From 2b1d7b6a5c27bb1df6aaa1843ec1d0f888cbb8d2 Mon Sep 17 00:00:00 2001
From: unknown <n.santhanam@redkarma.eu>
Date: Thu, 12 Nov 2020 13:16:26 +0530
Subject: [PATCH] Merge branch 'master' into TMSS-380

---
 .gitignore                                    |   1 +
 CMake/LofarPackageList.cmake                  |   4 +-
 Docker/lofar-ci/Dockerfile_ci_sas             |   2 +-
 LCS/Messaging/python/messaging/exceptions.py  |  12 +
 LCS/Messaging/python/messaging/messagebus.py  |   3 +
 LCS/PyCommon/postgres.py                      |  75 +-
 LCS/PyCommon/test/postgres.py                 |   5 +-
 MAC/Services/src/PipelineControl.py           |  40 +-
 QA/QA_Common/lib/hdf5_io.py                   |   5 +-
 QA/QA_Common/test/t_hdf5_io.py                |  14 +-
 QA/QA_Service/lib/qa_service.py               |  23 +-
 QA/QA_Service/test/t_qa_service.py            |   1 -
 .../radb/sql/add_notifications.sql            | 114 ++-
 .../radb/sql/create_add_notifications.sql.py  |   1 +
 .../radbpglistener.py                         |  30 +-
 SAS/Scheduler/src/taskdialog.cpp              |   5 +
 SAS/TMSS/client/lib/tmss_http_rest_client.py  |   7 +-
 SAS/TMSS/client/lib/tmssbuslistener.py        | 260 ++++---
 .../components/Timeline/CalendarTimeline.js   |  33 +-
 .../tmss_webapp/src/components/ViewTable.js   |   2 +-
 .../src/layout/sass/_timeline.scss            |  16 +-
 .../tmss_webapp/src/routes/Project/view.js    |   4 +-
 .../Scheduling/Scheduling.Constraints.js      |   4 +
 .../routes/Scheduling/SchedulingUnitList.js   |  69 +-
 .../tmss_webapp/src/routes/Timeline/view.js   | 101 ++-
 .../src/routes/Timeline/week.view.js          |   8 +-
 .../src/services/schedule.service.js          |  21 +-
 .../tmss_webapp/src/services/task.service.js  |  10 +-
 SAS/TMSS/services/CMakeLists.txt              |   1 +
 .../test/t_feedback_handling_service.py       |   1 -
 .../lib/subtask_scheduling.py                 |  59 +-
 .../test/t_subtask_scheduling_service.py      |   6 +-
 .../tmss_postgres_listener/CMakeLists.txt     |   8 +
 .../tmss_postgres_listener/bin/CMakeLists.txt |   4 +
 .../bin/tmss_postgres_listener_service        |  24 +
 .../bin/tmss_postgres_listener_service.ini    |   9 +
 .../tmss_postgres_listener/lib/CMakeLists.txt |  10 +
 .../lib/tmss_postgres_listener.py             | 237 ++++++
 .../test/CMakeLists.txt                       |   7 +
 .../test/t_tmss_postgres_listener_service.py  | 176 +++++
 .../test/t_tmss_postgres_listener_service.run |   6 +
 .../test/t_tmss_postgres_listener_service.sh  |   3 +
 SAS/TMSS/src/migrate_momdb_to_tmss.py         | 687 ++++++++++++------
 SAS/TMSS/src/tmss/settings.py                 |  10 +-
 SAS/TMSS/src/tmss/tmssapp/adapters/sip.py     |  35 +-
 SAS/TMSS/src/tmss/tmssapp/conversions.py      |  54 ++
 .../tmss/tmssapp/migrations/0001_initial.py   |  34 +-
 .../src/tmss/tmssapp/models/scheduling.py     |  70 +-
 .../src/tmss/tmssapp/models/specification.py  |  36 +-
 .../tmss/tmssapp/schemas/sap_template-1.json  |  12 -
 .../schemas/subtask_template-ingest-1.json    |  12 +
 .../schemas/task_template-ingest-1.json       |  12 +
 .../src/tmss/tmssapp/schemas/templates.json   |  10 +
 .../tmss/tmssapp/serializers/scheduling.py    |  16 +-
 .../tmss/tmssapp/serializers/specification.py |   2 +-
 SAS/TMSS/src/tmss/tmssapp/subtasks.py         | 113 ++-
 SAS/TMSS/src/tmss/tmssapp/views.py            |  53 +-
 .../src/tmss/tmssapp/viewsets/scheduling.py   |  52 +-
 SAS/TMSS/src/tmss/urls.py                     |   2 +
 SAS/TMSS/test/t_adapter.py                    |  35 +-
 SAS/TMSS/test/t_conversions.py                |  38 +
 SAS/TMSS/test/t_scheduling.py                 | 170 ++++-
 SAS/TMSS/test/t_subtasks.py                   |  65 +-
 SAS/TMSS/test/test_utils.py                   |  27 +-
 .../tmss_test_environment_unittest_setup.py   |   3 +-
 65 files changed, 2262 insertions(+), 707 deletions(-)
 create mode 100644 SAS/TMSS/services/tmss_postgres_listener/CMakeLists.txt
 create mode 100644 SAS/TMSS/services/tmss_postgres_listener/bin/CMakeLists.txt
 create mode 100644 SAS/TMSS/services/tmss_postgres_listener/bin/tmss_postgres_listener_service
 create mode 100644 SAS/TMSS/services/tmss_postgres_listener/bin/tmss_postgres_listener_service.ini
 create mode 100644 SAS/TMSS/services/tmss_postgres_listener/lib/CMakeLists.txt
 create mode 100644 SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py
 create mode 100644 SAS/TMSS/services/tmss_postgres_listener/test/CMakeLists.txt
 create mode 100644 SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.py
 create mode 100644 SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.run
 create mode 100644 SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.sh
 create mode 100644 SAS/TMSS/src/tmss/tmssapp/schemas/subtask_template-ingest-1.json
 create mode 100644 SAS/TMSS/src/tmss/tmssapp/schemas/task_template-ingest-1.json

diff --git a/.gitignore b/.gitignore
index fe3291cd80a..93d22e6d47a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -22,3 +22,4 @@ SAS/OTB/jRSP/configure.in
 **/.idea
 SAS/TMSS/frontend/tmss_webapp/package-lock.json
 SAS/TMSS/frontend/tmss_webapp/node_modules/
+SAS/TMSS/frontend/tmss_webapp/debug.log
diff --git a/CMake/LofarPackageList.cmake b/CMake/LofarPackageList.cmake
index 740c52181b3..db28a087be7 100644
--- a/CMake/LofarPackageList.cmake
+++ b/CMake/LofarPackageList.cmake
@@ -1,7 +1,7 @@
 # - Create for each LOFAR package a variable containing the absolute path to
 # its source directory. 
 #
-# Generated by gen_LofarPackageList_cmake.sh at do 28 mei 2020 11:22:44 CEST
+# Generated by gen_LofarPackageList_cmake.sh at do 29 okt 2020  7:42:34 CET
 #
 #                      ---- DO NOT EDIT ----
 #
@@ -208,6 +208,8 @@ if(NOT DEFINED LOFAR_PACKAGE_LIST_INCLUDED)
   set(RACommon_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/ResourceAssignment/Common)
   set(TMSSClient_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/client)
   set(TMSSSubtaskSchedulingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/subtask_scheduling)
+  set(TMSSFeedbackHandlingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/feedback_handling)
+  set(TMSSPostgresListenerService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/tmss_postgres_listener)
   set(TriggerEmailServiceCommon_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TriggerEmailService/Common)
   set(TriggerEmailServiceServer_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TriggerEmailService/Server)
   set(CCU_MAC_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SubSystems/CCU_MAC)
diff --git a/Docker/lofar-ci/Dockerfile_ci_sas b/Docker/lofar-ci/Dockerfile_ci_sas
index 1aa8f6689b5..b515298af20 100644
--- a/Docker/lofar-ci/Dockerfile_ci_sas
+++ b/Docker/lofar-ci/Dockerfile_ci_sas
@@ -16,7 +16,7 @@ RUN yum erase -y postgresql postgresql-server postgresql-devel && \
     cd /bin && ln -s /usr/pgsql-9.6/bin/initdb && ln -s /usr/pgsql-9.6/bin/postgres
 ENV PATH /usr/pgsql-9.6/bin:$PATH 
 
-RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil Django==3.0.9 djangorestframework==3.11.1 djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc jsonschema comet pyxb==1.2.5 graphviz isodate astropy packaging django-debug-toolbar 
+RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil Django==3.0.9 djangorestframework==3.11.1 djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc jsonschema comet pyxb==1.2.5 graphviz isodate astropy packaging django-debug-toolbar pymysql astroplan
 
 #Viewflow package 
 RUN pip3 install django-material django-viewflow
diff --git a/LCS/Messaging/python/messaging/exceptions.py b/LCS/Messaging/python/messaging/exceptions.py
index 52e023d2145..003324cc41f 100644
--- a/LCS/Messaging/python/messaging/exceptions.py
+++ b/LCS/Messaging/python/messaging/exceptions.py
@@ -65,3 +65,15 @@ class MessagingTimeoutError(MessagingError, TimeoutError):
     """
     pass
 
+
+class MessageHandlerError(MessagingError):
+    """
+    raised upon handling a message
+    """
+    pass
+
+class MessageHandlerUnknownSubjectError(MessageHandlerError):
+    """
+    raised upon handling a message with an unknown subject
+    """
+    pass
diff --git a/LCS/Messaging/python/messaging/messagebus.py b/LCS/Messaging/python/messaging/messagebus.py
index 734e5db18de..66feb5bad87 100644
--- a/LCS/Messaging/python/messaging/messagebus.py
+++ b/LCS/Messaging/python/messaging/messagebus.py
@@ -1570,6 +1570,9 @@ class BusListener:
                             if isinstance(e, TimeoutError):
                                 logger.error("Handling of %s timed out: %s", lofar_msg, e)
                                 receiver.reject(lofar_msg, requeue=True)
+                            elif isinstance(e, MessageHandlerError):
+                                logger.error("Could not handle message %s: %s", lofar_msg, e)
+                                receiver.reject(lofar_msg, requeue=False)
                             else:
                                 logger.exception("Handling of %s failed. Rejecting message. Error: %s", lofar_msg, e)
                                 receiver.reject(lofar_msg, requeue=False)
diff --git a/LCS/PyCommon/postgres.py b/LCS/PyCommon/postgres.py
index 13d6cee1db2..9c6d36e6e43 100644
--- a/LCS/PyCommon/postgres.py
+++ b/LCS/PyCommon/postgres.py
@@ -49,45 +49,66 @@ def makePostgresNotificationQueries(schema, table, action, column_name='id'):
     if column_name != 'id':
         change_name += '_column_' + column_name
     function_name = '''NOTIFY_{change_name}'''.format(change_name=change_name)
+
+    if action == 'UPDATE':
+        if column_name == 'id':
+            select_payload = '''SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;'''
+        else:
+            select_payload = '''SELECT '{"id": ' || CAST(NEW.id AS text) || ', "''' + column_name + '''": "' || CAST(NEW.''' + column_name + ''' AS text)  || '"}' INTO payload;'''
+    elif action == 'INSERT':
+        select_payload = '''SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;'''
+    elif action == 'DELETE':
+        select_payload = '''SELECT '{"id": ' || CAST(OLD.id AS text) || '}' INTO payload;'''
+
+    if action == 'UPDATE':
+        begin_update_check = 'IF ROW(NEW.{what}) IS DISTINCT FROM ROW(OLD.{what}) THEN'.format(what='*' if column_name == 'id' else column_name)
+        end_update_check = 'END IF;'
+    else:
+        begin_update_check = ''
+        end_update_check = ''
+
     function_sql = '''
-    CREATE OR REPLACE FUNCTION {schema}.{function_name}()
+    CREATE OR REPLACE FUNCTION {schema}{function_name}()
     RETURNS TRIGGER AS $$
     DECLARE payload text;
     BEGIN
-    {begin_update_check}SELECT CAST({column_value} AS text) INTO payload;
-    PERFORM pg_notify(CAST('{change_name}' AS text), payload);{end_update_check}
+    {begin_update_check}
+    {select_payload}
+    PERFORM pg_notify(CAST('{change_name}' AS text), payload);
+    {end_update_check}
     RETURN {value};
     END;
     $$ LANGUAGE plpgsql;
-    '''.format(schema=schema,
+    '''.format(schema=schema+'.' if schema else '',
                 function_name=function_name,
                 table=table,
                 action=action,
-                column_value=('OLD' if action == 'DELETE' else 'NEW') + '.' + column_name,
+                old_or_new=('OLD' if action == 'DELETE' else 'NEW') + '.' + column_name,
                 value='OLD' if action == 'DELETE' else 'NEW',
                 change_name=change_name.lower(),
-                begin_update_check='IF ROW(NEW.*) IS DISTINCT FROM ROW(OLD.*) THEN\n' if action == 'UPDATE' else '',
-                end_update_check='\nEND IF;' if action == 'UPDATE' else '')
+                begin_update_check=begin_update_check,
+                select_payload=select_payload,
+                end_update_check=end_update_check)
 
     trigger_name = 'T_%s' % function_name
 
     trigger_sql = '''
     CREATE TRIGGER {trigger_name}
-    AFTER {action} ON {schema}.{table}
+    AFTER {action} ON {schema}{table}
     FOR EACH ROW
-    EXECUTE PROCEDURE {schema}.{function_name}();
+    EXECUTE PROCEDURE {schema}{function_name}();
     '''.format(trigger_name=trigger_name,
                 function_name=function_name,
-                schema=schema,
+                schema=schema+'.' if schema else '',
                 table=table,
                 action=action)
 
     drop_sql = '''
-    DROP TRIGGER IF EXISTS {trigger_name} ON {schema}.{table} CASCADE;
-    DROP FUNCTION IF EXISTS {schema}.{function_name}();
+    DROP TRIGGER IF EXISTS {trigger_name} ON {schema}{table} CASCADE;
+    DROP FUNCTION IF EXISTS {schema}{function_name}();
     '''.format(trigger_name=trigger_name,
                function_name=function_name,
-               schema=schema,
+               schema=schema+'.' if schema else '',
                table=table)
 
     sql = drop_sql + '\n' + function_sql + '\n' + trigger_sql
@@ -321,7 +342,7 @@ class PostgresDatabaseConnection:
         try:
             if self._connection.notices:
                 for notice in self._connection.notices:
-                    logger.info('database log message %s', notice.strip())
+                    logger.debug('database log message %s', notice.strip())
                 if isinstance(self._connection.notices, collections.deque):
                     self._connection.notices.clear()
                 else:
@@ -331,19 +352,19 @@ class PostgresDatabaseConnection:
 
     def commit(self):
         if self.is_connected:
-            logger.info('commit')
+            logger.debug('commit')
             self._connection.commit()
 
     def rollback(self):
         if self.is_connected:
-            logger.info('rollback')
+            logger.debug('rollback')
             self._connection.rollback()
 
 
 class PostgresListener(PostgresDatabaseConnection):
-    ''' This class lets you listen to postgress notifications
-    It execute callbacks when a notifocation occurs.
-    Make your own subclass with your callbacks and subscribe them to the appriate channel.
+    ''' This class lets you listen to postgres notifications
+    It execute callbacks when a notification occurs.
+    Make your own subclass with your callbacks and subscribe them to the appropriate channel.
     Example:
 
     class MyListener(PostgresListener):
@@ -385,18 +406,20 @@ class PostgresListener(PostgresDatabaseConnection):
     def subscribe(self, notification, callback):
         '''Subscribe to a certain postgres notification.
         Call callback method in case such a notification is received.'''
-        logger.info("Subscribed %sto %s" % ('and listening ' if self.isListening() else '', notification))
+        logger.debug("Subscribing %sto %s" % ('and listening ' if self.isListening() else '', notification))
         with self.__lock:
             self.executeQuery("LISTEN %s;", (psycopg2.extensions.AsIs(notification),))
             self.__callbacks[notification] = callback
+        logger.info("Subscribed %sto %s" % ('and listening ' if self.isListening() else '', notification))
 
     def unsubscribe(self, notification):
         '''Unubscribe from a certain postgres notification.'''
-        logger.info("Unsubscribed from %s" % notification)
+        logger.debug("Unsubscribing from %s" % notification)
         with self.__lock:
             self.executeQuery("UNLISTEN %s;", (psycopg2.extensions.AsIs(notification),))
             if notification in self.__callbacks:
                 del self.__callbacks[notification]
+        logger.info("Unsubscribed from %s" % notification)
 
     def isListening(self):
         '''Are we listening? Has the listener been started?'''
@@ -459,12 +482,16 @@ class PostgresListener(PostgresDatabaseConnection):
         self.disconnect()
 
     def __enter__(self):
-        '''starts the listener upon contect enter'''
-        self.start()
+        '''starts the listener upon 'with' context enter'''
+        try:
+            self.start()
+        except Exception as e:
+            logger.exception(str(e))
+            self.stop()
         return self
 
     def __exit__(self, exc_type, exc_val, exc_tb):
-        '''stops the listener upon contect enter'''
+        '''stops the listener upon 'with' context enter'''
         self.stop()
 
     def _callCallback(self, channel, payload = None):
diff --git a/LCS/PyCommon/test/postgres.py b/LCS/PyCommon/test/postgres.py
index f98092c3d7f..51e3be001e0 100755
--- a/LCS/PyCommon/test/postgres.py
+++ b/LCS/PyCommon/test/postgres.py
@@ -90,6 +90,8 @@ class PostgresTestDatabaseInstance():
                     # make the user known in the new test database
                     self._create_superuser(dsn)
 
+                    logger.info('Created test-database instance. It is available at: %s', self.dbcreds.stringWithHiddenPassword())
+
                     logger.info('Applying test-database schema...')
                     self.apply_database_schema()
                     return
@@ -106,9 +108,6 @@ class PostgresTestDatabaseInstance():
             # create user role
             query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % (self.dbcreds.user, self.dbcreds.password)
             cursor.execute(query)
-
-            logger.info('Created test-database instance. It is available at: %s',
-                        self.dbcreds.stringWithHiddenPassword())
         finally:
             cursor.close()
             conn.commit()
diff --git a/MAC/Services/src/PipelineControl.py b/MAC/Services/src/PipelineControl.py
index 0f6febfe8ce..8fb09299fd0 100755
--- a/MAC/Services/src/PipelineControl.py
+++ b/MAC/Services/src/PipelineControl.py
@@ -75,7 +75,7 @@ from lofar.common.subprocess_utils import communicate_returning_strings
 from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC
 from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_SUBJECT
 from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession
-from lofar.sas.tmss.client.tmssbuslistener import TMSSSubTaskEventMessageHandler, TMSSSubTaskBusListener
+from lofar.sas.tmss.client.tmssbuslistener import TMSSEventMessageHandler, TMSSBusListener
 
 import subprocess
 import pipes
@@ -342,7 +342,7 @@ class PipelineDependencies(object):
         return self.rarpc.getTasks(task_status=task_status, task_type=task_type)
 
 
-class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler):
+class PipelineControlTMSSHandler(TMSSEventMessageHandler):
 
     def __init__(self):
         super(PipelineControlTMSSHandler, self).__init__()
@@ -394,22 +394,24 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler):
         except Exception as e:
             logger.error(e)
 
-    def onSubTaskScheduled(self, subtask_id: int, old_state: str, new_state: str):
-        try:
-            subtask = self.tmss_client.get_subtask(subtask_id)
-            subtask_template = self.tmss_client.get_url_as_json_object(subtask['specifications_template'])
-            if 'pipeline' not in subtask_template['type_value']:
-                logger.info("skipping scheduled subtask id=%s of non-pipeline type '%s'", subtask_id, subtask_template['type_value'])
-                return
-
-            logger.info("getting parset for scheduled subtask id=%s of type '%s'", subtask_id, subtask_template['type_value'])
-            parset = self.tmss_client.get_subtask_parset(subtask_id)
-            parset = parameterset.fromString(parset)
-            parset = Parset(parset.dict())
-            if parset and self._shouldHandle(parset):
-                self._startPipeline(subtask_id, parset)
-        except Exception as e:
-            logger.error(e)
+    def onSubTaskStatusChanged(self, id: int, status: str):
+        if status == "scheduled":
+            try:
+                subtask = self.tmss_client.get_subtask(id)
+                subtask_template = self.tmss_client.get_url_as_json_object(subtask['specifications_template'])
+                if 'pipeline' not in subtask_template['type_value']:
+                    logger.info("skipping scheduled subtask id=%s of non-pipeline type '%s'", id, subtask_template['type_value'])
+                    return
+
+                logger.info("getting parset for scheduled subtask id=%s of type '%s'", id, subtask_template['type_value'])
+                parset = self.tmss_client.get_subtask_parset(id)
+                parset = parameterset.fromString(parset)
+                parset = Parset(parset.dict())
+                if parset and self._shouldHandle(parset):
+                    self._startPipeline(id, parset)
+            except Exception as e:
+                logger.error(e)
+
 
     @staticmethod
     def _shouldHandle(parset):
@@ -978,7 +980,7 @@ class PipelineControl(OTDBBusListener):
 
 
 
-class PipelineControlTMSS(TMSSSubTaskBusListener):
+class PipelineControlTMSS(TMSSBusListener):
     def __init__(self, handler_kwargs: dict = None,
                  exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER):
 
diff --git a/QA/QA_Common/lib/hdf5_io.py b/QA/QA_Common/lib/hdf5_io.py
index b6a9e1a192a..ec1814dbfb1 100644
--- a/QA/QA_Common/lib/hdf5_io.py
+++ b/QA/QA_Common/lib/hdf5_io.py
@@ -290,7 +290,7 @@ def read_sap_numbers(path):
 
         return sorted([int(sap_nr) for sap_nr in file['measurement/saps'].keys()])
 
-def read_version(h5_path):
+def read_version(h5_path: str) -> str:
     with SharedH5File(h5_path, "r") as file:
         version = file['version'][0]
         if isinstance(version, bytes):
@@ -661,7 +661,8 @@ def combine_hypercubes(input_paths, output_dir, output_filename=None, do_compres
         if len(versions) != 1:
             raise ValueError('Cannot combine h5 files of multiple versions: %s' % (', '.join(versions),))
 
-        version_str = list(versions)[0]
+        versions = [v.decode('utf-8') if isinstance(v, bytes) else v for v in versions]
+        version_str = versions[0]
 
         if version_str != '1.4':
             raise ValueError('Cannot read version %s' % (version_str,))
diff --git a/QA/QA_Common/test/t_hdf5_io.py b/QA/QA_Common/test/t_hdf5_io.py
index a4ad76cd421..9dccf7bd8f4 100755
--- a/QA/QA_Common/test/t_hdf5_io.py
+++ b/QA/QA_Common/test/t_hdf5_io.py
@@ -85,7 +85,7 @@ class TestHdf5_IO(unittest.TestCase):
             file_annotations = read_file_annotations(path)
 
             self.assertEqual(1, len(file_annotations))
-            self.assertEqual('This file was recorded in front of a live audience ;-)', file_annotations[0]['annotation'])
+            self.assertEqual(b'This file was recorded in front of a live audience ;-)', file_annotations[0]['annotation'])
             self.assertEqual('test_user', file_annotations[0]['user'])
 
         finally:
@@ -362,9 +362,7 @@ class TestHdf5_IO(unittest.TestCase):
             write_hypercube(path, saps_in, sas_id=123456)
 
             # check if version is 1.4
-            with h5py.File(path, "r") as file:
-                version_str = file['version'][0]
-                self.assertEqual('1.4', version_str)
+            self.assertEqual('1.4', read_version(path))
 
             # change version back to 1.2
             # and modify visibility data to have the 1.2 incorrect phases
@@ -408,17 +406,13 @@ class TestHdf5_IO(unittest.TestCase):
                     sap_group.create_dataset('visibilities', data=scaled_visibilities)
 
             # check if version is 1.2
-            with h5py.File(path, "r") as file:
-                version_str = file['version'][0]
-                self.assertEqual('1.2', version_str)
+            self.assertEqual('1.2', read_version(path))
 
             # reading the 1.2 file should result in automatic conversion via 1.3 to 1.4 and correction of phases
             result_raw = read_hypercube(path, visibilities_in_dB=False, python_datetimes=True)
 
             # check if version is now 1.3
-            with h5py.File(path, "r") as file:
-                version_str = file['version'][0]
-                self.assertEqual('1.4', version_str)
+            self.assertEqual('1.4', read_version(path))
 
             # read in dB as well because we usually plot the visibilities in dB
             result_dB = read_hypercube(path, visibilities_in_dB=True, python_datetimes=True)
diff --git a/QA/QA_Service/lib/qa_service.py b/QA/QA_Service/lib/qa_service.py
index d189be1560b..18bd13f9c1f 100644
--- a/QA/QA_Service/lib/qa_service.py
+++ b/QA/QA_Service/lib/qa_service.py
@@ -23,7 +23,7 @@ from subprocess import call
 from optparse import OptionParser, OptionGroup
 from lofar.common.util import waitForInterrupt
 from lofar.sas.otdb.OTDBBusListener import OTDBBusListener, OTDBEventMessageHandler
-from lofar.sas.tmss.client.tmssbuslistener import TMSSSubTaskEventMessageHandler, TMSSSubTaskBusListener
+from lofar.sas.tmss.client.tmssbuslistener import TMSSEventMessageHandler, TMSSBusListener
 from lofar.messaging import UsingToBusMixin, BusListener, ToBus, AbstractMessageHandler
 from lofar.messaging.messages import EventMessage, CommandMessage
 from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME
@@ -67,8 +67,8 @@ class QAFilteringOTDBBusListener(OTDBBusListener):
                          broker=broker)
 
 
-class QAFilteringTMSSSubTaskBusListener(TMSSSubTaskBusListener):
-    class QAFilteringTMSSSubTaskEventMessageHandler(UsingToBusMixin, TMSSSubTaskEventMessageHandler):
+class QAFilteringTMSSSubTaskBusListener(TMSSBusListener):
+    class QAFilteringTMSSSubTaskEventMessageHandler(UsingToBusMixin, TMSSEventMessageHandler):
         def _send_qa_command_message(self, subtask_id: int, command_subject: str):
             with TMSSsession.create_from_dbcreds_for_ldap() as tmsssession:
                 tmsssession.set_subtask_status(subtask_id, 'queueing')
@@ -83,14 +83,15 @@ class QAFilteringTMSSSubTaskBusListener(TMSSSubTaskBusListener):
 
                 tmsssession.set_subtask_status(subtask_id, 'queued')
 
-        def onSubTaskScheduled(self, subtask_id: int, old_state: str, new_state:str):
-            with TMSSsession.create_from_dbcreds_for_ldap() as tmsssession:
-                subtask = tmsssession.get_subtask(subtask_id)
-                spec = tmsssession.get_url_as_json_object(subtask['specifications_template'])
-                if 'qa_files' == spec['type_value']:
-                    self._send_qa_command_message(subtask_id, DEFAULT_DO_QAFILE_CONVERSION_SUBJECT)
-                elif 'qa_plots' == spec['type_value']:
-                    self._send_qa_command_message(subtask_id, DEFAULT_DO_QAPLOTS_SUBJECT)
+        def onSubTaskStatusChanged(self, id: int, status:str):
+            if status == "scheduled":
+                with TMSSsession.create_from_dbcreds_for_ldap() as tmsssession:
+                    subtask = tmsssession.get_subtask(id)
+                    spec = tmsssession.get_url_as_json_object(subtask['specifications_template'])
+                    if 'qa_files' == spec['type_value']:
+                        self._send_qa_command_message(id, DEFAULT_DO_QAFILE_CONVERSION_SUBJECT)
+                    elif 'qa_plots' == spec['type_value']:
+                        self._send_qa_command_message(id, DEFAULT_DO_QAPLOTS_SUBJECT)
 
     def __init__(self, exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER):
         super().__init__(handler_type=QAFilteringTMSSSubTaskBusListener.QAFilteringTMSSSubTaskEventMessageHandler,
diff --git a/QA/QA_Service/test/t_qa_service.py b/QA/QA_Service/test/t_qa_service.py
index 477ba9dc491..fe5bfc908ac 100755
--- a/QA/QA_Service/test/t_qa_service.py
+++ b/QA/QA_Service/test/t_qa_service.py
@@ -27,7 +27,6 @@ from datetime import datetime
 
 import logging
 
-from lofar.sas.tmss.client.tmssbuslistener import DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX
 from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment
 
 logger = logging.getLogger(__name__)
diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_notifications.sql b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_notifications.sql
index 75b70085890..5bbf683cac0 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_notifications.sql
+++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/add_notifications.sql
@@ -12,12 +12,18 @@ BEGIN;
 SET LOCAL client_min_messages=warning;
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_task_INSERT ON resource_allocation.task CASCADE;
+DROP FUNCTION IF EXISTS resource_allocation.NOTIFY_task_INSERT();
+
+
 CREATE OR REPLACE FUNCTION resource_allocation.NOTIFY_task_INSERT()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
-SELECT CAST(NEW.id AS text) INTO payload;
+
+SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;
 PERFORM pg_notify(CAST('task_insert' AS text), payload);
+
 RETURN NEW;
 END;
 $$ LANGUAGE plpgsql;
@@ -29,12 +35,16 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_allocation.NOTIFY_task_INSERT();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_task_UPDATE ON resource_allocation.task CASCADE;
+DROP FUNCTION IF EXISTS resource_allocation.NOTIFY_task_UPDATE();
+
+
 CREATE OR REPLACE FUNCTION resource_allocation.NOTIFY_task_UPDATE()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
 IF ROW(NEW.*) IS DISTINCT FROM ROW(OLD.*) THEN
-SELECT CAST(NEW.id AS text) INTO payload;
+SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;
 PERFORM pg_notify(CAST('task_update' AS text), payload);
 END IF;
 RETURN NEW;
@@ -48,12 +58,18 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_allocation.NOTIFY_task_UPDATE();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_task_DELETE ON resource_allocation.task CASCADE;
+DROP FUNCTION IF EXISTS resource_allocation.NOTIFY_task_DELETE();
+
+
 CREATE OR REPLACE FUNCTION resource_allocation.NOTIFY_task_DELETE()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
-SELECT CAST(OLD.id AS text) INTO payload;
+
+SELECT '{"id": ' || CAST(OLD.id AS text) || '}' INTO payload;
 PERFORM pg_notify(CAST('task_delete' AS text), payload);
+
 RETURN OLD;
 END;
 $$ LANGUAGE plpgsql;
@@ -65,12 +81,18 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_allocation.NOTIFY_task_DELETE();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_task_predecessor_INSERT_column_task_id ON resource_allocation.task_predecessor CASCADE;
+DROP FUNCTION IF EXISTS resource_allocation.NOTIFY_task_predecessor_INSERT_column_task_id();
+
+
 CREATE OR REPLACE FUNCTION resource_allocation.NOTIFY_task_predecessor_INSERT_column_task_id()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
-SELECT CAST(NEW.task_id AS text) INTO payload;
+
+SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;
 PERFORM pg_notify(CAST('task_predecessor_insert_column_task_id' AS text), payload);
+
 RETURN NEW;
 END;
 $$ LANGUAGE plpgsql;
@@ -82,12 +104,16 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_allocation.NOTIFY_task_predecessor_INSERT_column_task_id();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_task_predecessor_UPDATE_column_task_id ON resource_allocation.task_predecessor CASCADE;
+DROP FUNCTION IF EXISTS resource_allocation.NOTIFY_task_predecessor_UPDATE_column_task_id();
+
+
 CREATE OR REPLACE FUNCTION resource_allocation.NOTIFY_task_predecessor_UPDATE_column_task_id()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
-IF ROW(NEW.*) IS DISTINCT FROM ROW(OLD.*) THEN
-SELECT CAST(NEW.task_id AS text) INTO payload;
+IF ROW(NEW.task_id) IS DISTINCT FROM ROW(OLD.task_id) THEN
+SELECT '{"id": ' || CAST(NEW.id AS text) || ', "task_id": "' || CAST(NEW.task_id AS text)  || '"}' INTO payload;
 PERFORM pg_notify(CAST('task_predecessor_update_column_task_id' AS text), payload);
 END IF;
 RETURN NEW;
@@ -101,12 +127,18 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_allocation.NOTIFY_task_predecessor_UPDATE_column_task_id();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_task_predecessor_DELETE_column_task_id ON resource_allocation.task_predecessor CASCADE;
+DROP FUNCTION IF EXISTS resource_allocation.NOTIFY_task_predecessor_DELETE_column_task_id();
+
+
 CREATE OR REPLACE FUNCTION resource_allocation.NOTIFY_task_predecessor_DELETE_column_task_id()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
-SELECT CAST(OLD.task_id AS text) INTO payload;
+
+SELECT '{"id": ' || CAST(OLD.id AS text) || '}' INTO payload;
 PERFORM pg_notify(CAST('task_predecessor_delete_column_task_id' AS text), payload);
+
 RETURN OLD;
 END;
 $$ LANGUAGE plpgsql;
@@ -118,12 +150,18 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_allocation.NOTIFY_task_predecessor_DELETE_column_task_id();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_task_predecessor_INSERT_column_predecessor_id ON resource_allocation.task_predecessor CASCADE;
+DROP FUNCTION IF EXISTS resource_allocation.NOTIFY_task_predecessor_INSERT_column_predecessor_id();
+
+
 CREATE OR REPLACE FUNCTION resource_allocation.NOTIFY_task_predecessor_INSERT_column_predecessor_id()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
-SELECT CAST(NEW.predecessor_id AS text) INTO payload;
+
+SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;
 PERFORM pg_notify(CAST('task_predecessor_insert_column_predecessor_id' AS text), payload);
+
 RETURN NEW;
 END;
 $$ LANGUAGE plpgsql;
@@ -135,12 +173,16 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_allocation.NOTIFY_task_predecessor_INSERT_column_predecessor_id();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_task_predecessor_UPDATE_column_predecessor_id ON resource_allocation.task_predecessor CASCADE;
+DROP FUNCTION IF EXISTS resource_allocation.NOTIFY_task_predecessor_UPDATE_column_predecessor_id();
+
+
 CREATE OR REPLACE FUNCTION resource_allocation.NOTIFY_task_predecessor_UPDATE_column_predecessor_id()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
-IF ROW(NEW.*) IS DISTINCT FROM ROW(OLD.*) THEN
-SELECT CAST(NEW.predecessor_id AS text) INTO payload;
+IF ROW(NEW.predecessor_id) IS DISTINCT FROM ROW(OLD.predecessor_id) THEN
+SELECT '{"id": ' || CAST(NEW.id AS text) || ', "predecessor_id": "' || CAST(NEW.predecessor_id AS text)  || '"}' INTO payload;
 PERFORM pg_notify(CAST('task_predecessor_update_column_predecessor_id' AS text), payload);
 END IF;
 RETURN NEW;
@@ -154,12 +196,18 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_allocation.NOTIFY_task_predecessor_UPDATE_column_predecessor_id();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_task_predecessor_DELETE_column_predecessor_id ON resource_allocation.task_predecessor CASCADE;
+DROP FUNCTION IF EXISTS resource_allocation.NOTIFY_task_predecessor_DELETE_column_predecessor_id();
+
+
 CREATE OR REPLACE FUNCTION resource_allocation.NOTIFY_task_predecessor_DELETE_column_predecessor_id()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
-SELECT CAST(OLD.predecessor_id AS text) INTO payload;
+
+SELECT '{"id": ' || CAST(OLD.id AS text) || '}' INTO payload;
 PERFORM pg_notify(CAST('task_predecessor_delete_column_predecessor_id' AS text), payload);
+
 RETURN OLD;
 END;
 $$ LANGUAGE plpgsql;
@@ -171,12 +219,16 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_allocation.NOTIFY_task_predecessor_DELETE_column_predecessor_id();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_specification_UPDATE ON resource_allocation.specification CASCADE;
+DROP FUNCTION IF EXISTS resource_allocation.NOTIFY_specification_UPDATE();
+
+
 CREATE OR REPLACE FUNCTION resource_allocation.NOTIFY_specification_UPDATE()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
 IF ROW(NEW.*) IS DISTINCT FROM ROW(OLD.*) THEN
-SELECT CAST(NEW.id AS text) INTO payload;
+SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;
 PERFORM pg_notify(CAST('specification_update' AS text), payload);
 END IF;
 RETURN NEW;
@@ -190,12 +242,18 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_allocation.NOTIFY_specification_UPDATE();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_resource_claim_INSERT ON resource_allocation.resource_claim CASCADE;
+DROP FUNCTION IF EXISTS resource_allocation.NOTIFY_resource_claim_INSERT();
+
+
 CREATE OR REPLACE FUNCTION resource_allocation.NOTIFY_resource_claim_INSERT()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
-SELECT CAST(NEW.id AS text) INTO payload;
+
+SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;
 PERFORM pg_notify(CAST('resource_claim_insert' AS text), payload);
+
 RETURN NEW;
 END;
 $$ LANGUAGE plpgsql;
@@ -207,12 +265,16 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_allocation.NOTIFY_resource_claim_INSERT();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_resource_claim_UPDATE ON resource_allocation.resource_claim CASCADE;
+DROP FUNCTION IF EXISTS resource_allocation.NOTIFY_resource_claim_UPDATE();
+
+
 CREATE OR REPLACE FUNCTION resource_allocation.NOTIFY_resource_claim_UPDATE()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
 IF ROW(NEW.*) IS DISTINCT FROM ROW(OLD.*) THEN
-SELECT CAST(NEW.id AS text) INTO payload;
+SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;
 PERFORM pg_notify(CAST('resource_claim_update' AS text), payload);
 END IF;
 RETURN NEW;
@@ -226,12 +288,18 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_allocation.NOTIFY_resource_claim_UPDATE();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_resource_claim_DELETE ON resource_allocation.resource_claim CASCADE;
+DROP FUNCTION IF EXISTS resource_allocation.NOTIFY_resource_claim_DELETE();
+
+
 CREATE OR REPLACE FUNCTION resource_allocation.NOTIFY_resource_claim_DELETE()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
-SELECT CAST(OLD.id AS text) INTO payload;
+
+SELECT '{"id": ' || CAST(OLD.id AS text) || '}' INTO payload;
 PERFORM pg_notify(CAST('resource_claim_delete' AS text), payload);
+
 RETURN OLD;
 END;
 $$ LANGUAGE plpgsql;
@@ -243,12 +311,16 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_allocation.NOTIFY_resource_claim_DELETE();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_resource_availability_UPDATE_column_resource_id ON resource_monitoring.resource_availability CASCADE;
+DROP FUNCTION IF EXISTS resource_monitoring.NOTIFY_resource_availability_UPDATE_column_resource_id();
+
+
 CREATE OR REPLACE FUNCTION resource_monitoring.NOTIFY_resource_availability_UPDATE_column_resource_id()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
-IF ROW(NEW.*) IS DISTINCT FROM ROW(OLD.*) THEN
-SELECT CAST(NEW.resource_id AS text) INTO payload;
+IF ROW(NEW.resource_id) IS DISTINCT FROM ROW(OLD.resource_id) THEN
+SELECT '{"id": ' || CAST(NEW.id AS text) || ', "resource_id": "' || CAST(NEW.resource_id AS text)  || '"}' INTO payload;
 PERFORM pg_notify(CAST('resource_availability_update_column_resource_id' AS text), payload);
 END IF;
 RETURN NEW;
@@ -262,12 +334,16 @@ FOR EACH ROW
 EXECUTE PROCEDURE resource_monitoring.NOTIFY_resource_availability_UPDATE_column_resource_id();
 
 
+DROP TRIGGER IF EXISTS T_NOTIFY_resource_capacity_UPDATE_column_resource_id ON resource_monitoring.resource_capacity CASCADE;
+DROP FUNCTION IF EXISTS resource_monitoring.NOTIFY_resource_capacity_UPDATE_column_resource_id();
+
+
 CREATE OR REPLACE FUNCTION resource_monitoring.NOTIFY_resource_capacity_UPDATE_column_resource_id()
 RETURNS TRIGGER AS $$
 DECLARE payload text;
 BEGIN
-IF ROW(NEW.*) IS DISTINCT FROM ROW(OLD.*) THEN
-SELECT CAST(NEW.resource_id AS text) INTO payload;
+IF ROW(NEW.resource_id) IS DISTINCT FROM ROW(OLD.resource_id) THEN
+SELECT '{"id": ' || CAST(NEW.id AS text) || ', "resource_id": "' || CAST(NEW.resource_id AS text)  || '"}' INTO payload;
 PERFORM pg_notify(CAST('resource_capacity_update_column_resource_id' AS text), payload);
 END IF;
 RETURN NEW;
diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_add_notifications.sql.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_add_notifications.sql.py
index 81410073abe..fc0406c7200 100755
--- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_add_notifications.sql.py
+++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_add_notifications.sql.py
@@ -29,6 +29,7 @@ from lofar.common.postgres import makePostgresNotificationQueries
 logger = logging.getLogger(__name__)
 
 if __name__ == '__main__':
+    print("ToDo: the new notifications send a json dict as payload instead on just the id. Adapt RADB code to handle that. For now it's ok, as we don't change the RADB schema or notifications.")
     with open('add_notifications.sql', 'wt') as f:
         f.write('--this file was generated by create_add_notifications.sql.py\n')
         f.write('--it creates triggers and functions which fire postgres notify events upon the given table actions\n')
diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py
index 89a69bac043..6a1786252db 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py
+++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py
@@ -74,7 +74,7 @@ class RADBPGListener(PostgresListener):
 
     def onTaskUpdated(self, payload = None):
         # Send notification for the given updated task
-        task_id = payload        
+        task_id = json.loads(payload)['id']
         task = self.radb.getTask(task_id)
         self._sendNotification('TaskUpdated', task)
 
@@ -86,39 +86,39 @@ class RADBPGListener(PostgresListener):
             self._sendNotification('TaskUpdated', suc_sched_task)
 
     def onTaskInserted(self, payload = None):
-        self._sendNotification('TaskInserted', self.radb.getTask(payload))
+        self._sendNotification('TaskInserted', self.radb.getTask(json.loads(payload)['id']))
 
     def onTaskDeleted(self, payload = None):
-        self._sendNotification('TaskDeleted', payload)
+        self._sendNotification('TaskDeleted', json.loads(payload)['id'])
 
-    def onTaskPredecessorChanged(self, task_id):
-        logger.info('onTaskPredecessorChanged(task_id=%s)', task_id)
-        self._sendNotification('TaskUpdated', self.radb.getTask(task_id))
+    def onTaskPredecessorChanged(self, payload):
+        logger.info('onTaskPredecessorChanged(task_id=%s)', json.loads(payload)['task_id'])
+        self._sendNotification('TaskUpdated', self.radb.getTask(json.loads(payload)['task_id']))
 
-    def onTaskSuccessorChanged(self, task_id):
-        logger.info('onTaskSuccessorChanged(task_id=%s)', task_id)
-        self._sendNotification('TaskUpdated', self.radb.getTask(task_id))
+    def onTaskSuccessorChanged(self, payload):
+        logger.info('onTaskSuccessorChanged(task_id=%s)', json.loads(payload)['task_id'])
+        self._sendNotification('TaskUpdated', self.radb.getTask(json.loads(payload)['task_id']))
 
     def onSpecificationUpdated(self, payload = None):
         # when the specification starttime and endtime are updated, then that effects the task as well
-        self._sendNotification('TaskUpdated', self.radb.getTask(specification_id=payload))
+        self._sendNotification('TaskUpdated', self.radb.getTask(specification_id=json.loads(payload)['id']))
 
     def onResourceClaimUpdated(self, payload = None):
-        self._sendNotification('ResourceClaimUpdated', self.radb.getResourceClaim(payload))
+        self._sendNotification('ResourceClaimUpdated', self.radb.getResourceClaim(json.loads(payload)['id']))
 
     def onResourceClaimInserted(self, payload = None):
-        self._sendNotification('ResourceClaimInserted', self.radb.getResourceClaim(payload))
+        self._sendNotification('ResourceClaimInserted', self.radb.getResourceClaim(json.loads(payload)['id']))
 
     def onResourceClaimDeleted(self, payload = None):
-        self._sendNotification('ResourceClaimDeleted', payload)
+        self._sendNotification('ResourceClaimDeleted', json.loads(payload)['id'])
 
     def onResourceAvailabilityUpdated(self, payload = None):
-        r = self.radb.getResources(resource_ids=[payload], include_availability=True)[0]
+        r = self.radb.getResources(resource_ids=[json.loads(payload)['id']], include_availability=True)[0]
         r = {k:r[k] for k in ['id', 'active']}
         self._sendNotification('ResourceAvailabilityUpdated', r)
 
     def onResourceCapacityUpdated(self, payload = None):
-        r = self.radb.getResources(resource_ids=[payload], include_availability=True)[0]
+        r = self.radb.getResources(resource_ids=[json.loads(payload)['id']], include_availability=True)[0]
         r = {k:r[k] for k in ['id', 'total_capacity', 'available_capacity', 'used_capacity']}
         self._sendNotification('ResourceCapacityUpdated', r)
 
diff --git a/SAS/Scheduler/src/taskdialog.cpp b/SAS/Scheduler/src/taskdialog.cpp
index 49b1ef01849..339ef7a5457 100644
--- a/SAS/Scheduler/src/taskdialog.cpp
+++ b/SAS/Scheduler/src/taskdialog.cpp
@@ -1655,6 +1655,8 @@ void TaskDialog::apply(bool close) {
 		close = false;
 	}
 	if (close) {
+		// SW-933
+		clearMultiTasks();
 		this->close();
 	}
 	else {
@@ -4647,6 +4649,9 @@ void TaskDialog::showMultiEdit(std::vector<Task *> &tasks) {
 	isMultiTasks = true;
 	enableTabs();
 
+	// SW-933
+	clearMultiTasks();
+
 	itsDataSlotDialog.clear();
 
     ui.lineEditCreationDate->clear();
diff --git a/SAS/TMSS/client/lib/tmss_http_rest_client.py b/SAS/TMSS/client/lib/tmss_http_rest_client.py
index 0fb13c71c65..48df33a1cab 100644
--- a/SAS/TMSS/client/lib/tmss_http_rest_client.py
+++ b/SAS/TMSS/client/lib/tmss_http_rest_client.py
@@ -156,10 +156,13 @@ class TMSSsession(object):
 
         return self.get_path_as_json_object("subtask", clauses)
 
+    def get_full_url_for_path(self, path: str) -> str:
+        '''get the full URL for the given path'''
+        return '%s/%s' % (self.base_url, path.strip('/'))
+
     def get_path_as_json_object(self, path: str, params={}) -> object:
         '''get resource at the given path, interpret it as json, and return it as as native object (usually a dict or a list of dicts)'''
-        full_url = '%s/%s' % (self.base_url, path.strip('/'))
-        return self.get_url_as_json_object(full_url, params=params)
+        return self.get_url_as_json_object(self.get_full_url_for_path(path=path), params=params)
 
     def get_url_as_json_object(self, full_url: str, params={}) -> object:
         '''get resource at the given full url (including http://<base_url>, interpret it as json, and return it as as native object (usually a dict or a list of dicts)'''
diff --git a/SAS/TMSS/client/lib/tmssbuslistener.py b/SAS/TMSS/client/lib/tmssbuslistener.py
index 30c49bb7ce9..81448e9a16c 100644
--- a/SAS/TMSS/client/lib/tmssbuslistener.py
+++ b/SAS/TMSS/client/lib/tmssbuslistener.py
@@ -28,180 +28,209 @@ Typical usage is to derive your own subclass from TMSSBusListener and implement
 """
 
 from lofar.messaging.messagebus import BusListener, AbstractMessageHandler
-from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME, EventMessage
+from lofar.messaging import DEFAULT_BUSNAME, DEFAULT_BROKER, EventMessage
+from lofar.messaging.exceptions import MessageHandlerUnknownSubjectError
 from lofar.common.util import waitForInterrupt, single_line_with_single_spaces
 
 import logging
 logger = logging.getLogger(__name__)
 
 
-_DEFAULT_TMSS_NOTIFICATION_PREFIX_TEMPLATE = 'TMSS.%s.notification'
-DEFAULT_TMSS_TASK_NOTIFICATION_PREFIX = _DEFAULT_TMSS_NOTIFICATION_PREFIX_TEMPLATE % 'Task'
-DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX = _DEFAULT_TMSS_NOTIFICATION_PREFIX_TEMPLATE % 'SubTask'
-DEFAULT_TMSS_ALL_NOTIFICATION_PREFIX = _DEFAULT_TMSS_NOTIFICATION_PREFIX_TEMPLATE + '#'
+_TMSS_EVENT_PREFIX_TEMPLATE                      = 'TMSS.Event.%s'
+TMSS_SUBTASK_OBJECT_EVENT_PREFIX                 = _TMSS_EVENT_PREFIX_TEMPLATE % 'SubTask.Object'
+TMSS_SUBTASK_STATUS_EVENT_PREFIX                 = _TMSS_EVENT_PREFIX_TEMPLATE % 'SubTask.Status'
+TMSS_TASKBLUEPRINT_OBJECT_EVENT_PREFIX           = _TMSS_EVENT_PREFIX_TEMPLATE % 'TaskBlueprint.Object'
+TMSS_TASKBLUEPRINT_STATUS_EVENT_PREFIX           = _TMSS_EVENT_PREFIX_TEMPLATE % 'TaskBlueprint.Status'
+TMSS_TASKDRAFT_OBJECT_EVENT_PREFIX               = _TMSS_EVENT_PREFIX_TEMPLATE % 'TaskDraft.Object'
+TMSS_SCHEDULINGUNITBLUEPRINT_OBJECT_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % 'SchedulingUnitBlueprint.Object'
+TMSS_SCHEDULINGUNITBLUEPRINT_STATUS_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % 'SchedulingUnitBlueprint.Status'
+TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX     = _TMSS_EVENT_PREFIX_TEMPLATE % 'SchedulingUnitDraft.Object'
+TMSS_ALL_OBJECT_EVENTS_FILTER                    = _TMSS_EVENT_PREFIX_TEMPLATE % '.*.Object.#'
+TMSS_ALL_STATUS_EVENTS_FILTER                    = _TMSS_EVENT_PREFIX_TEMPLATE % '.*.Status.#'
+TMSS_ALL_EVENTS_FILTER                           = _TMSS_EVENT_PREFIX_TEMPLATE % '#'
 
 
-class TMSSSubTaskEventMessageHandler(AbstractMessageHandler):
+class TMSSEventMessageHandler(AbstractMessageHandler):
     '''
-    Base-type messagehandler for handling TMSS event messages.
-    Typical usage is to derive your own subclass from TMSSSubTaskEventMessageHandler and implement the specific on<SomeMessage> methods that you are interested in.
+    Base-type messagehandler for handling all TMSS event messages.
+    Typical usage is to derive your own subclass from TMSSEventMessageHandler and implement the specific on<SomeMessage> methods that you are interested in.
     '''
     def handle_message(self, msg: EventMessage):
         if not isinstance(msg, EventMessage):
             raise ValueError("%s: Ignoring non-EventMessage: %s" % (self.__class__.__name__, msg))
 
-        stripped_subject = msg.subject.replace("%s." % DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX, '')
-
-        logger.info("%s.on%s: %s" % (self.__class__.__name__, stripped_subject, single_line_with_single_spaces(msg.content)))
-
-        if stripped_subject == 'Defining':
-            self.onSubTaskDefining(**msg.content)
-        elif stripped_subject == 'Defined':
-            self.onSubTaskDefined(**msg.content)
-        elif stripped_subject == 'Scheduling':
-            self.onSubTaskScheduling(**msg.content)
-        elif stripped_subject == 'Scheduled':
-            self.onSubTaskScheduled(**msg.content)
-        elif stripped_subject == 'Queueing':
-            self.onSubTaskQueueing(**msg.content)
-        elif stripped_subject == 'Queued':
-            self.onSubTaskQueued(**msg.content)
-        elif stripped_subject == 'Starting':
-            self.onSubTaskStarting(**msg.content)
-        elif stripped_subject == 'Started':
-            self.onSubTaskStarted(**msg.content)
-        elif stripped_subject == 'Finishing':
-            self.onSubTaskFinishing(**msg.content)
-        elif stripped_subject == 'Finished':
-            self.onSubTaskFinished(**msg.content)
-        elif stripped_subject == 'Cancelling':
-            self.onSubTaskCancelling(**msg.content)
-        elif stripped_subject == 'Cancelled':
-            self.onSubTaskCancelled(**msg.content)
-        elif stripped_subject == 'Error':
-            self.onSubTaskError(**msg.content)
+        stripped_subject = msg.subject.replace(_TMSS_EVENT_PREFIX_TEMPLATE%('',), '')
+
+        logger.info("%s %s: %s" % (self.__class__.__name__, stripped_subject, single_line_with_single_spaces(msg.content)))
+
+        # sorry, very big if/elif/else tree.
+        # it just maps all possible event subjects for all possible objects and statuses onto handler methods.
+        if stripped_subject == 'SubTask.Object.Created':
+            self.onSubTaskCreated(**msg.content)
+        elif stripped_subject == 'SubTask.Object.Updated':
+            self.onSubTaskUpdated(**msg.content)
+        elif stripped_subject == 'SubTask.Object.Deleted':
+            self.onSubTaskDeleted(**msg.content)
+        elif stripped_subject == 'TaskBlueprint.Object.Created':
+            self.onTaskBlueprintCreated(**msg.content)
+        elif stripped_subject == 'TaskBlueprint.Object.Updated':
+            self.onTaskBlueprintUpdated(**msg.content)
+        elif stripped_subject == 'TaskBlueprint.Object.Deleted':
+            self.onTaskBlueprintDeleted(**msg.content)
+        elif stripped_subject == 'TaskDraft.Object.Created':
+            self.onTaskDraftCreated(**msg.content)
+        elif stripped_subject == 'TaskDraft.Object.Updated':
+            self.onTaskDraftUpdated(**msg.content)
+        elif stripped_subject == 'TaskDraft.Object.Deleted':
+            self.onTaskDraftDeleted(**msg.content)
+        elif stripped_subject == 'SchedulingUnitBlueprint.Object.Created':
+            self.onSchedulingUnitBlueprintCreated(**msg.content)
+        elif stripped_subject == 'SchedulingUnitBlueprint.Object.Updated':
+            self.onSchedulingUnitBlueprintUpdated(**msg.content)
+        elif stripped_subject == 'SchedulingUnitBlueprint.Object.Deleted':
+            self.onSchedulingUnitBlueprintDeleted(**msg.content)
+        elif stripped_subject == 'SchedulingUnitDraft.Object.Created':
+            self.onSchedulingUnitDraftCreated(**msg.content)
+        elif stripped_subject == 'SchedulingUnitDraft.Object.Updated':
+            self.onSchedulingUnitDraftUpdated(**msg.content)
+        elif stripped_subject == 'SchedulingUnitDraft.Object.Deleted':
+            self.onSchedulingUnitDraftDeleted(**msg.content)
+        elif stripped_subject.startswith('SubTask.Status.'):
+            self.onSubTaskStatusChanged(**msg.content)
+        elif stripped_subject.startswith('TaskBlueprint.Status.'):
+            self.onTaskBlueprintStatusChanged(**msg.content)
+        elif stripped_subject.startswith('SchedulingUnitBlueprint.Status.'):
+            self.onSchedulingUnitBlueprintStatusChanged(**msg.content)
         else:
-            raise ValueError("TMSSBusListener.handleMessage: unknown subject: %s" %  msg.subject)
+            raise MessageHandlerUnknownSubjectError("TMSSBusListener.handleMessage: unknown subject: %s" %  msg.subject)
 
-    def onSubTaskDefining(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskDefining is called upon receiving a SubTaskDefining message, which is sent when a SubTasks changes state to "Defining".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
+
+    def onSubTaskStatusChanged(self, id: int, status:str):
+        '''onSubTaskStatusChanged is called upon receiving a SubTask.Status.* message, which is sent when a SubTasks changes status.
+        :param id: the TMSS id of the SubTask
+        :param status: the new status of the SubTask
+        '''
+        pass
+
+    def onTaskBlueprintStatusChanged(self, id: int, status:str):
+        '''onTaskBlueprintStatusChanged is called upon receiving a TaskBlueprint.Status.* message, which is sent when a TaskBlueprint changes status.
+        :param id: the TMSS id of the TaskBlueprint
+        :param status: the new status of the TaskBlueprint
+        '''
+        pass
+
+    def onSchedulingUnitBlueprintStatusChanged(self, id: int, status:str):
+        '''onSchedulingUnitBlueprintStatusChanged is called upon receiving a SchedulingUnitBlueprint.Status.* message, which is sent when a SchedulingUnitBlueprints changes status.
+        :param id: the TMSS id of the SchedulingUnitBlueprint
+        :param status: the new status of the SchedulingUnitBlueprint
+        '''
+        pass
+
+    def onSubTaskCreated(self, id: int):
+        '''onSubTaskCreated is called upon receiving a SubTask.Object.Created message, which is sent when a SubTasks was created.
+        :param id: the TMSS id of the SubTask
+        '''
+        pass
+
+    def onSubTaskUpdated(self, id: int):
+        '''onSubTaskUpdated is called upon receiving a SubTask.Object.Updated message, which is sent when a SubTasks was created.
+        :param id: the TMSS id of the SubTask
+        '''
+        pass
+
+    def onSubTaskDeleted(self, id: int):
+        '''onSubTaskDeleted is called upon receiving a SubTask.Object.Deleted message, which is sent when a SubTasks was created.
+        :param id: the TMSS id of the SubTask
         '''
         pass
 
-    def onSubTaskDefined(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskDefined is called upon received a SubTaskDefined message, which is sent when a SubTasks changes state to "Defined".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
+    def onTaskDraftCreated(self, id: int):
+        '''onTaskDraftCreated is called upon receiving a TaskDraft.Object.Created message, which is sent when a TaskDrafts was created.
+        :param id: the TMSS id of the TaskDraft
         '''
         pass
 
-    def onSubTaskScheduling(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskScheduling is called upon receiving a SubTaskScheduling message, which is sent when a SubTasks changes state to "Scheduling".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
+    def onTaskDraftUpdated(self, id: int):
+        '''onTaskDraftUpdated is called upon receiving a TaskDraft.Object.Updated message, which is sent when a TaskDrafts was created.
+        :param id: the TMSS id of the TaskDraft
         '''
         pass
 
-    def onSubTaskScheduled(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskScheduled is called upon received a SubTaskScheduled message, which is sent when a SubTasks changes state to "Scheduled".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
+    def onTaskDraftDeleted(self, id: int):
+        '''onTaskDraftDeleted is called upon receiving a TaskDraft.Object.Deleted message, which is sent when a TaskDrafts was created.
+        :param id: the TMSS id of the TaskDraft
         '''
         pass
 
-    def onSubTaskQueueing(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskQueueing is called upon receiving a SubTaskQueueing message, which is sent when a SubTasks changes state to "Queueing".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
+    def onTaskBlueprintCreated(self, id: int):
+        '''onTaskBlueprintCreated is called upon receiving a TaskBlueprint.Object.Created message, which is sent when a TaskBlueprints was created.
+        :param id: the TMSS id of the TaskBlueprint
         '''
         pass
 
-    def onSubTaskQueued(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskQueued is called upon received a SubTaskQueued message, which is sent when a SubTasks changes state to "Queued".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
+    def onTaskBlueprintUpdated(self, id: int):
+        '''onTaskBlueprintUpdated is called upon receiving a TaskBlueprint.Object.Updated message, which is sent when a TaskBlueprints was created.
+        :param id: the TMSS id of the TaskBlueprint
         '''
         pass
 
-    def onSubTaskStarting(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskStarting is called upon receiving a SubTaskStarting message, which is sent when a SubTasks changes state to "Starting".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
+    def onTaskBlueprintDeleted(self, id: int):
+        '''onTaskBlueprintDeleted is called upon receiving a TaskBlueprint.Object.Deleted message, which is sent when a TaskBlueprints was created.
+        :param id: the TMSS id of the TaskBlueprint
         '''
         pass
 
-    def onSubTaskStarted(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskStarted is called upon received a SubTaskStarted message, which is sent when a SubTasks changes state to "Started".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
+    def onSchedulingUnitDraftCreated(self, id: int):
+        '''onSchedulingUnitDraftCreated is called upon receiving a SchedulingUnitDraft.Object.Created message, which is sent when a SchedulingUnitDrafts was created.
+        :param id: the TMSS id of the SchedulingUnitDraft
         '''
         pass
 
-    def onSubTaskFinishing(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskFinishing is called upon receiving a SubTaskFinishing message, which is sent when a SubTasks changes state to "Finishing".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
+    def onSchedulingUnitDraftUpdated(self, id: int):
+        '''onSchedulingUnitDraftUpdated is called upon receiving a SchedulingUnitDraft.Object.Updated message, which is sent when a SchedulingUnitDrafts was created.
+        :param id: the TMSS id of the SchedulingUnitDraft
         '''
         pass
 
-    def onSubTaskFinished(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskFinished is called upon received a SubTaskFinished message, which is sent when a SubTasks changes state to "Finished".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
+    def onSchedulingUnitDraftDeleted(self, id: int):
+        '''onSchedulingUnitDraftDeleted is called upon receiving a SchedulingUnitDraft.Object.Deleted message, which is sent when a SchedulingUnitDrafts was created.
+        :param id: the TMSS id of the SchedulingUnitDraft
         '''
         pass
 
-    def onSubTaskCancelling(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskCancelling is called upon receiving a SubTaskCancelling message, which is sent when a SubTasks changes state to "Cancelling".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
+    def onSchedulingUnitBlueprintCreated(self, id: int):
+        '''onSchedulingUnitBlueprintCreated is called upon receiving a SchedulingUnitBlueprint.Object.Created message, which is sent when a SchedulingUnitBlueprints was created.
+        :param id: the TMSS id of the SchedulingUnitBlueprint
         '''
         pass
 
-    def onSubTaskCancelled(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskCancelled is called upon received a SubTaskCancelled message, which is sent when a SubTasks changes state to "Cancelled".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
+    def onSchedulingUnitBlueprintUpdated(self, id: int):
+        '''onSchedulingUnitBlueprintUpdated is called upon receiving a SchedulingUnitBlueprint.Object.Updated message, which is sent when a SchedulingUnitBlueprints was created.
+        :param id: the TMSS id of the SchedulingUnitBlueprint
         '''
         pass
 
-    def onSubTaskError(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskError is called upon receiving a SubTaskError message, which is sent when a SubTasks changes state to "Error".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
+    def onSchedulingUnitBlueprintDeleted(self, id: int):
+        '''onSchedulingUnitBlueprintDeleted is called upon receiving a SchedulingUnitBlueprint.Object.Deleted message, which is sent when a SchedulingUnitBlueprints was created.
+        :param id: the TMSS id of the SchedulingUnitBlueprint
         '''
         pass
 
 
-class TMSSSubTaskBusListener(BusListener):
+class TMSSBusListener(BusListener):
     def __init__(self,
-                 handler_type: TMSSSubTaskEventMessageHandler.__class__ = TMSSSubTaskEventMessageHandler,
+                 handler_type: TMSSEventMessageHandler.__class__ = TMSSEventMessageHandler,
                  handler_kwargs: dict = None,
                  exchange: str = DEFAULT_BUSNAME,
-                 routing_key: str = DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX+".#",
+                 routing_key: str = TMSS_ALL_EVENTS_FILTER,
                  num_threads: int = 1,
                  broker: str = DEFAULT_BROKER):
         """
-        TMSSSubTaskBusListener listens on the lofar notification message bus and calls on<SomeMessage> methods in the TMSSSubTaskEventMessageHandler when such a message is received.
-        Typical usage is to derive your own subclass from TMSSSubTaskEventMessageHandler and implement the specific on<SomeMessage> methods that you are interested in.
+        TMSSSubTaskBusListener listens on the lofar notification message bus and calls on<SomeMessage> methods in the TMSSEventMessageHandler when such a message is received.
+        Typical usage is to derive your own subclass from TMSSEventMessageHandler and implement the specific on<SomeMessage> methods that you are interested in.
         """
-        if not issubclass(handler_type, TMSSSubTaskEventMessageHandler):
-            raise TypeError("handler_type should be a TMSSSubTaskEventMessageHandler subclass")
+        if not issubclass(handler_type, TMSSEventMessageHandler):
+            raise TypeError("handler_type should be a TMSSEventMessageHandler subclass")
 
         super().__init__(handler_type, handler_kwargs, exchange, routing_key, num_threads, broker)
 
@@ -209,10 +238,11 @@ class TMSSSubTaskBusListener(BusListener):
 if __name__ == '__main__':
     logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
 
-    class ExampleTMSSSubTaskEventMessageHandler(TMSSSubTaskEventMessageHandler):
-        def onSubTaskDefined(self, **kwargs):
-            logger.debug("MyTMSSSubTaskEventMessageHandler.onSubTaskDefined(%s)", kwargs)
+    class ExampleTMSSEventMessageHandler(TMSSEventMessageHandler):
+        def onSubTaskStatusChanged(self, id: int, status:str):
+            logger.debug("MyTMSSEventMessageHandler.onSubTaskStatusChanged(id=%s, status=%s)", id, status)
 
-    with TMSSSubTaskBusListener(handler_type=ExampleTMSSSubTaskEventMessageHandler):
+    from lofar.messaging.messagebus import BusListenerJanitor
+    with BusListenerJanitor(TMSSBusListener(handler_type=ExampleTMSSEventMessageHandler)):
         waitForInterrupt()
 
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js b/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js
index 9b4a1a0b038..a045bc5f47a 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js
@@ -572,7 +572,7 @@ export class CalendarTimeline extends Component {
         if (this.state.viewType===UIConstants.timeline.types.NORMAL) {
             const startTime = moment().utc().add(-24, 'hours');
             const endTime = moment().utc().add(24, 'hours');
-            let result = this.props.dateRangeCallback(startTime, endTime);
+            let result = await this.props.dateRangeCallback(startTime, endTime);
             let group = DEFAULT_GROUP.concat(result.group);
             this.setState({defaultStartTime: startTime, defaultEndTime: endTime, 
                             zoomLevel: DEFAULT_ZOOM_LEVEL, dayHeaderVisible: true, 
@@ -626,12 +626,7 @@ export class CalendarTimeline extends Component {
                 }
             }
             this.loadLSTDateHeaderMap(startTime, endTime, 'hour');
-            let result = {};
-            if (this.state.viewType===UIConstants.timeline.types.WEEKVIEW) {
-                result = await this.props.dateRangeCallback(startTime, endTime);
-            }   else {
-                result = this.props.dateRangeCallback(startTime, endTime);
-            }
+            let result = await this.props.dateRangeCallback(startTime, endTime);
             let group = DEFAULT_GROUP.concat(result.group);
             this.setState({zoomLevel: zoomLevel, defaultStartTime: startTime, defaultEndTime: endTime, 
                             isTimelineZoom: isTimelineZoom, zoomRange: null, 
@@ -650,17 +645,12 @@ export class CalendarTimeline extends Component {
         let secondsToMove = visibleTimeDiff / 1000 / 10 ;
         let newVisibleTimeStart = visibleTimeStart.clone().add(-1 * secondsToMove, 'seconds');
         let newVisibleTimeEnd = visibleTimeEnd.clone().add(-1 * secondsToMove, 'seconds');
-        let result = {};
         if (this.state.viewType === UIConstants.timeline.types.WEEKVIEW &&
             newVisibleTimeStart.isBefore(this.state.timelineStartDate)) {
             newVisibleTimeStart = this.state.timelineStartDate.clone().hours(0).minutes(0).seconds(0);
             newVisibleTimeEnd = newVisibleTimeStart.clone().add(visibleTimeDiff/1000, 'seconds');
         }
-        if (this.state.viewType === UIConstants.timeline.types.WEEKVIEW) {
-            result = await this.props.dateRangeCallback(newVisibleTimeStart, newVisibleTimeEnd);
-        }   else {
-            result = this.props.dateRangeCallback(newVisibleTimeStart, newVisibleTimeEnd);
-        }
+        let result = await this.props.dateRangeCallback(newVisibleTimeStart, newVisibleTimeEnd);
         this.loadLSTDateHeaderMap(newVisibleTimeStart, newVisibleTimeEnd, 'hour');
         let group = DEFAULT_GROUP.concat(result.group);
         this.setState({defaultStartTime: newVisibleTimeStart,
@@ -678,17 +668,12 @@ export class CalendarTimeline extends Component {
         const secondsToMove = visibleTimeDiff / 1000 / 10 ;
         let newVisibleTimeStart = visibleTimeStart.clone().add(1 * secondsToMove, 'seconds');
         let newVisibleTimeEnd = visibleTimeEnd.clone().add(1 * secondsToMove, 'seconds');
-        let result = {};
         if (this.state.viewType === UIConstants.timeline.types.WEEKVIEW &&
             newVisibleTimeEnd.isAfter(this.state.timelineEndDate)) {
             newVisibleTimeEnd = this.state.timelineEndDate.clone().hours(23).minutes(59).minutes(59);
             newVisibleTimeStart = newVisibleTimeEnd.clone().add((-1 * visibleTimeDiff/1000), 'seconds');
         }
-        if (this.state.viewType === UIConstants.timeline.types.WEEKVIEW) {
-           result = await this.props.dateRangeCallback(visibleTimeStart, visibleTimeEnd);
-        }   else {
-            result = this.props.dateRangeCallback(visibleTimeStart, visibleTimeEnd);
-        }
+        let result = await this.props.dateRangeCallback(visibleTimeStart, visibleTimeEnd);
         this.loadLSTDateHeaderMap(newVisibleTimeStart, newVisibleTimeEnd, 'hour');
         let group = DEFAULT_GROUP.concat(result.group);
         this.setState({defaultStartTime: newVisibleTimeStart,
@@ -726,13 +711,13 @@ export class CalendarTimeline extends Component {
      * calls back parent to get updated group and item records, LST date header values
      * @param {array} value - array of moment object
      */
-    setZoomRange(value){
+    async setZoomRange(value){
         let startDate, endDate = null;
         if (value) {
             // Set all values only when both range values available in the array else just set the value to reflect in the date selection component
             if (value[1]!==null) {
-                startDate = moment.utc(moment(value[0]).format("DD-MMM-YYYY"));
-                endDate = moment.utc(moment(value[1]).format("DD-MMM-YYYY 23:59:59"));
+                startDate = moment.utc(moment(value[0]).format("YYYY-MM-DD"));
+                endDate = moment.utc(moment(value[1]).format("YYYY-MM-DD 23:59:59"));
                 let dayHeaderVisible = this.state.dayHeaderVisible;
                 let weekHeaderVisible = this.state.weekHeaderVisible;
                 let lstDateHeaderUnit = this.state.lstDateHeaderUnit;
@@ -746,7 +731,7 @@ export class CalendarTimeline extends Component {
                                 dayHeaderVisible: dayHeaderVisible, weekHeaderVisible: weekHeaderVisible, 
                                 lstDateHeaderUnit: lstDateHeaderUnit
                                 });
-                const result = this.props.dateRangeCallback(startDate, endDate);
+                const result = await this.props.dateRangeCallback(startDate, endDate);
                 let group = DEFAULT_GROUP.concat(result.group);
                 this.setState({group: group, items: result.items});
                 this.loadLSTDateHeaderMap(startDate, endDate, lstDateHeaderUnit);
@@ -795,7 +780,7 @@ export class CalendarTimeline extends Component {
         return (
             <React.Fragment>
                 {/* Toolbar for the timeline */}
-                <div className="p-fluid p-grid timeline-toolbar">
+                <div className={`p-fluid p-grid timeline-toolbar ${this.props.className}`}>
                     {/* Clock Display */}
                     <div className="p-col-2" style={{padding: '0px 0px 0px 10px'}}>
                         <div style={{marginTop: "0px"}}>
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js b/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js
index b49c55c8f98..00f02fc49b3 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js
@@ -683,7 +683,7 @@ optionaldataheader.forEach(header => {
     columns.push({
       Header: isString ? optionalheader[0][header] : optionalheader[0][header].name,
       id: isString ? header : optionalheader[0][header].name,
-      accessor: header,
+      accessor: isString ? header : optionalheader[0][header].name, 
       filter: filtertype,
       Filter: filterFn,
       isVisible: false,
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss
index 045a70a2728..1c5e635be11 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss
+++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss
@@ -6,6 +6,20 @@
     background-color: #f0f0f0;
 }
 
+.timeline-view-toolbar {
+    margin-left: 10px;
+}
+
+.timeline-view-toolbar label {
+    margin-bottom: 0px;
+    vertical-align: top;
+    margin-right: 10px;
+}
+
+.timeline-toolbar-margin-top-0 {
+    margin-top: 0px !important;
+}
+
 .timeline-toolbar {
     margin-top: 25px;
     margin-bottom: 2px;
@@ -37,7 +51,7 @@
 }
 
 .timeline-filters button {
-    width: auto !important;
+    // width: auto !important;
 }
 
 .timeline-week-span {
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/view.js
index 1dce9c77522..d2dfd4708c5 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/view.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/view.js
@@ -179,7 +179,7 @@ export class ProjectView extends Component {
                             <div className="p-field p-grid resource-input-grid">
                                 <ResourceDisplayList projectQuota={this.state.projectQuota}  unitMap={this.resourceUnitMap} />
                             </div>
-                            {/* Show Schedule Unit blongest to Project */}
+                            {/* Show Schedule Unit belongs to Project */}
                             <div className="p-fluid">
                                 <div className="p-field p-grid">
                                     <div className="col-lg-3 col-md-3 col-sm-12">
@@ -187,7 +187,7 @@ export class ProjectView extends Component {
                                     </div>
                                 </div>
                             </div>
-                            <SchedulingUnitList project={this.state.project.name}/>
+                            <SchedulingUnitList project={this.state.project.name} hideProjectColumn/>
                         </div>
                     </React.Fragment>
                 }
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js
index 52ebcb8b12c..a19914a42eb 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js
@@ -116,8 +116,12 @@ export default (props) => {
                 list.push('disable-field');
             }
             ref.editors['root.time.at'].container.className = list.join(' ');
+            Array.prototype.slice.call(ref.editors['root.time.at'].control.getElementsByTagName('input')).forEach(input => input.disabled = true);
+            Array.prototype.slice.call(ref.editors['root.time.at'].control.getElementsByTagName('button')).forEach(button => button.disabled = true);
         } else {
             ref.editors['root.time.at'].container.className = ref.editors['root.time.at'].container.className.replace('disable-field', '');
+            Array.prototype.slice.call(ref.editors['root.time.at'].control.getElementsByTagName('input')).forEach(input => input.disabled = false);
+            Array.prototype.slice.call(ref.editors['root.time.at'].control.getElementsByTagName('button')).forEach(button => button.disabled = false);
         }
         if (props.callback) {
             props.callback(jsonOutput, errors);
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js
index 9bb43e3f0c7..570ca6388bd 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js
@@ -9,37 +9,42 @@ import ScheduleService from '../../services/schedule.service';
 class SchedulingUnitList extends Component{
      
     constructor(props){
-        super(props)
+       super(props)
+       const defaultcolumns = {
+        type:{
+            name:"Type",
+            filter:"select"
+        },
+        name:"Name",
+        description:"Description",
+        project:"Project",
+        created_at:{
+            name:"Created At",
+            filter: "date"
+        },
+        updated_at:{
+            name:"Updated At",
+            filter: "date"
+        },
+        requirements_template_id:{
+            name: "Template",
+            filter: "select"
+        },
+        start_time:"Start Time",
+        stop_time:"End time",
+        duration:"Duration (HH:mm:ss)",
+        status:"Status"
+        };
+        if (props.hideProjectColumn) {
+            delete defaultcolumns['project'];
+        }
         this.state = {
             scheduleunit: [],
             paths: [{
                 "View": "/schedulingunit/view",
             }],
             isLoading: true,
-            defaultcolumns: [ {
-                type:{
-                    name:"Type",
-                    filter:"select"
-                },
-                name:"Name",
-                description:"Description",
-                created_at:{
-                    name:"Created At",
-                    filter: "date"
-                },
-                updated_at:{
-                    name:"Updated At",
-                    filter: "date"
-                },
-                requirements_template_id:{
-                    name: "Template",
-                    filter: "select"
-                },
-                start_time:"Start Time",
-                stop_time:"End time",
-                duration:"Duration (HH:mm:ss)",
-                status:"Status"
-                }],
+            defaultcolumns: [defaultcolumns],
             optionalcolumns:  [{
                 actionpath:"actionpath",
             }],
@@ -57,25 +62,30 @@ class SchedulingUnitList extends Component{
         //Get SU Draft/Blueprints for the Project ID. This request is coming from view Project page. Otherwise it will show all SU
         let project = this.props.project;
         if(project){
-            let scheduleunits = await ScheduleService.getSchedulingListByProject(project);
-            if(scheduleunits){
+           let scheduleunits = await ScheduleService.getSchedulingListByProject(project);
+        if(scheduleunits){
                 this.setState({
                     scheduleunit: scheduleunits, isLoading: false
                 });
             }
-        }else{
+        }else{ 
+            const schedulingSet = await ScheduleService.getSchedulingSets();
+            const projects = await ScheduleService.getProjectList();
             const bluePrint = await ScheduleService.getSchedulingUnitBlueprint();
             ScheduleService.getSchedulingUnitDraft().then(scheduleunit =>{
                 const output = [];
                 var scheduleunits = scheduleunit.data.results;
                 for( const scheduleunit  of scheduleunits){
+                    const suSet = schedulingSet.find((suSet) => { return  scheduleunit.scheduling_set_id === suSet.id });
+                    const project = projects.find((project) => { return suSet.project_id === project.name});
                     const blueprintdata = bluePrint.data.results.filter(i => i.draft_id === scheduleunit.id);
                     blueprintdata.map(blueP => { 
-                        blueP.duration = moment.utc((blueP.duration || 0)*1000).format('HH:mm:ss'); 
+                        blueP.duration = moment.utc((blueP.duration || 0)*1000).format('HH:mm:ss');
                         blueP.type="Blueprint"; 
                         blueP['actionpath'] ='/schedulingunit/view/blueprint/'+blueP.id;
                         blueP['created_at'] = moment(blueP['created_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
                         blueP['updated_at'] = moment(blueP['updated_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
+                        blueP.project = project.name;
                         return blueP; 
                     });
                     output.push(...blueprintdata);
@@ -84,6 +94,7 @@ class SchedulingUnitList extends Component{
                     scheduleunit['duration'] = moment.utc((scheduleunit.duration || 0)*1000).format('HH:mm:ss');
                     scheduleunit['created_at'] = moment(scheduleunit['created_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
                     scheduleunit['updated_at'] = moment(scheduleunit['updated_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
+                    scheduleunit.project = project.name;
                     output.push(scheduleunit);
                 }
                 this.setState({
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js
index bb5635a0b0b..16524edc0f5 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js
@@ -4,6 +4,7 @@ import moment from 'moment';
 import _ from 'lodash';
 
 // import SplitPane, { Pane }  from 'react-split-pane';
+import {InputSwitch} from 'primereact/inputswitch';
 
 import AppLoader from '../../layout/components/AppLoader';
 import PageHeader from '../../layout/components/PageHeader';
@@ -44,6 +45,7 @@ export class TimelineView extends Component {
             suTaskList:[],
             isSummaryLoading: false
         }
+        this.STATUS_BEFORE_SCHEDULED = ['defining', 'defined', 'schedulable'];  // Statuses before scheduled to get station_group
 
         this.onItemClick = this.onItemClick.bind(this);
         this.closeSUDets = this.closeSUDets.bind(this);
@@ -59,11 +61,11 @@ export class TimelineView extends Component {
                             ScheduleService.getSchedulingUnitDraft(),
                             ScheduleService.getSchedulingSets(),
                             UtilService.getUTC()] ;
-        Promise.all(promises).then(responses => {
+        Promise.all(promises).then(async(responses) => {
             const projects = responses[0];
             const suBlueprints = _.sortBy(responses[1].data.results, 'name');
             const suDrafts = responses[2].data.results;
-            const suSets = responses[3]
+            const suSets = responses[3];
             const group = [], items = [];
             const currentUTC = moment.utc(responses[4]);
             const defaultStartTime = currentUTC.clone().add(-24, 'hours');      // Default start time, this should be updated if default view is changed.
@@ -81,15 +83,24 @@ export class TimelineView extends Component {
                         suBlueprint.suSet = suSet;
                         suBlueprint.durationInSec = suBlueprint.duration;
                         suBlueprint.duration = UnitConverter.getSecsToHHmmss(suBlueprint.duration);
+                        // Load subtasks also to get stations from subtask if status is before scheduled
+                        const loadSubtasks = this.STATUS_BEFORE_SCHEDULED.indexOf(suBlueprint.status.toLowerCase()) < 0 ;
                         // Select only blueprints with start_time and stop_time in the default time limit
                         if (suBlueprint.start_time && 
                             (moment.utc(suBlueprint.start_time).isBetween(defaultStartTime, defaultEndTime) ||
                              moment.utc(suBlueprint.stop_time).isBetween(defaultStartTime, defaultEndTime))) {
+                            // suBlueprint.tasks = await ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true);
+                            suBlueprint.tasks = await ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true, loadSubtasks);
                             items.push(this.getTimelineItem(suBlueprint));
                             if (!_.find(group, {'id': suDraft.id})) {
                                 group.push({'id': suDraft.id, title: suDraft.name});
                             }
                             suList.push(suBlueprint);
+                        }   else if (suBlueprint.start_time) {  // For other SUs with start_time load details asynchronously
+                            ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true, loadSubtasks)
+                                .then(tasks => {
+                                    suBlueprint.tasks = tasks;
+                            })
                         }
                     }
                 }
@@ -108,7 +119,7 @@ export class TimelineView extends Component {
     getTimelineItem(suBlueprint) {
         // Temporary for testing
         const diffOfCurrAndStart = moment().diff(moment(suBlueprint.stop_time), 'seconds');
-        suBlueprint.status = diffOfCurrAndStart>=0?"FINISHED":"DEFINED";
+        // suBlueprint.status = diffOfCurrAndStart>=0?"FINISHED":"DEFINED";
         let item = { id: suBlueprint.id, 
             group: suBlueprint.suDraft.id,
             title: `${suBlueprint.project.name} - ${suBlueprint.suDraft.name} - ${(suBlueprint.durationInSec/3600).toFixed(2)}Hrs`,
@@ -136,7 +147,7 @@ export class TimelineView extends Component {
                 suTaskList: !fetchDetails?this.state.suTaskList:[],
                 canExtendSUList: false, canShrinkSUList:false});
             if (fetchDetails) {
-                const suBlueprint = _.find(this.state.suBlueprints, {id: item.id});
+                const suBlueprint = _.find(this.state.suBlueprints, {id: (this.state.stationView?parseInt(item.id.split('-')[0]):item.id)});
                 ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true)
                     .then(taskList => {
                         for (let task of taskList) {
@@ -163,17 +174,24 @@ export class TimelineView extends Component {
      * @param {moment} startTime 
      * @param {moment} endTime 
      */
-    dateRangeCallback(startTime, endTime) {
+    async dateRangeCallback(startTime, endTime) {
         let suBlueprintList = [], group=[], items = [];
         if (startTime && endTime) {
             for (const suBlueprint of this.state.suBlueprints) {
                 if (moment.utc(suBlueprint.start_time).isBetween(startTime, endTime) 
                         || moment.utc(suBlueprint.stop_time).isBetween(startTime, endTime)) {
-                    suBlueprintList.push(suBlueprint);
-                    items.push(this.getTimelineItem(suBlueprint));
-                    if (!_.find(group, {'id': suBlueprint.suDraft.id})) {
-                        group.push({'id': suBlueprint.suDraft.id, title: suBlueprint.suDraft.name});
+                    let timelineItem = this.getTimelineItem(suBlueprint);
+                    if (this.state.stationView) {
+                        const loadSubtasks = this.STATUS_BEFORE_SCHEDULED.indexOf(suBlueprint.status.toLowerCase()) < 0 ;
+                        suBlueprint.tasks = await ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true, loadSubtasks);
+                        this.getStationItemGroups(suBlueprint, timelineItem, group, items);
+                    }   else {
+                        items.push(timelineItem);
+                        if (!_.find(group, {'id': suBlueprint.suDraft.id})) {
+                            group.push({'id': suBlueprint.suDraft.id, title: suBlueprint.suDraft.name});
+                        }
                     }
+                    suBlueprintList.push(suBlueprint);
                 } 
             }
         }   else {
@@ -184,7 +202,48 @@ export class TimelineView extends Component {
         this.setState({suBlueprintList: _.filter(suBlueprintList, (suBlueprint) => {return suBlueprint.start_time!=null})});
         // On range change close the Details pane
         // this.closeSUDets();
-        return {group: group, items: items};
+        return {group: _.sortBy(group,'id'), items: items};
+    }
+
+    /**
+     * To get items and groups for station view
+     * @param {Object} suBlueprint 
+     * @param {Object} timelineItem 
+     * @param {Array} group 
+     * @param {Array} items 
+     */
+    getStationItemGroups(suBlueprint, timelineItem, group, items) {
+        /** Get all observation tasks */
+        const observtionTasks = _.filter(suBlueprint.tasks, (task) => { return task.template.type_value.toLowerCase() === "observation"});
+        let stations = [];
+        for (const observtionTask of observtionTasks) {
+            /** If the status of SU is before scheduled, get all stations from the station_groups from the task specification_docs */
+            if (this.STATUS_BEFORE_SCHEDULED.indexOf(suBlueprint.status.toLowerCase()) >= 0
+                && observtionTask.specifications_doc.station_groups) {
+                for (const grpStations of _.map(observtionTask.specifications_doc.station_groups, "stations")) {
+                    stations = _.concat(stations, grpStations);
+                }
+            }   else if (this.STATUS_BEFORE_SCHEDULED.indexOf(suBlueprint.status.toLowerCase()) < 0 
+                            && observtionTask.subTasks) {
+                /** If the status of SU is scheduled or after get the stations from the subtask specification tasks */
+                for (const subtask of observtionTask.subTasks) {
+                    if (subtask.specifications_doc.stations) {
+                        stations = _.concat(stations, subtask.specifications_doc.stations.station_list);
+                    }
+                }
+            }
+        }
+        stations = _.uniq(stations);
+        /** Group the items by station */
+        for (const station of stations) {
+            let stationItem = _.cloneDeep(timelineItem);
+            stationItem.id = `${stationItem.id}-${station}`;
+            stationItem.group = station;
+            items.push(stationItem);
+            if (!_.find(group, {'id': station})) {
+                group.push({'id': station, title: station});
+            }
+        }
     }
 
     /**
@@ -215,13 +274,18 @@ export class TimelineView extends Component {
         const suBlueprints = this.state.suBlueprints;
         for (const data of filteredData) {
             const suBlueprint = _.find(suBlueprints, {actionpath: data.actionpath});
-            items.push(this.getTimelineItem(suBlueprint));
-            if (!_.find(group, {'id': suBlueprint.suDraft.id})) {
-                group.push({'id': suBlueprint.suDraft.id, title: suBlueprint.suDraft.name});
+            let timelineItem = this.getTimelineItem(suBlueprint);
+            if (this.state.stationView) {
+                this.getStationItemGroups(suBlueprint, timelineItem, group, items);
+            }   else {
+                items.push(timelineItem);
+                if (!_.find(group, {'id': suBlueprint.suDraft.id})) {
+                    group.push({'id': suBlueprint.suDraft.id, title: suBlueprint.suDraft.name});
+                }
             }
         }
         if (this.timeline) {
-            this.timeline.updateTimeline({group: group, items: items});
+            this.timeline.updateTimeline({group: _.sortBy(group,"id"), items: items});
         }
     }
 
@@ -234,7 +298,7 @@ export class TimelineView extends Component {
         const canShrinkSUList = this.state.canShrinkSUList;
         let suBlueprint = null;
         if (isSUDetsVisible) {
-            suBlueprint = _.find(this.state.suBlueprints, {id: this.state.selectedItem.id});
+            suBlueprint = _.find(this.state.suBlueprints, {id:  this.state.stationView?parseInt(this.state.selectedItem.id.split('-')[0]):this.state.selectedItem.id});
         }
         return (
             <React.Fragment>
@@ -274,12 +338,17 @@ export class TimelineView extends Component {
                                         <i className="pi pi-step-forward"></i>
                                     </button>
                                 </div> 
+                                <div className="timeline-view-toolbar">
+                                    <label>Station View</label>
+                                    <InputSwitch checked={this.state.stationView} onChange={(e) => {this.closeSUDets();this.setState({stationView: e.value})}} />
+                                </div>
                                 <Timeline ref={(tl)=>{this.timeline=tl}} 
                                         group={this.state.group} 
                                         items={this.state.items}
                                         currentUTC={this.state.currentUTC}
                                         rowHeight={30} itemClickCallback={this.onItemClick}
-                                        dateRangeCallback={this.dateRangeCallback}></Timeline>
+                                        dateRangeCallback={this.dateRangeCallback}
+                                        className="timeline-toolbar-margin-top-0"></Timeline>
                             </div>
                             {/* Details Panel */}
                             {this.state.isSUDetsVisible &&
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js
index a8c7b525703..4e11e19eaca 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js
@@ -85,7 +85,7 @@ export class WeekTimelineView extends Component {
                         const suBlueprint = _.find(suBlueprints, {'id': suBlueprintId});
                         suBlueprint['actionpath'] = `/schedulingunit/view/blueprint/${suBlueprintId}`;
                         suBlueprint.suDraft = suDraft;
-                        suBlueprint.project = project;
+                        suBlueprint.project = project.name;
                         suBlueprint.suSet = suSet;
                         suBlueprint.durationInSec = suBlueprint.duration;
                         suBlueprint.duration = UnitConverter.getSecsToHHmmss(suBlueprint.duration);
@@ -144,8 +144,8 @@ export class WeekTimelineView extends Component {
             name: suBlueprint.suDraft.name,
             band: antennaSet,
             duration: suBlueprint.durationInSec?`${(suBlueprint.durationInSec/3600).toFixed(2)}Hrs`:"",
-            start_time: moment.utc(`${displayDate.format('MM-DD-YYYY')} ${suBlueprint.start_time.split('T')[1]}`),
-            end_time: moment.utc(`${displayDate.format('MM-DD-YYYY')} ${suBlueprint.stop_time.split('T')[1]}`),
+            start_time: moment.utc(`${displayDate.format('YYYY-MM-DD')} ${suBlueprint.start_time.split('T')[1]}`),
+            end_time: moment.utc(`${displayDate.format('YYYY-MM-DD')} ${suBlueprint.stop_time.split('T')[1]}`),
             bgColor: suBlueprint.status? STATUS_COLORS[suBlueprint.status.toUpperCase()]:"#2196f3",
             selectedBgColor: suBlueprint.status? STATUS_COLORS[suBlueprint.status.toUpperCase()]:"#2196f3"}; 
         return item;
@@ -313,7 +313,7 @@ export class WeekTimelineView extends Component {
                                     data={this.state.suBlueprintList} 
                                     defaultcolumns={[{name: "Name",
                                                         start_time:"Start Time", stop_time:"End Time"}]}
-                                    optionalcolumns={[{description: "Description", duration:"Duration (HH:mm:ss)", actionpath: "actionpath"}]}
+                                    optionalcolumns={[{project:"Project",description: "Description", duration:"Duration (HH:mm:ss)",actionpath: "actionpath"}]}
                                     columnclassname={[{"Name":"filter-input-100", "Start Time":"filter-input-50", "End Time":"filter-input-50",
                                                         "Duration (HH:mm:ss)" : "filter-input-50",}]}
                                     defaultSortColumn= {[{id: "Start Time", desc: false}]}
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js
index 15cac898691..6c73fd23652 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js
@@ -53,24 +53,31 @@ const ScheduleService = {
             return null;
         }
     },
-    getTaskBlueprintById: async function(id, loadTemplate){
+    getTaskBlueprintById: async function(id, loadTemplate, loadSubtasks){
         let result;
         try {
             result = await axios.get('/api/task_blueprint/'+id);
             if (result.data && loadTemplate) {
                 result.data.template = await TaskService.getTaskTemplate(result.data.specifications_template_id);
             }
+            if (result.data && loadSubtasks) {
+                let subTasks = [];
+                for (const subtaskId of result.data.subtasks_ids) {
+                    subTasks.push((await TaskService.getSubtaskDetails(subtaskId)));
+                }
+                result.data.subTasks = subTasks;
+            }
         }   catch(error) {
             console.error('[schedule.services.getTaskBlueprintById]',error);
         }
         return result;
     },
-    getTaskBlueprintsBySchedulingUnit: async function(scheduleunit, loadTemplate){
+    getTaskBlueprintsBySchedulingUnit: async function(scheduleunit, loadTemplate, loadSubtasks){
         // there no single api to fetch associated task_blueprint, so iteare the task_blueprint id to fetch associated task_blueprint
         let taskblueprintsList = [];
         if(scheduleunit.task_blueprints_ids){
             for(const id of scheduleunit.task_blueprints_ids){
-               await this.getTaskBlueprintById(id, loadTemplate).then(response =>{
+               await this.getTaskBlueprintById(id, loadTemplate, loadSubtasks).then(response =>{
                     let taskblueprint = response.data;
                     taskblueprint['tasktype'] = 'Blueprint';
                     taskblueprint['actionpath'] = '/task/view/blueprint/'+taskblueprint['id'];
@@ -360,6 +367,14 @@ const ScheduleService = {
             return [];
         }
     },
+      getProjectList: async function() {
+        try {
+          const response = await axios.get('/api/project/');
+          return response.data.results;
+        } catch (error) {
+          console.error('[project.services.getProjectList]',error);
+        }
+      }
 }
 
 export default ScheduleService;
\ No newline at end of file
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/task.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/task.service.js
index a6044b01419..34a1e75b3d0 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/services/task.service.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/services/task.service.js
@@ -169,7 +169,7 @@ const TaskService = {
         let subtaskTemplates = {};
         const taskDetails = (await axios.get(`/api/task_blueprint/${taskId}`)).data;
         for (const subtaskId of taskDetails.subtasks_ids) {
-          const subtaskDetails = (await axios.get(`/api/subtask/${subtaskId}`)).data;
+          const subtaskDetails = await this.getSubtaskDetails(subtaskId);
           const subtaskLogs = await this.getSubtaskStatusLogs(subtaskId);
           let template = subtaskTemplates[subtaskDetails.specifications_template_id];
           if (!template) {
@@ -194,7 +194,13 @@ const TaskService = {
         console.error(error);
       }
     },
-    
+    getSubtaskDetails: async function(subtaskId) {
+      try {
+        return (await axios.get(`/api/subtask/${subtaskId}`)).data;
+      } catch(error) {
+        console.error(error);
+      }
+    }
 }
 
 export default TaskService;
diff --git a/SAS/TMSS/services/CMakeLists.txt b/SAS/TMSS/services/CMakeLists.txt
index 5e89c9c4e37..b1cdad1bc89 100644
--- a/SAS/TMSS/services/CMakeLists.txt
+++ b/SAS/TMSS/services/CMakeLists.txt
@@ -1,3 +1,4 @@
 lofar_add_package(TMSSSubtaskSchedulingService subtask_scheduling)
 lofar_add_package(TMSSFeedbackHandlingService feedback_handling)
+lofar_add_package(TMSSPostgresListenerService tmss_postgres_listener)
 
diff --git a/SAS/TMSS/services/feedback_handling/test/t_feedback_handling_service.py b/SAS/TMSS/services/feedback_handling/test/t_feedback_handling_service.py
index b3fe3a83615..4a414858756 100755
--- a/SAS/TMSS/services/feedback_handling/test/t_feedback_handling_service.py
+++ b/SAS/TMSS/services/feedback_handling/test/t_feedback_handling_service.py
@@ -25,7 +25,6 @@ import logging
 logger = logging.getLogger(__name__)
 
 from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment
-from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import *
 from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator
 
 from lofar.sas.tmss.services.feedback_handling import TMSSFeedbackListener
diff --git a/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py b/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py
index 09fb330ef2e..524a616a86f 100644
--- a/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py
+++ b/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py
@@ -36,7 +36,7 @@ logger = logging.getLogger(__name__)
 from lofar.sas.tmss.client.tmssbuslistener import *
 from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession
 
-class TMSSSubTaskSchedulingEventMessageHandler(TMSSSubTaskEventMessageHandler):
+class TMSSSubTaskSchedulingEventMessageHandler(TMSSEventMessageHandler):
     '''
     '''
     def __init__(self, tmss_client_credentials_id: str=None):
@@ -51,39 +51,34 @@ class TMSSSubTaskSchedulingEventMessageHandler(TMSSSubTaskEventMessageHandler):
         super().stop_handling()
         self.tmss_client.close()
 
-    def onSubTaskFinished(self, subtask_id: int, old_state: str, new_state:str):
-        '''onSubTaskFinished is called upon received a SubTaskFinished message, which is sent when a SubTasks changes state to "Finished".
-        :param subtask_id: the TMSS id of the SubTask
-        :param old_state: the previous state of the SubTask
-        :param new_state: the new state of the SubTask
-        '''
-        logger.info("subtask %s finished. Trying to schedule defined successor subtasks...", subtask_id)
-
-        successors = self.tmss_client.get_subtask_successors(subtask_id, state="defined")
-        successor_ids = [s['id'] for s in successors]
-
-        logger.info("subtask %s finished. trying to schedule defined successors: %s",
-                    subtask_id,
-                    ', '.join(str(id) for id in successor_ids) or 'None')
-
-        for successor in successors:
-            try:
-                suc_subtask_id = successor['id']
-                suc_subtask_state = successor['state_value']
-
-                if suc_subtask_state == "defined":
-                    logger.info("trying to schedule successor subtask %s for finished subtask %s", suc_subtask_id, subtask_id)
-                    scheduled_successor = self.tmss_client.schedule_subtask(suc_subtask_id)
-                    suc_subtask_state = scheduled_successor['state_value']
-                    logger.info("successor subtask %s for finished subtask %s now has state '%s', see %s", suc_subtask_id, subtask_id, suc_subtask_state, scheduled_successor['url'])
-                else:
-                    logger.warning("skipping scheduling of successor subtask %s for finished subtask %s because its state is '%s'", suc_subtask_id, subtask_id, suc_subtask_state)
-
-            except Exception as e:
-                logger.error(e)
+    def onSubTaskStatusChanged(self, id: int, status: str):
+        super().onSubTaskStatusChanged(id, status)
+
+        if status == "finished":
+            successors = self.tmss_client.get_subtask_successors(id, state="defined")
+            successor_ids = sorted([s['id'] for s in successors])
+
+            logger.info("subtask %s finished. trying to schedule defined successors: %s",
+                        id, ', '.join(str(id) for id in successor_ids) or 'None')
+
+            for successor in successors:
+                try:
+                    suc_subtask_id = successor['id']
+                    suc_subtask_state = successor['state_value']
+
+                    if suc_subtask_state == "defined":
+                        logger.info("trying to schedule successor subtask %s for finished subtask %s", suc_subtask_id, id)
+                        scheduled_successor = self.tmss_client.schedule_subtask(suc_subtask_id)
+                        suc_subtask_state = scheduled_successor['state_value']
+                        logger.info("successor subtask %s for finished subtask %s now has state '%s', see %s", suc_subtask_id, id, suc_subtask_state, scheduled_successor['url'])
+                    else:
+                        logger.warning("skipping scheduling of successor subtask %s for finished subtask %s because its state is '%s'", suc_subtask_id, id, suc_subtask_state)
+
+                except Exception as e:
+                    logger.error(e)
 
 def create_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER, tmss_client_credentials_id: str=None):
-    return TMSSSubTaskBusListener(handler_type=TMSSSubTaskSchedulingEventMessageHandler,
+    return TMSSBusListener(handler_type=TMSSSubTaskSchedulingEventMessageHandler,
                                   handler_kwargs={'tmss_client_credentials_id': tmss_client_credentials_id},
                                   exchange=exchange,
                                   broker=broker)
diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py b/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py
index 1027b12e91d..84d85d87901 100755
--- a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py
+++ b/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py
@@ -22,9 +22,9 @@ import uuid
 
 import logging
 logger = logging.getLogger(__name__)
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
 
 from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment
-from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import *
 from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator
 
 from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor
@@ -49,7 +49,7 @@ class TestSubtaskSchedulingService(unittest.TestCase):
         cls.tmss_test_env.start()
 
         cls.test_data_creator = TMSSRESTTestDataCreator(cls.tmss_test_env.django_server.url,
-                                                        (tmss_test_env.ldap_server.dbcreds.user, tmss_test_env.ldap_server.dbcreds.password))
+                                                        (cls.tmss_test_env.ldap_server.dbcreds.user, cls.tmss_test_env.ldap_server.dbcreds.password))
 
     @classmethod
     def tearDownClass(cls) -> None:
@@ -109,8 +109,6 @@ class TestSubtaskSchedulingService(unittest.TestCase):
                 # subtask2 should now be scheduled
                 self.assertEqual(subtask2['state_value'], 'scheduled')
 
-logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
-
 if __name__ == '__main__':
     #run the unit tests
     unittest.main()
diff --git a/SAS/TMSS/services/tmss_postgres_listener/CMakeLists.txt b/SAS/TMSS/services/tmss_postgres_listener/CMakeLists.txt
new file mode 100644
index 00000000000..c500272b175
--- /dev/null
+++ b/SAS/TMSS/services/tmss_postgres_listener/CMakeLists.txt
@@ -0,0 +1,8 @@
+lofar_package(TMSSPostgresListenerService 0.1 DEPENDS TMSSClient PyCommon PyMessaging)
+
+lofar_find_package(PythonInterp 3.4 REQUIRED)
+
+add_subdirectory(lib)
+add_subdirectory(bin)
+add_subdirectory(test)
+
diff --git a/SAS/TMSS/services/tmss_postgres_listener/bin/CMakeLists.txt b/SAS/TMSS/services/tmss_postgres_listener/bin/CMakeLists.txt
new file mode 100644
index 00000000000..5bd20e76ba6
--- /dev/null
+++ b/SAS/TMSS/services/tmss_postgres_listener/bin/CMakeLists.txt
@@ -0,0 +1,4 @@
+lofar_add_bin_scripts(tmss_postgres_listener_service)
+
+# supervisord config files
+lofar_add_sysconf_files(tmss_postgres_listener_service.ini DESTINATION supervisord.d)
diff --git a/SAS/TMSS/services/tmss_postgres_listener/bin/tmss_postgres_listener_service b/SAS/TMSS/services/tmss_postgres_listener/bin/tmss_postgres_listener_service
new file mode 100644
index 00000000000..6f4ba61d805
--- /dev/null
+++ b/SAS/TMSS/services/tmss_postgres_listener/bin/tmss_postgres_listener_service
@@ -0,0 +1,24 @@
+#!/usr/bin/python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+
+from lofar.sas.tmss.services.tmss_postgres_listener import main
+
+if __name__ == "__main__":
+    main()
diff --git a/SAS/TMSS/services/tmss_postgres_listener/bin/tmss_postgres_listener_service.ini b/SAS/TMSS/services/tmss_postgres_listener/bin/tmss_postgres_listener_service.ini
new file mode 100644
index 00000000000..3564a30c1f8
--- /dev/null
+++ b/SAS/TMSS/services/tmss_postgres_listener/bin/tmss_postgres_listener_service.ini
@@ -0,0 +1,9 @@
+[program:tmss_pglistener_service]
+command=/bin/bash -c 'source $LOFARROOT/lofarinit.sh;exec tmss_pglistener_service'
+user=lofarsys
+stopsignal=INT ; KeyboardInterrupt
+stopasgroup=true ; bash does not propagate signals
+stdout_logfile=%(program_name)s.log
+redirect_stderr=true
+stderr_logfile=NONE
+stdout_logfile_maxbytes=0
diff --git a/SAS/TMSS/services/tmss_postgres_listener/lib/CMakeLists.txt b/SAS/TMSS/services/tmss_postgres_listener/lib/CMakeLists.txt
new file mode 100644
index 00000000000..c438ae81c5d
--- /dev/null
+++ b/SAS/TMSS/services/tmss_postgres_listener/lib/CMakeLists.txt
@@ -0,0 +1,10 @@
+lofar_find_package(PythonInterp 3.4 REQUIRED)
+include(PythonInstall)
+
+set(_py_files
+    tmss_postgres_listener.py
+    )
+
+python_install(${_py_files}
+    DESTINATION lofar/sas/tmss/services)
+
diff --git a/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py b/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py
new file mode 100644
index 00000000000..3cf20c24ec7
--- /dev/null
+++ b/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py
@@ -0,0 +1,237 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+import logging
+logger = logging.getLogger(__name__)
+
+from optparse import OptionParser, OptionGroup
+import json
+
+from lofar.common.postgres import PostgresListener, makePostgresNotificationQueries
+from lofar.messaging.messagebus import ToBus
+from lofar.sas.tmss.client.tmssbuslistener import *
+from lofar.common import dbcredentials
+from lofar.common.util import single_line_with_single_spaces
+
+class TMSSPGListener(PostgresListener):
+    '''This class subscribes to the Subtask, TaskDraft/Blueprint & SchedulingUnitDraft/Blueprint tables in the TMSS database
+    and send EventMessages upon each table row action, *Created, *Updated, *Deleted, and for each status update.
+    See lofar.sas.tmss.client.tmssbuslistener.TMSSBusListener for the receiving BusListener'''
+    def __init__(self,
+                 dbcreds,
+                 exchange=DEFAULT_BUSNAME,
+                 broker=DEFAULT_BROKER):
+        super().__init__(dbcreds=dbcreds)
+        self.event_bus = ToBus(exchange=exchange, broker=broker)
+
+    def start(self):
+        logger.info("Starting to listen for TMSS database changes and publishing EventMessages on %s  db: %s", self.event_bus.exchange, self._dbcreds.stringWithHiddenPassword())
+        self.event_bus.open()
+
+        # SubTask
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_subtask', 'insert'))
+        self.subscribe('tmssapp_subtask_insert', self.onSubTaskInserted)
+
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_subtask', 'update'))
+        self.subscribe('tmssapp_subtask_update', self.onSubTaskUpdated)
+
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_subtask', 'delete'))
+        self.subscribe('tmssapp_subtask_delete', self.onSubTaskDeleted)
+
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_subtask', 'update', 'state_id'))
+        self.subscribe('tmssapp_subtask_update_column_state_id', self.onSubTaskStateUpdated)
+
+
+        # TaskBlueprint
+        # please note that the status property in the TaskBlueprint model is derived from the subtasks
+        # hence we cannot create a postgres notification for this "column".
+        # But, see how onSubTaskStateUpdated does send a TaskBlueprint status changed event as well for its parent.
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_taskblueprint', 'insert'))
+        self.subscribe('tmssapp_taskblueprint_insert', self.onTaskBlueprintInserted)
+
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_taskblueprint', 'update'))
+        self.subscribe('tmssapp_taskblueprint_update', self.onTaskBlueprintUpdated)
+
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_taskblueprint', 'delete'))
+        self.subscribe('tmssapp_taskblueprint_delete', self.onTaskBlueprintDeleted)
+
+
+        # TaskDraft
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_taskdraft', 'insert'))
+        self.subscribe('tmssapp_taskdraft_insert', self.onTaskDraftInserted)
+
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_taskdraft', 'update'))
+        self.subscribe('tmssapp_taskdraft_update', self.onTaskDraftUpdated)
+
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_taskdraft', 'delete'))
+        self.subscribe('tmssapp_taskdraft_delete', self.onTaskDraftDeleted)
+
+
+        # SchedulingUnitBlueprint
+        # please note that the status property in the SchedulingUnitBlueprint model is derived from the tasks, and these are derived from the subtasks
+        # hence we cannot create a postgres notification for this "column".
+        # But, see how onSubTaskStateUpdated does send a SchedulingUnitBlueprint status changed event as well for its parent.
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_schedulingunitblueprint', 'insert'))
+        self.subscribe('tmssapp_schedulingunitblueprint_insert', self.onSchedulingUnitBlueprintInserted)
+
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_schedulingunitblueprint', 'update'))
+        self.subscribe('tmssapp_schedulingunitblueprint_update', self.onSchedulingUnitBlueprintUpdated)
+
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_schedulingunitblueprint', 'delete'))
+        self.subscribe('tmssapp_schedulingunitblueprint_delete', self.onSchedulingUnitBlueprintDeleted)
+
+
+        # SchedulingUnitDraft
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_schedulingunitdraft', 'insert'))
+        self.subscribe('tmssapp_schedulingunitdraft_insert', self.onSchedulingUnitDraftInserted)
+
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_schedulingunitdraft', 'update'))
+        self.subscribe('tmssapp_schedulingunitdraft_update', self.onSchedulingUnitDraftUpdated)
+
+        self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_schedulingunitdraft', 'delete'))
+        self.subscribe('tmssapp_schedulingunitdraft_delete', self.onSchedulingUnitDraftDeleted)
+
+        return super().start()
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        super().stop()
+        self.event_bus.close()
+        logger.info("Stopped listening for TMSS database changes and publishing EventMessages on %s broker=%s db: %s",
+                    self.event_bus.exchange, self.event_bus.broker, self._dbcreds.stringWithHiddenPassword())
+
+    def _sendNotification(self, subject, contentDict):
+        try:
+            if isinstance(contentDict, str):
+                contentDict = json.loads(contentDict)
+
+            msg = EventMessage(subject=subject, content=contentDict)
+            logger.info('Sending %s to %s: %s',
+                        subject, self.event_bus.exchange, single_line_with_single_spaces(contentDict))
+            self.event_bus.send(msg)
+        except Exception as e:
+            logger.error(str(e))
+
+    def onSubTaskInserted(self, payload = None):
+        self._sendNotification(TMSS_SUBTASK_OBJECT_EVENT_PREFIX+'.Created', payload)
+
+    def onSubTaskUpdated(self, payload = None):
+        self._sendNotification(TMSS_SUBTASK_OBJECT_EVENT_PREFIX+'.Updated', payload)
+
+    def onSubTaskDeleted(self, payload = None):
+        self._sendNotification(TMSS_SUBTASK_OBJECT_EVENT_PREFIX+'.Deleted', payload)
+
+    def onSubTaskStateUpdated(self, payload = None):
+        payload_dict = json.loads(payload)
+        # send notification for this subtask...
+        from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Subtask
+        subtask = Subtask.objects.get(id=payload_dict['id'])
+        self._sendNotification(TMSS_SUBTASK_STATUS_EVENT_PREFIX+'.'+subtask.state.value.capitalize(),
+                               {'id': subtask.id, 'status': subtask.state.value})
+
+        # ... and also send status change and object update events for the parent task, and schedulingunit,
+        # because their status is implicitly derived from their subtask(s)
+        # send both object.updated and status change events
+        self.onTaskBlueprintUpdated( {'id': subtask.task_blueprint.id})
+        self._sendNotification(TMSS_TASKBLUEPRINT_STATUS_EVENT_PREFIX+'.'+subtask.task_blueprint.status.capitalize(),
+                               {'id': subtask.task_blueprint.id, 'status': subtask.task_blueprint.status})
+
+        self.onSchedulingUnitBlueprintUpdated( {'id': subtask.task_blueprint.scheduling_unit_blueprint.id})
+        self._sendNotification(TMSS_SCHEDULINGUNITBLUEPRINT_STATUS_EVENT_PREFIX+'.'+subtask.task_blueprint.scheduling_unit_blueprint.status.capitalize(),
+                               {'id': subtask.task_blueprint.scheduling_unit_blueprint.id, 'status': subtask.task_blueprint.scheduling_unit_blueprint.status})
+
+    def onTaskBlueprintInserted(self, payload = None):
+        self._sendNotification(TMSS_TASKBLUEPRINT_OBJECT_EVENT_PREFIX+'.Created', payload)
+
+    def onTaskBlueprintUpdated(self, payload = None):
+        self._sendNotification(TMSS_TASKBLUEPRINT_OBJECT_EVENT_PREFIX+'.Updated', payload)
+
+    def onTaskBlueprintDeleted(self, payload = None):
+        self._sendNotification(TMSS_TASKBLUEPRINT_OBJECT_EVENT_PREFIX+'.Deleted', payload)
+
+    def onTaskDraftInserted(self, payload = None):
+        self._sendNotification(TMSS_TASKDRAFT_OBJECT_EVENT_PREFIX+'.Created', payload)
+
+    def onTaskDraftUpdated(self, payload = None):
+        self._sendNotification(TMSS_TASKDRAFT_OBJECT_EVENT_PREFIX+'.Updated', payload)
+
+    def onTaskDraftDeleted(self, payload = None):
+        self._sendNotification(TMSS_TASKDRAFT_OBJECT_EVENT_PREFIX+'.Deleted', payload)
+
+    def onSchedulingUnitBlueprintInserted(self, payload = None):
+        self._sendNotification(TMSS_SCHEDULINGUNITBLUEPRINT_OBJECT_EVENT_PREFIX+'.Created', payload)
+
+    def onSchedulingUnitBlueprintUpdated(self, payload = None):
+        self._sendNotification(TMSS_SCHEDULINGUNITBLUEPRINT_OBJECT_EVENT_PREFIX+'.Updated', payload)
+
+    def onSchedulingUnitBlueprintDeleted(self, payload = None):
+        self._sendNotification(TMSS_SCHEDULINGUNITBLUEPRINT_OBJECT_EVENT_PREFIX+'.Deleted', payload)
+
+    def onSchedulingUnitDraftInserted(self, payload = None):
+        self._sendNotification(TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX+'.Created', payload)
+
+    def onSchedulingUnitDraftUpdated(self, payload = None):
+        self._sendNotification(TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX+'.Updated', payload)
+
+    def onSchedulingUnitDraftDeleted(self, payload = None):
+        self._sendNotification(TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX+'.Deleted', payload)
+
+
+def create_service(dbcreds, exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER):
+    '''create a TMSSPGListener instance'''
+    return TMSSPGListener(dbcreds=dbcreds, exchange=exchange, broker=broker)
+
+
+def main():
+    # make sure we run in UTC timezone
+    import os
+    os.environ['TZ'] = 'UTC'
+
+    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
+
+    # Check the invocation arguments
+    parser = OptionParser("%prog [options]",
+                          description='runs the radb postgres listener which listens to changes on some tables in the radb and publishes the changes as notifications on the bus.')
+
+    group = OptionGroup(parser, 'Messaging options')
+    group.add_option('-b', '--broker', dest='broker', type='string', default=DEFAULT_BROKER,
+                     help='Address of the message broker, default: %default')
+    group.add_option('-e', "--exchange", dest="exchange", type="string", default=DEFAULT_BUSNAME,
+                     help="Bus or queue where the TMSS messages are published. [default: %default]")
+    parser.add_option_group(group)
+
+    parser.add_option_group(dbcredentials.options_group(parser))
+    parser.set_defaults(dbcredentials=os.environ.get('TMSS_CLIENT_DBCREDENTIALS', 'TMSS'))
+    (options, args) = parser.parse_args()
+
+    dbcreds = dbcredentials.parse_options(options)
+    logger.info("Using dbcreds: %s" % dbcreds.stringWithHiddenPassword())
+
+    # setup django
+    os.environ["TMSS_DBCREDENTIALS"] = options.dbcredentials
+    os.environ["DJANGO_SETTINGS_MODULE"] = "lofar.sas.tmss.tmss.settings"
+    import django
+    django.setup()
+
+    with create_service(dbcreds=dbcreds,
+                        exchange=options.exchange,
+                        broker=options.broker) as listener:
+        listener.waitWhileListening()
+
+if __name__ == '__main__':
+    main()
diff --git a/SAS/TMSS/services/tmss_postgres_listener/test/CMakeLists.txt b/SAS/TMSS/services/tmss_postgres_listener/test/CMakeLists.txt
new file mode 100644
index 00000000000..36dfcc36e0a
--- /dev/null
+++ b/SAS/TMSS/services/tmss_postgres_listener/test/CMakeLists.txt
@@ -0,0 +1,7 @@
+# $Id: CMakeLists.txt 32679 2015-10-26 09:31:56Z schaap $
+
+if(BUILD_TESTING)
+    include(LofarCTest)
+
+    lofar_add_test(t_tmss_postgres_listener_service)
+endif()
diff --git a/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.py b/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.py
new file mode 100644
index 00000000000..b0b847668bf
--- /dev/null
+++ b/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+import unittest
+import uuid
+
+import logging
+logger = logging.getLogger(__name__)
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
+
+from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment
+from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator
+
+from lofar.messaging.messagebus import TemporaryExchange
+from lofar.sas.tmss.services.tmss_postgres_listener import *
+from lofar.common.test_utils import integration_test
+from threading import Lock
+import requests
+import json
+from collections import deque
+from datetime import datetime, timedelta
+
+@integration_test
+class TestSubtaskSchedulingService(unittest.TestCase):
+    '''
+    Tests for the SubtaskSchedulingService
+    '''
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.TEST_UUID = uuid.uuid1()
+
+        cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID))
+        cls.tmp_exchange.open()
+
+        cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address, start_postgres_listener=False, populate_schemas=False, populate_test_data=False)
+        cls.tmss_test_env.start()
+
+        cls.test_data_creator = TMSSRESTTestDataCreator(cls.tmss_test_env.django_server.url,
+                                                        (cls.tmss_test_env.ldap_server.dbcreds.user,
+                                                         cls.tmss_test_env.ldap_server.dbcreds.password))
+
+    @classmethod
+    def tearDownClass(cls) -> None:
+        cls.tmss_test_env.stop()
+        cls.tmp_exchange.close()
+
+    def test_01_for_expected_behaviour(self):
+        '''
+        This test starts a TMSSPGListener service and TMSS, creates/updates/deletes subtasks/tasks/schedulingunits, and checks if the correct events are sent.
+        '''
+        logger.info(' -- test_01_for_expected_behaviour -- ')
+
+        class TestTMSSPGListener(TMSSPGListener):
+            '''Helper TMSSPGListener for this test, storing intermediate results, and providing synchronization threading.Events'''
+            def __init__(self, dbcreds, exchange=self.tmp_exchange.address):
+                super().__init__(dbcreds, exchange)
+                self.subjects = deque()
+                self.contentDicts = deque()
+                self.lock = Lock()
+
+            def _sendNotification(self, subject, contentDict):
+                # instead of sending a notification to the messagebus, record the subject and content in queues
+                # so we can check in the test if the correct subjects are recorded
+                with self.lock:
+                    logger.info("detected db change: %s %s", subject, single_line_with_single_spaces(contentDict))
+                    self.subjects.append(subject)
+                    self.contentDicts.append(json.loads(contentDict) if isinstance(contentDict, str) else contentDict)
+
+        # create and start the service (the object under test)
+        with TestTMSSPGListener(exchange=self.tmp_exchange.address, dbcreds=self.tmss_test_env.database.dbcreds) as service:
+            # create a SchedulingUnitDraft
+            su_draft = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.SchedulingUnitDraft(), '/scheduling_unit_draft/')
+
+            # sync and check
+            with service.lock:
+                self.assertEqual(TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX+'.Created', service.subjects.popleft())
+                self.assertEqual({"id": su_draft['id']}, service.contentDicts.popleft())
+
+
+            # create a TaskDraft
+            task_draft = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.TaskDraft(scheduling_unit_draft_url=su_draft['url']), '/task_draft/')
+
+            # sync and check
+            with service.lock:
+                self.assertEqual(TMSS_TASKDRAFT_OBJECT_EVENT_PREFIX+'.Created', service.subjects.popleft())
+                self.assertEqual({"id": task_draft['id']}, service.contentDicts.popleft())
+
+
+            # create a SchedulingUnitBlueprint
+            su_blueprint = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.SchedulingUnitBlueprint(scheduling_unit_draft_url=su_draft['url']), '/scheduling_unit_blueprint/')
+
+            # sync and check
+            with service.lock:
+                self.assertEqual(TMSS_SCHEDULINGUNITBLUEPRINT_OBJECT_EVENT_PREFIX+'.Created', service.subjects.popleft())
+                self.assertEqual({"id": su_blueprint['id']}, service.contentDicts.popleft())
+
+
+            # create a TaskBlueprint
+            task_blueprint = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.TaskBlueprint(scheduling_unit_blueprint_url=su_blueprint['url'],
+                                                                                                                                   draft_url=task_draft['url']), '/task_blueprint/')
+
+            # sync and check
+            with service.lock:
+                self.assertEqual(TMSS_TASKBLUEPRINT_OBJECT_EVENT_PREFIX+'.Created', service.subjects.popleft())
+                self.assertEqual({"id": task_blueprint['id']}, service.contentDicts.popleft())
+
+
+            # create a SubTask
+            subtask = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Subtask(task_blueprint_url=task_blueprint['url']), '/subtask/')
+
+            # sync and check
+            with service.lock:
+                self.assertEqual(TMSS_SUBTASK_OBJECT_EVENT_PREFIX+'.Created', service.subjects.popleft())
+                self.assertEqual({"id": subtask['id']}, service.contentDicts.popleft())
+
+            # update subtask status, use a nice tmss_client and the rest api.
+            with self.tmss_test_env.create_tmss_client() as client:
+                client.set_subtask_status(subtask['id'], 'scheduled')
+
+                # ugly, but functional. Wait for all status updates: 1 object, 1 status. both per each object (3 types) => total 6 events.
+                start_wait = datetime.utcnow()
+                while True:
+                    with service.lock:
+                        if len(service.subjects) == 6:
+                            break
+                    if  datetime.utcnow() - start_wait > timedelta(seconds=5):
+                        raise TimeoutError("timeout while waiting for status/object updates")
+
+            # sync and check
+            with service.lock:
+                self.assertEqual(TMSS_SUBTASK_OBJECT_EVENT_PREFIX + '.Updated', service.subjects.popleft())
+                self.assertEqual({'id': subtask['id']}, service.contentDicts.popleft())
+
+                self.assertEqual(TMSS_SUBTASK_STATUS_EVENT_PREFIX+'.Scheduled', service.subjects.popleft())
+                self.assertEqual({'id': subtask['id'], 'status': 'scheduled'}, service.contentDicts.popleft())
+
+                self.assertEqual(TMSS_TASKBLUEPRINT_OBJECT_EVENT_PREFIX+'.Updated', service.subjects.popleft())
+                self.assertEqual({'id': task_blueprint['id']}, service.contentDicts.popleft())
+
+                self.assertEqual(TMSS_TASKBLUEPRINT_STATUS_EVENT_PREFIX+'.Scheduled', service.subjects.popleft())
+                self.assertEqual({'id': task_blueprint['id'], 'status': 'scheduled'}, service.contentDicts.popleft())
+
+                self.assertEqual(TMSS_SCHEDULINGUNITBLUEPRINT_OBJECT_EVENT_PREFIX+'.Updated', service.subjects.popleft())
+                self.assertEqual({'id': su_blueprint['id']}, service.contentDicts.popleft())
+
+                self.assertEqual(TMSS_SCHEDULINGUNITBLUEPRINT_STATUS_EVENT_PREFIX+'.Scheduled', service.subjects.popleft())
+                self.assertEqual({'id': su_blueprint['id'], 'status': 'scheduled'}, service.contentDicts.popleft())
+
+            # delete subtask, use direct http delete request on rest api
+            requests.delete(subtask['url'], auth=self.test_data_creator.auth)
+
+            # sync and check subtask deleted
+            with service.lock:
+                self.assertEqual(TMSS_SUBTASK_OBJECT_EVENT_PREFIX+'.Deleted', service.subjects.popleft())
+                self.assertEqual({'id': subtask['id']}, service.contentDicts.popleft())
+
+
+if __name__ == '__main__':
+    #run the unit tests
+    unittest.main()
diff --git a/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.run b/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.run
new file mode 100644
index 00000000000..b3b8a825859
--- /dev/null
+++ b/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.run
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+# Run the unit test
+source python-coverage.sh
+python_coverage_test "*tmss*" t_tmss_postgres_listener_service.py
+
diff --git a/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.sh b/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.sh
new file mode 100644
index 00000000000..600f72e660b
--- /dev/null
+++ b/SAS/TMSS/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+./runctest.sh t_tmss_postgres_listener_service
\ No newline at end of file
diff --git a/SAS/TMSS/src/migrate_momdb_to_tmss.py b/SAS/TMSS/src/migrate_momdb_to_tmss.py
index 07fa5d2ffcc..13efa43bbc7 100755
--- a/SAS/TMSS/src/migrate_momdb_to_tmss.py
+++ b/SAS/TMSS/src/migrate_momdb_to_tmss.py
@@ -1,6 +1,4 @@
 #!/usr/bin/env python3
-from tmss.wsgi import application  # required to set up django, even though not explicitly used
-from tmss.tmssapp import models
 
 from lofar.common import dbcredentials
 
@@ -8,8 +6,24 @@ import logging
 import datetime
 import pymysql
 from optparse import OptionParser
+import os
+import django
+import sys
+import re
 
 logger = logging.getLogger(__file__)
+handler = logging.StreamHandler(sys.stdout)
+handler.setLevel(logging.DEBUG)
+formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+handler.setFormatter(formatter)
+logger.addHandler(handler)
+
+
+# 'mom2id' -> 'tmss object' mapping
+# (so we know what has been created already and refer to that)
+mom2id_to_tmss_representation = {}
+stats = {"projects_skipped": 0, "projects_updated": 0, "projects_created":0,
+         "subtasks_skipped": 0, "subtasks_updated": 0, "subtasks_created": 0}
 
 def _execute_query(query, data=None):
     try:
@@ -27,74 +41,77 @@ def query_project_details_from_momdb():
     Queries MoM database for project details and returns the list of results
     :return: list of details as dict
     """
-    logger.info("Querying MoM database for projects")
+    logger.info("...querying MoM database for projects")
     query = """SELECT project.priority,
                       project.allowtriggers,
                       mom2object.name,
                       mom2object.description,
-                      mom2object.mom2id
+                      mom2object.mom2id,
+                      resourcetype.name AS resourcetypename,
+                      resource.projectpath
                 FROM project
                 JOIN mom2object ON project.mom2objectid=mom2object.id
+                LEFT JOIN resource ON projectid=project.id AND resource.resourcetypeid IN (2,3,4,5,6,7,8,9,10,12)
+                LEFT JOIN resourcetype ON resource.resourcetypeid=resourcetype.id
                 ORDER BY mom2id;
                 """
 
     results = _execute_query(query)
 
-    # dummy data:
-    # results = [{"mom2id": 42,
-    #             "name": "dummyproject",
-    #             "description": "fake description",
-    #             "priority": 1234,
-    #             "allowtriggers": True}]
+    # MoM resourcetypes
+    #
+    # mysql> SELECT * FROM lofar_mom_test_lsmr.resourcetype;
+    # +----+----------------------------------+--------------------------------------------------------------------------------+----------------+
+    # | id | name                             | hosturi                                                                        | type           |
+    # +----+----------------------------------+--------------------------------------------------------------------------------+----------------+
+    # |  1 | Lofar Observing Time             | NULL                                                                           | OBSERVING_TIME |
+    # |  2 | Lofar Storage (SARA)             | srm://srm.grid.sara.nl:8443/pnfs/grid.sara.nl/data/lofar/ops/projects/         | LTA_STORAGE    |
+    # |  3 | Lofar Test Storage (SARA)        | srm://srm.grid.sara.nl:8443/pnfs/grid.sara.nl/data/lofar/ops/test/projects/    | LTA_STORAGE    |
+    # |  4 | Lofar Storage (TARGET) old       | gsiftp://lotar1.staging.lofar/target/gpfs2/lofar/home/lofarops/ops/projects/   | LTA_STORAGE    |
+    # |  5 | Lofar Storage (Jülich)           | srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/ | LTA_STORAGE    |
+    # |  6 | Lofar User Disk Storage (SARA)   | srm://srm.grid.sara.nl/pnfs/grid.sara.nl/data/lofar/user/disk/projects/        | LTA_STORAGE    |
+    # |  7 | Lofar Tape Storage (Target)      | srm://srm.target.rug.nl:8444/lofar/ops/projects/                               | LTA_STORAGE    |
+    # |  8 | Lofar Tape Test Storage (Target) | srm://srm.target.rug.nl:8444/lofar/ops/test/projects/                          | LTA_STORAGE    |
+    # |  9 | Lofar Disk Storage (Target)      | srm://srm.target.rug.nl:8444/lofar/ops/disk/projects/                          | LTA_STORAGE    |
+    # | 10 | Lofar Disk Test Storage (Target) | srm://srm.target.rug.nl:8444/lofar/ops/disk/test/projects/                     | LTA_STORAGE    |
+    # | 11 | Lofar Processing Time            | NULL                                                                           | OBSERVING_TIME |
+    # | 12 | Lofar Storage (Poznan)           | srm://lta-head.lofar.psnc.pl:8443/lofar/ops/projects/                          | LTA_STORAGE    |
+    # | 13 | Lofar Triggers                   | NULL                                                                           | LOFAR_TRIGGERS |
+    # +----+----------------------------------+--------------------------------------------------------------------------------+----------------+
 
     return results
 
 
-def get_project_details_from_momdb():
+def query_subtask_details_for_project_from_momdb(project_mom2id):
     """
-    Obtains project details from MoM database and translates it into details as understood by the tmss data model.
-    :return: dict mom2id -> project details as dict
+    Obtains details of observations and pipelines from the MoM database.
+    Note: Some of these details still need to be converted to sth. that is understood by the tmss data model for subtasks.
+    :param project_mom2id:
+    :return:
     """
-    logger.info("Getting project details from MoM database")
-    mom_results = query_project_details_from_momdb()
-    results = {}
-
-    for mom_details in mom_results:
-
-        # create new tmss details based on MoM details
-        details = {"name": mom_details['name'],
-                   "description": mom_details['description'],
-                   "tags": ["migrated_from_MoM"],
-                   "priority": mom_details['priority'],
-                   "can_trigger": mom_details['allowtriggers'],
-                   "private_data": True  # todo: check project.releasedate and compare to now or how to determine???
-                   }
-
-        # alterations to comply with constraints:
-        if details['description'] is None:
-            details['description'] = ''
-
-        # add to return dict
-        results[mom_details['mom2id']] = details
-
-    return results
-
-
-def query_subtask_details_for_project_from_momdb(project_mom2id):
-    logger.info("Querying MoM database for tasks of project %s" % project_mom2id)
+    logger.info("...querying MoM database for subtasks of project %s" % project_mom2id)
     # todo: double-check the correct use of ids. What refers to a mom2id and what refers to a database entry pk does not seem systematic and is very comfusing.
+    # todo: clarify: Measurements correspond to subtask and Observations correspond to task level?
+    #  We have lofar_observation and lofar_pipeline tables, but lofar_observation seems to refer to a 'task level mom2object'
+    #  with measurement and pipeline children, where the latter are further described on the lofar_pipeline table.
+    #  So I'm assuming that type '%%MEASUREMENT%%' here correspond to observation subtasks, which also means that some
+    #  info is apparently squashed together with that of other subtasks of the same task in the lofar_observation entry
+    #  of the parent/'task'.
+    # todo: clarify: Is there info on template/start/stop on Measurement level somewhere? Using the parent task/observation now.
     query = '''SELECT mom2object.mom2id, mom2object.name, mom2object.description, mom2object.mom2objecttype, status.code,
-                      lofar_pipeline.template, lofar_observation.default_template, lofar_pipeline.starttime, lofar_pipeline.endtime,
-                      lofar_observation_specification.starttime AS obs_starttime, lofar_observation_specification.endtime AS obs_endtime
+                      lofar_pipeline.template AS template, lofar_observation.default_template as obs_template, lofar_pipeline.starttime, lofar_pipeline.endtime,
+                      lofar_observation_specification.starttime AS obs_starttime, lofar_observation_specification.endtime AS obs_endtime,
+                      parent_mom2object.mom2id as parent_mom2id
                       FROM mom2object
                       INNER JOIN mom2object AS ownerproject_mom2object ON mom2object.ownerprojectid = ownerproject_mom2object.id
                       INNER JOIN mom2objectstatus ON mom2object.currentstatusid = mom2objectstatus.id
                       INNER JOIN status ON mom2objectstatus.statusid = status.id
+                      LEFT JOIN mom2object AS parent_mom2object ON mom2object.parentid = parent_mom2object.id
                       LEFT JOIN lofar_pipeline ON mom2object.id = lofar_pipeline.mom2objectid
-                      LEFT JOIN lofar_observation ON mom2object.id = lofar_observation.mom2objectid
+                      LEFT JOIN lofar_observation ON mom2object.parentid = lofar_observation.mom2objectid
                       LEFT JOIN lofar_observation_specification ON lofar_observation.user_specification_id = lofar_observation_specification.id
                       WHERE ownerproject_mom2object.mom2id = %s
-                      AND (mom2object.mom2objecttype = 'LOFAR_OBSERVATION' OR mom2object.mom2objecttype LIKE '%%PIPELINE%%');
+                      AND (mom2object.mom2objecttype LIKE '%%MEASUREMENT%%' OR mom2object.mom2objecttype LIKE '%%PIPELINE%%');  
                       '''
 
     parameters = (project_mom2id,)
@@ -104,242 +121,497 @@ def query_subtask_details_for_project_from_momdb(project_mom2id):
     return results
 
 
-def _dummy_subtask_template():
-    try:
-        return models.SubtaskTemplate.objects.get(name='dummy')
-    except:
-        dummy_template_details = {"name": "dummy",
-                                  "description": 'Dummy Template',
-                                  "version": '1',
-                                  "schema": {},
-                                  "realtime": False,
-                                  "queue": False,
-                                  "tags": ["DUMMY"]}
+def query_task_details_for_subtask_from_momdb(subtask_mom2id):
+    """
+    Obtains details of observations and pipelines from the MoM database.
+    Note: Some of these details still need to be converted to sth. that is understood by the tmss data model for subtasks.
+    :param project_mom2id:
+    :return:
+    """
+    logger.info("...querying MoM database for parent task of subtask mom2id=%s" % subtask_mom2id)
+    query = '''SELECT parent_mom2object.mom2id, parent_mom2object.name, parent_mom2object.description, parent_mom2object.mom2objecttype, status.code,
+                      lofar_observation.template, lofar_observation_specification.starttime AS starttime, lofar_observation_specification.endtime AS endtime
+                      FROM mom2object
+                      INNER JOIN mom2object AS parent_mom2object ON mom2object.parentid = parent_mom2object.id
+                      INNER JOIN mom2objectstatus ON parent_mom2object.currentstatusid = mom2objectstatus.id
+                      INNER JOIN status ON mom2objectstatus.statusid = status.id
+                      LEFT JOIN lofar_observation ON parent_mom2object.id = lofar_observation.mom2objectid
+                      LEFT JOIN lofar_observation_specification ON lofar_observation.user_specification_id = lofar_observation_specification.id
+                      WHERE mom2object.mom2id = %s                      
+                      '''
+    parameters = (subtask_mom2id,)
 
-        return models.SubtaskTemplate.objects.create(**dummy_template_details)
+    results = _execute_query(query, parameters)
 
+    return results
 
-def _dummy_scheduling_set(project):
-    dummy_scheduling_set_details = {"name": 'dummy',
-                            "description": "Dummy scheduling unit set",
-                            "tags": ["DUMMY"],
-                            "generator_doc": "{}",
-                            "project": project,
-                            "generator_template": None}
+def query_related_tasks_from_momdb(task_mom2id):
+    """
+    Obtains details of observations and pipelines from the MoM database.
+    Note: Some of these details still need to be converted to sth. that is understood by the tmss data model for subtasks.
+    :param project_mom2id:
+    :return:
+    """
+    logger.info("...querying MoM database for tasks related to task mom2id=%s" % task_mom2id)
+    query = '''SELECT TIMEDIFF(related_lofar_observation_specification.starttime, lofar_observation_specification.endtime) AS startdiff, 
+               TIMEDIFF(lofar_observation_specification.starttime, related_lofar_observation_specification.endtime) AS enddiff, 
+               related_mom2object.mom2id, related_mom2object.name, related_mom2object.description, related_mom2object.mom2objecttype, status.code,
+               related_lofar_observation.template, related_lofar_observation_specification.starttime AS starttime, related_lofar_observation_specification.endtime AS endtime
+               FROM mom2object
+               INNER JOIN mom2object AS related_mom2object ON mom2object.parentid = related_mom2object.parentid
+               INNER JOIN mom2objectstatus ON related_mom2object.currentstatusid = mom2objectstatus.id
+               INNER JOIN status ON mom2objectstatus.statusid = status.id
+               LEFT JOIN lofar_observation AS related_lofar_observation ON related_mom2object.id = related_lofar_observation.mom2objectid
+               LEFT JOIN lofar_observation_specification AS related_lofar_observation_specification ON related_lofar_observation.user_specification_id = related_lofar_observation_specification.id 
+               LEFT JOIN lofar_observation ON mom2object.id = lofar_observation.mom2objectid
+               LEFT JOIN lofar_observation_specification ON lofar_observation.user_specification_id = lofar_observation_specification.id
+               WHERE mom2object.mom2id = %s
+               '''
+
+    parameters = (task_mom2id,)
+
+    results = _execute_query(query, parameters)
 
+    return results
 
-    return models.SchedulingSet.objects.create(**dummy_scheduling_set_details)
 
+def get_project_details_from_momdb():
+    """
+    Obtains project details from MoM database and translates it into details as understood by the tmss data model.
+    :return: dict mom2id -> project details as dict
+    """
+    logger.info("Getting project details from MoM database")
+    mom_results = query_project_details_from_momdb()
+    results = {}
 
-def _dummy_scheduling_unit_template():
-    dummy_scheduling_unit_template_details = {"name": "dummy",
-                                  "description": 'Dummy scheduling unit template',
-                                  "version": 'v0.314159265359',
-                                  "schema": {"mykey": "my value"},
-                                  "tags": ["DUMMY"]}
+    for mom_details in mom_results:
+
+        # derive values for TMSS:
 
-    return models.RunTemplate.objects.create(**dummy_scheduling_unit_template_details)
+        #   filesystem   todo: how to deal with Target locations?
+        if mom_details['resourcetypename']:
+            try:
+                archive_location = models.Filesystem.objects.get(name=mom_details['resourcetypename'])
+            except:
+                logger.error("No Filesystem matching '%(resourcetypename)s' in tmss database! Skipping project name=%(name)s" % mom_details)
+                continue
+        else:
+            logger.warning("Missing archive info in MoM details, using None! name=%(name)s" % mom_details)
+            archive_location = None
+            mom_details['projectpath'] = ""
 
+        # create new tmss details
+        details = {"name": mom_details['name'],
+                   "description":  "" if not mom_details['description'] else mom_details['description'],
+                   "tags": ["migrated_from_MoM", "migration_incomplete"],
+                   "priority_rank": mom_details['priority'],
+                   "trigger_priority": 1000,
+                   "can_trigger": mom_details['allowtriggers'],
+                   "private_data": True,  # todo: check project.releasedate and compare to now or how to determine???
+                   "archive_subdirectory": mom_details['projectpath'],
+                   # optional:
+                   # "project_category":,
+                   # "period_category":,
+                   "archive_location": archive_location
+                   }
 
-def _dummy_scheduling_unit_draft(scheduling_set, template):
-    dummy_scheduling_unit_draft_details = {"name": 'dummy',
-                            "description": "Dummy scheduling_unit draft",
-                            "tags": ["DUMMY"],
-                            "requirements_doc": "{}",
-                            "copy_reason": models.CopyReason.objects.get(value='template'),
-                            "generator_instance_doc": "para",
-                            "copies": None,
-                            "scheduling_set": scheduling_set,
-                            "generator_source": None,
-                            "template": template}
+        # add to return dict
+        results[mom_details['mom2id']] = details
 
-    return models.RunDraft.objects.create(**dummy_scheduling_unit_draft_details)
+    return results
 
 
-def _dummy_scheduling_unit_blueprint(draft, template):
+def get_or_create_scheduling_set_for_project(project):
+    """
+    Returns the common scheduling set for all scheduling units of the given project or creates a new one if not found in TMSS.
+    """
+    try:
+        scheduling_set = models.SchedulingSet.objects.get(name=project.name)
+    except:
+        dummy_scheduling_set_details = {"name": project.name,
+                            "description": "Common scheduling set for all scheduling units in this project (created during MoM migration)",
+                            "tags": ["migrated_from_MoM", "migration_incomplete"],
+                            "generator_doc": {},
+                            "project": project,
+                            "generator_template": None}
+        scheduling_set = models.SchedulingSet.objects.create(**dummy_scheduling_set_details)
 
-    dummy_scheduling_unit_blueprint_details = {"name": 'dummy',
-                                   "description": "Dummy scheduling_unit blueprint",
-                                   "tags": ["DUMMY"],
-                                   "requirements_doc": "{}",
-                                   "do_cancel": False,
-                                   "draft": draft,
-                                   "template": template}
+    return scheduling_set
 
 
-    return  models.RunBlueprint.objects.create(**dummy_scheduling_unit_blueprint_details)
+def get_or_create_scheduling_unit_for_subtask(scheduling_set, scheduling_unit_template, task_mom2id, max_time_distance=900):
+    """
+    Returns a scheduling unit for the given subtask mom2id. It is either newly created or an existing scheduling unit
+    of related subtasks. Subtasks are considered related when they are within the same folder and one task does not start
+    more than max_time_distance seconds  before of after the other task.
 
+    # todo: we have groups/topologies as well, need some guidance here...
+    # todo: where to get the specification time from?
 
-def _dummy_task_template():
+    :returns tuple(scheduling_unit_draft, scheduling_unit_template)
+    """
 
-    dummy_task_template_details = {"name": "dummy",
-                                           "description": 'Dummy work request template',
-                                           "validation_code_js": "",
-                                           "version": 'v0.314159265359',
-                                           "schema": {"mykey": "my value"},
-                                           "tags": ["DUMMY"]}
+    related_task_details = query_related_tasks_from_momdb(task_mom2id)
+    if related_task_details:
+        for details in related_task_details:
+            if details["mom2id"] in mom2id_to_tmss_representation.keys():
+                related_task = mom2id_to_tmss_representation[details["mom2id"]]  # we kept a blueprint reference
+                if details['startdiff'] and details['enddiff']:
+                    time_distance = min(abs(details['startdiff'].total_seconds()), abs(details['enddiff'].total_seconds()))
+                    if time_distance < max_time_distance:
+                        blueprint = related_task.scheduling_unit_blueprint
+                        draft = blueprint.draft
+                        logger.info("...using scheduling unit draft_id=%s blueprint_id=%s from related task mom2id=%s for task mom2id=%s" % (draft.id, blueprint.id, details["mom2id"], task_mom2id))
+                        return draft, blueprint
+                    else:
+                        logger.info("...related task mom2id=%s starts too far apart (seconds=%s threshold=%s)" % (details["mom2id"], time_distance, max_time_distance))
+                        continue
+                else:
+                    logger.warning("Cannot compare times, assuming task mom2id=%s is not related to %s" % (task_mom2id, details))  # todo: Investigate... is this because sometimes user sometimes system specified?
+                    continue
+
+    scheduling_unit_draft_details = {"name": 'dummy',
+                                     "description": "Scheduling unit draft (created during MoM migration for task mom2id=%s)" % task_mom2id,
+                                     "tags": ["migrated_from_MoM", "migration_incomplete"],
+                                     "requirements_doc": {},
+                                     "scheduling_set": scheduling_set,
+                                     "requirements_template": scheduling_unit_template
+                                     # optional:
+                                     # "copy_reason": models.CopyReason.objects.get(value='template'),
+                                     # "copies": None,
+                                     # "generator_instance_doc" : {},
+                                     # "scheduling_constraints_doc": {},
+                                     # "scheduling_constraints_template": None,
+                                     # "observation_strategy_template": None,
+                                     }
+
+    draft = models.SchedulingUnitDraft.objects.create(**scheduling_unit_draft_details)
+
+    scheduling_unit_blueprint_details = {"name": 'dummy',
+                                         "description": "Scheduling unit blueprint (created during MoM migration for task mom2id=%s)" % task_mom2id,
+                                         "tags": ["migrated_from_MoM", "migration_incomplete"],
+                                         "requirements_doc": {},
+                                         "do_cancel": False,
+                                         "draft": draft,
+                                         "requirements_template": scheduling_unit_template}
+
+    blueprint = models.SchedulingUnitBlueprint.objects.create(**scheduling_unit_blueprint_details)
+    logger.info("Created new scheduling unit draft_id=%s blueprint_id=%s for task mom2id=%s" % (draft.id, blueprint.id, task_mom2id))
+    return draft, blueprint
+
+
+def get_or_create_task_for_subtask(scheduling_unit_draft, scheduling_unit_blueprint, subtask_mom2id):
+    """
+    Returns a TMSS task for the given subtask.
+    It is either newly created or an existing task of related subtasks. Subtasks are considered related when they have
+    the same parentid in MoM database.
+    :returns tuple(task_draft, task_blueprint)
+    """
 
-    return models.TaskTemplate.objects.create(**dummy_task_template_details)
+    task_details = query_task_details_for_subtask_from_momdb(subtask_mom2id)
+    if task_details:
+        for details in task_details:   # there should be exactly one, actually
+            if details["mom2id"] in mom2id_to_tmss_representation.keys():
+                blueprint = mom2id_to_tmss_representation[details["mom2id"]]
+                draft = blueprint.draft
+                logger.info("...using existing task draft_id=%s blueprint_id=%s for subtask mom2id=%s" % (draft.id, blueprint.id, subtask_mom2id))
+                return draft, blueprint
+            else:
+                try:
+                    return models.TaskTemplate.objects.get(name=details['default_template'])
+                except:
+                    task_template = _dummy_task_template(details['template'])
+
+                task_draft_details = {"name": details["name"],
+                                            "description": "" if not details['description'] else details['description'],
+                                            "tags": ["migrated_from_MoM", "migration_incomplete"],
+                                            "specifications_doc": {},
+                                            # "copy_reason": models.CopyReason.objects.get(value='template'),
+                                            # "copies": None,
+                                            "scheduling_unit_draft": scheduling_unit_draft,
+                                            "specifications_template": task_template}
+
+                task_draft = models.TaskDraft.objects.create(**task_draft_details)
+
+                task_blueprint_details = {"name": details["name"],
+                                                "description": "" if not details['description'] else details['description'],
+                                                "tags": ["migrated_from_MoM", "migration_incomplete"],
+                                                "specifications_doc": {},
+                                                "do_cancel": False,
+                                                "draft": task_draft,
+                                                "specifications_template": task_template,
+                                                "scheduling_unit_blueprint": scheduling_unit_blueprint}
+
+                task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_details)
+
+                mom2id_to_tmss_representation[details["mom2id"]] = task_blueprint
+                logger.info("...created new task draft_id=%s blueprint_id=%s for subtask mom2id=%s" % (task_draft.id, task_blueprint.id, subtask_mom2id))
+                return task_draft, task_blueprint
+
+
+def _dummy_subtask_template(name):
+    template_name = "%s_dummy" % name
+    try:
+        return models.SubtaskTemplate.objects.get(name=template_name)
+    except:
+        dummy_template_details = {"name": template_name,
+                                  "description": "Dummy subtask template for MoM migration, when no matching template in TMSS",
+                                  "version": '1',
+                                  "schema": {"$id":"http://tmss.lofar.org/api/schemas/subtasktemplate/empty/1#",
+                                             "$schema": "http://json-schema.org/draft-06/schema#"},
+                                  "realtime": False,
+                                  "queue": False,
+                                  "tags": ["DUMMY"],
+                                  "type": models.SubtaskType.objects.get(value='other')}
 
+        return models.SubtaskTemplate.objects.create(**dummy_template_details)
 
-def _dummy_task_draft(scheduling_unit_draft, template):
 
-    dummy_task_draft_details = {"name": 'dummy',
-                                        "description": "Dummy work request draft",
-                                        "tags": ["DUMMY"],
-                                        "requirements_doc": "{}",
-                                        "copy_reason": models.CopyReason.objects.get(value='template'),
-                                        "copies": None,
-                                        "scheduling_unit_draft": scheduling_unit_draft,
-                                        "template": template}
+def _dummy_scheduling_unit_template(name):
+    template_name = "%s_dummy" % name
+    try:
+        return models.SchedulingUnitTemplate.objects.get(name=template_name)
+    except:
+        dummy_scheduling_unit_template_details = {"name": template_name,
+                                  "description": "Dummy scheduling unit template for MoM migration, when no matching template in TMSS",
+                                  "version": 'v0.314159265359',
+                                  "schema": {"$id":"http://tmss.lofar.org/api/schemas/schedulingunittemplate/empty/1#",
+                                             "$schema": "http://json-schema.org/draft-06/schema#"},
+                                  "tags": ["DUMMY"]}
 
-    return models.TaskDraft.objects.create(**dummy_task_draft_details)
+        return models.SchedulingUnitTemplate.objects.create(**dummy_scheduling_unit_template_details)
 
 
-def _dummy_task_blueprint(draft, template, scheduling_unit_blueprint):
+def _dummy_task_template(name):
+    template_name = "%s_dummy" % name
+    try:
+        return models.TaskTemplate.objects.get(name=template_name)
+    except:
+        dummy_task_template_details = {"name": template_name,
+                                       "description": 'Dummy task template for MoM migration, when no matching template in TMSS',
+                                       "validation_code_js": "",
+                                       "version": 'v0.314159265359',
+                                       "schema": {"$id":"http://tmss.lofar.org/api/schemas/tasktemplate/empty/1#",
+                                                  "$schema": "http://json-schema.org/draft-06/schema#"},
+                                       "tags": ["DUMMY"],
+                                       "type": models.TaskType.objects.get(value='other')}
 
-    dummy_task_blueprint_details = {"name": 'dummy',
-                                            "description": "Dummy work request blueprint",
-                                            "tags": ["DUMMY"],
-                                            "requirements_doc": "{}",
-                                            "do_cancel": False,
-                                            "draft": draft,
-                                            "template": template,
-                                            "scheduling_unit_blueprint": scheduling_unit_blueprint}
+        return models.TaskTemplate.objects.create(**dummy_task_template_details)
 
-    return models.TaskBlueprint.objects.create(**dummy_task_blueprint_details)
 
+def create_subtask_trees_for_project_in_momdb(project_mom2id, project):
+    """
+    Migrates all observations and pipelines that belong to the given project as Subtasks to TMSS.
+    This also creates associated Task and SchedulingUnit drafts and blueprints in order to link the subtask to its project.
+    :param project_mom2id: The mom id of the project to migrate
+    :param project: The TMSS project object to refer to
+    """
 
-def get_subtask_details_from_momdb(project_mom2id, project):
+    global stats
+    global mom2id_to_tmss_representation
 
     logger.info("Getting subtask details from MoM database")
     mom_results = query_subtask_details_for_project_from_momdb(project_mom2id)
-    results = {}
+    logger.info("There are %s subtasks to migrate in project name=%s" % (len(mom_results), project.name))
 
     for mom_details in mom_results:
 
-        # different types have some info in different spots, so they end up in different columns.
-        # put same information in same spot to keep following code same for all tasks.
-        # (maybe we want to instead separate these into different queries instead or union them in SQL?)
+        logger.info("...now migrating subtask mom2id=%s mom2objecttype=%s" % (mom_details['mom2id'], mom_details['mom2objecttype']))
+
+        # derive values for TMSS
+
+        #   type and start/end times
 
-        if 'OBSERVATION' in mom_details['mom2objecttype']:
-            type = models.SubtaskType.objects.get(value='observation')
-            template_name = mom_details['default_template']
+        if 'MEASUREMENT' in mom_details['mom2objecttype']:
+            template_name = mom_details['obs_template']
             start_time = mom_details['obs_starttime']
-            end_time = mom_details['obs_endtime']
+            stop_time = mom_details['obs_endtime']
         elif 'PIPELINE' in mom_details['mom2objecttype']:
-            type = models.SubtaskType.objects.get(value='pipeline')
             template_name = mom_details['template']
             start_time = mom_details['starttime']
-            end_time = mom_details['endtime']
+            stop_time = mom_details['endtime']
         else:
-            logger.warning('Unknown type %(mom2objecttype)s' % mom_details)
-            logger.warning('Skipping %s' % mom_details)
+            logger.error('Unknown type %(mom2objecttype)s - Skipping subtask mom2id=%(mom2id)s' % mom_details)
+            stats['subtasks_skipped'] += 1
             continue
 
-        # create new tmss details (leave out stuff that might go wrong)
+        #   timestamps
 
-        details = {"type": type,
-                   "start_time": None,              # mandatory
-                   "stop_time": None,               # mandatory
-                   "state": None,                   # mandatory
-                   "requested_state": None,         # mandatory
-                   "specification": "{}",
-                   "task_blueprint": None,  # optional, but required for project reference
-                   "template": None,                # mandatory
-                   "tags": ["migrated_from_MoM"]}
+        if start_time is None:
+            start_time = datetime.datetime.utcfromtimestamp(0).isoformat()  # not-null constraint
 
-        # timestamps
+        if stop_time is None:
+            stop_time = datetime.datetime.utcfromtimestamp(0).isoformat()  # not-null constraint
 
-        if start_time is not None:
-            details['start_time'] = start_time
-        else:
-            details['start_time'] = datetime.datetime.utcfromtimestamp(0).isoformat() # not-null constraint
-
-        if end_time is not None:
-            details['stop_time'] = end_time
-        else:
-            details['stop_time'] = datetime.datetime.utcfromtimestamp(0).isoformat() # not-null constraint
+        #   state
 
-        # state
+        #   todo: check mapping is correct and complete.
+        #    This now only includes what I ran into during testing and is mapped to what felt right by my intuition (i.e. probably wrong)
+        #   Note: status codes with a verbatim counterpart in TMSS do not need to be mapped here.
+        #   Valid TMSS values are: "defining", "defined", "scheduling", "scheduled", "queueing", "queued", "starting", "started", "finishing", "finished", "cancelling", "cancelled", "error"
+        mom_state_to_subtask_state = {"opened": models.SubtaskState.objects.get(value="defining"),
+                                      "described": models.SubtaskState.objects.get(value="defined"),
+                                      "suspended": models.SubtaskState.objects.get(value="cancelled"),
+                                      "prepared": models.SubtaskState.objects.get(value="scheduling"),
+                                      "aborted": models.SubtaskState.objects.get(value="cancelled"),
+                                      "hold": models.SubtaskState.objects.get(value="cancelled"),
+                                      "approved": models.SubtaskState.objects.get(value="queued"),
+                                      "failed": models.SubtaskState.objects.get(value="error"),
+                                      "successful": models.SubtaskState.objects.get(value="finished"),}
 
-        try:
-            state = models.SubtaskState.objects.get(value=mom_details['code'])
-            details['state'] = state
-            details['requested_state'] = state
-        except Exception as e:
-            logger.error("No state choice matching '%s' in tmss database! %s" % (mom_details['code'], e))
-            logger.warning('Skipping %s' % mom_details)
-            continue
+        if mom_details['code'] in mom_state_to_subtask_state:
+            state = mom_state_to_subtask_state[mom_details['code']]
+        else:
+            try:
+                state = models.SubtaskState.objects.get(value=mom_details['code'])
+            except:
+                logger.error("No SubtaskState choice matching '%(code)s' in tmss database! - Skipping subtask mom2id=%(mom2id)s" % mom_details)
+                stats['subtasks_skipped'] += 1
+                continue
 
-        # template
+        #     template
 
         if template_name is not None:
             try:
-                details['template'] = models.SubtaskTemplate.objects.get(name=template_name)
-            except Exception as e:
-                logger.warning("No task template matching '%s' in tmss database! Using dummy instead! %s" % (template_name, e))
-
+                specifications_template = models.SubtaskTemplate.objects.get(name=template_name)
+                logger.info('...found SubtaskTemplate id=%s for subtask mom2id=%s templatename=%s' % (specifications_template.id, mom_details["mom2id"], template_name))
+            except:
                 # todo: create a lot of templates to reflect what was used for the actual task?
-                # todo: raise Exception (or continue) once we have proper templates for everything.
-                details["template"] = _dummy_subtask_template()
+                #  Then raise Exception once we have proper templates for everything?
+                specifications_template = _dummy_subtask_template(template_name)
+                logger.warning("No SubtaskTemplate matching '%s' in tmss database! Using dummy id=%s instead for subtask mom2id=%s" % (template_name, specifications_template.id, mom_details['mom2id']))
 
         else:
-            logger.warning('Missing template name in MoM details!')
-            logger.warning('Skipping %s' % mom_details)
+            logger.error('Missing template name in MoM details! - Skipping subtask mom2id=%(mom2id)s' % mom_details)
+            stats['subtasks_skipped'] += 1
             continue
 
-        # ----------------
-        # todo: the following entries are needed to relate a task to it's project.
-        # todo: we should substitute the dummy items by items that reflect the actual task details
-
         # scheduling set
-        scheduling_set = _dummy_scheduling_set(project)
+        scheduling_set = get_or_create_scheduling_set_for_project(project)
 
         # scheduling unit template
-        scheduling_unit_template = _dummy_scheduling_unit_template()
+        try:
+            scheduling_unit_template = models.SchedulingUnitTemplate.objects.get(name=template_name)
+            logger.info('...found SchedulingUnitTemplate id=%s for subtask mom2id=%s templatename=%s' % (scheduling_unit_template.id, mom_details["mom2id"], template_name))
+        except:
+            scheduling_unit_template = _dummy_scheduling_unit_template(template_name)
+            logger.warning('No SchedulingUnitTemplate was found for subtask mom2id=%s templatename=%s. Using dummy template id=%s' % (mom_details["mom2id"], template_name, scheduling_unit_template.id))
+
+        # scheduling unit draft + blueprint
+        scheduling_unit_draft, scheduling_unit_blueprint = get_or_create_scheduling_unit_for_subtask(scheduling_set, scheduling_unit_template, mom_details["parent_mom2id"])
+
+        # task draft + blueprint
+        task_draft, task_blueprint = get_or_create_task_for_subtask(scheduling_unit_draft, scheduling_unit_blueprint, mom_details["mom2id"])
+
+        details = {"id": mom_details['mom2id'],
+                   "state": state,
+                   "specifications_doc": {},   # todo: where from? We have user_specification_id (for task?) and system_specification_id (for subtask?) on lofar_observation (I guess referring to lofar_observation_specification). Shall we piece things together from that, or is there a text blob to use? Also: pipeline info lives in obs_spec too?
+                   "task_blueprint": task_blueprint,
+                   "specifications_template": specifications_template,
+                   "tags": ["migrated_from_MoM", "migration_incomplete"],   # todo: set complete once it is verified that all info is present
+                   "priority": project.priority_rank,  # todo: correct to derive from project?
+                   # optional:
+                   "start_time": start_time,
+                   "stop_time": stop_time,
+                   "schedule_method": models.ScheduleMethod.objects.get(value="manual"),  # todo: correct? Or leave None?
+                   # "created_or_updated_by_user" = None,
+                   # "raw_feedback" = None,
+                   # "do_cancel": None,
+                   #"cluster": None  # I guess from lofar_observation.storage_cluster_id
+                   }
 
-        # scheduling unit draft
-        scheduling_unit_draft = _dummy_scheduling_unit_draft(scheduling_set, scheduling_unit_template)
+        subtask_qs = models.Subtask.objects.filter(id=details["id"])
+        if subtask_qs.count():
+            # todo: this will update the subtask, but other TMSS objects do not share id with MoM and get recreated with every migration run. Can we clean this up somehow?
+            subtask_qs.update(**details)
+            subtask = subtask_qs.first()
+            logger.info("...updated existing subtask tmss id=%s" % subtask.id)
+            stats['subtasks_updated'] += 1
+        else:
+            subtask = models.Subtask.objects.create(**details)
+            logger.info("...created new subtask tmss id=%s" % subtask.id)
+            stats['subtasks_created'] += 1
 
-        # scheduling unit blueprint
-        scheduling_unit_blueprint = _dummy_scheduling_unit_blueprint(scheduling_unit_draft, scheduling_unit_template)
+        mom2id_to_tmss_representation[mom_details['mom2id']] = subtask
 
-        # work request template
-        task_template = _dummy_task_template()
+        logger.info("...handled %s TMSS objects so far | %s" % (len(mom2id_to_tmss_representation), stats))
 
-        # work request draft
-        task_draft = _dummy_task_draft(scheduling_unit_draft, task_template)
 
-        # work request blueprint
-        details['task_blueprint'] = _dummy_task_blueprint(task_draft,
-                                                                          task_template,
-                                                                          scheduling_unit_blueprint)
-        # ----------------
+def get_or_create_cycle_for_project(project):
+    """
+    Returns a cycle for a given project. Since cycles don't seem to be a thing in MoM, the cycle is derived from the
+    project name. Returns None, if that fails.
+    """
+    name = project.name
+    if name.lower().startswith('lc'):
+        cycle_no = re.search(r'\d+', name.lower()).group()
+        cycle_name = "Cycle %02d" % int(cycle_no)
+        try:
+            cycle = models.Cycle.objects.get(name=cycle_name)
+            logger.info("...found existing cycle name=%s for project name=%s" % (cycle.name, project.name))
+            return cycle
+        except:
+            details = {"name": cycle_name,
+                       "description": "Cycle %s (created during MoM migration)" % cycle_no,
+                       "tags": ["migrated_from_MoM", "migration_incomplete"],
+                       "start": "1970-01-01T00:00:00",  # todo, where from?
+                       "stop": "1970-01-01T00:00:00",   # todo, where from?
+                       }
+            cycle = models.Cycle.objects.create(**details)
+            logger.info("...created new cycle name=% for project name=%s" % (cycle.name, project.name))
+            return cycle
 
-        # add task mom2id and its details to return dict
-        results[mom_details['mom2id']] = details
+    logger.warning("Could not determine cycle for project name=%s. Using None." % (project.name))
 
-    return results
 
 
 def main():
+    """
+    Migrates data from a MoM database to a TMSS database.
+    Existing objects in TMSS of same name or id are updated, otherwise new objects are created.
+    """
+
+    global mom2id_to_tmss_representation
+    global stats
+
+    # query details of all projects in MoM database
     project_details = get_project_details_from_momdb()
+    logger.info("There are %s projects to migrate" % len(project_details))
 
+    # iterate projects
     for p_id, p_details in project_details.items():
 
-        logger.info("---\nNow migrating project %s..." % p_details['name'])
-        project = models.Project.objects.create(**p_details)
-        logger.info("...created new project with tmss id %s" % project.id)
+        try:
+            logger.info("Now migrating project mom_name=%s mom2id=%s" % (p_details['name'], p_id))
+
+            # create or update project
+            project_qs = models.Project.objects.filter(name=p_details["name"])
+            if project_qs.count():
+                project_qs.update(**p_details)
+                project = project_qs.first()
+                logger.info("...updated existing project tmss_name=%s" % project.name)
+                stats["projects_updated"] += 1
+            else:
+                project = models.Project.objects.create(**p_details)
+                logger.info("...created new project tmss_name=%s" % project.name)
+                stats["projects_created"] += 1
 
-        task_details = get_subtask_details_from_momdb(p_id, project)
-        for t_id, t_details in task_details.items():
+            # create all subtasks and related objects for the project
+            create_subtask_trees_for_project_in_momdb(p_id, project)
 
-            logger.info("...creating new task mom2id %s" % t_id)
-            task = models.Subtask.objects.create(**t_details)
-            logger.info("...created new task with tmss id %s" % task.id)
+            # add project to existing or new cycle
+            cycle = get_or_create_cycle_for_project(project)
+            if cycle:
+                project.cycles.set([cycle])
 
-        logger.info("...done migrating project %s." % p_details['name'])
+            logger.info("...done migrating project mom_name=%s mom2id=%s tmss_name=%s." % (p_details['name'], p_id, project.name))
+
+        except Exception as ex:
+            logger.error(ex, exc_info=True)
+            logger.error("Skipping migration of project mom_name=%s mom2id=%s details=%s." % (p_details['name'], p_id, p_details))
+            stats["projects_skipped"] += 1
+
+    logger.info("Done. Handled %s TMSS objects in total | %s" % (len(mom2id_to_tmss_representation), stats))
 
 
 if __name__ == "__main__":
@@ -354,7 +626,7 @@ if __name__ == "__main__":
 
     global dbcreds
     dbcreds = dbcredentials.parse_options(options)
-    logger.info("Using dbcreds: %s", dbcreds.stringWithHiddenPassword())
+    logger.info("Using MoM dbcreds: %s", dbcreds.stringWithHiddenPassword())
 
     # note: this requires a config file .lofar/dbcredentials/mom.ini, with contents as such (adapt as needed):
     #
@@ -366,5 +638,14 @@ if __name__ == "__main__":
     # password = mompass
     # database = lofar_mom_test_tmss
 
+    # set up Django
+    creds_name = os.environ.get('TMSS_DBCREDENTIALS', 'tmss')
+    os.environ['TMSS_DBCREDENTIALS'] = creds_name
+    tmss_dbcreds = dbcredentials.DBCredentials().get(creds_name)
+    logger.info("Using TMSS dbcreds: %s", tmss_dbcreds.stringWithHiddenPassword())
+
+    os.environ.setdefault("DJANGO_SETTINGS_MODULE", 'lofar.sas.tmss.tmss.settings')
+    django.setup()
+    from lofar.sas.tmss.tmss.tmssapp import models  # has to happen after Django setup
 
     main()
diff --git a/SAS/TMSS/src/tmss/settings.py b/SAS/TMSS/src/tmss/settings.py
index d5a948d2878..7f160668b40 100644
--- a/SAS/TMSS/src/tmss/settings.py
+++ b/SAS/TMSS/src/tmss/settings.py
@@ -155,7 +155,7 @@ ROOT_URLCONF = 'lofar.sas.tmss.tmss.urls'
 TEMPLATES = [
     {
         'BACKEND': 'django.template.backends.django.DjangoTemplates',
-        'DIRS': [BASE_DIR, os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, '../frontend','tmss_webapp')],
+        'DIRS': [BASE_DIR, os.path.join(BASE_DIR, 'templates'), os.path.join(os.environ.get('LOFARROOT'), 'SAS/TMSS/frontend','tmss_webapp')],
         'APP_DIRS': True,
         'OPTIONS': {
             'context_processors': [
@@ -169,7 +169,7 @@ TEMPLATES = [
 ]
 
 STATICFILES_DIRS = [
-    os.path.join(BASE_DIR, '../frontend','tmss_webapp/build/static')
+    os.path.join(os.environ.get('LOFARROOT'), 'SAS/TMSS/frontend','tmss_webapp/build/static')
 ]
 
 WSGI_APPLICATION = 'lofar.sas.tmss.tmss.wsgi.application'
@@ -354,9 +354,3 @@ SWAGGER_SETTINGS = {
     },
 
 }
-
-# TODO Do I need distinguish more between Test and Production Environment??
-# maybe a local file in Development environment for test purposes
-SCU = "http://scu199" if isDevelopmentEnvironment() or isTestEnvironment() else "http://scu001"
-PIPELINE_SUBTASK_LOG_URL = SCU + ".control.lofar:7412/tasks/%s/log.html"
-OBSERVATION_SUBTASK_LOG_URL = "https://proxy.lofar.eu/inspect/%s/rtcp-%s.errors"
diff --git a/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py b/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py
index 49e555448b9..28c0e733214 100644
--- a/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py
+++ b/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py
@@ -1,5 +1,5 @@
 from lofar.sas.tmss.tmss.exceptions import *
-from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Dataproduct, SubtaskType, Subtask, SubtaskOutput
+from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Dataproduct, SubtaskType, Subtask, SubtaskOutput, SIPidentifier
 from lofar.sas.tmss.tmss.tmssapp.models.specification import Datatype, Dataformat
 from lofar.lta.sip import siplib, ltasip, validator, constants
 
@@ -77,28 +77,29 @@ def get_siplib_stations_list(subtask):
     return siplib_station_list
 
 
-# todo: how do we handle IDs? ...use ID service for new dataproducts? ...determine IDs of already ingested dataproducts?
-# todo: replace the following hack after a conclusion is reached on how to handle IDs.
-#   Either use the default constructor of siplib.Identifier to have the ID service generate unique IDs
-#   Or add a class method to create IDs from within TMSS, by implementing a logic that can generate LTA-wide unique integer identifiers.
-def create_fake_identifier_for_testing(unique_id: int=None) -> siplib.Identifier:
+def get_siplib_identifier(sipid_obj: SIPidentifier, context_str="") -> siplib.Identifier:
     """
-    Create an Identifier object for testing.
-    While under deleopment, work around the ID service and create fake identifiers that we are not gonna use anyway:
-    ! Important: Note that the created identifiers will be unknown to the LTA and must not be used for an actual ingest!
+    Retrieve an Identifier object. Get the unique_identifier and source of the given sip object and covert that to
+    a siblib object
+    :param sipid_obj: SIP Identifier object
+    :param context_str: In case of failure add some context info
     :return: A lofar.lta.sip.siplib.Identifier object
     """
     identifier = siplib.Identifier.__new__(siplib.Identifier)
+    unique_id = sipid_obj.unique_identifier
     if unique_id is None:
-        unique_id = uuid.uuid4().int
+        raise TMSSException("Can not create SIP Identifier, the value of 'unique_id' is None for %s" % context_str)
+    source = sipid_obj.source
+    if source is None:
+        raise TMSSException("Can not create SIP Identifier, the value 'source' is None for %s" % context_str)
+
     identifier._set_pyxb_identifier(
         ltasip.IdentifierType(
-            source='tmss_test',
+            source=source,
             identifier=unique_id,
             name=None,
             label=None),
         suppress_warning=True)
-    logger.warning("Created fake Identifier %s - do not use for an actual ingest!" % unique_id)
     return identifier
 
 
@@ -130,7 +131,7 @@ def create_sip_representation_for_subtask(subtask: Subtask):
     :return: A siplib.Observation object or one of the various siplib pipeline object flavors
     """
     # determine common properties
-    subtask_sip_identifier = create_fake_identifier_for_testing(unique_id=subtask.id)   # todo: use correct id
+    subtask_sip_identifier = get_siplib_identifier(subtask.global_identifier, "Subtask id=%s" % subtask.id)
     name = str(subtask.id)
     process_map = siplib.ProcessMap(strategyname=subtask.specifications_template.name,
                                     strategydescription=subtask.specifications_template.description,
@@ -170,7 +171,7 @@ def create_sip_representation_for_subtask(subtask: Subtask):
     elif subtask.specifications_template.type.value == SubtaskType.Choices.PIPELINE.value:
         sourcedata_identifiers = []
         for input in subtask.inputs.all():
-            sourcedata_identifiers += [create_fake_identifier_for_testing(unique_id=dp.id) for dp in input.dataproducts.all()]     # todo: use correct id, lookup based on TMSS reference or so, tbd
+            sourcedata_identifiers += [get_siplib_identifier(dp.global_identifier, "Dataproduct id=%s" % dp.id) for dp in input.dataproducts.all()]     # todo: use correct id, lookup based on TMSS reference or so, tbd
         if not sourcedata_identifiers:
             raise TMSSException("There seems to be no subtask input associated to your pipeline subtask id %s. Please define what data the pipeline processed." % subtask.id)
 
@@ -282,18 +283,18 @@ def create_sip_representation_for_dataproduct(dataproduct: Dataproduct):
         logger.warning("Could not determine the type of dataproduct id %s (%s). Falling back to %s" % (dataproduct.id, err, dataproduct_fileformat))
 
     dataproduct_map = siplib.DataProductMap(type=dataproduct_type,
-                                            identifier=create_fake_identifier_for_testing(unique_id=dataproduct.id), # todo: use correct id
+                                            identifier=get_siplib_identifier(dataproduct.global_identifier, "Dataproduct %s" % dataproduct.id),
                                             size=dataproduct.size if dataproduct.size else 0,
                                             filename=dataproduct.filename,
                                             fileformat=dataproduct_fileformat,
                                             storage_writer=storage_writer_map[dataproduct.feedback_doc["samples"]["writer"] if 'samples' in dataproduct.feedback_doc else 'unknown'], # todo: verify we can use the feedback_doc here and remove the old method | storage_writer_map[dataproduct.producer.subtask.task_blueprint.specifications_doc.get("storagemanager", 'unknown')],
                                             storage_writer_version=dataproduct.feedback_doc["samples"]["writer_version"] if 'samples' in dataproduct.feedback_doc else 'unknown',
-                                            process_identifier=create_fake_identifier_for_testing(unique_id=dataproduct.producer.subtask.id))
+                                            process_identifier=get_siplib_identifier(dataproduct.producer.subtask.global_identifier, "Producer Subtask %s" % dataproduct.producer.subtask.id))
 
     if dataproduct.dataformat.value == Dataformat.Choices.MEASUREMENTSET.value:  # <- This is the only one we currently need for UC1
         sip_dataproduct = siplib.CorrelatedDataProduct(
             dataproduct_map,
-            subarraypointing_identifier=create_fake_identifier_for_testing(), # todo, from dataproduct.specifications_doc, Jan David checks how to translate int -> Identifier object
+            subarraypointing_identifier=get_siplib_identifier(dataproduct.sap.global_identifier, "SAP %s" % dataproduct.sap.id),
             subband=dataproduct.feedback_doc['frequency']['subbands'][0],
             starttime=dataproduct.feedback_doc['time']['start_time'],
             duration=isodate.duration_isoformat(datetime.timedelta(seconds=dataproduct.feedback_doc['time']['duration'])),
diff --git a/SAS/TMSS/src/tmss/tmssapp/conversions.py b/SAS/TMSS/src/tmss/tmssapp/conversions.py
index ee8f35b3770..ce112f7b30b 100644
--- a/SAS/TMSS/src/tmss/tmssapp/conversions.py
+++ b/SAS/TMSS/src/tmss/tmssapp/conversions.py
@@ -3,6 +3,60 @@ import astropy.units
 from lofar.lta.sip import station_coordinates
 from datetime import datetime
 from astropy.coordinates.earth import EarthLocation
+from astropy.coordinates import Angle
+from astroplan.observer import Observer
+
+
+def create_astroplan_observer_for_station(station: str) -> Observer:
+    '''
+    returns an astroplan observer for object for a given station, located in the LBA center of the given station
+    :param station: a station name, e.g. "CS002"
+    :return: astroplan.observer.Observer object
+    '''
+
+    coords = station_coordinates.parse_station_coordinates()["%s_LBA" % station.upper()]
+    location = EarthLocation.from_geocentric(x=coords['x'], y=coords['y'], z=coords['z'],  unit=astropy.units.m)
+    observer = Observer(location, name="LOFAR", timezone="UTC")
+    return observer
+
+# default angle to the horizon at which the sunset/sunrise starts and ends, as per LOFAR definition.
+SUN_SET_RISE_ANGLE_TO_HORIZON = Angle(10, unit=astropy.units.deg)
+
+def timestamps_and_stations_to_sun_rise_and_set(timestamps: [datetime], stations: [str], angle_to_horizon: Angle=SUN_SET_RISE_ANGLE_TO_HORIZON) -> dict:
+    """
+    compute sunrise, sunset, day and night of the given stations at the given timestamps
+    :param timestamps: list of datetimes, e.g. [datetime(2020, 1, 1), datetime(2020, 1, 2)]
+    :param stations: list of station names, e.g. ["CS001"]
+    :return A dict that maps station names to a nested dict that contains lists of start and end times for sunrise, sunset, etc, on each requested date.
+        E.g.
+        {"CS001":
+            {   "sunrise": [{"start": (2020, 1, 1, 6, 0, 0)), "end": (2020, 1, 1, 6, 30, 0)},
+                            {"start": (2020, 1, 2, 6, 0, 0)), "end": (2020, 1, 2, 6, 30, 0)}],
+                "sunset": [{"start": (2020, 1, 1, 18, 0, 0)), "end": (2020, 1, 1, 18, 30, 0)},
+                           {"start": (2020, 1, 2, 18, 0, 0)), "end": (2020, 1, 2, 18, 30, 0)}],
+                "day": [{"start": (2020, 1, 1, 6, 30, 0)), "end": (2020, 1, 1, 18, 00, 0)},
+                        {"start": (2020, 1, 2, 6, 30, 0)), "end": (2020, 1, 2, 18, 00, 0)}],
+                "night": [{"start": (2020, 1, 1, 18, 30, 0)), "end": (2020, 1, 2, 6, 0, 0)},
+                          {"start": (2020, 1, 2, 18,3 0, 0)), "end": (2020, 1, 3, 6, 0, 0)}],
+            }
+        }
+    """
+    return_dict = {}
+    for station in stations:
+        for timestamp in timestamps:
+            observer = create_astroplan_observer_for_station(station)
+            sunrise_start = observer.sun_rise_time(time=Time(timestamp), which='previous')
+            if sunrise_start.to_datetime().date() < timestamp.date():
+                sunrise_start = observer.sun_rise_time(time=Time(timestamp), horizon=-angle_to_horizon, which='next')
+            sunrise_end = observer.sun_rise_time(time=Time(timestamp), horizon=angle_to_horizon, which='next')
+            sunset_start = observer.sun_set_time(time=sunrise_end, horizon=angle_to_horizon, which='next')
+            sunset_end = observer.sun_set_time(time=sunrise_end, horizon=-angle_to_horizon, which='next')
+            sunrise_next_start = observer.sun_rise_time(time=sunset_end, horizon=-angle_to_horizon, which='next')
+            return_dict.setdefault(station, {}).setdefault("sunrise", []).append({"start": sunrise_start.to_datetime(), "end": sunrise_end.to_datetime()})
+            return_dict[station].setdefault("sunset", []).append({"start": sunset_start.to_datetime(), "end": sunset_end.to_datetime()})
+            return_dict[station].setdefault("day", []).append({"start": sunrise_end.to_datetime(), "end": sunset_start.to_datetime()})
+            return_dict[station].setdefault("night", []).append({"start": sunset_end.to_datetime(), "end": sunrise_next_start.to_datetime()})
+    return return_dict
 
 
 def local_sidereal_time_for_utc_and_station(timestamp: datetime = None,
diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py
index f19b35a76f0..7faae82d9b3 100644
--- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py
+++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py
@@ -1,4 +1,4 @@
-# Generated by Django 3.0.9 on 2020-10-27 16:12
+# Generated by Django 3.0.7 on 2020-10-29 16:37
 
 from django.conf import settings
 import django.contrib.postgres.fields
@@ -597,6 +597,19 @@ class Migration(migrations.Migration):
                 'abstract': False,
             },
         ),
+        migrations.CreateModel(
+            name='SIPidentifier',
+            fields=[
+                ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)),
+                ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
+                ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
+                ('source', models.CharField(help_text='Source name', max_length=128)),
+                ('unique_identifier', models.BigAutoField(help_text='Unique global identifier.', primary_key=True, serialize=False)),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
         migrations.CreateModel(
             name='StationType',
             fields=[
@@ -1057,6 +1070,11 @@ class Migration(migrations.Migration):
             name='created_or_updated_by_user',
             field=models.ForeignKey(editable=False, help_text='The user who created / updated the subtask.', null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL),
         ),
+        migrations.AddField(
+            model_name='subtask',
+            name='global_identifier',
+            field=models.ForeignKey(editable=False, help_text='The global unique identifier for LTA SIP.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SIPidentifier'),
+        ),
         migrations.AddField(
             model_name='subtask',
             name='schedule_method',
@@ -1077,6 +1095,10 @@ class Migration(migrations.Migration):
             name='task_blueprint',
             field=models.ForeignKey(help_text='Task Blueprint to which this Subtask belongs.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='subtasks', to='tmssapp.TaskBlueprint'),
         ),
+        migrations.AddIndex(
+            model_name='sipidentifier',
+            index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sip_tags_bbce92_gin'),
+        ),
         migrations.AddConstraint(
             model_name='schedulingunittemplate',
             constraint=models.UniqueConstraint(fields=('name', 'version'), name='schedulingunittemplate_unique_name_version'),
@@ -1149,6 +1171,11 @@ class Migration(migrations.Migration):
             model_name='saptemplate',
             constraint=models.UniqueConstraint(fields=('name', 'version'), name='saptemplate_unique_name_version'),
         ),
+        migrations.AddField(
+            model_name='sap',
+            name='global_identifier',
+            field=models.ForeignKey(editable=False, help_text='The global unique identifier for LTA SIP.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SIPidentifier'),
+        ),
         migrations.AddField(
             model_name='sap',
             name='specifications_template',
@@ -1281,6 +1308,11 @@ class Migration(migrations.Migration):
             name='feedback_template',
             field=models.ForeignKey(help_text='Schema used for feedback_doc.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.DataproductFeedbackTemplate'),
         ),
+        migrations.AddField(
+            model_name='dataproduct',
+            name='global_identifier',
+            field=models.ForeignKey(editable=False, help_text='The global unique identifier for LTA SIP.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SIPidentifier'),
+        ),
         migrations.AddField(
             model_name='dataproduct',
             name='producer',
diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
index b10dc6daf59..6e58f28a9dc 100644
--- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
@@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
 from datetime import datetime, timedelta
 
 from django.db.models import ForeignKey, CharField, DateTimeField, BooleanField, IntegerField, BigIntegerField, \
-    ManyToManyField, CASCADE, SET_NULL, PROTECT, QuerySet
+    ManyToManyField, CASCADE, SET_NULL, PROTECT, QuerySet, BigAutoField
 from django.contrib.postgres.fields import ArrayField, JSONField
 from django.contrib.auth.models import User
 from .specification import AbstractChoice, BasicCommon, Template, NamedCommon, annotate_validate_add_defaults_to_doc_using_template
@@ -18,12 +18,9 @@ from django.db.models.expressions import RawSQL
 from django.core.exceptions import ValidationError
 
 from lofar.sas.tmss.tmss.exceptions import SubtaskSchedulingException
-from lofar.messaging.messagebus import ToBus, DEFAULT_BROKER, DEFAULT_BUSNAME
-from lofar.messaging.messages import EventMessage
-from lofar.sas.tmss.client.tmssbuslistener import DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX
-from lofar.common.util import single_line_with_single_spaces
 from django.conf import settings
 from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC
+import uuid
 #
 # I/O
 #
@@ -32,6 +29,16 @@ from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC
 # Choices
 #
 
+
+def generate_unique_identifier_for_SIP_when_needed(model):
+    """
+    Create an Unique Identifier for given model class if not exist (None)
+    We just use an Auto Increment ID which is 64 bit
+    """
+    if model.id is not None and model.global_identifier is None:
+        model.global_identifier = SIPidentifier.objects.create(source="TMSS")
+
+
 class SubtaskState(AbstractChoice):
     """Defines the model and predefined list of possible SubtaskStatusChoice's for Subtask.
     The items in the Choices class below are automagically populated into the database via a data migration."""
@@ -151,6 +158,7 @@ class Subtask(BasicCommon):
     # resource_claim = ForeignKey("ResourceClaim", null=False, on_delete=PROTECT) # todo <-- how is this external reference supposed to work?
     created_or_updated_by_user = ForeignKey(User, null=True, editable=False, on_delete=PROTECT, help_text='The user who created / updated the subtask.')
     raw_feedback = CharField(null=True, max_length=1048576, help_text='The raw feedback for this Subtask')
+    global_identifier = ForeignKey('SIPidentifier', null=True, editable=False, on_delete=PROTECT, help_text='The global unique identifier for LTA SIP.')
 
     def __init__(self, *args, **kwargs):
         super().__init__(*args, **kwargs)
@@ -158,17 +166,6 @@ class Subtask(BasicCommon):
         # keep original state for logging
         self.__original_state_id = self.state_id
 
-    @staticmethod
-    def _send_state_change_event_message(subtask_id:int, old_state: str, new_state: str):
-        with ToBus(exchange=os.environ.get("TMSS_EXCHANGE", DEFAULT_BUSNAME),
-                   broker=os.environ.get("TMSS_BROKER", DEFAULT_BROKER)) as tobus: #TODO: do we want to connect to the bus for each new message, or have some global tobus?
-            msg = EventMessage(subject="%s.%s" % (DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX, new_state.capitalize()),
-                               content={'subtask_id': subtask_id, 'old_state': old_state, 'new_state': new_state})
-            address = tobus.remote_address
-            logger.info("Sending message with subject '%s' to exchange='%s' on broker=%s:%s content: %s",
-                        msg.subject, tobus.exchange, address[0], address[1], single_line_with_single_spaces(msg.content))
-            tobus.send(msg)
-
     @property
     def successors(self) -> QuerySet:
         '''return the connect successor subtask(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets)
@@ -195,6 +192,7 @@ class Subtask(BasicCommon):
         creating = self._state.adding  # True on create, False on update
 
         annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template')
+        generate_unique_identifier_for_SIP_when_needed(self)
 
         # check for uniqueness of SAP names:
         # todo: this is a very specific check, that depends on the template. On the task level, we have a javascript
@@ -229,39 +227,9 @@ class Subtask(BasicCommon):
                                            user=self.created_or_updated_by_user, user_identifier=identifier)
             log_entry.save()
 
-            try:
-                self._send_state_change_event_message(self.id, log_entry.old_state.value, log_entry.new_state.value)
-            except Exception as e:
-                logger.error("Could not send state change to messagebus: %s", e)
-
             # update the previous state value
             self.__original_state_id = self.state_id
 
-    @property
-    def log_url(self):
-        """
-        Return the link to the pipeline log in case of pipeline or
-        link to COBALT error log in case of an observation
-        otherwise just an empty string
-        """
-        if self.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value:
-            url = settings.OBSERVATION_SUBTASK_LOG_URL % (self.id, self.id)
-        elif self.specifications_template.type.value == SubtaskType.Choices.PIPELINE.value:
-            # Get RADBID, subtask must be at least 'scheduled' to exist in radb
-            # If RA is not started don't wait longer than 10 seconds
-            with RADBRPC.create(timeout=10) as radbrpc:
-                try:
-                    radb_id = radbrpc.getTask(tmss_id=self.id)
-                except:
-                    radb_id = None
-            if radb_id is None:
-               url = "not available (missing radbid)"
-            else:
-               url = settings.PIPELINE_SUBTASK_LOG_URL % radb_id['id']
-        else:
-            url = ""
-        return url
-
 
 class SubtaskStateLog(BasicCommon):
     """
@@ -300,12 +268,15 @@ class SubtaskOutput(BasicCommon):
 class SAP(BasicCommon):
     specifications_doc = JSONField(help_text='SAP properties.')
     specifications_template = ForeignKey('SAPTemplate', null=False, on_delete=CASCADE, help_text='Schema used for specifications_doc.')
+    global_identifier = ForeignKey('SIPidentifier', null=True, editable=False, on_delete=PROTECT, help_text='The global unique identifier for LTA SIP.')
 
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
         annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template')
+        generate_unique_identifier_for_SIP_when_needed(self)
 
         super().save(force_insert, force_update, using, update_fields)
 
+
 class Dataproduct(BasicCommon):
     """
     A data product represents an atomic dataset, produced and consumed by subtasks. The consumed dataproducts are those
@@ -328,13 +299,16 @@ class Dataproduct(BasicCommon):
     feedback_doc = JSONField(help_text='Dataproduct properties, as reported by the producing process.')
     feedback_template = ForeignKey('DataproductFeedbackTemplate', on_delete=PROTECT, help_text='Schema used for feedback_doc.')
     sap = ForeignKey('SAP', on_delete=PROTECT, null=True, related_name="dataproducts", help_text='SAP this dataproduct was generated out of (NULLable).')
+    global_identifier = ForeignKey('SIPidentifier', editable=False, null=True, on_delete=PROTECT, help_text='The global unique identifier for LTA SIP.')
 
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
         annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template')
         annotate_validate_add_defaults_to_doc_using_template(self, 'feedback_doc', 'feedback_template')
+        generate_unique_identifier_for_SIP_when_needed(self)
 
         super().save(force_insert, force_update, using, update_fields)
 
+
 class AntennaSet(NamedCommon):
     station_type = ForeignKey('StationType', null=False, on_delete=PROTECT)
     rcus = ArrayField(IntegerField(), size=128, blank=False)
@@ -380,3 +354,7 @@ class DataproductHash(BasicCommon):
     algorithm = ForeignKey('Algorithm', null=False, on_delete=PROTECT, help_text='Algorithm used (MD5, AES256).')
     hash = CharField(max_length=128, help_text='Hash value.')
 
+
+class SIPidentifier(BasicCommon):
+    source = CharField(max_length=128, help_text='Source name')
+    unique_identifier = BigAutoField(primary_key=True, help_text='Unique global identifier.')
diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py
index 353f7a16ea0..7ec6f980a09 100644
--- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py
+++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py
@@ -660,6 +660,21 @@ class SchedulingUnitBlueprint(NamedCommon):
         else:
             return None
 
+    @property
+    def observed_end_time(self) -> datetime or None:
+        """
+        return the latest stop time of all (observation) tasks of this scheduling unit with the status observed/finished
+        """
+        observed_tasks = []
+        for task in self.task_blueprints.all():
+            if task.specifications_template.type.value == TaskType.Choices.OBSERVATION.value and\
+                    (task.status == "observed" or task.status == "finished") and task.stop_time is not None:
+                observed_tasks.append(task)
+        if observed_tasks:
+            return max(observed_tasks, key=lambda x: x.stop_time).stop_time
+        else:
+            return None
+
     @property
     def status(self):
         """
@@ -800,14 +815,19 @@ class TaskDraft(NamedCommon):
         '''
         scheduling_relations = list(self.first_to_connect.all()) + list(self.second_to_connect.all())
         for scheduling_relation in scheduling_relations:
-            if scheduling_relation.first.id == self._id and scheduling_relation.placement_id == "after":
+            # sometimes self._id does not exist so use self.id instead to avoid Exception
+            if hasattr(self, '_id'):
+                id = self._id
+            else:
+                id = self.id
+            if scheduling_relation.first.id == id and scheduling_relation.placement_id == "after":
                 previous_related_task_draft = TaskDraft.objects.get(id=scheduling_relation.second.id)
                 time_offset = scheduling_relation.time_offset
                 # todo: max of several relations
                 if previous_related_task_draft.relative_stop_time:
                     return previous_related_task_draft.relative_stop_time + datetime.timedelta(seconds=time_offset)
 
-            if scheduling_relation.second.id == self._id and scheduling_relation.placement_id == "before":
+            if scheduling_relation.second.id == id and scheduling_relation.placement_id == "before":
                 previous_related_task_draft = TaskDraft.objects.get(id=scheduling_relation.first.id)
                 time_offset = scheduling_relation.time_offset
                 # todo: max of several relations
@@ -872,6 +892,7 @@ class TaskDraft(NamedCommon):
 
 
 class TaskBlueprint(NamedCommon):
+
     specifications_doc = JSONField(help_text='Schedulings for this task (IMMUTABLE).')
     do_cancel = BooleanField(help_text='Cancel this task.')
     specifications_template = ForeignKey('TaskTemplate', on_delete=CASCADE, help_text='Schema used for specifications_doc (IMMUTABLE).')
@@ -917,20 +938,25 @@ class TaskBlueprint(NamedCommon):
         '''
         scheduling_relations = list(self.first_to_connect.all()) + list(self.second_to_connect.all())
         for scheduling_relation in scheduling_relations:
-            if scheduling_relation.first.id == self._id and scheduling_relation.placement_id == "after":   # self.id and placement.value will hit the db, this does not
+            # sometimes self._id does not exist so use self.id instead to avoid Exception
+            if hasattr(self, '_id'):
+                id = self._id
+            else:
+                id = self.id
+            if scheduling_relation.first.id == id and scheduling_relation.placement_id == "after":   # self.id and placement.value will hit the db, this does not
                     previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.second.id)
                     time_offset = scheduling_relation.time_offset
                     # todo: max of several relations
                     if previous_related_task_blueprint.relative_stop_time:
                         return previous_related_task_blueprint.relative_stop_time + datetime.timedelta(seconds=time_offset)
 
-            if scheduling_relation.second.id == self._id and scheduling_relation.placement_id == "before":   # self.id and placement.value will hit the db, this does not
+            if scheduling_relation.second.id == id and scheduling_relation.placement_id == "before":   # self.id and placement.value will hit the db, this does not
                     previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.first.id)
                     time_offset = scheduling_relation.time_offset
                     # todo: max of several relations
                     if previous_related_task_blueprint.relative_stop_time:
                         return previous_related_task_blueprint.relative_stop_time + datetime.timedelta(seconds=time_offset)
-        return datetime.timedelta(seconds=666660)
+        return datetime.timedelta(seconds=0)
 
     @cached_property
     def relative_stop_time(self) -> datetime.timedelta:
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/sap_template-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/sap_template-1.json
index b4f6eb64f1e..e80661a829d 100644
--- a/SAS/TMSS/src/tmss/tmssapp/schemas/sap_template-1.json
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/sap_template-1.json
@@ -5,17 +5,6 @@
   "type": "object",
   "default": {},
   "properties": {
-    "identifiers": {
-      "type": "object",
-      "properties": {
-        "SIP": {
-          "type": "string",
-          "default": ""
-        }
-      },
-      "additionalProperties": false,
-      "default": {}
-    },
     "measurement_type": {
       "type": "string",
       "enum": ["calibrator", "target"],
@@ -59,7 +48,6 @@
     }
   },
   "required": [
-    "identifiers",
     "name",
     "pointing",
     "time",
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/subtask_template-ingest-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/subtask_template-ingest-1.json
new file mode 100644
index 00000000000..d2727dbeaa1
--- /dev/null
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/subtask_template-ingest-1.json
@@ -0,0 +1,12 @@
+{
+  "$id":"http://tmss.lofar.org/api/schemas/subtasktemplate/ingest control/1#",
+  "$schema": "http://json-schema.org/draft-06/schema#",
+  "title":"ingest control",
+  "description":"This schema defines the parameters to setup and control an ingest subtask.",
+  "version":1,
+  "type": "object",
+  "properties": {
+  },
+  "required": [
+  ]
+}
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-ingest-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-ingest-1.json
new file mode 100644
index 00000000000..9877e438a72
--- /dev/null
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-ingest-1.json
@@ -0,0 +1,12 @@
+{
+  "$id": "http://tmss.lofar.org/api/schemas/tasktemplate/ingest/1#",
+  "$schema": "http://json-schema.org/draft-06/schema#",
+  "title": "ingest",
+  "description": "This schema defines the parameters to setup an ingest task.",
+  "version": 1,
+  "type": "object",
+  "properties": {
+  },
+  "required": [
+  ]
+}
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json
index b181f8ea1ed..27f52ee1913 100644
--- a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json
@@ -112,5 +112,15 @@
   {
     "file_name": "sap_template-1.json",
     "template": "sap_template"
+  },
+    {
+    "file_name": "subtask_template-ingest-1.json",
+    "template": "subtask_template",
+    "type": "copy"
+    },
+    {
+    "file_name": "task_template-ingest-1.json",
+    "template": "task_template",
+    "type": "ingest"
   }
 ]
\ No newline at end of file
diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
index 091a2352b42..85d7bd21c54 100644
--- a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
@@ -85,7 +85,7 @@ class SubtaskSerializer(RelationalHyperlinkedModelSerializer):
     class Meta:
         model = models.Subtask
         fields = '__all__'
-        extra_fields = ['cluster_value', 'log_url']
+        extra_fields = ['cluster_value']
 
 
 class SubtaskInputSerializer(RelationalHyperlinkedModelSerializer):
@@ -160,3 +160,17 @@ class SAPTemplateSerializer(AbstractTemplateSerializer):
     class Meta:
         model = models.SAPTemplate
         fields = '__all__'
+
+
+class SAPSerializer(RelationalHyperlinkedModelSerializer):
+    specifications_doc = JSONEditorField(schema_source='specifications_template.schema')
+
+    class Meta:
+        model = models.SAP
+        fields = '__all__'
+
+
+class SIPidentifierSerializer(RelationalHyperlinkedModelSerializer):
+    class Meta:
+        model = models.SIPidentifier
+        fields = '__all__'
diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py
index bf250c5a51a..279d0ae7621 100644
--- a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py
+++ b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py
@@ -314,7 +314,7 @@ class SchedulingUnitBlueprintSerializer(RelationalHyperlinkedModelSerializer):
     class Meta:
         model = models.SchedulingUnitBlueprint
         fields = '__all__'
-        extra_fields = ['task_blueprints', 'duration', 'start_time', 'stop_time', 'status']
+        extra_fields = ['task_blueprints', 'duration', 'start_time', 'stop_time', 'status', 'observed_end_time']
 
 class SchedulingUnitBlueprintCopyToSchedulingUnitDraftSerializer(SchedulingUnitBlueprintSerializer):
     class Meta(SchedulingUnitDraftSerializer.Meta):
diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py
index 6502b5187e3..d200d964073 100644
--- a/SAS/TMSS/src/tmss/tmssapp/subtasks.py
+++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py
@@ -49,7 +49,8 @@ def create_subtasks_from_task_blueprint(task_blueprint: TaskBlueprint) -> [Subta
     generators_mapping = {'target observation': [create_observation_control_subtask_from_task_blueprint,
                                                  create_qafile_subtask_from_task_blueprint,
                                                  create_qaplots_subtask_from_task_blueprint],
-                          'preprocessing pipeline': [create_preprocessing_subtask_from_task_blueprint]}
+                          'preprocessing pipeline': [create_preprocessing_subtask_from_task_blueprint],
+                          'ingest': [create_ingest_subtask_from_task_blueprint]}
     generators_mapping['calibrator observation'] = generators_mapping['target observation']
 
     template_name = task_blueprint.specifications_template.name
@@ -450,6 +451,50 @@ def create_preprocessing_subtask_from_task_blueprint(task_blueprint: TaskBluepri
     return subtask
 
 
+def create_ingest_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask:
+    ''' Create a subtask to for an ingest job
+    This method implements "Instantiate subtasks" step from the "Specification Flow"
+    https://support.astron.nl/confluence/display/TMSS/Specification+Flow
+    '''
+    # step 0: check pre-requisites
+    check_prerequities_for_subtask_creation(task_blueprint)
+
+    # step 1: create subtask in defining state, with filled-in subtask_template
+    subtask_template = SubtaskTemplate.objects.get(name='ingest control')
+    default_subtask_specs = get_default_json_object_for_schema(subtask_template.schema)
+    subtask_specs = default_subtask_specs  # todo: translate specs from task to subtask once we have non-empty templates
+    cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4")
+    subtask_data = {"start_time": None,
+                    "stop_time": None,
+                    "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value),
+                    "task_blueprint": task_blueprint,
+                    "specifications_template": subtask_template,
+                    "specifications_doc": subtask_specs,
+                    "priority": 1,
+                    "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value),
+                    "cluster": Cluster.objects.get(name=cluster_name)}
+    subtask = Subtask.objects.create(**subtask_data)
+
+    # step 2: create and link subtask input
+    for task_relation_blueprint in task_blueprint.produced_by.all():
+        producing_task_blueprint = task_relation_blueprint.producer
+
+        predecessor_subtasks = [st for st in producing_task_blueprint.subtasks.order_by('id').all()]
+        for predecessor_subtask in predecessor_subtasks:
+            for predecessor_subtask_output in predecessor_subtask.outputs.all():
+                SubtaskInput.objects.create(subtask=subtask,
+                                            producer=predecessor_subtask_output,
+                                            selection_doc=task_relation_blueprint.selection_doc,
+                                            selection_template=task_relation_blueprint.selection_template)
+
+    # step 3: set state to DEFINED
+    subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value)
+    subtask.save()
+
+    # done, now return the subtask, and allow the system to wait for the predecessors to be finished before we schedule this ingest
+    return subtask
+
+
 # ==== various schedule* methods to schedule a Subtasks (if possible) ====
 
 def schedule_subtask(subtask: Subtask) -> Subtask:
@@ -469,6 +514,9 @@ def schedule_subtask(subtask: Subtask) -> Subtask:
         if subtask.specifications_template.type.value == SubtaskType.Choices.QA_PLOTS.value:
             return schedule_qaplots_subtask(subtask)
 
+        if subtask.specifications_template.type.value == SubtaskType.Choices.COPY.value:
+            return schedule_copy_subtask(subtask)
+
         raise SubtaskSchedulingException("Cannot schedule subtask id=%d because there is no schedule-method known for this subtasktype=%s." %
                                          (subtask.pk, subtask.specifications_template.type.value))
     except Exception as e:
@@ -574,6 +622,7 @@ def schedule_qafile_subtask(qafile_subtask: Subtask):
                                                                 feedback_template=DataproductFeedbackTemplate.objects.get(name="empty"),
                                                                 sap=None  # todo: do we need to point to a SAP here? Of which dataproduct then?
                                                                 )
+        qafile_subtask_dataproduct.save()
 
     # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it)
     qafile_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value)
@@ -626,6 +675,7 @@ def schedule_qaplots_subtask(qaplots_subtask: Subtask):
                                                              feedback_template=DataproductFeedbackTemplate.objects.get(name="empty"),
                                                              sap=None  # todo: do we need to point to a SAP here? Of which dataproduct then?
                                                              )
+    qaplots_subtask_dataproduct.save()
 
     # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it)
     qaplots_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value)
@@ -743,7 +793,6 @@ def schedule_observation_subtask(observation_subtask: Subtask):
             antennafields += [{"station": station, "field": field, "type": antennaset.split('_')[0]} for field in fields]
 
         sap = SAP.objects.create(specifications_doc={ "name": "%s_%s" % (observation_subtask.id, pointing['name']),
-                                                      "identifiers": {},  # todo: TMSS-324
                                                       "pointing": pointing['pointing'],
                                                       "time": {"start_time": observation_subtask.start_time.isoformat(),
                                                                "duration": (observation_subtask.stop_time - observation_subtask.start_time).total_seconds()},
@@ -753,9 +802,9 @@ def schedule_observation_subtask(observation_subtask: Subtask):
                                                       }
                                                     },
                                  specifications_template=SAPTemplate.objects.get(name="SAP"))
-
+        sap.save()
         for sb_nr in pointing['subbands']:
-            Dataproduct.objects.create(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr),
+            dp = Dataproduct.objects.create(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr),
                                        directory=directory,
                                        dataformat=Dataformat.objects.get(value="MeasurementSet"),
                                        datatype=Datatype.objects.get(value="visibilities"),  # todo: is this correct?
@@ -767,7 +816,7 @@ def schedule_observation_subtask(observation_subtask: Subtask):
                                        size=0 if sb_nr%10==0 else 1024*1024*1024*sb_nr,
                                        expected_size=1024*1024*1024*sb_nr,
                                        sap=sap)
-
+            dp.save()
     # step 4: resource assigner (if possible)
     _assign_resources(observation_subtask)
 
@@ -849,6 +898,7 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask):
                                                    feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema),
                                                    feedback_template=dataproduct_feedback_template,
                                                    sap=input_dp.sap)
+            output_dp.save()
             DataproductTransform.objects.create(input=input_dp, output=output_dp, identity=False)
             output_dps.append(output_dp)
 
@@ -863,6 +913,59 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask):
 
     return pipeline_subtask
 
+def schedule_copy_subtask(copy_subtask: Subtask):
+    ''' Schedule the given copy_subtask
+    This method should typically be called upon the event of an predecessor (pipeline or observation) subtask being finished.
+    This method implements "Scheduling subtasks" step from the "Specification Flow"
+    https://support.astron.nl/confluence/display/TMSS/Specification+Flow
+    '''
+    # step 0: check pre-requisites
+    check_prerequities_for_scheduling(copy_subtask)
+
+    if copy_subtask.specifications_template.type.value != SubtaskType.Choices.COPY.value:
+        raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s but type should be %s" % (copy_subtask.pk,
+                                                                                                          copy_subtask.specifications_template.type,
+                                                                                                          SubtaskType.Choices.COPY.value))
+
+    # step 1: set state to SCHEDULING
+    copy_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULING.value)
+    copy_subtask.save()
+
+    # step 1a: check start/stop times
+    # not very relevant for tmss/dynamic scheduling, but the resource assigner demands it.
+    if copy_subtask.start_time is None:
+        now = datetime.utcnow()
+        logger.info("copy id=%s has no starttime. assigned default: %s", copy_subtask.pk, formatDatetime(now))
+        copy_subtask.start_time = now
+
+    if copy_subtask.stop_time is None:
+        stop_time = copy_subtask.start_time  + timedelta(hours=+1)
+        logger.info("copy id=%s has no stop_time. assigned default: %s", copy_subtask.pk, formatDatetime(stop_time))
+        copy_subtask.stop_time = stop_time
+
+    # step 2: link input dataproducts
+    if copy_subtask.inputs.count() == 0:
+        raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because it has no input(s)" % (copy_subtask.pk,
+                                                                                                               copy_subtask.specifications_template.type))
+
+    # iterate over all inputs
+    for copy_subtask_input in copy_subtask.inputs.all():
+
+        # select and set input dataproducts that meet the filter defined in selection_doc
+        dataproducts = [dataproduct for dataproduct in copy_subtask_input.producer.dataproducts.all()
+                        if specifications_doc_meets_selection_doc(dataproduct.specifications_doc, copy_subtask_input.selection_doc)]
+        copy_subtask_input.dataproducts.set(dataproducts)
+
+        # todo: I assume that there is no RA involvement here? If there is, how does a copy parset look like?
+        # step 4: resource assigner (if possible)
+        #_assign_resources(copy_subtask)
+
+        # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it)
+        copy_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value)
+        copy_subtask.save()
+
+    return copy_subtask
+
 # === Misc ===
 
 def create_and_schedule_subtasks_from_task_blueprint(task_blueprint: TaskBlueprint) -> [Subtask]:
diff --git a/SAS/TMSS/src/tmss/tmssapp/views.py b/SAS/TMSS/src/tmss/tmssapp/views.py
index 58a389fd6e3..851a6251977 100644
--- a/SAS/TMSS/src/tmss/tmssapp/views.py
+++ b/SAS/TMSS/src/tmss/tmssapp/views.py
@@ -4,15 +4,17 @@ from django.http import HttpResponse, JsonResponse, Http404
 from django.shortcuts import get_object_or_404, render
 from lofar.sas.tmss.tmss.tmssapp import models
 from lofar.common.json_utils import get_default_json_object_for_schema
+from lofar.common.datetimeutils import formatDatetime
 from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset
 from drf_yasg.utils import swagger_auto_schema
+from drf_yasg.openapi import Parameter
 from rest_framework.permissions import AllowAny
 from rest_framework.decorators import authentication_classes, permission_classes
 from django.apps import apps
 
 from datetime import datetime
 import dateutil.parser
-from lofar.sas.tmss.tmss.tmssapp.conversions import local_sidereal_time_for_utc_and_station, local_sidereal_time_for_utc_and_longitude
+from lofar.sas.tmss.tmss.tmssapp.conversions import local_sidereal_time_for_utc_and_station, local_sidereal_time_for_utc_and_longitude, timestamps_and_stations_to_sun_rise_and_set
 
 def subtask_template_default_specification(request, subtask_template_pk:int):
     subtask_template = get_object_or_404(models.SubtaskTemplate, pk=subtask_template_pk)
@@ -33,7 +35,7 @@ def subtask_parset(request, subtask_pk:int):
 
 
 def index(request):
-    return render(request, os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), '../../frontend','tmss_webapp/build/index.html'))
+    return render(request, os.path.join(os.environ.get('LOFARROOT'), 'SAS/TMSS/frontend','tmss_webapp/build/index.html'))
     #return render(request, "../../../frontend/frontend_poc/build/index.html")
 
 
@@ -84,10 +86,24 @@ def get_stations_in_group(request, template_name:str, template_version:str, stat
                          'stations': stations})
 
 
+@permission_classes([AllowAny])
+@authentication_classes([AllowAny])
+@swagger_auto_schema(responses={200: 'An isoformat timestamp of the current UTC clock of the system'},
+                     operation_description="Get the current system time in UTC")
 def utc(request):
     return HttpResponse(datetime.utcnow().isoformat(), content_type='text/plain')
 
-
+@permission_classes([AllowAny])
+@authentication_classes([AllowAny])
+@swagger_auto_schema(responses={200: 'The LST time in hms format at the given UTC time and station or longitude'},
+                     operation_description="Get LST time for UTC time and station or longitude",
+                     manual_parameters=[Parameter(name='station', required=False, type='string', in_='query',
+                                                  description="A station names (defaults to CS002)"),
+                                        Parameter(name='timestamp', required=False, type='string', in_='query',
+                                                  description="A timestamp in isoformat (defaults to utcnow)"),
+                                        Parameter(name='longitude', required=False, type='float', in_='query',
+                                                  description="A longitude")
+                                        ])
 def lst(request):
     # Handling optional parameters via django paths in urls.py is a pain, we access them on the request directly instead.
     timestamp = request.GET.get('timestamp', None)
@@ -109,4 +125,33 @@ def lst(request):
         lst_lon = local_sidereal_time_for_utc_and_station(timestamp)
 
     # todo: do we want to return a dict, so users can make sure their parameters were parsed correctly instead?
-    return HttpResponse(str(lst_lon), content_type='text/plain')
\ No newline at end of file
+    return HttpResponse(str(lst_lon), content_type='text/plain')
+
+
+@permission_classes([AllowAny])
+@authentication_classes([AllowAny])
+@swagger_auto_schema(responses={200: 'A JSON object with sunrise, sunset, day and night of the given stations at the given timestamps'},
+                     operation_description="Get sunrise, sunset, day and night for stations and timestamps",
+                     manual_parameters=[Parameter(name='stations', required=False, type='string', in_='query',
+                                                  description="comma-separated list of station names"),
+                                        Parameter(name='timestamps', required=False, type='string', in_='query',
+                                                  description="comma-separated list of isoformat timestamps")])
+def get_sun_rise_and_set(request):
+    """
+    returns sunrise and sunset at the given stations and timestamps, or today at LOFAR core if none specified.
+    example request: /api/util/sun_rise_and_set?stations=CS002,CS005&timestamps=2020-05-01,2020-09-09T11-11-00
+    """
+    timestamps = request.GET.get('timestamps', None)
+    stations = request.GET.get('stations', None)
+    if timestamps is None:
+        timestamps = [datetime.utcnow()]
+    else:
+        timestamps = timestamps.split(',')
+        timestamps = [dateutil.parser.parse(timestamp) for timestamp in timestamps]  #  isot to datetime
+    if stations is None:
+        stations = ['CS002']
+    else:
+        stations = stations.split(',')
+
+    return JsonResponse(timestamps_and_stations_to_sun_rise_and_set(timestamps, stations))
+
diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
index 601321cf92d..2bc7b1814e5 100644
--- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
@@ -16,9 +16,10 @@ from drf_yasg.utils import swagger_auto_schema
 from drf_yasg.inspectors import SwaggerAutoSchema
 
 from rest_framework.decorators import action
-from django.http import HttpResponse, JsonResponse
+from django.http import HttpResponse, JsonResponse, HttpResponseRedirect, HttpResponseNotFound
 from rest_framework.response import Response as RestResponse
 
+from lofar.common import isProductionEnvironment, isTestEnvironment
 from lofar.sas.tmss.tmss.tmssapp.viewsets.lofar_viewset import LOFARViewSet
 from lofar.sas.tmss.tmss.tmssapp import models
 from lofar.sas.tmss.tmss.tmssapp import serializers
@@ -209,6 +210,49 @@ class SubtaskViewSet(LOFARViewSet):
         return RestResponse(serializer.data)
 
 
+    @swagger_auto_schema(responses={302: 'A redirect url to the task log for this Subtask.',
+                                    403: 'forbidden'},
+                         operation_description="Get the task log for this Subtask.")
+    @action(methods=['get'], detail=True)
+    def task_log(self, request, pk=None):
+        """
+        Return a redirect to the the link to the pipeline log in case of pipeline or
+        link to COBALT error log in case of an observation.
+        """
+        subtask = get_object_or_404(models.Subtask, pk=pk)
+
+        # redirect to cobalt log served at proxy.lofar.eu
+        if subtask.specifications_template.type.value == models.SubtaskType.Choices.OBSERVATION.value:
+            url = "https://proxy.lofar.eu/inspect/%s/rtcp-%s.errors" % (subtask.id, subtask.id)
+            return HttpResponseRedirect(redirect_to=url)
+
+        # redirect to pipeline log served via webscheduler
+        if subtask.specifications_template.type.value == models.SubtaskType.Choices.PIPELINE.value:
+            # import here and not at top of module to "loosen" dependency on external packages, such as in this case the RADB RPC.
+            from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC
+            # Get RADBID, subtask must be at least 'scheduled' to exist in radb
+            try:
+                with RADBRPC.create(timeout=2) as radbrpc:
+                    radb_id = radbrpc.getTask(tmss_id=subtask.id)
+
+                    if radb_id is None:
+                        return HttpResponseNotFound(
+                            content='No RADB task found for subtask id=%s type="%s status=%s". Cannot redirect to pipeline log.' % (
+                                subtask.id, subtask.specifications_template.type.value, subtask.state))
+
+                    WEBSCHEDULER_URL = "http://scu001.control.lofar:7412" if isProductionEnvironment() else \
+                                       "http://scu199.control.lofar:7412" if isTestEnvironment() else \
+                                       "http://localhost:7412"
+
+                    url = "%s/tasks/%s/log.html" % (WEBSCHEDULER_URL, radb_id)
+                    return HttpResponseRedirect(redirect_to=url)
+            except Exception as e:
+                return HttpResponseNotFound(content='No RADB task found for subtask id=%s type="%s". Cannot redirect to pipeline log.' % (subtask.id, subtask.specifications_template.type.value))
+
+        # unknown log
+        return HttpResponseNotFound(content='No log (url) available for subtask id=%s type="%s"' % (subtask.id, subtask.specifications_template.type.value) )
+
+
     @swagger_auto_schema(responses={200: 'The input dataproducts of this subtask.',
                                     403: 'forbidden'},
                          operation_description="Get the input dataproducts of this subtask.")
@@ -328,6 +372,12 @@ class SAPViewSet(LOFARViewSet):
         serializer = serializers.DataproductSerializer(sap.dataproducts, many=True, context={'request': request})
         return RestResponse(serializer.data)
 
+
 class SAPTemplateViewSet(AbstractTemplateViewSet):
     queryset = models.SAPTemplate.objects.all()
     serializer_class = serializers.SAPTemplateSerializer
+
+
+class SIPidentifierViewSet(LOFARViewSet):
+    queryset = models.SIPidentifier.objects.all()
+    serializer_class = serializers.SIPidentifierSerializer
\ No newline at end of file
diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py
index b66e916301c..781e6af696a 100644
--- a/SAS/TMSS/src/tmss/urls.py
+++ b/SAS/TMSS/src/tmss/urls.py
@@ -65,6 +65,7 @@ urlpatterns = [
     path('schemas/<str:template>/<str:name>/<str:version>/', views.get_template_json_schema, name='get_template_json_schema'),
     path('station_groups/<str:template_name>/<str:template_version>/<str:station_group>', views.get_stations_in_group, name='get_stations_in_group'), #TODO: how to make trailing slash optional?
     path('station_groups/<str:template_name>/<str:template_version>/<str:station_group>/', views.get_stations_in_group, name='get_stations_in_group'),
+    path('util/sun_rise_and_set', views.get_sun_rise_and_set, name='get_sun_rise_and_set'),
     path(r'util/utc', views.utc, name="system-utc"),
     path(r'util/lst', views.lst, name="conversion-lst"),
 ]
@@ -199,6 +200,7 @@ router.register(r'dataproduct_hash', viewsets.DataproductHashViewSet)
 router.register(r'subtask_state_log', viewsets.SubtaskStateLogViewSet)
 router.register(r'user', viewsets.UserViewSet)
 router.register(r'sap', viewsets.SAPViewSet)
+router.register(r'sip_identifier', viewsets.SIPidentifierViewSet)
 
 # ---
 
diff --git a/SAS/TMSS/test/t_adapter.py b/SAS/TMSS/test/t_adapter.py
index 379f3a37a4b..4b11c380b6f 100755
--- a/SAS/TMSS/test/t_adapter.py
+++ b/SAS/TMSS/test/t_adapter.py
@@ -46,6 +46,7 @@ from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduc
 from lofar.sas.tmss.tmss.tmssapp.adapters.feedback import generate_dataproduct_feedback_from_subtask_feedback_and_set_finished
 from lofar.lta.sip import constants
 
+
 class ParsetAdapterTest(unittest.TestCase):
     def test_01(self):
         subtask_template = models.SubtaskTemplate.objects.get(name='observation control')
@@ -60,8 +61,17 @@ class ParsetAdapterTest(unittest.TestCase):
         parset = convert_to_parset(subtask)
 
 
-class SIPdapterTest(unittest.TestCase):
+class SIPadapterTest(unittest.TestCase):
     def test_simple_sip_generate_from_dataproduct(self):
+        """
+        Test if SIP is generated successfully when subtask, dataproduct and SAP objects are created
+        Check some value in the SIP (xml) output
+        Check that the SIP identifiers are in SIP (xml) output
+        Check the number of SIP identifiers are increased with 3
+        Check that all SIP identifiers are unique
+        """
+        nbr_expected_sip_identifiers_before_setup = len(models.SIPidentifier.objects.all())
+
         subtask_template = models.SubtaskTemplate.objects.get(name='observation control')
         specifications_doc = get_default_json_object_for_schema(subtask_template.schema)
         specifications_doc['stations']['filter'] = "HBA_210_250"
@@ -70,12 +80,22 @@ class SIPdapterTest(unittest.TestCase):
         feedback_doc = {'percentage_written': 100, 'frequency': {'subbands': [156], 'central_frequencies': [33593750.0], 'channel_width': 6103.515625, 'channels_per_subband': 32}, 'time': {'start_time': '2013-02-16T17:00:00', 'duration': 5.02732992172, 'sample_width': 2.00278016}, 'antennas': {'set': 'HBA_DUAL', 'fields': [{'type': 'HBA', 'field': 'HBA0', 'station': 'CS001'}, {'type': 'HBA', 'field': 'HBA1', 'station': 'CS001'}]}, 'target': {'pointing': {'angle1': 0, 'angle2': 0, 'angle3': 0, 'direction_type': 'J2000'}}, 'samples': {'polarisations': ['XX', 'XY', 'YX', 'YY'], 'type': 'float', 'bits': 32, 'writer': 'standard', 'writer_version': '2.2.0', 'complex': True}, '$schema': 'http://127.0.0.1:8001/api/schemas/dataproductfeedbacktemplate/feedback/1#'}
         for dp in specifications_doc['stations']['digital_pointings']:
             dp['subbands'] = list(range(8))
+        # Create SubTask(output)
         subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc)
         subtask:models.Subtask = models.Subtask.objects.create(**subtask_data)
+        subtask.save()
         subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask))
-        dataproduct:models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(feedback_doc=feedback_doc, producer=subtask_output))
+        # Create Dataproduct
+        dataproduct: models.Dataproduct = models.Dataproduct.objects.create(
+            **Dataproduct_test_data(feedback_doc=feedback_doc, producer=subtask_output))
+        dataproduct.save()
+        # Create SAP
+        sap_template = models.SAPTemplate.objects.get(name="SAP")
+        specifications_doc = get_default_json_object_for_schema(sap_template.schema)
+        sap = models.SAP.objects.create(specifications_doc=specifications_doc, specifications_template=sap_template)
+        dataproduct.sap = sap
+        sap.save()
 
-        # make sure we can create a valid SIP
         sip = generate_sip_for_dataproduct(dataproduct)
 
         # double-check that SIP contains values from feedback and specifications docs
@@ -83,6 +103,15 @@ class SIPdapterTest(unittest.TestCase):
         self.assertIn(str(feedback_doc['time']['start_time']), sip.get_prettyxml())
         self.assertIn(constants.FILTERSELECTIONTYPE_210_250_MHZ, sip.get_prettyxml()) # specifications_doc: "HBA_210_250"
 
+        self.assertIn(str(subtask.global_identifier.unique_identifier), sip.get_prettyxml())
+        self.assertIn(str(dataproduct.global_identifier.unique_identifier), sip.get_prettyxml())
+        self.assertIn(str(sap.global_identifier.unique_identifier), sip.get_prettyxml())
+
+        all_sip_ids = list(models.SIPidentifier.objects.all())
+        self.assertEqual(nbr_expected_sip_identifiers_before_setup+3, len(all_sip_ids))
+        for sip_id in all_sip_ids:
+            self.assertEqual(models.SIPidentifier.objects.filter(unique_identifier=sip_id.unique_identifier).count(), 1)
+
 
 class FeedbackAdapterTest(unittest.TestCase):
 
diff --git a/SAS/TMSS/test/t_conversions.py b/SAS/TMSS/test/t_conversions.py
index ccd4025f6c4..14231c4f091 100755
--- a/SAS/TMSS/test/t_conversions.py
+++ b/SAS/TMSS/test/t_conversions.py
@@ -26,6 +26,7 @@ import logging
 import requests
 import dateutil.parser
 import astropy.coordinates
+import json
 
 logger = logging.getLogger(__name__)
 logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
@@ -127,6 +128,43 @@ class UtilREST(unittest.TestCase):
         lon_str2 = r2.content.decode('utf8')
         self.assertNotEqual(lon_str1, lon_str2)
 
+    def test_util_sun_rise_and_set_returns_json_structure_with_defaults(self):
+        r = requests.get(BASE_URL + '/util/sun_rise_and_set', auth=AUTH)
+        self.assertEqual(r.status_code, 200)
+        r_dict = json.loads(r.content.decode('utf-8'))
+
+        # assert defaults to core and today
+        self.assertIn('CS002', r_dict.keys())
+        sunrise_start = dateutil.parser.parse(r_dict['CS002']['sunrise'][0]['start'])
+        self.assertEqual(datetime.date.today(), sunrise_start.date())
+
+    def test_util_sun_rise_and_set_considers_stations(self):
+        stations = ['CS005', 'RS305', 'DE609']
+        r = requests.get(BASE_URL + '/util/sun_rise_and_set?stations=%s' % ','.join(stations), auth=AUTH)
+        self.assertEqual(r.status_code, 200)
+        r_dict = json.loads(r.content.decode('utf-8'))
+
+        # assert station is included in response and timestamps differ
+        sunset_start_last = None
+        for station in stations:
+            self.assertIn(station, r_dict.keys())
+            sunset_start = dateutil.parser.parse(r_dict[station]['sunset'][0]['start'])
+            if sunset_start_last:
+                self.assertNotEqual(sunset_start, sunset_start_last)
+            sunset_start_last = sunset_start
+
+    def test_util_sun_rise_and_set_considers_timestamps(self):
+        timestamps = ['2020-01-01', '2020-02-22T16-00-00', '2020-3-11', '2020-01-01']
+        r = requests.get(BASE_URL + '/util/sun_rise_and_set?timestamps=%s' % ','.join(timestamps), auth=AUTH)
+        self.assertEqual(r.status_code, 200)
+        r_dict = json.loads(r.content.decode('utf-8'))
+
+        # assert all requested timestamps are included in response (sunrise on same day)
+        for i in range(len(timestamps)):
+            expected_date = dateutil.parser.parse(timestamps[i]).date()
+            response_date = dateutil.parser.parse(r_dict['CS002']['sunrise'][i]['start']).date()
+            self.assertEqual(expected_date, response_date)
+
 
 if __name__ == "__main__":
     os.environ['TZ'] = 'UTC'
diff --git a/SAS/TMSS/test/t_scheduling.py b/SAS/TMSS/test/t_scheduling.py
index 5c078a44c37..ec9de7be402 100755
--- a/SAS/TMSS/test/t_scheduling.py
+++ b/SAS/TMSS/test/t_scheduling.py
@@ -189,6 +189,49 @@ class SchedulingTest(unittest.TestCase):
             self.assertEqual('scheduled', subtask['state_value'])
             self.assertEqual('scheduled', ra_test_env.radb.getTask(tmss_id=pipe_subtask['id'])['status'])
 
+    def test_schedule_ingest_subtask(self):
+        with tmss_test_env.create_tmss_client() as client:
+            cluster_url = client.get_path_as_json_object('/cluster/1')['url']
+
+            # setup: first create an observation, so the ingest can have input.
+            obs_subtask_template = client.get_subtask_template("observation control")
+            obs_spec = get_default_json_object_for_schema(obs_subtask_template['schema'])
+            obs_spec['stations']['digital_pointings'][0]['subbands'] = [0]
+
+            obs_subtask_data = test_data_creator.Subtask(specifications_template_url=obs_subtask_template['url'],
+                                                         specifications_doc=obs_spec,
+                                                         cluster_url=cluster_url,
+                                                         task_blueprint_url=test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/'))
+            obs_subtask = test_data_creator.post_data_and_get_response_as_json_object(obs_subtask_data, '/subtask/')
+            obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url']), '/subtask_output/')
+            test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(filename="L%s_SB000.MS"%obs_subtask['id'],
+                                                    subtask_output_url=obs_subtask_output_url), '/dataproduct/')
+
+            # now create the ingest...
+            ingest_subtask_template = client.get_subtask_template("ingest control")
+            ingest_spec = get_default_json_object_for_schema(ingest_subtask_template['schema'])
+
+            ingest_subtask_data = test_data_creator.Subtask(specifications_template_url=ingest_subtask_template['url'],
+                                                          specifications_doc=ingest_spec,
+                                                          task_blueprint_url=obs_subtask['task_blueprint'],
+                                                          cluster_url=cluster_url)
+            ingest_subtask = test_data_creator.post_data_and_get_response_as_json_object(ingest_subtask_data, '/subtask/')
+
+            # ...and connect it to the observation
+            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskInput(subtask_url=ingest_subtask['url'], subtask_output_url=obs_subtask_output_url), '/subtask_input/')
+            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=ingest_subtask['url']), '/subtask_output/')
+
+            for predecessor in client.get_subtask_predecessors(ingest_subtask['id']):
+                client.set_subtask_status(predecessor['id'], 'finished')
+            client.set_subtask_status(ingest_subtask['id'], 'defined')
+
+            # trigger
+            subtask = client.schedule_subtask(ingest_subtask['id'])
+
+            # assert
+            self.assertEqual('scheduled', subtask['state_value'])
+            self.assertEqual(models.Subtask.objects.get(id=ingest_subtask['id']).inputs.first().dataproducts.count(), 1)
+
 
     def test_schedule_schedulingunit_enough_resources_available(self):
         '''similar test as test_schedule_pipeline_subtask_with_enough_resources_available, but now created from a scheduling_unit'''
@@ -357,27 +400,37 @@ class SAPTest(unittest.TestCase):
         self.assertEqual(dp2_in.sap, dp2_out.sap)
 
 
-
-class CreationFromSchedulingUnitDraft(unittest.TestCase):
+class TestWithUC1Specifications(unittest.TestCase):
     """
-    From scheduling_unit_draft test:
-     create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitBlueprint:
-    This requires Resource Assigner testenvironment being alive
+    The Setup will create Scheduling Unit Draft with UC1 strategy template
+    It will use the function 'create_task_blueprints_and_subtasks_from_scheduling_unit_draft' which is then
+    implicit tested.
+    Create Task Blueprints and Subtasks:
+        Observation Task 'Calibration 1'
+            SubTask Observation Control
+            SubTask QA File
+            SubTask QA Plots
+        Pipeline Task 'Pipeline 1'
+            SubTask Pipeline Control
+        Observation Task 'Target Observation'
+            SubTask Observation Control
+            SubTask QA File
+            SubTask QA Plots
+        Pipeline Task 'Pipeline target1'
+            SubTask Pipeline Control
+        Pipeline Task 'Pipeline target2'
+            SubTask Pipeline Control
+        Observation Task 'Calibration 2'
+            SubTask Observation Control
+            SubTask QA File
+            SubTask QA Plots
+        Pipeline Task 'Pipeline 2'
+            SubTask Pipeline Control
+
+    Note that this test requires Resource Assigner testenvironment being alive
     """
-
-    def test_create_task_blueprints_and_subtasks_from_scheduling_unit_draft_with_UC1_requirements(self):
-        """
-        Create Scheduling Unit Draft with requirements_doc (read from file)
-        Create Task Blueprints and Subtasks
-        Check if tasks (7) are created:
-           Calibration 1     : 1 Observation and 1 Pipeline task
-           Target Observation: 1 Observation and 2 Pipeline tasks
-           Calibration 2     : 1 Observation and 1 Pipeline task
-        Check if subtasks (13) are created:
-           Every Observation Task: 3 subtasks (1 control, 2 QA)
-           Every Pipeline Task:    1 subtasks (1 control)
-           makes 3x3 + 4x1 = 13
-        """
+    @classmethod
+    def setUpClass(cls) -> None:
         strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines")
 
         scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(
@@ -393,20 +446,81 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase):
         create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
 
         scheduling_unit_draft.refresh_from_db()
-        task_drafts = scheduling_unit_draft.task_drafts.all()
-        self.assertEqual(7, len(task_drafts))
-
-        scheduling_unit_blueprints = scheduling_unit_draft.scheduling_unit_blueprints.all()
-        self.assertEqual(1, len(scheduling_unit_blueprints))
+        cls.task_drafts = scheduling_unit_draft.task_drafts.all()
+        cls.scheduling_unit_blueprints = scheduling_unit_draft.scheduling_unit_blueprints.all()
+        cls.scheduling_unit_blueprint = cls.scheduling_unit_blueprints[0]
+        cls.task_blueprints = cls.scheduling_unit_blueprint.task_blueprints.all()
 
-        scheduling_unit_blueprint = scheduling_unit_blueprints[0]
-        task_blueprints = scheduling_unit_blueprint.task_blueprints.all()
-        self.assertEqual(7, len(task_blueprints))
+    def test_create_task_blueprints_and_subtasks_from_scheduling_unit_draft(self):
+        """
+        Create Task Blueprints and Subtasks (class setup)
+        Check if tasks (7) are created:
+           Calibration 1     : 1 Observation and 1 Pipeline task
+           Target Observation: 1 Observation and 2 Pipeline tasks
+           Calibration 2     : 1 Observation and 1 Pipeline task
+        Check if subtasks (13) are created:
+           Every Observation Task: 3 subtasks (1 control, 2 QA)
+           Every Pipeline Task:    1 subtasks (1 control)
+           makes 3x3 + 4x1 = 13
+        """
+        self.assertEqual(7, len(self.task_drafts))
+        self.assertEqual(1, len(self.scheduling_unit_blueprints))
+        self.assertEqual(7, len(self.task_blueprints))
         total_subtasks = 0
-        for task_blueprint in task_blueprints:
+        for task_blueprint in self.task_blueprints:
             total_subtasks += task_blueprint.subtasks.count()
         self.assertEqual(13, total_subtasks)
 
+    def test_relative_times(self):
+        """
+        Create Task Blueprints and Subtasks (class setup)
+        Set start and stop times of taskBlueprint
+        Set the subtask start/stop time equal to its taskBlueprint
+        Set all subtask states to 'finished'
+        Check the observed_end_time of the SchedulingUnitBlueprint
+        Check the relative_start/stop_time of the SchedulingUnitBlueprint
+           start = 0
+           stop = calculates like 8hours (Target) + 2x10min (calibrators) + 2*1min (offset between observations) = 8h22min
+        """
+        DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
+        test_timeschedule = {
+            # name of taskBlueprint       start_time             stop_time
+            "Calibrator Observation 1": ["2020-11-01 08:00:00", "2020-11-01 08:10:00"],
+            "Pipeline 1":               ["2020-11-01 08:20:00", "2020-11-01 08:22:00"],
+            "Target Observation":       ["2020-11-01 08:30:00", "2020-11-01 18:00:00"],
+            "Pipeline target1":         ["2020-11-01 18:30:00", "2020-11-01 18:35:00"],
+            "Pipeline target2":         ["2020-11-01 18:40:00", "2020-11-01 18:45:00"],
+            "Calibrator Observation 2": ["2020-11-01 19:00:00", "2020-11-01 19:20:00"],
+            "Pipeline 2":               ["2020-11-01 19:30:00", "2020-11-01 19:40:00"]
+        }
+        # Set time_schedule,
+        for name, times in test_timeschedule.items():
+            task_blueprint = list(filter(lambda x: x.name == name, self.task_blueprints))[0]
+            for subtask in task_blueprint.subtasks.all():
+                subtask.state = models.SubtaskState.objects.get(value="finished")
+                subtask.stop_time = datetime.strptime(times[1], DATETIME_FORMAT)
+                subtask.start_time = datetime.strptime(times[0], DATETIME_FORMAT)
+                subtask.save()
+
+        # Check times
+        self.assertEqual("2020-11-01 19:20:00", self.scheduling_unit_blueprint.observed_end_time.strftime("%Y-%m-%d %H:%M:%S"))
+        self.assertEqual(timedelta(0), self.scheduling_unit_blueprint.relative_start_time)
+        self.assertEqual(timedelta(hours=8, minutes=22), self.scheduling_unit_blueprint.relative_stop_time)
+
+        for task_blueprint in self.task_blueprints:
+            if task_blueprint.name == "Calibrator Observation 1":
+                self.assertEqual(timedelta(0), task_blueprint.relative_start_time)
+                self.assertEqual(timedelta(minutes=10), task_blueprint.relative_stop_time)
+            elif task_blueprint.name == "Target Observation":
+                self.assertEqual(timedelta(minutes=11), task_blueprint.relative_start_time)
+                self.assertEqual(timedelta(hours=8, minutes=11), task_blueprint.relative_stop_time)
+            elif task_blueprint.name == "Calibrator Observation 2":
+                self.assertEqual(timedelta(hours=8, minutes=12), task_blueprint.relative_start_time)
+                self.assertEqual(timedelta(hours=8, minutes=22), task_blueprint.relative_stop_time)
+            else:
+                self.assertEqual(timedelta(0), task_blueprint.relative_start_time)
+                self.assertEqual(timedelta(0), task_blueprint.relative_stop_time)
+
 
 if __name__ == "__main__":
     os.environ['TZ'] = 'UTC'
diff --git a/SAS/TMSS/test/t_subtasks.py b/SAS/TMSS/test/t_subtasks.py
index cc14049e65a..b9021a86f94 100755
--- a/SAS/TMSS/test/t_subtasks.py
+++ b/SAS/TMSS/test/t_subtasks.py
@@ -34,12 +34,7 @@ tmss_test_env.populate_schemas()
 
 
 from lofar.sas.tmss.test.tmss_test_data_django_models import *
-
-# import and setup rest test data creator
-from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator
-
 from lofar.sas.tmss.tmss.tmssapp import models
-
 from lofar.sas.tmss.tmss.tmssapp.subtasks import *
 
 
@@ -283,6 +278,25 @@ class SubTasksCreationFromTaskBluePrintCalibrator(unittest.TestCase):
         self.assertEqual(2.222, subtask.specifications_doc['stations']['analog_pointing']['angle2'])
 
 
+class SubTaskCreationFromTaskBlueprintIngest(unittest.TestCase):
+
+    def test_create_subtask_from_task_blueprint_ingest(self):
+        """
+        Test that ingest task blueprint can be turned into a ingest control subtask
+        """
+
+        # setup
+        ingest_task_blueprint = create_task_blueprint_object_for_testing(task_template_name="ingest")
+
+        # trigger
+        subtask = create_ingest_subtask_from_task_blueprint(ingest_task_blueprint)
+
+        # assert
+        self.assertEqual("defined", str(subtask.state))
+        self.assertEqual("ingest control", str(subtask.specifications_template.name))
+        self.assertEqual("copy", str(subtask.specifications_template.type))
+
+
 class SubtaskInputSelectionFilteringTest(unittest.TestCase):
 
     def setUp(self) -> None:
@@ -328,6 +342,31 @@ class SubtaskInputSelectionFilteringTest(unittest.TestCase):
         selection = {'sap': ['target0'], 'is_relevant': True}
         self.assertFalse(specifications_doc_meets_selection_doc(specs, selection))
 
+    def test_links_to_log_files(self):
+        """
+        Test redirect urls to subtask logfiles.
+        """
+
+        # the link to log files is a 'view' on the subtask, and NOT part of the subtask model.
+        # the link is served as an action on the REST API, redirecting to externally served log files.
+        # check/test the redirect urls.
+        with tmss_test_env.create_tmss_client() as client:
+            # observation
+            subtask_observation = create_subtask_object_for_testing("observation", "defined")
+            response = client.session.get(url=client.get_full_url_for_path('/subtask/%s/task_log' % (subtask_observation.id,)), allow_redirects=False)
+            self.assertTrue(response.is_redirect)
+            self.assertIn("proxy.lofar.eu", response.headers['Location'])
+            self.assertIn("rtcp-%s.errors" % subtask_observation.id, response.headers['Location'])
+
+            # pipeline
+            subtask_pipeline = create_subtask_object_for_testing("pipeline", "defined")
+            response = client.session.get(url=client.get_full_url_for_path('/subtask/%s/task_log' % (subtask_pipeline.id,)), allow_redirects=False)
+            self.assertEqual(404, response.status_code) # no log (yet) for unscheduled pipeline
+
+            # other (qa_plots)
+            subtask_qa_plots = create_subtask_object_for_testing("qa_plots", "defined")
+            self.assertEqual(404, response.status_code) # no log for other subtasktypes
+
 
 class SettingTest(unittest.TestCase):
 
@@ -340,22 +379,6 @@ class SettingTest(unittest.TestCase):
         with self.assertRaises(SubtaskSchedulingException):
             schedule_observation_subtask(obs_st)
 
-    def test_links_to_log_files(self):
-        """
-        Test if the links to logging of a subtasks is correct:
-        For an observation the subtaskid is in the logging url
-        For a pipeline the radbid of the subtaskid is in the link, BUT because RA is not started is should
-        return "not available"
-        All other subtask types (like qa) should have an empty string (no logging)
-        """
-        subtask_pipeline = create_subtask_object_for_testing("pipeline", "defined")
-        subtask_qa_plots = create_subtask_object_for_testing("qa_plots", "defined")
-        subtask_observation = create_subtask_object_for_testing("observation", "defined")
-
-        self.assertIn("proxy.lofar.eu", subtask_observation.log_url)
-        self.assertIn("rtcp-%s.errors" % subtask_observation.id, subtask_observation.log_url)
-        self.assertIn("not available", subtask_pipeline.log_url)
-        self.assertEqual("", subtask_qa_plots.log_url)
 
 
 if __name__ == "__main__":
diff --git a/SAS/TMSS/test/test_utils.py b/SAS/TMSS/test/test_utils.py
index 2edeaae66b2..7d559bb9800 100644
--- a/SAS/TMSS/test/test_utils.py
+++ b/SAS/TMSS/test/test_utils.py
@@ -142,8 +142,6 @@ class TMSSDjangoServerInstance():
         self.port = port
         self.public_host = public_host or host
         self._server_process = None
-        self._exchange = exchange
-        self._broker = broker
 
     @property
     def host_address(self):
@@ -190,8 +188,6 @@ class TMSSDjangoServerInstance():
         # set these here, run django setup, and start the server
         os.environ["TMSS_LDAPCREDENTIALS"] = self.ldap_dbcreds_id
         os.environ["TMSS_DBCREDENTIALS"] = self.database_dbcreds_id
-        os.environ["TMSS_EXCHANGE"] = self._exchange
-        os.environ["TMSS_BROKER"] = self._broker
         os.environ["DJANGO_SETTINGS_MODULE"] = "lofar.sas.tmss.tmss.settings"
         django.setup()
 
@@ -273,7 +269,10 @@ class TMSSTestEnvironment:
     '''Create and run a test django TMSS server against a newly created test database and a test ldap server (and cleanup automagically)'''
     def __init__(self, host: str='127.0.0.1', preferred_django_port: int=8000, public_host: str=None,
                  exchange: str=os.environ.get("TMSS_EXCHANGE", DEFAULT_BUSNAME), broker: str=os.environ.get("TMSS_BROKER", DEFAULT_BROKER),
-                 populate_schemas:bool=False, populate_test_data:bool=False):
+                 populate_schemas:bool=False, populate_test_data:bool=False,
+                 start_postgres_listener: bool=True):
+        self._exchange = exchange
+        self._broker = broker
         self._populate_schemas = populate_schemas
         self._populate_test_data = populate_test_data
         self.ldap_server = TestLDAPServer(user='test', password='test')
@@ -282,11 +281,12 @@ class TMSSTestEnvironment:
                                                       ldap_dbcreds_id=self.ldap_server.dbcreds_id,
                                                       host=host,
                                                       port=find_free_port(preferred_django_port),
-                                                      public_host=public_host,
-                                                      exchange=exchange,
-                                                      broker=broker)
+                                                      public_host=public_host)
         self.client_credentials = TemporaryCredentials(user=self.ldap_server.dbcreds.user,
                                                        password=self.ldap_server.dbcreds.password)
+        self._start_postgres_listener = start_postgres_listener
+        self.postgres_listener = None
+
         # Check for correct Django version, should be at least 3.0
         if django.VERSION[0] < 3:
             print("\nWARNING: YOU ARE USING DJANGO VERSION '%s', WHICH WILL NOT SUPPORT ALL FEATURES IN TMSS!\n" %
@@ -318,13 +318,24 @@ class TMSSTestEnvironment:
         user.is_superuser = True
         user.save()
 
+        if self._start_postgres_listener:
+            # start the TMSSPGListener, so the changes in the database are posted as EventMessages on the bus
+            from lofar.sas.tmss.services.tmss_postgres_listener import TMSSPGListener
+            self.postgres_listener = TMSSPGListener(exchange=self._exchange, broker=self._broker, dbcreds=self.database.dbcreds)
+            self.postgres_listener.start()
+
         if self._populate_schemas or self._populate_test_data:
             self.populate_schemas()
 
         if self._populate_test_data:
             self.populate_test_data()
 
+
     def stop(self):
+        if self.postgres_listener is not None:
+            self.postgres_listener.stop()
+            self.postgres_listener = None
+
         self.django_server.stop()
         self.ldap_server.stop()
         self.database.destroy()
diff --git a/SAS/TMSS/test/tmss_test_environment_unittest_setup.py b/SAS/TMSS/test/tmss_test_environment_unittest_setup.py
index 04b98824548..45d148eb375 100644
--- a/SAS/TMSS/test/tmss_test_environment_unittest_setup.py
+++ b/SAS/TMSS/test/tmss_test_environment_unittest_setup.py
@@ -32,7 +32,8 @@ from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment
 tmss_test_env = TMSSTestEnvironment()
 try:
     tmss_test_env.start()
-except:
+except Exception as e:
+    logger.exception(str(e))
     tmss_test_env.stop()
     exit(1)
 
-- 
GitLab