diff --git a/SAS/TMSS/backend/services/CMakeLists.txt b/SAS/TMSS/backend/services/CMakeLists.txt
index 7fd95ed1ef96a8e6b19abda7ab1135ca9f748e1d..ee220bcd39d6774fb61053b7b7a58d956fefd6b8 100644
--- a/SAS/TMSS/backend/services/CMakeLists.txt
+++ b/SAS/TMSS/backend/services/CMakeLists.txt
@@ -7,3 +7,6 @@ lofar_add_package(TMSSWebSocketService websocket)
 lofar_add_package(TMSSWorkflowService workflow_service)
 lofar_add_package(TMSSLTAAdapter tmss_lta_adapter)
 lofar_add_package(TMSSSlackWebhookService slackwebhook)
+lofar_add_package(TMSSPreCalculationsService precalculations_service)
+
+
diff --git a/SAS/TMSS/backend/services/precalculations_service/CMakeLists.txt b/SAS/TMSS/backend/services/precalculations_service/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..1c52667c78f120c0b6340e71f67b45febdee919c
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/CMakeLists.txt
@@ -0,0 +1,10 @@
+lofar_package(TMSSPreCalculationsService 0.1)
+
+lofar_find_package(PythonInterp 3.4 REQUIRED)
+
+IF(NOT SKIP_TMSS_BUILD)
+    add_subdirectory(lib)
+    add_subdirectory(test)
+ENDIF(NOT SKIP_TMSS_BUILD)
+
+add_subdirectory(bin)
\ No newline at end of file
diff --git a/SAS/TMSS/backend/services/precalculations_service/bin/CMakeLists.txt b/SAS/TMSS/backend/services/precalculations_service/bin/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..80db184789d8880d2bbb2c7f3792208d49512a69
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/bin/CMakeLists.txt
@@ -0,0 +1,4 @@
+lofar_add_bin_scripts(tmss_precalculations_service)
+
+# supervisord config files
+lofar_add_sysconf_files(tmss_precalculations_service.ini DESTINATION supervisord.d)
diff --git a/SAS/TMSS/backend/services/precalculations_service/bin/tmss_precalculations_service b/SAS/TMSS/backend/services/precalculations_service/bin/tmss_precalculations_service
new file mode 100755
index 0000000000000000000000000000000000000000..2bcfee690f143ad791012bf25e6f5b7aff5223db
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/bin/tmss_precalculations_service
@@ -0,0 +1,24 @@
+#!/usr/bin/python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+
+from lofar.sas.tmss.services.precalculations_service import main
+
+if __name__ == "__main__":
+    main()
diff --git a/SAS/TMSS/backend/services/precalculations_service/bin/tmss_precalculations_service.ini b/SAS/TMSS/backend/services/precalculations_service/bin/tmss_precalculations_service.ini
new file mode 100644
index 0000000000000000000000000000000000000000..924ce072404b15d8f96bf70b102844af673fbcdc
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/bin/tmss_precalculations_service.ini
@@ -0,0 +1,9 @@
+[program:tmss_precalculations_service]
+command=docker run --rm --net=host -u 7149:7149 -v /opt/lofar/var/log:/opt/lofar/var/log -v /tmp/tmp -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro -v /localhome/lofarsys:/localhome/lofarsys -e HOME=/localhome/lofarsys -e USER=lofarsys nexus.cep4.control.lofar:18080/tmss_django:latest /bin/bash -c 'source ~/.lofar/.lofar_env;source $LOFARROOT/lofarinit.sh;exec tmss_precalculations_service'
+user=lofarsys
+stopsignal=INT ; KeyboardInterrupt
+stopasgroup=true ; bash does not propagate signals
+stdout_logfile=%(program_name)s.log
+redirect_stderr=true
+stderr_logfile=NONE
+stdout_logfile_maxbytes=0
diff --git a/SAS/TMSS/backend/services/precalculations_service/lib/CMakeLists.txt b/SAS/TMSS/backend/services/precalculations_service/lib/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..31845d5064326785365cd0932d3090b5e4fd137f
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/lib/CMakeLists.txt
@@ -0,0 +1,10 @@
+lofar_find_package(PythonInterp 3.4 REQUIRED)
+include(PythonInstall)
+
+set(_py_files
+    precalculations_service.py
+    )
+
+python_install(${_py_files}
+    DESTINATION lofar/sas/tmss/services)
+
diff --git a/SAS/TMSS/backend/services/precalculations_service/lib/precalculations_service.py b/SAS/TMSS/backend/services/precalculations_service/lib/precalculations_service.py
new file mode 100644
index 0000000000000000000000000000000000000000..87442a866d5c2d7c496de393fa6a00e8c56c2a1f
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/lib/precalculations_service.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+import logging
+logger = logging.getLogger(__name__)
+
+import os
+import threading
+import datetime
+from datetime import timedelta
+import time
+from lofar.common.util import waitForInterrupt
+
+# Default values of parameters
+INTERVAL_TIME_SECONDS = 24 * 60 * 60  # 24 hours (every day one calculation ahead)
+NBR_DAYS_CALCULATE_AHEAD = 365    # 1 year
+NBR_DAYS_BEFORE_TODAY = 1
+
+
+def execute_populate_sunrise_and_sunset_for_all_stations(nbr_days_calculate_ahead, start_date):
+    """
+    Execute the populate of calculations (sunrise/sunset) for given number of days stating at give date
+    :param nbr_days_calculate_ahead: Number of days to calculated
+    :param start_date: The date to start calculate
+    :return next_date: The next_date to process
+    """
+    logger.info("execute_populate_sunrise_and_sunset_for_all_stations %s for %d days" % (start_date, nbr_days_calculate_ahead))
+    # Import here otherwise you get
+    # "django.core.exceptions.ImproperlyConfigured: Requested setting INSTALLED_APPS, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings."
+    from lofar.sas.tmss.tmss.tmssapp.populate import populate_sunrise_and_sunset_for_all_stations
+
+    populate_sunrise_and_sunset_for_all_stations(nbr_days=nbr_days_calculate_ahead, start_date=start_date)
+    # Return the next_date to process
+    next_date = start_date + datetime.timedelta(days=nbr_days_calculate_ahead)
+    return next_date
+
+
+class TMSSPreCalculationsServiceJob(threading.Thread):
+    def __init__(self, interval, execute, *args, **kwargs):
+        threading.Thread.__init__(self)
+        self.daemon = False
+        self.stopped = threading.Event()
+        self.interval = interval
+        self.execute = execute
+        self.args = args
+        self.kwargs = kwargs
+
+    def __enter__(self):
+        pass
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        pass
+
+    def stop(self):
+        self.stopped.set()
+        self.join()
+
+    def run(self):
+        start_time = time.time()
+        next_date = self.execute(*self.args, **self.kwargs)
+        # determine remaining time for exact heartbeat of the interval time
+        remaining_wait_time_in_sec = self.interval.total_seconds() - (time.time() - start_time)
+        while not self.stopped.wait(remaining_wait_time_in_sec):
+            self.kwargs["nbr_days_calculate_ahead"] = 1
+            self.kwargs["start_date"] = next_date
+            start_time = time.time()
+            next_date = self.execute(*self.args, **self.kwargs)
+            remaining_wait_time_in_sec = self.interval.total_seconds() - (time.time() - start_time)
+
+
+def create_service_job_for_sunrise_and_sunset_calculations(interval_time, nbr_days_calculate_ahead, nbr_days_before_today):
+    start_date = datetime.date.today() - datetime.timedelta(days=nbr_days_before_today)
+    return TMSSPreCalculationsServiceJob(interval=timedelta(seconds=interval_time),
+                                         execute=execute_populate_sunrise_and_sunset_for_all_stations,
+                                         nbr_days_calculate_ahead=nbr_days_calculate_ahead, start_date=start_date)
+
+
+def main():
+    # make sure we run in UTC timezone
+    os.environ['TZ'] = 'UTC'
+
+    from optparse import OptionParser, OptionGroup
+    from lofar.common import dbcredentials
+
+    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
+
+    # Check the invocation arguments
+    parser = OptionParser('%prog [options]',
+                          description='run the tmss_workflow_service which forwards TMSS events to the workflow engine.')
+
+    parser.add_option('-i', '--interval_time', dest='interval_time', type='int', default=INTERVAL_TIME_SECONDS,
+                      help='The time between next calculation, default: %default')
+    parser.add_option('-d', '--nbr_days_calculate_ahead', dest='nbr_days_calculate_ahead', type='int', default=NBR_DAYS_CALCULATE_AHEAD,
+                      help='The number of days to calculate the sunset/sunrise ahead, default: %default')
+    parser.add_option('-b', '--nbr_days_before_today', dest='nbr_days_before_today', type='int', default=NBR_DAYS_BEFORE_TODAY,
+                      help='The number of days to calculate the sunset/sunrise before today (so yesterday=1), default: %default')
+
+    group = OptionGroup(parser, 'Django options')
+    parser.add_option_group(group)
+    group.add_option('-C', '--credentials', dest='dbcredentials', type='string', default=os.environ.get('TMSS_DBCREDENTIALS', 'TMSS'), help='django dbcredentials name, default: %default')
+
+    (options, args) = parser.parse_args()
+    from lofar.sas.tmss.tmss import setup_and_check_tmss_django_database_connection_and_exit_on_error
+    setup_and_check_tmss_django_database_connection_and_exit_on_error(options.dbcredentials)
+
+    job = create_service_job_for_sunrise_and_sunset_calculations(options.interval_time, options.nbr_days_calculate_ahead, options.nbr_days_before_today)
+    job.start()
+    waitForInterrupt()
+    job.stop()
+
+
+if __name__ == '__main__':
+    main()
+
diff --git a/SAS/TMSS/backend/services/precalculations_service/test/CMakeLists.txt b/SAS/TMSS/backend/services/precalculations_service/test/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..a3f0060bad5c5f9adfbbceb9c07b138a08675378
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/test/CMakeLists.txt
@@ -0,0 +1,10 @@
+# $Id: CMakeLists.txt 32679 2015-10-26 09:31:56Z schaap $
+
+if(BUILD_TESTING)
+    include(LofarCTest)
+
+    lofar_add_test(t_precalculations_service)
+
+    set_tests_properties(t_precalculations_service PROPERTIES TIMEOUT 300)
+
+endif()
diff --git a/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.py b/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.py
new file mode 100755
index 0000000000000000000000000000000000000000..f230691e85c92f1b64742cf731a6b1058bd7f188
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.py
@@ -0,0 +1,229 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+import unittest
+import time
+import datetime
+import logging
+logger = logging.getLogger('lofar.' + __name__)
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
+
+
+from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment
+
+from lofar.sas.tmss.services.precalculations_service import create_service_job_for_sunrise_and_sunset_calculations
+from lofar.common.test_utils import integration_test
+
+
+@integration_test
+class TestPreCalculationService(unittest.TestCase):
+    """
+    Tests for the TMSSPreCalculationsServiceJob
+    It will check the number of items created of the StationTimeline model based on the input of the service to start
+    It will not check the content of the sunrise/sunset data of the  StationTimeline model itself
+    Note that 1 day calculation will take about 6 seconds (my local developer environment)
+    So the assumption was that the calculation takes about 6 sec BUT the build environment tooks ever longer,  11
+    a 14 seconds!!!! Some 'timing' parameters where adjusted in this testcase but maybe not robust enough
+    On the other hand if the build system is getting even more slower than this  there should be really be a doubt
+    about the build system.
+    """
+
+    @classmethod
+    def setUpClass(cls) -> None:
+        """
+        Populate schema to be able to retrieve all stations
+        """
+        cls.tmss_test_env = TMSSTestEnvironment(populate_schemas=True)
+        cls.tmss_test_env.start()
+        cls.test_data_creator = cls.tmss_test_env.create_test_data_creator()
+
+    @classmethod
+    def tearDownClass(cls) -> None:
+        cls.tmss_test_env.stop()
+
+    def setUp(self) -> None:
+        """
+        Start every testcase with 'clean' StationTimeline model
+        """
+        from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+        StationTimeline.objects.all().delete()
+
+    def test_all_stations_calculated_for_one_day(self):
+        """
+        Test if creating and starting, followed by stopping the (pre)calculation service results in 'one day'
+        of StationTimeline data for all stations
+        Note that 1 day calculation will take about 6 seconds
+        """
+        # Import here otherwise you get
+        # "django.core.exceptions.ImproperlyConfigured: Requested setting INSTALLED_APPS, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings."
+        from lofar.sas.tmss.tmss.tmssapp.conversions import get_all_stations
+        from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+
+        nbr_stations = len(get_all_stations())
+        # Initially there should be no data
+        self.assertEqual(len(StationTimeline.objects.all()), 0)
+        # Now we are going to create and start the calculation service with a wait time of 60 sec,
+        # nbr days to calculate ahead is 1 and nbr days before today 1 ->  so only 'yesterday' should be created
+        job = create_service_job_for_sunrise_and_sunset_calculations(60, 1, 1)
+        job.start()
+        job.stop()
+        # Check what have been created
+        st_objects = StationTimeline.objects.all()
+        self.assertEqual(len(st_objects), nbr_stations)
+        # lets check with the timestamp of today, that should be zero
+        st_objects = StationTimeline.objects.filter(timestamp=datetime.date.today())
+        self.assertEqual(len(st_objects), 0)
+        # lets check with the timestamp in future, that should be zero
+        st_objects = StationTimeline.objects.filter(timestamp__gt=datetime.date.today())
+        self.assertEqual(len(st_objects), 0)
+        # lets check with the timestamp yesterday, that should be equal to the number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp=datetime.date.today()-datetime.timedelta(days=1))
+        self.assertEqual(len(st_objects), nbr_stations)
+
+    def test_all_stations_calculated_for_multiple_days_with_one_trigger(self):
+        """
+        Test if creating and starting, followed by stopping the (pre)calculation service results in 'multiple day'
+        of StationTimeline data for all stations
+        Note that 4 days calculation will take about 30 seconds
+        """
+        # Import here otherwise you get
+        # "django.core.exceptions.ImproperlyConfigured: Requested setting INSTALLED_APPS, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings."
+        from lofar.sas.tmss.tmss.tmssapp.conversions import get_all_stations
+        from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+
+        nbr_stations = len(get_all_stations())
+        # Initially there should be no data
+        self.assertEqual(len(StationTimeline.objects.all()), 0)
+        # Now we are going to create and start the calculation service with a interval of 120 sec,
+        # nbr days to calculate ahead is 4 and nbr days before today 2 ->  so 'day before yesterday, 'yesterday',
+        # 'today' and 'tomorrow' should be created
+        job = create_service_job_for_sunrise_and_sunset_calculations(120, 4, 2)
+        job.start()
+        job.stop()
+        # Check what have been created
+        st_objects = StationTimeline.objects.all()
+        self.assertEqual(len(st_objects), 4*nbr_stations)
+        # lets check with the timestamp of today, that should be equal to the number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp=datetime.date.today())
+        self.assertEqual(len(st_objects), nbr_stations)
+        # lets check with the timestamp in future, that should be equal to the number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp__gt=datetime.date.today())
+        self.assertEqual(len(st_objects), nbr_stations)
+        # lets check with the timestamp in the past, that should be equal to the 2 times number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp__lt=datetime.date.today())
+        self.assertEqual(len(st_objects), 2*nbr_stations)
+
+    def test_all_stations_calculated_after_interval(self):
+        """
+        Test if creating and starting, waiting for period (25 seconds), followed by stopping the (pre)calculation service results
+        in 'multiple day' of StationTimeline data for all stations.
+        It will test the scheduler with interval of 20 seconds, so three days should be calculated
+        """
+        # Import here otherwise you get
+        # "django.core.exceptions.ImproperlyConfigured: Requested setting INSTALLED_APPS, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings."
+        from lofar.sas.tmss.tmss.tmssapp.conversions import get_all_stations
+        from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+
+        nbr_stations = len(get_all_stations())
+        # Initially there should be no data
+        self.assertEqual(len(StationTimeline.objects.all()), 0)
+        # Now we are going to create and start the calculation service with a interval of 20 sec (smaller will not make sense),
+        # nbr days to calculate ahead is 1 and nbr days before today 0 ->  so it start with 'today' and after 20 seconds
+        # 'tomorrow' etc..,
+        job = create_service_job_for_sunrise_and_sunset_calculations(20, 1, 0)
+        job.start()
+        time.sleep(25)
+        job.stop()
+        # Check what have been created with interval of 20 seconds we should have two days
+        st_objects = StationTimeline.objects.all()
+        self.assertEqual(len(st_objects), 2*nbr_stations)
+        # lets check with the timestamp of today, that should be equal to the number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp=datetime.date.today())
+        self.assertEqual(len(st_objects), nbr_stations)
+        # lets check with the timestamp in future, that should be equal to the number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp__gt=datetime.date.today())
+        self.assertEqual(len(st_objects), nbr_stations)
+        # lets check with the timestamp in the past, that should be equal to zero
+        st_objects = StationTimeline.objects.filter(timestamp__lt=datetime.date.today())
+        self.assertEqual(len(st_objects), 0)
+
+    def test_all_stations_calculated_for_when_interval_time_is_too_small(self):
+        """
+        Check that if the interval time < calculation time it does not lead to exception
+        Test if creating and starting, waiting for period (20 seconds), followed by stopping the (pre)calculation service results
+        in 'multiple day' of StationTimeline data for all stations.
+        It will test the scheduler with interval of 2 seconds, which smaller than ~6 seconds
+        Stopping after 20 seconds should make 2 days calculated
+        """
+        # Import here otherwise you get
+        # "django.core.exceptions.ImproperlyConfigured: Requested setting INSTALLED_APPS, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings."
+        from lofar.sas.tmss.tmss.tmssapp.conversions import get_all_stations
+        from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+
+        nbr_stations = len(get_all_stations())
+        # Initially there should be no data
+        self.assertEqual(len(StationTimeline.objects.all()), 0)
+        # Now we are going to create and start the calculation service with an interval of 2 sec
+        # nbr days to calculate ahead is 1 and nbr days before today 0 ->  so it start with 'today' and after ~6 seconds
+        # 'tomorrow' etc..
+        job = create_service_job_for_sunrise_and_sunset_calculations(2, 1, 0)
+        job.start()
+        time.sleep(20)
+        job.stop()
+        # Check what have been created with interval of 2 seconds we should have two days
+        st_objects = StationTimeline.objects.all()
+        self.assertGreaterEqual(len(st_objects), 2 * nbr_stations)
+        # lets check with the timestamp of today, that should be equal to the number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp=datetime.date.today())
+        self.assertEqual(len(st_objects), nbr_stations)
+
+    @unittest.skip("TODO: fix blinking test due to incorrect synchronization issues.")
+    def test_all_stations_calculated_with_two_jobs_started(self):
+        """
+        Test if starting two jobs of (pre)calculation service results in no Exception, there are no
+        duplicate data stored (covered by the Constraints in the model)
+        It will test the scheduler with interval of 20 seconds, to make sure one interval after the start has been passed
+        """
+        # Import here otherwise you get
+        # "django.core.exceptions.ImproperlyConfigured: Requested setting INSTALLED_APPS, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings."
+        from lofar.sas.tmss.tmss.tmssapp.conversions import get_all_stations
+        from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+
+        nbr_stations = len(get_all_stations())
+        # Initially there should be no data
+        self.assertEqual(len(StationTimeline.objects.all()), 0)
+        # Now we are going to create and start the calculation service with an interval of 20 sec
+        # nbr days to calculate ahead is 1 and nbr days before today 0 ->  so it start with 'today' and after ~20 seconds
+        # 'tomorrow' etc..
+        job = create_service_job_for_sunrise_and_sunset_calculations(20, 1, 0)
+        job2 = create_service_job_for_sunrise_and_sunset_calculations(20, 1, 0)
+
+        job.start()
+        job2.start()
+        time.sleep(22)
+        job.stop()
+        job2.stop()
+        # Check what have been created should only be today and tomorrow
+        st_objects = StationTimeline.objects.all()
+        self.assertGreaterEqual(len(st_objects), 2 * nbr_stations)
+
+
+if __name__ == '__main__':
+    #run the unit tests
+    unittest.main()
\ No newline at end of file
diff --git a/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.run b/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.run
new file mode 100755
index 0000000000000000000000000000000000000000..187c3bf1e7ba9d481b31f00104a57b7904d56c15
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.run
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+# Run the unit test
+source python-coverage.sh
+python_coverage_test "*tmss*" t_precalculations_service.py
+
diff --git a/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.sh b/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.sh
new file mode 100755
index 0000000000000000000000000000000000000000..cfa3c84d44a360c48d4e92ba2de791a0c0755362
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+./runctest.sh t_precalculations_service
diff --git a/SAS/TMSS/backend/services/scheduling/lib/constraints/__init__.py b/SAS/TMSS/backend/services/scheduling/lib/constraints/__init__.py
index d8e10bc63e80383aed0eff6e64ff3c7b880921f0..b8831d7759b9433108322e26254abd5b5586f317 100644
--- a/SAS/TMSS/backend/services/scheduling/lib/constraints/__init__.py
+++ b/SAS/TMSS/backend/services/scheduling/lib/constraints/__init__.py
@@ -86,7 +86,7 @@ def filter_scheduling_units_using_constraints(scheduling_units: [models.Scheduli
             # For example, the user can choose a different template,
             # or submit a feature request to implement constraint solvers for this new template.
             logger.warning(e)
-            for subtask in models.Subtask.independent_subtasks().filter(task_blueprint__scheduling_unit_blueprint_id=scheduling_unit.id).all():
+            for subtask in models.Subtask.independent_subtasks().filter(task_blueprints__scheduling_unit_blueprint_id=scheduling_unit.id).all():
                 subtask.status = models.SubtaskState.objects.get(value=models.SubtaskState.Choices.ERROR.value)
                 subtask.save()
 
@@ -151,7 +151,7 @@ def sort_scheduling_units_scored_by_constraints(scheduling_units: [models.Schedu
             # For example, the user can choose a different template,
             # or submit a feature request to implement constraint solvers for this new template.
             logger.warning(e)
-            for subtask in models.Subtask.independent_subtasks().filter(task_blueprint__scheduling_unit_blueprint_id=scheduling_unit.id).all():
+            for subtask in models.Subtask.independent_subtasks().filter(task_blueprints__scheduling_unit_blueprint_id=scheduling_unit.id).all():
                 subtask.status = models.SubtaskState.objects.get(value=models.SubtaskState.Choices.ERROR.value)
                 subtask.save()
 
diff --git a/SAS/TMSS/backend/services/scheduling/lib/dynamic_scheduling.py b/SAS/TMSS/backend/services/scheduling/lib/dynamic_scheduling.py
index d1e77384b1a55546f10c5dd86b8628dd45719c8b..c8ff0c4c2a7a0517243aa4ef9444db39161c9f51 100644
--- a/SAS/TMSS/backend/services/scheduling/lib/dynamic_scheduling.py
+++ b/SAS/TMSS/backend/services/scheduling/lib/dynamic_scheduling.py
@@ -262,7 +262,7 @@ class TMSSDynamicSchedulingMessageHandler(TMSSEventMessageHandler):
         self._do_schedule_event.set()
     
     def onSettingUpdated(self, name: str, value: bool):
-        if name == models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value and value:
+        if name == models.SystemSettingFlag.Choices.DYNAMIC_SCHEDULING_ENABLED.value and value:
             logger.info("%s was set to %s: triggering update of dynamic schedule...", name, value)
             self._do_schedule_event.set()
 
@@ -272,10 +272,10 @@ class TMSSDynamicSchedulingMessageHandler(TMSSEventMessageHandler):
             if self._do_schedule_event.wait(timeout=10):
                 self._do_schedule_event.clear()
                 try:
-                    if models.Setting.objects.get(name=models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value).value:
+                    if models.Setting.objects.get(name=models.SystemSettingFlag.Choices.DYNAMIC_SCHEDULING_ENABLED.value).value:
                         do_dynamic_schedule()
                     else:
-                        logger.warning("Skipping update of dynamic schedule because the setting %s=%s", models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value, models.Setting.objects.get(name=models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value).value)
+                        logger.warning("Skipping update of dynamic schedule because the setting %s=%s", models.SystemSettingFlag.Choices.DYNAMIC_SCHEDULING_ENABLED.value, models.Setting.objects.get(name=models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value).value)
                 except Exception as e:
                     logger.exception(str(e))
                     # just continue processing events. better luck next time...
@@ -296,7 +296,7 @@ def create_dynamic_scheduling_service(exchange: str=DEFAULT_BUSNAME, broker: str
 def get_dynamically_schedulable_scheduling_units() -> [models.SchedulingUnitBlueprint]:
     '''get a list of all dynamically schedulable scheduling_units'''
     defined_independend_subtasks = models.Subtask.independent_subtasks().filter(state__value='defined')
-    defined_independend_subtask_ids = defined_independend_subtasks.values('task_blueprint__scheduling_unit_blueprint_id').distinct().all()
+    defined_independend_subtask_ids = defined_independend_subtasks.values('task_blueprints__scheduling_unit_blueprint_id').distinct().all()
     scheduling_units = models.SchedulingUnitBlueprint.objects.filter(id__in=defined_independend_subtask_ids) \
                                                              .filter(draft__scheduling_constraints_template__isnull=False) \
                                                              .select_related('draft', 'draft__scheduling_constraints_template').all()
@@ -310,7 +310,7 @@ def get_scheduled_scheduling_units(lower:datetime=None, upper:datetime=None) ->
         scheduled_subtasks = scheduled_subtasks.filter(stop_time__gte=lower)
     if upper is not None:
         scheduled_subtasks = scheduled_subtasks.filter(start_time__lte=upper)
-    return list(models.SchedulingUnitBlueprint.objects.filter(id__in=scheduled_subtasks.values('task_blueprint__scheduling_unit_blueprint_id').distinct()).all())
+    return list(models.SchedulingUnitBlueprint.objects.filter(id__in=scheduled_subtasks.values('task_blueprints__scheduling_unit_blueprint_id').distinct()).all())
 
 
 def unschededule_blocking_scheduled_units_if_needed_and_possible(candidate: ScoredSchedulingUnit) -> bool:
diff --git a/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py b/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py
index 8f44e71ccb5b1d9a2a4a5b9d671abfb0ec0e8865..3ac9f0476dfece6dd41a722e5c35049fe1e5fcb5 100755
--- a/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py
+++ b/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py
@@ -236,11 +236,11 @@ class TestDynamicScheduling(TestCase):  # Note: we use django.test.TestCase inst
 
         # check the scheduled subtask
         upcoming_scheduled_subtasks = models.Subtask.objects.filter(state__value='scheduled',
-                                                                    task_blueprint__scheduling_unit_blueprint__in=(scheduling_unit_blueprint_low,
+                                                                    task_blueprints__scheduling_unit_blueprint__in=(scheduling_unit_blueprint_low,
                                                                                                                    scheduling_unit_blueprint_medium,
                                                                                                                    scheduling_unit_blueprint_high)).all()
         self.assertEqual(1, upcoming_scheduled_subtasks.count())
-        self.assertEqual(scheduling_unit_blueprint_high.id, upcoming_scheduled_subtasks[0].task_blueprint.scheduling_unit_blueprint.id)
+        self.assertEqual(scheduling_unit_blueprint_high.id, upcoming_scheduled_subtasks[0].task_blueprints().first().scheduling_unit_blueprint.id)  # all task blueprints share same SU, so it does not matter which one we check
 
         # check scheduling_unit_blueprint_low starts after the scheduled scheduling_unit_blueprint_high
         self.assertGreater(scheduling_unit_blueprint_low.start_time, scheduling_unit_blueprint_medium.start_time)
diff --git a/SAS/TMSS/backend/services/scheduling/test/t_subtask_scheduling_service.py b/SAS/TMSS/backend/services/scheduling/test/t_subtask_scheduling_service.py
index 57d3ca6f86bbc6ab3b9e5d5a7de7c051e75e2650..a8b86bb1cd4b7249a8aa89ef9c4ab96c5a386452 100755
--- a/SAS/TMSS/backend/services/scheduling/test/t_subtask_scheduling_service.py
+++ b/SAS/TMSS/backend/services/scheduling/test/t_subtask_scheduling_service.py
@@ -106,7 +106,7 @@ class TestSubtaskSchedulingService(unittest.TestCase):
 
             # create two subtasks
             subtask1 = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url), '/subtask/')
-            subtask2 = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url, task_blueprint_url=subtask1['task_blueprint']), '/subtask/')
+            subtask2 = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url, task_blueprint_urls=subtask1['task_blueprints']), '/subtask/')
 
             # connect them
             output_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskOutput(subtask1['url']), '/subtask_output/')
diff --git a/SAS/TMSS/backend/services/tmss_postgres_listener/lib/tmss_postgres_listener.py b/SAS/TMSS/backend/services/tmss_postgres_listener/lib/tmss_postgres_listener.py
index 31a3ce09cba1f51d98d3015b1c27159863b307aa..2d2584dc517318c13e15f600103a04d42ca9ac75 100644
--- a/SAS/TMSS/backend/services/tmss_postgres_listener/lib/tmss_postgres_listener.py
+++ b/SAS/TMSS/backend/services/tmss_postgres_listener/lib/tmss_postgres_listener.py
@@ -195,28 +195,29 @@ class TMSSPGListener(PostgresListener):
         # send both object.updated and status change events
 
         # check if task status is new or changed... If so, send event.
-        task_id = subtask.task_blueprint.id
-        task_status = subtask.task_blueprint.status
-        if task_id not in self._task_status_cache or self._task_status_cache[task_id][1] != task_status:
-            # update cache for this task
-            self._task_status_cache[task_id] = (datetime.utcnow(), task_status)
-
-            # send event(s)
-            self.onTaskBlueprintUpdated( {'id': task_id})
-            self._sendNotification(TMSS_TASKBLUEPRINT_STATUS_EVENT_PREFIX+'.'+task_status.capitalize(),
-                                   {'id': task_id, 'status': task_status})
-
-        # check if scheduling_unit status is new or changed... If so, send event.
-        scheduling_unit_id = subtask.task_blueprint.scheduling_unit_blueprint.id
-        scheduling_unit_status = subtask.task_blueprint.scheduling_unit_blueprint.status
-        if scheduling_unit_id not in self._scheduling_unit_status_cache or self._scheduling_unit_status_cache[scheduling_unit_id][1] != scheduling_unit_status:
-            # update cache for this task
-            self._scheduling_unit_status_cache[scheduling_unit_id] = (datetime.utcnow(), scheduling_unit_status)
-
-            # send event(s)
-            self.onSchedulingUnitBlueprintUpdated( {'id': scheduling_unit_id})
-            self._sendNotification(TMSS_SCHEDULINGUNITBLUEPRINT_STATUS_EVENT_PREFIX+'.'+scheduling_unit_status.capitalize(),
-                                   {'id': scheduling_unit_id, 'status': scheduling_unit_status})
+        for task_blueprint in subtask.task_blueprints.all():
+            task_id = task_blueprint.id
+            task_status = task_blueprint.status
+            if task_id not in self._task_status_cache or self._task_status_cache[task_id][1] != task_status:
+                # update cache for this task
+                self._task_status_cache[task_id] = (datetime.utcnow(), task_status)
+    
+                # send event(s)
+                self.onTaskBlueprintUpdated( {'id': task_id})
+                self._sendNotification(TMSS_TASKBLUEPRINT_STATUS_EVENT_PREFIX+'.'+task_status.capitalize(),
+                                       {'id': task_id, 'status': task_status})
+    
+            # check if scheduling_unit status is new or changed... If so, send event.
+            scheduling_unit_id = task_blueprint.scheduling_unit_blueprint.id
+            scheduling_unit_status = task_blueprint.scheduling_unit_blueprint.status
+            if scheduling_unit_id not in self._scheduling_unit_status_cache or self._scheduling_unit_status_cache[scheduling_unit_id][1] != scheduling_unit_status:
+                # update cache for this task
+                self._scheduling_unit_status_cache[scheduling_unit_id] = (datetime.utcnow(), scheduling_unit_status)
+    
+                # send event(s)
+                self.onSchedulingUnitBlueprintUpdated( {'id': scheduling_unit_id})
+                self._sendNotification(TMSS_SCHEDULINGUNITBLUEPRINT_STATUS_EVENT_PREFIX+'.'+scheduling_unit_status.capitalize(),
+                                       {'id': scheduling_unit_id, 'status': scheduling_unit_status})
 
         try:
             # wipe old entries from cache.
diff --git a/SAS/TMSS/backend/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.py b/SAS/TMSS/backend/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.py
index 37fbe82b303bd9f2a3e8246c7f98daf29273e33d..5037782678f81bc525984af256babd3b35cb24df 100755
--- a/SAS/TMSS/backend/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.py
+++ b/SAS/TMSS/backend/services/tmss_postgres_listener/test/t_tmss_postgres_listener_service.py
@@ -135,7 +135,7 @@ class TestSubtaskSchedulingService(unittest.TestCase):
 
 
             # create a SubTask
-            subtask = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Subtask(task_blueprint_url=task_blueprint['url']), '/subtask/')
+            subtask = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Subtask(task_blueprint_urls=[task_blueprint['url']]), '/subtask/')
 
             # sync and check
             with service.lock:
diff --git a/SAS/TMSS/backend/services/websocket/test/t_websocket_service.py b/SAS/TMSS/backend/services/websocket/test/t_websocket_service.py
index f3f8388cb9b361665964ba3660f926b2653bbfc0..37eeddf21e5fabb2b3008e3a3efb272674ddbfa3 100755
--- a/SAS/TMSS/backend/services/websocket/test/t_websocket_service.py
+++ b/SAS/TMSS/backend/services/websocket/test/t_websocket_service.py
@@ -152,7 +152,7 @@ class TestSubtaskSchedulingService(unittest.TestCase):
 
             # Test subtask create
             subtask = self.test_data_creator.post_data_and_get_response_as_json_object(
-                self.test_data_creator.Subtask(task_blueprint_url=task_blueprint['url']), '/subtask/')
+                self.test_data_creator.Subtask(task_blueprint_urls=[task_blueprint['url']]), '/subtask/')
             test_object(subtask, self.ObjTypes.SUBTASK, self.ObjActions.CREATE)
 
             # Test updates
diff --git a/SAS/TMSS/backend/src/migrate_momdb_to_tmss.py b/SAS/TMSS/backend/src/migrate_momdb_to_tmss.py
index 4d38aff956611eb3b0f0d5044fbffa32994c9742..a77af99efa8693c76fcaee0f43537d65bdea0848 100755
--- a/SAS/TMSS/backend/src/migrate_momdb_to_tmss.py
+++ b/SAS/TMSS/backend/src/migrate_momdb_to_tmss.py
@@ -506,7 +506,7 @@ def create_subtask_trees_for_project_in_momdb(project_mom2id, project):
         details = {"id": mom_details['mom2id'],
                    "state": state,
                    "specifications_doc": {},   # todo: where from? We have user_specification_id (for task?) and system_specification_id (for subtask?) on lofar_observation (I guess referring to lofar_observation_specification). Shall we piece things together from that, or is there a text blob to use? Also: pipeline info lives in obs_spec too?
-                   "task_blueprint": task_blueprint,
+                   #"task_blueprint": task_blueprint,  # ManyToMany, use set()
                    "specifications_template": specifications_template,
                    "tags": ["migrated_from_MoM", "migration_incomplete"],   # todo: set complete once it is verified that all info is present
                    "priority": project.priority_rank,  # todo: correct to derive from project?
@@ -523,11 +523,13 @@ def create_subtask_trees_for_project_in_momdb(project_mom2id, project):
         if subtask_qs.count():
             # todo: this will update the subtask, but other TMSS objects do not share id with MoM and get recreated with every migration run. Can we clean this up somehow?
             subtask_qs.update(**details)
+            subtask_qs.task_blueprints.set([task_blueprint])
             subtask = subtask_qs.first()
             logger.info("...updated existing subtask tmss id=%s" % subtask.id)
             stats['subtasks_updated'] += 1
         else:
             subtask = models.Subtask.objects.create(**details)
+            subtask.task_blueprints.set([task_blueprint])
             logger.info("...created new subtask tmss id=%s" % subtask.id)
             stats['subtasks_created'] += 1
 
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/CMakeLists.txt b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/CMakeLists.txt
index 457bdbabeb7c04db158abe1c7a6a6a9b0f5dd90e..d3438271ca516b706d2d6f687b7ec6db2db2253d 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/CMakeLists.txt
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/CMakeLists.txt
@@ -5,6 +5,7 @@ set(_py_files
     parset.py
     sip.py
     feedback.py
+    reports.py
     )
 
 python_install(${_py_files}
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py
index b3d630d18a33e76379b593a4cbd6e5a613160ad2..ae0eeacbd3d103a67c633c4b73233a37d18de23e 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py
@@ -309,7 +309,10 @@ def _convert_to_parset_dict_for_observationcontrol_schema(subtask: models.Subtas
     parset["Observation.tmssID"] = subtask.pk
     parset["Observation.processType"] = subtask.specifications_template.type.value.capitalize()
     parset["Observation.processSubtype"] = "Beam Observation"
-    parset["Observation.Campaign.name"] = subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name
+    project_set = set([tb.scheduling_unit_blueprint.draft.scheduling_set.project.name for tb in subtask.task_blueprints.all()])
+    if len(project_set) != 1:
+        raise ConversionException('Subtask id=%s cannot be converted to parset because it references task blueprint that belong to different projects=%s' % (subtask.id, project_set))
+    parset["Observation.Campaign.name"] = list(project_set)[0]
     parset["Observation.startTime"] = formatDatetime(subtask.start_time) if isinstance(subtask.start_time, datetime) else subtask.start_time
     parset["Observation.stopTime"] = formatDatetime(subtask.stop_time) if isinstance(subtask.stop_time, datetime) else subtask.stop_time
     parset["Observation.strategy"] = "default"  # maybe not mandatory?
@@ -429,8 +432,11 @@ def _convert_to_parset_dict_for_pipelinecontrol_schema(subtask: models.Subtask)
     parset["Observation.processSubtype"] = "Averaging Pipeline"
     parset["Observation.ObservationControl.PythonControl.pythonProgram"] = "preprocessing_pipeline.py"
     parset["Observation.ObservationControl.PythonControl.softwareVersion"] = ""
-    parset["Observation.Campaign.name"] = subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name
-    parset["Observation.Scheduler.taskName"] = subtask.task_blueprint.name
+    project_set = set([tb.scheduling_unit_blueprint.draft.scheduling_set.project.name for tb in subtask.task_blueprints.all()])
+    if len(project_set) != 1:
+        raise ConversionException('Subtask pk=%s cannot be converted to parset because it references task blueprint that belong to different projects (names=%s)' % (subtask.pk, project_set))
+    parset["Observation.Campaign.name"] = list(project_set)[0]
+    parset["Observation.Scheduler.taskName"] = subtask.task_blueprints.first().name   # Scheduler keys are artefacts of an older time. Their content is deprecated, so we don't care whch task we take this from
     parset["Observation.Scheduler.predecessors"] = []
     parset["Observation.Cluster.ProcessingCluster.clusterName"] = subtask.cluster.name
     parset["Observation.Cluster.ProcessingCluster.clusterPartition"] = 'cpu'
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/reports.py b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/reports.py
new file mode 100644
index 0000000000000000000000000000000000000000..43a699ad5c8e4529d3e711cc46248132dba3cb13
--- /dev/null
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/reports.py
@@ -0,0 +1,76 @@
+from django.db.models import Sum
+from lofar.sas.tmss.tmss.tmssapp import models
+from lofar.sas.tmss.tmss.tmssapp import serializers
+
+from rest_framework.request import Request
+from datetime import timedelta
+
+
+def create_project_report(request: Request, project: models.Project) -> {}:
+    """
+    Create a project report as a JSON object.
+    """
+    result = {'project': project.pk}
+    result['quota'] = _get_quotas_from_project(request, project.pk)
+    result['durations'] = _get_subs_and_durations_from_project(project.pk)
+    result['LTA dataproducts'] = _get_lta_dataproducts(project.name)
+    result['SAPs'] = _get_saps(project.pk)
+
+    return result
+
+
+def _get_quotas_from_project(request: Request, project_pk: int) -> []:
+    """
+    Help function to retrieve quotas.
+    """
+    project_quotas = models.ProjectQuota.objects.filter(project=project_pk)
+    project_quotas_data = [serializers.ProjectQuotaSerializer(pq, context={'request': request}).data for pq in project_quotas]
+    quotas = [{k: pqd[k] for k in ('id', 'resource_type_id', 'value')} for pqd in project_quotas_data]
+    return quotas
+
+
+def _get_subs_and_durations_from_project(project_pk: int) -> {}:
+    """
+    Help function to retrieve durations and scheduling_units distinguished by success/fail.
+    """
+    # Get SUBs related to the project
+    scheduling_unit_blueprints = models.SchedulingUnitBlueprint.objects.filter(draft__scheduling_set__project__pk=project_pk)
+    # TODO: Split into total, prio A, prio B? See TMSS-592.
+    total_duration, total_succeeded_duration, total_failed_duration = timedelta(), timedelta(), timedelta()
+    subs_succeeded, subs_failed = [], []
+
+    # NOTE: This might be optimised later with the use of Django's ORM as done for LTA dataproducts.
+    for sub in scheduling_unit_blueprints:  # Distinguish between succeeded and failed observations
+        # TODO: Use QA workflow flag instead of the finished status? See TMSS-592.
+        if sub.status == 'finished':        # Succeeded observations
+            total_succeeded_duration += sub.duration
+            subs_succeeded.append({'id': sub.pk, 'name': sub.name, 'duration': sub.duration.total_seconds()})
+        elif sub.status == 'cancelled':     # Failed observations
+            total_failed_duration += sub.duration
+            subs_failed.append({'id': sub.pk, 'name': sub.name, 'duration': sub.duration.total_seconds()})
+        total_duration += sub.duration      # Total duration without considering the status of the obs.
+
+    total_not_cancelled = total_duration - total_failed_duration  # Calculate not_cancelled duration
+    durations = {'total': total_duration.total_seconds(), 'total_succeeded': total_succeeded_duration.total_seconds(),
+                 'total_not_cancelled': total_not_cancelled.total_seconds(), 'total_failed': total_failed_duration.total_seconds(),
+                 'scheduling_unit_blueprints_finished': subs_succeeded, 'scheduling_unit_blueprints_failed': subs_failed}
+    return durations
+
+
+def _get_lta_dataproducts(project_name: str) -> {}:
+    """
+    Help function to retrieve the sum of the LTA dataproducts sizes.
+    """
+    # Query dataproducts from Subtasks of type 'ingest' within 'finished' status
+    return models.Dataproduct.objects.filter(producer__subtask__specifications_template__type='ingest') \
+        .filter(producer__subtask__state__value='finished') \
+        .filter(producer__subtask__task_blueprints__draft__scheduling_unit_draft__scheduling_set__project__name=project_name) \
+        .aggregate(Sum('size'))
+
+
+def _get_saps(project_pk: int) -> []:
+    """
+    Help function to retrieve SAPs.
+    """
+    # TODO: For each unique target (SAP name) get the sum of target observation durations from the tasks.
+    return [{'sap_name': 'placeholder', 'total_exposure': 0}, ]
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py
index 30a2d4029769070ebf204aeda4fada4565e59f1b..570e8b9c7663a7c5878bdaf89f154235cf04e1a9 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py
@@ -1,5 +1,5 @@
 from lofar.sas.tmss.tmss.exceptions import *
-from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Dataproduct, SubtaskType, Subtask, SubtaskOutput, SIPidentifier, Algorithm
+from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Dataproduct, SubtaskType, Subtask, SubtaskOutput, SIPidentifier, HashAlgorithm
 from lofar.sas.tmss.tmss.tmssapp.models.specification import Datatype, Dataformat
 from lofar.lta.sip import siplib, ltasip, validator, constants
 from lofar.common.json_utils import add_defaults_to_json_object_for_schema
@@ -144,7 +144,8 @@ def create_sip_representation_for_subtask(subtask: Subtask):
 
     # determine subtask specific properties and add subtask representation to Sip object
     if subtask.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value:
-        subarraypointings=None  # todo, subtask.specifications_doc, probably more complex than it looks -> RGOE yes complex type for later -> JK: assuming this is done in TMSS-308?
+        subarraypointings = None  # todo, subtask.specifications_doc, probably more complex than it looks -> RGOE yes complex type for later -> JK: assuming this is done in TMSS-308?
+        concatenated_task_descriptions = "\n".join([tb.description for tb in subtask.task_blueprints.order_by("specifications_template__name").all()])   # we could also order by "specifications_template__type__value"?
         observation = siplib.Observation(observingmode=constants.OBSERVINGMODETYPE_BEAM_OBSERVATION,  # can be hardcoded for an observation
                                          instrumentfilter=mapping_filterset_type_TMSS_2_SIP[subtask.specifications_doc['stations']['filter']],
                                          clock_frequency="200",  # fixed,
@@ -162,7 +163,7 @@ def create_sip_representation_for_subtask(subtask: Subtask):
                                          process_map=process_map,
                                          channelwidth_frequency=None, # NA any more ('BlueGene compatibility' see comment in LTA-SIP.xsd)
                                          channelwidth_frequencyunit=constants.FREQUENCYUNIT_HZ,  # fixed
-                                         observationdescription=subtask.task_blueprint.description,
+                                         observationdescription=concatenated_task_descriptions,
                                          channelspersubband=0,  # NA any more ('BlueGene compatibility' see comment in LTA-SIP.xsd)
                                          subarraypointings=subarraypointings,
                                          transientbufferboardevents=None  # fixed
@@ -175,9 +176,11 @@ def create_sip_representation_for_subtask(subtask: Subtask):
             sourcedata_identifiers += [get_siplib_identifier(dp.global_identifier, "Dataproduct id=%s" % dp.id) for dp in input.dataproducts.all()]     # todo: use correct id, lookup based on TMSS reference or so, tbd
         if not sourcedata_identifiers:
             raise TMSSException("There seems to be no subtask input associated to your pipeline subtask id %s. Please define what data the pipeline processed." % subtask.id)
+        if subtask.task_blueprints.count() > 1:
+            raise TMSSException("There are several task blueprints pk=%s associated to subtask pk=%s, but for pipelines, only a single task is supported." % ([tb.pk for tb in subtask.task_blueprints.all()], subtask.pk))
 
         pipeline_map = siplib.PipelineMap(
-                name=subtask.task_blueprint.name,
+                name=subtask.task_blueprints.first().name,  # there is only one
                 version='unknown',  # todo from subtask.specifications_doc? from feedback (we have feedback and storagewriter versions there, not pipeline version or sth)?
                 sourcedata_identifiers=sourcedata_identifiers,
                 process_map=process_map)
@@ -280,7 +283,7 @@ def create_sip_representation_for_dataproduct(dataproduct: Dataproduct):
         logger.warning("Could not determine the type of dataproduct id %s (%s). Falling back to %s" % (dataproduct.id, err, dataproduct_type))
 
     try:
-        dataproduct_fileformat = fileformat_map[dataproduct.producer.subtask.task_blueprint.consumed_by.first().dataformat.value] # todo same as with type? Why is this not with the data? Why is this so different from the LTA datamodel?
+        dataproduct_fileformat = fileformat_map[dataproduct.dataformat.value]  # todo same as with type? Why is this not with the data? Why is this so different from the LTA datamodel?
     except Exception as err:
         dataproduct_fileformat = constants.FILEFORMATTYPE_UNDOCUMENTED
         logger.warning("Could not determine the type of dataproduct id %s (%s). Falling back to %s" % (dataproduct.id, err, dataproduct_fileformat))
@@ -449,12 +452,12 @@ def generate_sip_for_dataproduct(dataproduct: Dataproduct) -> siplib.Sip:
         if dataproduct.hashes:
             from django.core.exceptions import ObjectDoesNotExist
             try:
-                sip_dataproduct.set_checksum_md5(dataproduct.hashes.get(algorithm=Algorithm.Choices.MD5.value).hash)
+                sip_dataproduct.set_checksum_md5(dataproduct.hashes.get(hash_algorithm=HashAlgorithm.Choices.MD5.value).hash)
             except ObjectDoesNotExist:
                 pass
 
             try:
-                sip_dataproduct.set_checksum_adler32(dataproduct.hashes.get(algorithm=Algorithm.Choices.ADLER32.value).hash)
+                sip_dataproduct.set_checksum_adler32(dataproduct.hashes.get(hash_algorithm=HashAlgorithm.Choices.ADLER32.value).hash)
             except ObjectDoesNotExist:
                 pass
 
@@ -465,7 +468,11 @@ def generate_sip_for_dataproduct(dataproduct: Dataproduct) -> siplib.Sip:
         sip_dataproduct = create_sip_representation_for_dataproduct(dataproduct)
 
     # Gather project details
-    project = dataproduct.producer.subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project
+    project_set = set([tb.scheduling_unit_blueprint.draft.scheduling_set.project.name for tb in dataproduct.producer.subtask.task_blueprints.all()])
+    if len(project_set) != 1:
+        # todo: support for multiple projects needs to be picked up in TMSS-689
+        raise TMSSException('Dataproduct pk=%s references task blueprints that belong to different projects (names=%s). This can currently not be represented in SIP format.' % (dataproduct.pk, project_set))
+    project = dataproduct.producer.subtask.task_blueprints.first().scheduling_unit_blueprint.draft.scheduling_set.project   # there must be only one task blueprint
     project_code = project.name
     project_primaryinvestigator = 'project_primaryinvestigator'
     project_contactauthor = 'project_contactauthor'
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/conversions.py b/SAS/TMSS/backend/src/tmss/tmssapp/conversions.py
index 3c0e184ce79ac8e697043dcf8ced5dceba3bf1eb..14b0a38e566666fda10ba8292bb9d4f91525afef 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/conversions.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/conversions.py
@@ -5,10 +5,16 @@ from astropy.coordinates.earth import EarthLocation
 from astropy.coordinates import Angle, get_body
 import astropy.time
 from functools import lru_cache
+from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+from lofar.sas.tmss.tmss.tmssapp.models.specification import CommonSchemaTemplate
+from django.db.utils import IntegrityError
+
+from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
 
 import logging
 logger = logging.getLogger(__name__)
 
+
 def create_astroplan_observer_for_station(station: str) -> 'Observer':
     '''
     returns an astroplan observer for object for a given station, located in the LBA center of the given station
@@ -26,18 +32,28 @@ def create_astroplan_observer_for_station(station: str) -> 'Observer':
 # default angle to the horizon at which the sunset/sunrise starts and ends, as per LOFAR definition.
 SUN_SET_RISE_ANGLE_TO_HORIZON = Angle(10, unit=astropy.units.deg)
 # default n_grid_points; higher is more precise but very costly; astropy defaults to 150, errors now can be in the minutes, increase if this is not good enough
+# TODO: To be considered, now we store the sunset/sunrise data in advanced, we can increase the number of points!!
 SUN_SET_RISE_PRECISION = 30
 
-@lru_cache(maxsize=256, typed=False)  # does not like lists, so use tuples to allow caching
-def timestamps_and_stations_to_sun_rise_and_set(timestamps: tuple, stations: tuple, angle_to_horizon: Angle=SUN_SET_RISE_ANGLE_TO_HORIZON) -> dict:
+
+def timestamps_and_stations_to_sun_rise_and_set(timestamps: tuple, stations: tuple, angle_to_horizon: Angle=SUN_SET_RISE_ANGLE_TO_HORIZON,
+                                                create_when_not_found=False) -> dict:
     """
-    Compute sunrise, sunset, day and night of the given stations at the given timestamps.
+    Retrieve for given stations and given timestamps the sunrise/sunset/day/night data as dictionary
+    If station/timestamp is already calculated it will be retrieved from database otherwise it will be calculated
+    and added to the database for possible future retrieval (optional parameter must be true).
+    Storing the pre-calculated data into a database makes retrieval faster.
+
     The day/sunrise/sunset is always on the date of the timestamp.
-    The night is usually the one _starting_ on the date of the time stamp, unless the given timestamp falls before sunrise, in which case it is the night _ending_ on the timestamp date.
-    :param timestamps: tuple of datetimes, e.g. (datetime(2020, 1, 1), datetime(2020, 1, 2))
-    :param stations: tuple of station names, e.g. ("CS002",)
+    The night is usually the one _starting_ on the date of the time stamp, unless the given timestamp falls before
+    sunrise, in which case it is the night _ending_ on the timestamp date.
+
+    :param timestamps: tuple of datetimes, e.g. datetime(2020, 1, 1)
+    :param stations: tuple of station names, e.g. ("CS002")
     :param angle_to_horizon: the angle between horizon and given coordinates for which rise and set times are returned
-    :return A dict that maps station names to a nested dict that contains lists of start and end times for sunrise, sunset, etc, on each requested date.
+    :param: create_when_not_found: Add data to database if not found in database and so calculated for first time
+    :return A dict that maps station names to a nested dict that contains lists of start and end times for sunrise,
+            sunset, day and night, on each requested date.
         E.g.
         {"CS002":
             {   "sunrise": [{"start": datetime(2020, 1, 1, 6, 0, 0)), "end": datetime(2020, 1, 1, 6, 30, 0)},
@@ -53,27 +69,112 @@ def timestamps_and_stations_to_sun_rise_and_set(timestamps: tuple, stations: tup
     """
     return_dict = {}
     for station in stations:
+        observer = create_astroplan_observer_for_station(station)
         for timestamp in timestamps:
-            # todo: this can probably be made faster by moving the following logic to an own function with single station/timestamp as input and putting the lru_cache on there.
-            #  This also means that we have to strip the time from the datetime. Can this be safely done?
-            observer = create_astroplan_observer_for_station(station)
-            sunrise_start = observer.sun_rise_time(time=Time(datetime.combine(timestamp.date(), dtime(12,0,0))), horizon=-angle_to_horizon, which='previous', n_grid_points=SUN_SET_RISE_PRECISION)
-            sunrise_end = observer.sun_rise_time(time=Time(sunrise_start), horizon=angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION)
-            sunset_start = observer.sun_set_time(time=sunrise_end, horizon=angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION)
-            sunset_end = observer.sun_set_time(time=sunset_start, horizon=-angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION)
-            return_dict.setdefault(station, {}).setdefault("sunrise", []).append({"start": sunrise_start.to_datetime(), "end": sunrise_end.to_datetime()})
-            return_dict[station].setdefault("sunset", []).append({"start": sunset_start.to_datetime(), "end": sunset_end.to_datetime()})
-            return_dict[station].setdefault("day", []).append({"start": sunrise_end.to_datetime(), "end": sunset_start.to_datetime()})
-            if timestamp >= sunrise_start:
-                sunrise_next_start = observer.sun_rise_time(time=sunset_end, horizon=-angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION)
-                return_dict[station].setdefault("night", []).append({"start": sunset_end.to_datetime(), "end": sunrise_next_start.to_datetime()})
+            # We can also check if ALL stations/timestamps are in DB once. Do it now in a loop for each
+            # station/timestamp, because we might missing something
+            station_timestamp_found = False
+            try:
+                obj = StationTimeline.objects.get(station_name=station, timestamp=datetime.date(timestamp))
+                station_timestamp_found = True
+            except ObjectDoesNotExist:
+                station_timestamp_found = False
+
+            if station_timestamp_found:
+                logger.debug("StationTimeline data found in DB for station=%s, timestamp=%s" % (station,timestamp))
+                sunrise_dict = {"start": obj.sunrise_start, "end": obj.sunrise_end}
+                sunset_dict = {"start": obj.sunset_start, "end": obj.sunset_end}
             else:
-                sunset_previous_end = observer.sun_set_time(time=sunrise_start, horizon=-angle_to_horizon, which='previous', n_grid_points=SUN_SET_RISE_PRECISION)
-                return_dict[station].setdefault("night", []).append({"start": sunset_previous_end.to_datetime(), "end": sunrise_start.to_datetime()})
+                # Not found in database so calculate it
+                try:
+                    sunrise_dict, sunset_dict = calculate_and_get_sunrise_and_sunset_of_observer_day(observer, timestamp, angle_to_horizon)
+                except Exception as exp:
+                    logger.warning("Can not calculate sunrise/sunset for station=%s, timestamp=%s" % (station,timestamp))
+                    # raise exp
+                    # Don't let it crash for now
+                    # The stations SE607 and LV614 station has problems calculation on 2021-07-01....
+                    # The SE607 also on 2021-06-04 ??
+                    break
+                # Add to database
+                if create_when_not_found:
+                    try:
+                        station_timeline = StationTimeline.objects.create(
+                                                    station_name=station,
+                                                    timestamp=timestamp,
+                                                    sunrise_start=sunrise_dict['start'],
+                                                    sunrise_end=sunrise_dict['end'],
+                                                    sunset_start=sunset_dict['start'],
+                                                    sunset_end=sunset_dict['end'])
+                        logger.debug("StationTimeline %s calculated and created for station=%s, timestamp=%s" %
+                                    (station_timeline, station, timestamp))
+                    except IntegrityError as e:
+                        if 'unique_station_time_line' in str(e):
+                            logger.info("StationTimeline with station=%s and timestamp=%s already exists, "
+                                        "so not added to database",  station, timestamp)
+                        else:
+                            raise
+
+            # Derive day/night from sunset/sunrise
+            day_dict = {"start": sunrise_dict["end"], "end": sunset_dict["start"]}
+
+            if timestamp >= sunrise_dict["start"]:
+                # Determine next sunrise start
+                try:
+                    obj_next = StationTimeline.objects.get(station_name=station,
+                                                           timestamp=datetime.date(timestamp + timedelta(days=1)))
+                    sunrise_next_start = obj_next.sunrise_start
+                except:
+                    sunrise_next_start = observer.sun_rise_time(time=Time(sunrise_dict["end"]), horizon=-angle_to_horizon,
+                                                                which='next',
+                                                                n_grid_points=SUN_SET_RISE_PRECISION).to_datetime()
+                night_dict = {"start": sunset_dict["end"], "end": sunrise_next_start}
+            else:
+                # Determine previous sunset end
+                try:
+                    obj_prev = StationTimeline.objects.get(station_name=station,
+                                                           timestamp=datetime.date(timestamp - timedelta(days=1)))
+                    sunset_previous_end = obj_prev.sunrise_start
+                except:
+                    sunset_previous_end = observer.sun_set_time(time=Time(sunrise_dict["start"]), horizon=-angle_to_horizon,
+                                                                which='previous',
+                                                                n_grid_points=SUN_SET_RISE_PRECISION).to_datetime()
+                night_dict = {"start": sunset_previous_end, "end": sunrise_dict["start"]}
+
+            # Create overall result
+            return_dict.setdefault(station, {})
+            return_dict[station].setdefault("sunrise", []).append(sunrise_dict)
+            return_dict[station].setdefault("sunset", []).append(sunset_dict)
+            return_dict[station].setdefault("day", []).append(day_dict)
+            return_dict[station].setdefault("night", []).append(night_dict)
 
     return return_dict
 
 
+@lru_cache(maxsize=256, typed=False)
+def calculate_and_get_sunrise_and_sunset_of_observer_day(observer, timestamp: datetime, angle_to_horizon: Angle) -> dict:
+    """
+    Compute sunrise, sunset of the given observer object (station) at the given timestamp.
+    :param observer: observer object
+    :param timestamp: Datetime of a day (datetime(2020, 1, 1)
+    :param the angle between horizon and given coordinates for which rise and set times are returned
+    :return: dictionaries (with 'start' and 'end' defined) of sunrise, sunset
+    """
+    sunrise_start = observer.sun_rise_time(time=Time(datetime.combine(timestamp.date(), dtime(12, 0, 0))),
+                                           horizon=-angle_to_horizon, which='previous',
+                                           n_grid_points=SUN_SET_RISE_PRECISION)
+    sunrise_end = observer.sun_rise_time(time=Time(sunrise_start), horizon=angle_to_horizon, which='next',
+                                         n_grid_points=SUN_SET_RISE_PRECISION)
+    sunset_start = observer.sun_set_time(time=sunrise_end, horizon=angle_to_horizon, which='next',
+                                         n_grid_points=SUN_SET_RISE_PRECISION)
+    sunset_end = observer.sun_set_time(time=sunset_start, horizon=-angle_to_horizon, which='next',
+                                       n_grid_points=SUN_SET_RISE_PRECISION)
+
+    sunrise_dict = {"start": sunrise_start.to_datetime(), "end": sunrise_end.to_datetime()}
+    sunset_dict = {"start": sunset_start.to_datetime(), "end": sunset_end.to_datetime()}
+
+    return sunrise_dict, sunset_dict
+
+
 # todo: Depending on usage patterns, we should consider refactoring this a little so that we cache on a function with a single timestamp as input. Requests with similar (but not identical) timestamps or bodies currently make no use of cached results for the subset computed in previous requests.
 @lru_cache(maxsize=256, typed=False)  # does not like lists, so use tuples to allow caching
 def coordinates_and_timestamps_to_separation_from_bodies(angle1: float, angle2: float, direction_type: str, timestamps: tuple, bodies: tuple) -> dict:
@@ -228,3 +329,20 @@ def antennafields_for_antennaset_and_station(antennaset:str, station:str) -> lis
 
     return fields
 
+
+def get_all_stations():
+    """
+    returns all possible stations.
+    Retrieve station names from station template by getting the Dutch and International stations,
+    then you should have it all.
+    """
+    lst_stations = []
+    for station_group in ["Dutch", "International"]:
+        try:
+            station_schema_template = CommonSchemaTemplate.objects.get(name="stations", version=1)
+            groups = station_schema_template.schema['definitions']['station_group']['anyOf']
+            selected_group = next(g for g in groups if g['title'].lower() == station_group.lower())
+            lst_stations.extend(selected_group['properties']['stations']['enum'][0])
+        except Exception:
+            logger.warning("No stations schema found, sorry can not determine station list, return empty list")
+    return lst_stations
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py
index 2fffaacce2860830ce8cf931ccb535535ae69121..569127f8d49da529f086ddbeafbe1d8bfe54943e 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py
@@ -1,4 +1,4 @@
-# Generated by Django 3.0.9 on 2021-03-23 17:08
+# Generated by Django 3.0.9 on 2021-04-07 08:59
 
 from django.conf import settings
 import django.contrib.postgres.fields
@@ -6,6 +6,7 @@ import django.contrib.postgres.fields.jsonb
 import django.contrib.postgres.indexes
 from django.db import migrations, models
 import django.db.models.deletion
+import lofar.sas.tmss.tmss.tmssapp.models.common
 import lofar.sas.tmss.tmss.tmssapp.models.specification
 
 
@@ -18,15 +19,6 @@ class Migration(migrations.Migration):
     ]
 
     operations = [
-        migrations.CreateModel(
-            name='Algorithm',
-            fields=[
-                ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)),
-            ],
-            options={
-                'abstract': False,
-            },
-        ),
         migrations.CreateModel(
             name='AntennaSet',
             fields=[
@@ -98,6 +90,7 @@ class Migration(migrations.Migration):
             options={
                 'abstract': False,
             },
+            bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model),
         ),
         migrations.CreateModel(
             name='CycleQuota',
@@ -334,15 +327,6 @@ class Migration(migrations.Migration):
                 'abstract': False,
             },
         ),
-        migrations.CreateModel(
-            name='Flag',
-            fields=[
-                ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)),
-            ],
-            options={
-                'abstract': False,
-            },
-        ),
         migrations.CreateModel(
             name='GeneratorTemplate',
             fields=[
@@ -360,6 +344,15 @@ class Migration(migrations.Migration):
                 'abstract': False,
             },
         ),
+        migrations.CreateModel(
+            name='HashAlgorithm',
+            fields=[
+                ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
         migrations.CreateModel(
             name='IOType',
             fields=[
@@ -378,6 +371,15 @@ class Migration(migrations.Migration):
                 'abstract': False,
             },
         ),
+        migrations.CreateModel(
+            name='PriorityQueueType',
+            fields=[
+                ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
         migrations.CreateModel(
             name='Project',
             fields=[
@@ -398,6 +400,7 @@ class Migration(migrations.Migration):
             options={
                 'abstract': False,
             },
+            bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model),
         ),
         migrations.CreateModel(
             name='ProjectCategory',
@@ -433,6 +436,7 @@ class Migration(migrations.Migration):
             fields=[
                 ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
             ],
+            bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model),
         ),
         migrations.CreateModel(
             name='ProjectRole',
@@ -608,10 +612,12 @@ class Migration(migrations.Migration):
                 ('output_data_allowed_to_be_ingested', models.BooleanField(default=False, help_text='boolean (default FALSE), which blocks Ingest Tasks from starting if OFF. When toggled ON, backend must scan for startable Ingest Tasks.')),
                 ('output_pinned', models.BooleanField(default=False, help_text='boolean (default FALSE), which blocks deleting unpinned dataproducts. When toggled ON, backend must pick SUB up for deletion. It also must when dataproducts are unpinned.')),
                 ('results_accepted', models.BooleanField(default=False, help_text='boolean (default NULL), which records whether the results were accepted, allowing the higher-level accounting to be adjusted.')),
+                ('priority_rank', models.FloatField(default=0.0, help_text='Priority of this scheduling unit w.r.t. other scheduling units within the same queue and project.')),
             ],
             options={
                 'abstract': False,
             },
+            bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model),
         ),
         migrations.CreateModel(
             name='SchedulingUnitDraft',
@@ -626,10 +632,12 @@ class Migration(migrations.Migration):
                 ('generator_instance_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Parameter value that generated this run draft (NULLable).', null=True)),
                 ('scheduling_constraints_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling Constraints for this run.', null=True)),
                 ('ingest_permission_required', models.BooleanField(default=False, help_text='Explicit permission is needed before the task.')),
+                ('priority_rank', models.FloatField(default=0.0, help_text='Priority of this scheduling unit w.r.t. other scheduling units within the same queue and project.')),
             ],
             options={
                 'abstract': False,
             },
+            bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model),
         ),
         migrations.CreateModel(
             name='SchedulingUnitObservingStrategyTemplate',
@@ -770,6 +778,15 @@ class Migration(migrations.Migration):
                 'abstract': False,
             },
         ),
+        migrations.CreateModel(
+            name='SystemSettingFlag',
+            fields=[
+                ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
         migrations.CreateModel(
             name='Tags',
             fields=[
@@ -791,6 +808,7 @@ class Migration(migrations.Migration):
                 ('do_cancel', models.BooleanField(help_text='Cancel this task.')),
                 ('output_pinned', models.BooleanField(default=False, help_text='True if the output of this task is pinned to disk, that is, forbidden to be removed.')),
             ],
+            bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model),
         ),
         migrations.CreateModel(
             name='TaskConnectorType',
@@ -869,7 +887,7 @@ class Migration(migrations.Migration):
                 ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)),
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
-                ('name', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, primary_key=True, serialize=False, to='tmssapp.Flag', unique=True)),
+                ('name', models.OneToOneField(on_delete=django.db.models.deletion.PROTECT, primary_key=True, serialize=False, to='tmssapp.SystemSettingFlag')),
                 ('value', models.BooleanField()),
             ],
             options={
@@ -920,6 +938,17 @@ class Migration(migrations.Migration):
                 ('second', models.ForeignKey(help_text='Second Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_scheduling_relation', to='tmssapp.TaskBlueprint')),
             ],
         ),
+        migrations.CreateModel(
+            name='StationTimeline',
+            fields=[
+                ('station_name', models.CharField(max_length=16, null=False, editable=False, help_text='The LOFAR station name.')),
+                ('timestamp', models.DateField(editable=False, null=True, help_text='The date (YYYYMMDD).')),
+                ('sunrise_start', models.DateTimeField(null=True, help_text='Start time of the sunrise.')),
+                ('sunrise_end', models.DateTimeField(null=True, help_text='End time of the sunrise.')),
+                ('sunset_start', models.DateTimeField(null=True, help_text='Start time of the sunset.')),
+                ('sunset_end', models.DateTimeField(null=True, help_text='End time of the sunset.')),
+            ],
+        ),
         migrations.AddConstraint(
             model_name='taskrelationselectiontemplate',
             constraint=models.UniqueConstraint(fields=('name', 'version'), name='taskrelationselectiontemplate_unique_name_version'),
@@ -1079,6 +1108,11 @@ class Migration(migrations.Migration):
             name='subtask',
             field=models.ForeignKey(help_text='Subtask to which this output specification refers.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs', to='tmssapp.Subtask'),
         ),
+        migrations.AddField(
+            model_name='subtaskoutput',
+            name='task_blueprint',
+            field=models.ForeignKey(help_text='Task to which this output specification refers.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs', to='tmssapp.TaskBlueprint'),
+        ),
         migrations.AddField(
             model_name='subtaskinput',
             name='dataproducts',
@@ -1131,8 +1165,12 @@ class Migration(migrations.Migration):
         ),
         migrations.AddField(
             model_name='subtask',
-            name='task_blueprint',
-            field=models.ForeignKey(help_text='Task Blueprint to which this Subtask belongs.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='subtasks', to='tmssapp.TaskBlueprint'),
+            name='task_blueprints',
+            field=models.ManyToManyField(blank=True, help_text='Task Blueprint to which this Subtask belongs.', related_name='subtasks', to='tmssapp.TaskBlueprint'),
+        ),
+        migrations.AddConstraint(
+            model_name='stationtimeline',
+            constraint=models.UniqueConstraint(fields=('station_name', 'timestamp'),  name='unique_station_time_line'),
         ),
         migrations.AddConstraint(
             model_name='schedulingunittemplate',
@@ -1158,6 +1196,11 @@ class Migration(migrations.Migration):
             name='observation_strategy_template',
             field=models.ForeignKey(help_text='Observation Strategy Template used to create the requirements_doc.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingUnitObservingStrategyTemplate'),
         ),
+        migrations.AddField(
+            model_name='schedulingunitdraft',
+            name='priority_queue',
+            field=models.ForeignKey(default='A', help_text='Priority queue of this scheduling unit. Queues provide a strict ordering between scheduling units.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.PriorityQueueType'),
+        ),
         migrations.AddField(
             model_name='schedulingunitdraft',
             name='requirements_template',
@@ -1178,6 +1221,11 @@ class Migration(migrations.Migration):
             name='draft',
             field=models.ForeignKey(help_text='Scheduling Unit Draft which this run instantiates.', on_delete=django.db.models.deletion.PROTECT, related_name='scheduling_unit_blueprints', to='tmssapp.SchedulingUnitDraft'),
         ),
+        migrations.AddField(
+            model_name='schedulingunitblueprint',
+            name='priority_queue',
+            field=models.ForeignKey(default='A', help_text='Priority queue of this scheduling unit. Queues provide a strict ordering between scheduling units.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.PriorityQueueType'),
+        ),
         migrations.AddField(
             model_name='schedulingunitblueprint',
             name='requirements_template',
@@ -1293,12 +1341,12 @@ class Migration(migrations.Migration):
         migrations.AddField(
             model_name='project',
             name='period_category',
-            field=models.ForeignKey(help_text='Period category.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.PeriodCategory'),
+            field=models.ForeignKey(help_text='Policy for managing the lifetime of this project.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.PeriodCategory'),
         ),
         migrations.AddField(
             model_name='project',
             name='project_category',
-            field=models.ForeignKey(help_text='Project category.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ProjectCategory'),
+            field=models.ForeignKey(help_text='Category this project falls under.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ProjectCategory'),
         ),
         migrations.AddConstraint(
             model_name='generatortemplate',
@@ -1365,13 +1413,13 @@ class Migration(migrations.Migration):
         ),
         migrations.AddField(
             model_name='dataproducthash',
-            name='algorithm',
-            field=models.ForeignKey(help_text='Algorithm used (MD5, AES256).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Algorithm'),
+            name='dataproduct',
+            field=models.ForeignKey(help_text='The dataproduct to which this hash refers.', on_delete=django.db.models.deletion.PROTECT, related_name='hashes', to='tmssapp.Dataproduct'),
         ),
         migrations.AddField(
             model_name='dataproducthash',
-            name='dataproduct',
-            field=models.ForeignKey(help_text='The dataproduct to which this hash refers.', on_delete=django.db.models.deletion.PROTECT, related_name='hashes', to='tmssapp.Dataproduct'),
+            name='hash_algorithm',
+            field=models.ForeignKey(help_text='Algorithm used for hashing (MD5, AES256).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.HashAlgorithm'),
         ),
         migrations.AddConstraint(
             model_name='dataproductfeedbacktemplate',
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/CMakeLists.txt b/SAS/TMSS/backend/src/tmss/tmssapp/models/CMakeLists.txt
index 3496efd57358ab186b665fe2dc3bd40264d4deaa..f6e74f93da044cdb42d2144d32a96fad0ed10097 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/CMakeLists.txt
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/CMakeLists.txt
@@ -8,6 +8,7 @@ set(_py_files
     scheduling.py
     common.py
     permissions.py
+    calculations.py
     )
 
 python_install(${_py_files}
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/__init__.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/__init__.py
index 0b0546b8d4bb175b9d8b5f9d98727aab73191c6b..3eb788371d97e4e3b1e62cbb5636014ceffc88bd 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/__init__.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/__init__.py
@@ -1,4 +1,5 @@
 from .specification import *
 from .scheduling import *
 from .common import *
-from .permissions import *
\ No newline at end of file
+from .permissions import *
+from .calculations import *
\ No newline at end of file
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/calculations.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/calculations.py
new file mode 100644
index 0000000000000000000000000000000000000000..e0f361589f577b47d3bedd8b5072b294fe7c409f
--- /dev/null
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/calculations.py
@@ -0,0 +1,30 @@
+"""
+This file contains the database models for calculations
+"""
+
+import os
+import logging
+logger = logging.getLogger(__name__)
+
+from django.db.models import Model, CharField, DateTimeField, DateField, UniqueConstraint
+
+
+class StationTimeline(Model):
+    """
+    Represents computations of sunrise, sunset of the given stations at the given timestamps.
+    Day and night are derived from sunset/sunrise data.
+    The day/sunrise/sunset is always on the date of the timestamp.
+    The night is usually the one _starting_ on the date of the time stamp, unless the given timestamp falls
+    before sunrise, in which case it is the night _ending_ on the timestamp date.
+    """
+    station_name = CharField(max_length=16, null=False, editable=False, help_text='The LOFAR station name.')
+    timestamp = DateField(editable=False, null=True, help_text='The date (YYYYMMDD).')
+
+    sunrise_start = DateTimeField(null=True, help_text='Start time of the sunrise.')
+    sunrise_end = DateTimeField(null=True, help_text='End time of the sunrise.')
+    sunset_start = DateTimeField(null=True, help_text='Start time of the sunset.')
+    sunset_end = DateTimeField(null=True, help_text='End time of the sunset.')
+
+    class Meta:
+        # ensure there are no duplicate station-timestamp combinations
+        constraints = [UniqueConstraint(fields=['station_name', 'timestamp'], name='unique_station_time_line')]
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
index 9535b3c3d9732a2ae062b3186717697a5f853cd4..9e9a1cbf634f75d353d148c6efc0ed924ed3e619 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
@@ -76,8 +76,8 @@ class StationType(AbstractChoice):
             INTERNATIONAL = "international"
 
 
-class Algorithm(AbstractChoice):
-    """Defines the model and predefined list of possible Algorithm's for DataproductHash.
+class HashAlgorithm(AbstractChoice):
+    """Defines the model and predefined list of possible HashAlgorithm's for DataproductHash.
     The items in the Choices class below are automagically populated into the database via a data migration."""
 
     class Choices(Enum):
@@ -147,7 +147,7 @@ class Subtask(BasicCommon):
     stop_time = DateTimeField(null=True, help_text='Stop this subtask at the specified time (NULLable).')
     state = ForeignKey('SubtaskState', null=False, on_delete=PROTECT, related_name='task_states', help_text='Subtask state (see Subtask State Machine).')
     specifications_doc = JSONField(help_text='Final specifications, as input for the controller.')
-    task_blueprint = ForeignKey('TaskBlueprint', related_name='subtasks', null=True, on_delete=SET_NULL, help_text='Task Blueprint to which this Subtask belongs.')
+    task_blueprints = ManyToManyField('TaskBlueprint', related_name='subtasks', blank=True, help_text='Task Blueprint to which this Subtask belongs.')
     specifications_template = ForeignKey('SubtaskTemplate', null=False, on_delete=PROTECT, help_text='Schema used for specifications_doc.')
     do_cancel = DateTimeField(null=True, help_text='Timestamp when the subtask has been ordered to cancel (NULLable).')
     cluster = ForeignKey('Cluster', null=True, on_delete=PROTECT, help_text='Where the Subtask is scheduled to run (NULLable).')
@@ -174,11 +174,13 @@ class Subtask(BasicCommon):
         '''get the specified (or estimated) duration of this subtask based on the specified task duration and the subtask type'''
         if self.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value:
             # observations have a specified duration, so grab it from the spec.
-            return timedelta(seconds=self.task_blueprint.specifications_doc.get('duration', 0))
+            # In case we have several associated tasks: use the longest duration, since we assume that tasks will run in parallel (there would be no reason to combine them into a subtask).
+            return timedelta(seconds=max([tb.specifications_doc.get('duration', 0) for tb in self.task_blueprints.all()]))
 
         if self.specifications_template.type.value == SubtaskType.Choices.PIPELINE.value:
             # pipelines usually do not have a specified duration, so make a guess (half the obs duration?).
-            return timedelta(seconds=self.task_blueprint.specifications_doc.get('duration', max(p.specified_duration.total_seconds() for p in self.predecessors)/2))
+            # In case we have several associated tasks: this guess is probably in no way accurate anyway, so we assume it does not really matter which task blueprint we refer to here
+            return timedelta(seconds=self.task_blueprints.first().specifications_doc.get('duration', max(p.specified_duration.total_seconds() for p in self.predecessors)/2))
 
         # other subtasktypes usually depend on cpu/data/network etc. So, make a guess (for now)
         return timedelta(minutes=5)
@@ -340,6 +342,7 @@ class SubtaskInput(BasicCommon):
 
 class SubtaskOutput(BasicCommon):
     subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, related_name='outputs', help_text='Subtask to which this output specification refers.')
+    task_blueprint = ForeignKey('TaskBlueprint', null=False, on_delete=CASCADE, related_name='outputs', help_text='Task to which this output specification refers.')
 
 
 class SAP(BasicCommon):
@@ -434,6 +437,6 @@ class DataproductArchiveInfo(BasicCommon):
 
 class DataproductHash(BasicCommon):
     dataproduct = ForeignKey('Dataproduct', related_name='hashes', on_delete=PROTECT, help_text='The dataproduct to which this hash refers.')
-    algorithm = ForeignKey('Algorithm', null=False, on_delete=PROTECT, help_text='Algorithm used (MD5, AES256).')
+    hash_algorithm = ForeignKey('HashAlgorithm', null=False, on_delete=PROTECT, help_text='Algorithm used for hashing (MD5, AES256).')
     hash = CharField(max_length=128, help_text='Hash value.')
 
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py
index 140b298db576485d9f3d8f23cb49f20daf15cd37..222653d1fe21c9d6b064ba5525b885efc0477155 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py
@@ -5,7 +5,7 @@ This file contains the database models
 import logging
 logger = logging.getLogger(__name__)
 
-from django.db.models import Model, CharField, DateTimeField, BooleanField, ForeignKey, CASCADE, IntegerField, FloatField, SET_NULL, PROTECT, ManyToManyField, UniqueConstraint, QuerySet
+from django.db.models import Model, CharField, DateTimeField, BooleanField, ForeignKey, CASCADE, IntegerField, FloatField, SET_NULL, PROTECT, ManyToManyField, UniqueConstraint, QuerySet, OneToOneField
 from django.contrib.postgres.fields import JSONField
 from enum import Enum
 from django.db.models.expressions import RawSQL
@@ -101,7 +101,7 @@ class SchedulingRelationPlacement(AbstractChoice):
         BEFORE = "before"
         PARALLEL = "parallel"
 
-class Flag(AbstractChoice):
+class SystemSettingFlag(AbstractChoice):
     """Defines the model and predefined list of possible Flags to be used in Setting.
     The items in the Choises class below are automagically populated into the database via a data migration."""
     class Choices(Enum):
@@ -124,7 +124,7 @@ class Quantity(AbstractChoice):
 
 
 class PeriodCategory(AbstractChoice):
-    """Defines the model and predefined list of possible period categories to be used in Project.
+    """Defines the model and predefined list of possible period categories to be used in Project as a policy for managing the project's lifetime.
         The items in the Choices class below are automagically populated into the database via a data migration."""
 
     class Choices(Enum):
@@ -156,10 +156,17 @@ class TaskType(AbstractChoice):
         OTHER = 'other'
 
 
+class PriorityQueueType(AbstractChoice):
+    """Defines the possible priority queues for SchedulingUnits.
+    The items in the Choices class below are automagically populated into the database via a data migration."""
+    class Choices(Enum):
+        A = "A"
+        B = "B"
+
 # concrete models
 
 class Setting(BasicCommon):
-    name = ForeignKey('Flag', null=False, on_delete=PROTECT, unique=True, primary_key=True)
+    name = OneToOneField('SystemSettingFlag', null=False, on_delete=PROTECT, primary_key=True)
     value = BooleanField(null=False)
 
 
@@ -317,8 +324,8 @@ class Project(RefreshFromDbInvalidatesCachedPropertiesMixin, NamedCommonPK):
     private_data = BooleanField(default=True, help_text='True if data of this project is sensitive. Sensitive data is not made public.')
     expert = BooleanField(default=False, help_text='Expert projects put more responsibility on the PI.')
     filler = BooleanField(default=False, help_text='Use this project to fill up idle telescope time.')
-    project_category = ForeignKey('ProjectCategory', null=True, on_delete=PROTECT, help_text='Project category.')
-    period_category = ForeignKey('PeriodCategory', null=True, on_delete=PROTECT, help_text='Period category.')
+    project_category = ForeignKey('ProjectCategory', help_text='Category this project falls under.', null=True, on_delete=PROTECT)
+    period_category = ForeignKey('PeriodCategory', help_text='Policy for managing the lifetime of this project.', null=True, on_delete=PROTECT)
     auto_pin = BooleanField(default=False, help_text='True if the output_pinned flag of tasks in this project should be set True on creation.')
     path_to_project = "project"
 
@@ -387,6 +394,8 @@ class SchedulingUnitDraft(RefreshFromDbInvalidatesCachedPropertiesMixin, NamedCo
     scheduling_constraints_doc = JSONField(help_text='Scheduling Constraints for this run.', null=True)
     scheduling_constraints_template = ForeignKey('SchedulingConstraintsTemplate', on_delete=CASCADE, null=True, help_text='Schema used for scheduling_constraints_doc.')
     ingest_permission_required = BooleanField(default=False, help_text='Explicit permission is needed before the task.')
+    priority_rank = FloatField(null=False, default=0.0, help_text='Priority of this scheduling unit w.r.t. other scheduling units within the same queue and project.')
+    priority_queue = ForeignKey('PriorityQueueType', null=False, on_delete=PROTECT, default="A", help_text='Priority queue of this scheduling unit. Queues provide a strict ordering between scheduling units.')
 
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
         if self.requirements_doc is not None and self.requirements_template_id and self.requirements_template.schema is not None:
@@ -463,6 +472,8 @@ class SchedulingUnitBlueprint(RefreshFromDbInvalidatesCachedPropertiesMixin, Nam
     output_data_allowed_to_be_ingested = BooleanField(default=False, help_text='boolean (default FALSE), which blocks Ingest Tasks from starting if OFF. When toggled ON, backend must scan for startable Ingest Tasks.')
     output_pinned = BooleanField(default=False, help_text='boolean (default FALSE), which blocks deleting unpinned dataproducts. When toggled ON, backend must pick SUB up for deletion. It also must when dataproducts are unpinned.')
     results_accepted = BooleanField(default=False, help_text='boolean (default NULL), which records whether the results were accepted, allowing the higher-level accounting to be adjusted.')
+    priority_rank = FloatField(null=False, default=0.0, help_text='Priority of this scheduling unit w.r.t. other scheduling units within the same queue and project.')
+    priority_queue = ForeignKey('PriorityQueueType', null=False, on_delete=PROTECT, default="A", help_text='Priority queue of this scheduling unit. Queues provide a strict ordering between scheduling units.')
 
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
         annotate_validate_add_defaults_to_doc_using_template(self, 'requirements_doc', 'requirements_template')
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/populate.py b/SAS/TMSS/backend/src/tmss/tmssapp/populate.py
index 1768345692a3b519bac2c555834231455ebe611d..02575ebfc44cfa7d5261791d741ae95438bfab94 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/populate.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/populate.py
@@ -19,12 +19,13 @@ logger = logging.getLogger(__name__)
 
 import inspect
 import re
-from datetime import datetime, timezone
+from datetime import timezone, datetime, date
 from lofar.sas.tmss.tmss.tmssapp import models
 from lofar.sas.tmss.tmss.tmssapp import viewsets
 from lofar.sas.tmss.tmss.tmssapp.models.specification import *
 from lofar.sas.tmss.tmss.tmssapp.models.scheduling import *
 from lofar.sas.tmss.tmss.tmssapp.models.permissions import *
+from lofar.sas.tmss.tmss.tmssapp.conversions import timestamps_and_stations_to_sun_rise_and_set, get_all_stations
 from lofar.common import isTestEnvironment, isDevelopmentEnvironment
 from concurrent.futures import ThreadPoolExecutor
 from django.contrib.auth.models import User, Group, Permission
@@ -41,8 +42,8 @@ def populate_choices(apps, schema_editor):
     :return: None
     '''
     choice_classes = [Role, IOType, Datatype, Dataformat, CopyReason,
-                      SubtaskState, SubtaskType, StationType, Algorithm, SchedulingRelationPlacement,
-                      Flag, ProjectCategory, PeriodCategory, Quantity, TaskType, ProjectRole]
+                      SubtaskState, SubtaskType, StationType, HashAlgorithm, SchedulingRelationPlacement,
+                      SystemSettingFlag, ProjectCategory, PeriodCategory, Quantity, TaskType, ProjectRole, PriorityQueueType]
 
     # upload choices in parallel
     with ThreadPoolExecutor() as executor:
@@ -50,7 +51,7 @@ def populate_choices(apps, schema_editor):
                      choice_classes)
 
 def populate_settings(apps, schema_editor):
-    Setting.objects.create(name=Flag.objects.get(value='dynamic_scheduling_enabled'), value=False)
+    Setting.objects.create(name=SystemSettingFlag.objects.get(value='dynamic_scheduling_enabled'), value=False)
 
 def populate_test_data():
     """
@@ -622,3 +623,19 @@ def populate_system_test_users():
     guest_user.groups.add(Group.objects.get(name='Guest'))
     lta_user, _ = User.objects.get_or_create(username='lta_user', password='lta_user')
     lta_user.groups.add(Group.objects.get(name='LTA User'))
+
+
+def populate_sunrise_and_sunset_for_all_stations(nbr_days=3, start_date=date.today()):
+    """
+    Populate station timeline data of all stations for given number of days the starting at given date
+    Note: If data is not in database yet, it will take about 6 seconds to calculate it for all (51) stations
+    """
+    starttime_for_logging = datetime.utcnow()
+    logger.info("Populate sunrise and sunset for ALL known stations from %s up to %d days" % (start_date, nbr_days))
+    lst_timestamps = []
+    for i in range(0, nbr_days):
+        dt = datetime.combine(start_date, datetime.min.time()) + timedelta(days=i)
+        lst_timestamps.append(dt)
+
+    timestamps_and_stations_to_sun_rise_and_set(tuple(lst_timestamps), tuple(get_all_stations()), create_when_not_found=True)
+    logger.info("Populate sunrise and sunset done in %.1fs", (datetime.utcnow()-starttime_for_logging).total_seconds())
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json
index 25cf213d72b42f3bdc2194df0d9587a7f738e9fa..e3afa001749c54992e3de0cc6938a24ac4ed2867 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json
@@ -35,7 +35,6 @@
           "CS302",
           "CS401",
           "CS501",
-          "RS104",
           "RS106",
           "RS205",
           "RS208",
@@ -47,7 +46,6 @@
           "RS406",
           "RS407",
           "RS409",
-          "RS410",
           "RS503",
           "RS508",
           "RS509",
@@ -116,8 +114,8 @@
           "properties":{
             "stations":{
               "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list",
-              "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501"]],
-              "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501"],
+              "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501"]],
+              "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501"],
               "uniqueItems": false
             },
             "max_nr_missing":{
@@ -154,8 +152,8 @@
           "properties":{
             "stations":{
               "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list",
-              "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"]],
-              "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"],
+              "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"]],
+              "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"],
               "uniqueItems": false
             },
             "max_nr_missing":{
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/CMakeLists.txt b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/CMakeLists.txt
index 83a8174527b6f67a614c62aa26739e4e38377af7..f5f6fe3833689eb59d13bca4ad0b66af0517d805 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/CMakeLists.txt
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/CMakeLists.txt
@@ -8,6 +8,7 @@ set(_py_files
     widgets.py
     common.py
     permissions.py
+    calculations.py
     )
 
 python_install(${_py_files}
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/__init__.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/__init__.py
index 0b0546b8d4bb175b9d8b5f9d98727aab73191c6b..3eb788371d97e4e3b1e62cbb5636014ceffc88bd 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/__init__.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/__init__.py
@@ -1,4 +1,5 @@
 from .specification import *
 from .scheduling import *
 from .common import *
-from .permissions import *
\ No newline at end of file
+from .permissions import *
+from .calculations import *
\ No newline at end of file
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/calculations.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/calculations.py
new file mode 100644
index 0000000000000000000000000000000000000000..8584228204e5737e659fec51df69363b25ae5673
--- /dev/null
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/calculations.py
@@ -0,0 +1,15 @@
+"""
+This file contains the serializers for conversion models
+"""
+
+import logging
+logger = logging.getLogger(__name__)
+
+from rest_framework import serializers
+from .. import models
+
+
+class StationTimelineSerializer(serializers.ModelSerializer):
+    class Meta:
+        model = models.StationTimeline
+        fields = '__all__'
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py
index 518c11f39915df327667e46af3f32526c2cbf93d..f498b0f20dd0b6b62528fc294ba014593f65b38d 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py
@@ -34,9 +34,9 @@ class StationTypeSerializer(DynamicRelationalHyperlinkedModelSerializer):
         fields = '__all__'
 
 
-class AlgorithmSerializer(DynamicRelationalHyperlinkedModelSerializer):
+class HashAlgorithmSerializer(DynamicRelationalHyperlinkedModelSerializer):
     class Meta:
-        model = models.Algorithm
+        model = models.HashAlgorithm
         fields = '__all__'
 
 
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py
index f709c770e71cf6ebc0bb1eb4e492ca20b8c50c18..9cea775af716487a03fd1f9e6c6c8c845f2e0b19 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py
@@ -203,9 +203,9 @@ class ResourceTypeSerializer(DynamicRelationalHyperlinkedModelSerializer):
         extra_fields = ['name']
 
 
-class FlagSerializer(DynamicRelationalHyperlinkedModelSerializer):
+class SystemSettingFlagSerializer(DynamicRelationalHyperlinkedModelSerializer):
     class Meta:
-        model = models.Flag
+        model = models.SystemSettingFlag
         fields = '__all__'
 
 
@@ -371,6 +371,12 @@ class TaskTypeSerializer(DynamicRelationalHyperlinkedModelSerializer):
         fields = '__all__'
 
 
+class PriorityQueueTypeSerializer(DynamicRelationalHyperlinkedModelSerializer):
+    class Meta:
+        model = models.PriorityQueueType
+        fields = '__all__'
+
+
 class ReservationStrategyTemplateSerializer(DynamicRelationalHyperlinkedModelSerializer):
     template = JSONEditorField(schema_source="reservation_template.schema")
 
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py
index 1360c3e1df96e2a692e05e3b9a652b196cbdbcf3..21f0705c51b6ffb9beb388d1a238479a4027b45b 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py
@@ -166,7 +166,7 @@ def create_observation_subtask_specifications_from_observation_task_blueprint(ta
     # So... copy the calibrator specs first, then loop over the shared target/calibrator specs...
     if 'calibrator' in task_blueprint.specifications_template.name.lower():
         # Calibrator requires related Target Task Observation for some specifications
-        target_task_blueprint = get_related_target_observation_task_blueprint(task_blueprint)
+        target_task_blueprint, _ = get_related_target_observation_task_blueprint(task_blueprint)
         if target_task_blueprint is None:
             raise SubtaskCreationException("Cannot create calibrator observation subtask specifications from task_blueprint id=%s with template name='%s' because no related target observation task_blueprint is found" % (task_blueprint.id, task_blueprint.specifications_template.name))
         target_task_spec = target_task_blueprint.specifications_doc
@@ -391,9 +391,21 @@ def get_stations_in_group(station_group_name: str) -> []:
         return sorted(list(station_names))
 
 
-def get_related_target_observation_task_blueprint(calibrator_or_beamformer_task_blueprint: TaskBlueprint) -> TaskBlueprint:
+def get_related_calibrator_observation_task_blueprint(target_task_blueprint: TaskBlueprint) -> (TaskBlueprint, SchedulingRelationPlacement):
     """
-    get the related target observation task_blueprint for the given calibrator or beamformer task_blueprint
+    get the related calibrator observation task_blueprint and the relative placement for the given target task_blueprint
+    if nothing found return None
+    """
+    if 'target' not in target_task_blueprint.specifications_template.name.lower():
+        raise ValueError("Cannot get a related calibrator observation task_blueprint for non-target task_blueprint id=%s template_name='%s'",
+                        target_task_blueprint.id, target_task_blueprint.specifications_template.name)
+
+    return _get_related_observation_task_blueprint(target_task_blueprint, 'calibrator observation')
+
+
+def get_related_target_observation_task_blueprint(calibrator_or_beamformer_task_blueprint: TaskBlueprint) -> (TaskBlueprint, SchedulingRelationPlacement):
+    """
+    get the related target observation task_blueprint and the relative placement for the given calibrator or beamformer task_blueprint
     if nothing found return None
     """
     if 'calibrator' not in calibrator_or_beamformer_task_blueprint.specifications_template.name.lower() and \
@@ -401,17 +413,21 @@ def get_related_target_observation_task_blueprint(calibrator_or_beamformer_task_
         raise ValueError("Cannot get a related target observation task_blueprint for non-calibrator/beamformer task_blueprint id=%s template_name='%s'",
                         calibrator_or_beamformer_task_blueprint.id, calibrator_or_beamformer_task_blueprint.specifications_template.name)
 
+    return _get_related_observation_task_blueprint(calibrator_or_beamformer_task_blueprint, 'target observation')
+
+
+def _get_related_observation_task_blueprint(task_blueprint: TaskBlueprint, related_template_name: str) -> (TaskBlueprint, SchedulingRelationPlacement):
     try:
-        return next(relation.second for relation in TaskSchedulingRelationBlueprint.objects.filter(first=calibrator_or_beamformer_task_blueprint).all()
-                    if relation.second is not None and relation.second.specifications_template.name.lower() == 'target observation')
+        return next((relation.second, relation.placement) for relation in TaskSchedulingRelationBlueprint.objects.filter(first=task_blueprint).all()
+                    if relation.second is not None and relation.second.specifications_template.name.lower() == related_template_name)
     except StopIteration:
         try:
-            return next(relation.first for relation in TaskSchedulingRelationBlueprint.objects.filter(second=calibrator_or_beamformer_task_blueprint).all()
-                        if relation.first is not None and relation.first.specifications_template.name.lower() == 'target observation')
+            return next((relation.first, relation.placement) for relation in TaskSchedulingRelationBlueprint.objects.filter(second=task_blueprint).all()
+                        if relation.first is not None and relation.first.specifications_template.name.lower() == related_template_name)
         except StopIteration:
-            logger.info("No related target observation task_blueprint found for calibrator/beamformer observation task_blueprint id=%d", calibrator_or_beamformer_task_blueprint.id)
+            logger.info("No related %s task_blueprint found for task_blueprint id=%d", related_template_name, task_blueprint.id)
 
-    return None
+    return None, None
 
 
 def create_observation_control_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask:
@@ -436,19 +452,45 @@ def create_observation_control_subtask_from_task_blueprint(task_blueprint: TaskB
                      "stop_time": None,
                      "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value),
                      "specifications_doc": specifications_doc,
-                     "task_blueprint": task_blueprint,
+                     #"task_blueprint": task_blueprint,  # ManyToMany, so use set()!
                      "specifications_template": subtask_template,
                      "tags": [],
                      "cluster": Cluster.objects.get(name=cluster_name)
                      }
-    subtask = Subtask.objects.create(**subtask_data)
+
+    # If we deal with a calibrator obs that runs in parallel to a target observation, add the calibrator beam to the
+    # existing target obs subtask.
+    subtask = None
+    if 'calibrator' in task_blueprint.specifications_template.name.lower():
+        related_task_blueprint, relation = get_related_target_observation_task_blueprint(task_blueprint)
+        if relation and relation.value == 'parallel':
+            # add calibrator beam
+            subtask = related_task_blueprint.subtasks.filter(specifications_template__type__value=SubtaskType.Choices.OBSERVATION.value).first()
+            if not subtask:
+                raise SubtaskCreationException('Calibrator observation cannot be added to the target subtask, because it does not exist. Make sure to create a subtask from the target observation task id=%s first.' % related_task_blueprint.id)
+            subtask.specifications_doc['stations']['digital_pointings'] += subtask_data['specifications_doc']['stations']['digital_pointings']
+            # check that the additional beam fits into the spec (observation must not result in >488 subbands across all beams)
+            total_subbands = sum([len(digital_pointing['subbands']) for digital_pointing in subtask.specifications_doc['stations']['digital_pointings']])
+            if total_subbands > 488:  # todo: should this be better handled in JSON?
+                raise SubtaskCreationException('Calibrator beam does not fit into the spec (results in %s total subbands, but only 488 are possible)' % total_subbands)
+
+    if not subtask:
+        subtask = Subtask.objects.create(**subtask_data)
+    subtask.task_blueprints.set(list(subtask.task_blueprints.all()) + [task_blueprint])
 
     # step 2: create and link subtask input/output
     # an observation has no input, it just produces output data
-    subtask_output = SubtaskOutput.objects.create(subtask=subtask)
-
-    # step 3: set state to DEFINED
-    subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value)
+    subtask_output = SubtaskOutput.objects.create(subtask=subtask,
+                                                  task_blueprint=task_blueprint)
+
+    # step 3: set state to DEFINED, unless we have a target obs with a related parallel calibrator obs
+    defined = True
+    if 'target' in task_blueprint.specifications_template.name.lower():
+        _, relation = get_related_calibrator_observation_task_blueprint(task_blueprint)
+        if relation and relation.value == 'parallel':
+            defined = False
+    if defined:
+        subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value)
     subtask.save()
     return subtask
 
@@ -469,7 +511,8 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask)
     https://support.astron.nl/confluence/display/TMSS/Specification+Flow
     '''
     # step 0: check pre-requisites
-    check_prerequities_for_subtask_creation(observation_subtask.task_blueprint)
+    for tb in observation_subtask.task_blueprints.all():
+        check_prerequities_for_subtask_creation(tb)
 
     if observation_subtask.specifications_template.type.value != SubtaskType.Choices.OBSERVATION.value:
         raise ValueError("Cannot create %s subtask for subtask id=%d type=%s because it is not an %s" % (
@@ -497,20 +540,26 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask)
     qafile_subtask_data = { "start_time": None,
                             "stop_time": None,
                             "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value),
-                            "task_blueprint": observation_subtask.task_blueprint,
+                            #"task_blueprint": observation_subtask.task_blueprint,   # ManyToMany, use set()
                             "specifications_template": qafile_subtask_template,
                             "specifications_doc": qafile_subtask_spec,
                             "cluster": observation_subtask.cluster}
     qafile_subtask = Subtask.objects.create(**qafile_subtask_data)
+    qafile_subtask.task_blueprints.set(observation_subtask.task_blueprints.all())
 
     # step 2: create and link subtask input/output
     selection_template = TaskRelationSelectionTemplate.objects.get(name="all")
     selection_doc = get_default_json_object_for_schema(selection_template.schema)
-    qafile_subtask_input = SubtaskInput.objects.create(subtask=qafile_subtask,
-                                                       producer=observation_subtask.outputs.first(), # TODO: determine proper producer based on spec in task_relation_blueprint
-                                                       selection_doc=selection_doc,
-                                                       selection_template=selection_template)
-    qafile_subtask_output = SubtaskOutput.objects.create(subtask=qafile_subtask)
+
+    for obs_out in observation_subtask.outputs.all():
+        qafile_subtask_input = SubtaskInput.objects.create(subtask=qafile_subtask,
+                                                           producer=obs_out,  # TODO: determine proper producer based on spec in task_relation_blueprint
+                                                           selection_doc=selection_doc,
+                                                           selection_template=selection_template)
+
+    for tb in observation_subtask.task_blueprints.all():
+        qafile_subtask_output = SubtaskOutput.objects.create(subtask=qafile_subtask,
+                                                             task_blueprint=tb)
 
     # step 3: set state to DEFINED
     qafile_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value)
@@ -523,7 +572,7 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask)
 def create_qaplots_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask:
     if 'calibrator' in task_blueprint.specifications_template.name.lower():
         # Calibrator requires related Target Task Observation for some specifications
-        target_task_blueprint = get_related_target_observation_task_blueprint(task_blueprint)
+        target_task_blueprint, _ = get_related_target_observation_task_blueprint(task_blueprint)
         if target_task_blueprint is None:
             raise SubtaskCreationException("Cannot retrieve specifications for task id=%d because no related target observation is found " % task.pk)
     else:
@@ -547,7 +596,8 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta
     https://support.astron.nl/confluence/display/TMSS/Specification+Flow
     '''
     # step 0: check pre-requisites
-    check_prerequities_for_subtask_creation(qafile_subtask.task_blueprint)
+    for tb in qafile_subtask.task_blueprints.all():
+        check_prerequities_for_subtask_creation(tb)
 
     if qafile_subtask.specifications_template.type.value != SubtaskType.Choices.QA_FILES.value:
         raise ValueError("Cannot create %s subtask for subtask id=%d type=%s because it is not an %s" % (
@@ -571,11 +621,12 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta
     qaplots_subtask_data = { "start_time": None,
                              "stop_time": None,
                              "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value),
-                             "task_blueprint": qafile_subtask.task_blueprint,
+                             #"task_blueprint": qafile_subtask.task_blueprint,
                              "specifications_template": qaplots_subtask_template,
                              "specifications_doc": qaplots_subtask_spec_doc,
                              "cluster": qafile_subtask.cluster}
     qaplots_subtask = Subtask.objects.create(**qaplots_subtask_data)
+    qaplots_subtask.task_blueprints.set(qafile_subtask.task_blueprints.all())
 
     # step 2: create and link subtask input/output
     selection_template = TaskRelationSelectionTemplate.objects.get(name="all")
@@ -584,7 +635,10 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta
                                                         producer=qafile_subtask.outputs.first(),
                                                         selection_doc=selection_doc,
                                                         selection_template=selection_template)
-    qaplots_subtask_output = SubtaskOutput.objects.create(subtask=qaplots_subtask)
+
+    for tb in qafile_subtask.task_blueprints.all():
+        qaplots_subtask_output = SubtaskOutput.objects.create(subtask=qaplots_subtask,
+                                                              task_blueprint=tb)
 
     # step 3: set state to DEFINED
     qaplots_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value)
@@ -617,11 +671,12 @@ def create_preprocessing_subtask_from_task_blueprint(task_blueprint: TaskBluepri
     subtask_data = { "start_time": None,
                      "stop_time": None,
                      "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value),
-                     "task_blueprint": task_blueprint,
+                     #"task_blueprint": task_blueprint,  # ManyToMany, so use set()!
                      "specifications_template": subtask_template,
                      "specifications_doc": subtask_specs,
                      "cluster": Cluster.objects.get(name=cluster_name) }
     subtask = Subtask.objects.create(**subtask_data)
+    subtask.task_blueprints.set([task_blueprint])
 
     # step 2: create and link subtask input/output
     for task_relation_blueprint in task_blueprint.produced_by.all():
@@ -635,7 +690,8 @@ def create_preprocessing_subtask_from_task_blueprint(task_blueprint: TaskBluepri
                                                             producer=predecessor_subtask_output,
                                                             selection_doc=task_relation_blueprint.selection_doc,
                                                             selection_template=task_relation_blueprint.selection_template)
-    subtask_output = SubtaskOutput.objects.create(subtask=subtask)
+    subtask_output = SubtaskOutput.objects.create(subtask=subtask,
+                                                  task_blueprint=task_blueprint)
 
     # step 3: set state to DEFINED
     subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value)
@@ -661,11 +717,12 @@ def create_ingest_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) ->
     subtask_data = {"start_time": None,
                     "stop_time": None,
                     "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value),
-                    "task_blueprint": task_blueprint,
+                    #"task_blueprint": task_blueprint,  # ManyToMany, so use set()!
                     "specifications_template": subtask_template,
                     "specifications_doc": subtask_specs,
                     "cluster": Cluster.objects.get(name=cluster_name)}
     subtask = Subtask.objects.create(**subtask_data)
+    subtask.task_blueprints.set([task_blueprint])
 
     # step 2: create and link subtask input
     for task_relation_blueprint in task_blueprint.produced_by.all():
@@ -784,7 +841,7 @@ def update_subtasks_start_times_for_scheduling_unit(scheduling_unit: SchedulingU
     for task_blueprint in scheduling_unit.task_blueprints.all():
         defined_independend_subtasks = task_blueprint.subtasks.filter(state__value='defined').filter(inputs=None).all()
         for subtask in defined_independend_subtasks:
-            update_start_time_and_shift_successors_until_after_stop_time(subtask, start_time + subtask.task_blueprint.relative_start_time)
+            update_start_time_and_shift_successors_until_after_stop_time(subtask, start_time + min([tb.relative_start_time for tb in subtask.task_blueprints.all()]))  # todo: min is correct here?
 
 
 def update_start_time_and_shift_successors_until_after_stop_time(subtask: Subtask, start_time: datetime):
@@ -802,13 +859,22 @@ def shift_successors_until_after_stop_time(subtask: Subtask):
 
         # ... but adjust it if there is a scheduling_relation with an offset.
         # so, check if these successive subtasks have different task_blueprint parents
-        if subtask.task_blueprint.id != successor.task_blueprint.id:
-            relations = (TaskSchedulingRelationBlueprint.objects.filter(first=subtask.task_blueprint, second=successor.task_blueprint) |
-                         TaskSchedulingRelationBlueprint.objects.filter(first=successor.task_blueprint, second=subtask.task_blueprint)).all()
-            if relations:
-                # there should be only one scheduling relation between the tasks
-                relation = relations[0]
-                successor_start_time += timedelta(seconds=relation.time_offset)
+        # Note: subtasks either have the same parent task(s) or different ones, no partial overlap.
+        #  we now need to look up all combinations between subtask and successor blueprints
+        #  to find if theres a relation with a time offset between the tasks...
+        time_offsets = []
+        for tb in subtask.task_blueprints.all():
+            for successor_tb in successor.task_blueprints.all():
+                if tb.id != successor_tb.id:
+                    relations = (TaskSchedulingRelationBlueprint.objects.filter(first=tb, second=successor_tb) |
+                                 TaskSchedulingRelationBlueprint.objects.filter(first=successor_tb, second=tb)).all()
+
+                    if relations:
+                        # there should be only one scheduling relation between the tasks
+                        time_offsets += [relations[0].time_offset]
+
+        if len(time_offsets) > 0:
+            successor_start_time += timedelta(seconds=max(time_offsets))
 
         # update the starttime and recurse to shift the successor successors as well
         update_start_time_and_shift_successors_until_after_stop_time(successor, successor_start_time)
@@ -966,17 +1032,18 @@ def get_station_groups(subtask):
     :return: station_groups which is a list of dict. { station_list, max_nr_missing }
     """
     station_groups = []
-    if 'calibrator' in subtask.task_blueprint.specifications_template.name.lower():
-        # Calibrator requires related Target Task Observation for some specifications
-        target_task_blueprint = get_related_target_observation_task_blueprint(subtask.task_blueprint)
-        if target_task_blueprint is None:
-            raise SubtaskException("Cannot retrieve related target observation of task_blueprint %d (subtask %d)" %
-                                   (subtask.task_blueprint.id, subtask.id))
-        if "station_groups" in target_task_blueprint.specifications_doc.keys():
-            station_groups = target_task_blueprint.specifications_doc["station_groups"]
-    else:
-        if "station_groups" in subtask.task_blueprint.specifications_doc.keys():
-            station_groups = subtask.task_blueprint.specifications_doc["station_groups"]
+    for task_blueprint in subtask.task_blueprints.all():
+        if 'calibrator' in task_blueprint.specifications_template.name.lower():
+            # Calibrator requires related Target Task Observation for some specifications
+            target_task_blueprint, _ = get_related_target_observation_task_blueprint(task_blueprint)
+            if target_task_blueprint is None:
+                raise SubtaskException("Cannot retrieve related target observation of task_blueprint %d (subtask %d)" %
+                                       (task_blueprint.id, subtask.id))
+            if "station_groups" in target_task_blueprint.specifications_doc.keys():
+                station_groups = target_task_blueprint.specifications_doc["station_groups"]
+        else:
+            if "station_groups" in task_blueprint.specifications_doc.keys():
+                station_groups = task_blueprint.specifications_doc["station_groups"]
     return station_groups
 
 
@@ -1168,7 +1235,24 @@ def schedule_observation_subtask(observation_subtask: Subtask):
     dataproduct_feedback_template = DataproductFeedbackTemplate.objects.get(name="empty")
     dataproduct_feedback_doc = get_default_json_object_for_schema(dataproduct_feedback_template.schema)
 
-    subtask_output = observation_subtask.outputs.first() # TODO: make proper selection, not default first()
+
+    # select correct output for each pointing based on name
+    subtask_output_dict = {}
+
+    for task_blueprint in observation_subtask.task_blueprints.all():
+        output = observation_subtask.outputs.filter(task_blueprint=task_blueprint).first()
+        if not output:
+            raise SubtaskSchedulingException('Cannot schedule subtask id=%s because it is missing the output for '
+                                             'task_blueprint id=%s (subtask has associated task_blueprints=%s, but '
+                                             'has outputs for task_blueprint=%s' % (observation_subtask.id,
+                                                                                   task_blueprint.id,
+                                                                                   [(tb.id, tb.specifications_template.type) for tb in observation_subtask.task_blueprints.all()],
+                                                                                   [(out.task_blueprint.id, out.task_blueprint.specifications_template.type) for out in observation_subtask.outputs.all()]))
+        if 'SAPs' in task_blueprint.specifications_doc:  # target
+            for sap in task_blueprint.specifications_doc['SAPs']:
+                subtask_output_dict[sap['name']] = output
+        if 'pointing' in task_blueprint.specifications_doc:  # calibrator
+            subtask_output_dict[task_blueprint.specifications_doc['name']] = output
 
     # create SAP objects, as observations create new beams
     antennaset = specifications_doc['stations']['antenna_set']
@@ -1189,9 +1273,13 @@ def schedule_observation_subtask(observation_subtask: Subtask):
                                specifications_template=SAPTemplate.objects.get(name="SAP")) for sap_nr, pointing in enumerate(specifications_doc['stations']['digital_pointings'])]
 
     # store everything below this directory
+    # Support for several projects will be added in TMSS-689, for now catch it.
+    project_set = set([tb.scheduling_unit_blueprint.draft.scheduling_set.project.name for tb in observation_subtask.task_blueprints.all()])
+    if len(project_set) != 1:
+        raise SubtaskSchedulingException('Cannot schedule subtask id=%s because it references task blueprints that belong to different projects=%s' % (observation_subtask.id, project_set))
     directory = "/data/%s/%s/L%s" % ("projects" if isProductionEnvironment() else "test-projects",
-                                     observation_subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name,
-                                     observation_subtask.id)
+                                        list(project_set)[0],  # TMSS-689: use correct project name for each dataproduct
+                                        observation_subtask.id)
 
     # create correlated dataproducts
     if specifications_doc['COBALT']['correlator']['enabled']:
@@ -1201,6 +1289,10 @@ def schedule_observation_subtask(observation_subtask: Subtask):
         sb_nr_offset = 0 # subband numbers run from 0 to (nr_subbands-1), increasing across SAPs
 
         for sap_nr, pointing in enumerate(specifications_doc['stations']['digital_pointings']):
+            if pointing['name'] in subtask_output_dict:
+                subtask_output = subtask_output_dict[pointing['name']]
+            else:
+                raise SubtaskSchedulingException('Cannot schedule subtask id=%s because the output for pointing name=%s cannot be determined.' % (observation_subtask.id, pointing['name']))
             for sb_nr, subband in enumerate(pointing['subbands'], start=sb_nr_offset):
                 dataproducts.append(Dataproduct(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr),
                                                          directory=directory+"/uv",
@@ -1241,7 +1333,7 @@ def schedule_observation_subtask(observation_subtask: Subtask):
                                          directory=directory+("/cs" if coherent else "/is"),
                                          dataformat=Dataformat.objects.get(value="Beamformed"),
                                          datatype=Datatype.objects.get(value="time series"),
-                                         producer=subtask_output,
+                                         producer=observation_subtask.outputs.first(),  # todo: select correct output. I tried "subtask_output_dict[sap['name']]" but tests fail because the sap's name is not in the task blueprint. Maybe it's just test setup and this should work?
                                          specifications_doc={"sap": specifications_doc['stations']['digital_pointings'][sap_nr]["name"], "coherent": coherent, "identifiers": {"pipeline_index": pipeline_nr, "tab_index": tab_nr, "stokes_index": stokes_nr, "part_index": part_nr}},
                                          specifications_template=dataproduct_specifications_template_timeseries,
                                          feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema),
@@ -1409,7 +1501,7 @@ def schedule_ingest_subtask(ingest_subtask: Subtask):
     ingest_subtask.save()
 
     # check permission pre-requisites
-    scheduling_unit_blueprint = ingest_subtask.task_blueprint.scheduling_unit_blueprint
+    scheduling_unit_blueprint = ingest_subtask.task_blueprints.first().scheduling_unit_blueprint    # first() is fine because we assume an ingest subtask does not serve tasks across SU boundaries
     if scheduling_unit_blueprint.ingest_permission_required:
         if scheduling_unit_blueprint.ingest_permission_granted_since is None or scheduling_unit_blueprint.ingest_permission_granted_since > datetime.utcnow():
             raise SubtaskSchedulingException("Cannot schedule ingest subtask id=%d because it requires explicit permission and the permission has not been granted (yet)" % (ingest_subtask.pk,))
@@ -1439,7 +1531,9 @@ def schedule_ingest_subtask(ingest_subtask: Subtask):
         ingest_subtask_input.dataproducts.set(input_dataproducts)
 
         # define output and create output dataproducts.
-        ingest_subtask_output = SubtaskOutput.objects.create(subtask=ingest_subtask)
+        tb = ingest_subtask_input.producer.task_blueprint  # output dataproducts are linked to the same task as its input dataproduct
+        ingest_subtask_output = SubtaskOutput.objects.create(subtask=ingest_subtask,
+                                                             task_blueprint=tb)
 
         # prepare identifiers in bulk for each output_dataproduct
         dp_gids = [SIPidentifier(source="TMSS") for _ in input_dataproducts]
@@ -1537,7 +1631,7 @@ def create_and_schedule_subtasks_from_task_blueprint(task_blueprint: TaskBluepri
 
 def schedule_independent_subtasks_in_task_blueprint(task_blueprint: TaskBlueprint, start_time: datetime=None) -> [Subtask]:
     '''Convenience method: Schedule (and return) the subtasks in the task_blueprint that are not dependend on any predecessors'''
-    independent_subtasks = list(Subtask.independent_subtasks().filter(task_blueprint_id=task_blueprint.id, state__value=SubtaskState.Choices.DEFINED.value).all())
+    independent_subtasks = list(Subtask.independent_subtasks().filter(task_blueprints__id=task_blueprint.id, state__value=SubtaskState.Choices.DEFINED.value).all())
 
     for subtask in independent_subtasks:
         if start_time is not None:
@@ -1629,14 +1723,15 @@ def get_observation_task_specification_with_check_for_calibrator(subtask):
     :param: subtask object
     :return: task_spec: the specifications_doc of the blue print task which is allways a target observation
     """
-    if 'calibrator' in subtask.task_blueprint.specifications_template.name.lower():
-        # Calibrator requires related Target Task Observation for some specifications
-        target_task_blueprint = get_related_target_observation_task_blueprint(subtask.task_blueprint)
-        if target_task_blueprint is None:
-            raise SubtaskCreationException("Cannot retrieve specifications for subtask id=%d because no related target observation is found " % subtask.pk)
-        task_spec = target_task_blueprint.specifications_doc
-        logger.info("Using specifications for calibrator observation (id=%s) from target observation task_blueprint id=%s",
-                    subtask.task_blueprint.id, target_task_blueprint.id)
-    else:
-        task_spec = subtask.task_blueprint.specifications_doc
-    return task_spec
+    for task_blueprint in subtask.task_blueprints.all():
+        if 'calibrator' in task_blueprint.specifications_template.name.lower():
+            # Calibrator requires related Target Task Observation for some specifications
+            target_task_blueprint, _ = get_related_target_observation_task_blueprint(task_blueprint)
+            if target_task_blueprint is None:
+                raise SubtaskCreationException("Cannot retrieve specifications for subtask id=%d because no related target observation is found " % subtask.pk)
+            task_spec = target_task_blueprint.specifications_doc
+            logger.info("Using specifications for calibrator observation (id=%s) from target observation task_blueprint id=%s",
+                        task_blueprint.id, target_task_blueprint.id)
+        else:
+            task_spec = task_blueprint.specifications_doc
+        return task_spec
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/views.py b/SAS/TMSS/backend/src/tmss/tmssapp/views.py
index e22c15b14c59cb62b64199e4ae8ae5181b0d409a..85bdfe0de03a90428f85f01fb51264e4b4082b49 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/views.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/views.py
@@ -186,7 +186,7 @@ def get_sun_rise_and_set(request):
 @swagger_auto_schema(method='GET',
                      responses={200: 'A JSON object with angular distances of the given sky coordinates from the given solar system bodies at the given timestamps (seen from LOFAR core)'},
                      operation_description="Get angular distances of the given sky coordinates from the given solar system bodies at all given timestamps. \n\n"
-                                           "Example request: /api/util/angular_separation_from_bodies?angle1=1&angle2=1&timestamps=2020-01-01T15,2020-01-01T16",
+                                           "Example request: /api/util/angular_separation?angle1=1&angle2=1&timestamps=2020-01-01T15,2020-01-01T16",
                      manual_parameters=[Parameter(name='angle1', required=True, type='string', in_='query',
                                                   description="first angle of celectial coordinates as float, e.g. RA"),
                                         Parameter(name='angle2', required=True, type='string', in_='query',
@@ -198,7 +198,7 @@ def get_sun_rise_and_set(request):
                                         Parameter(name='bodies', required=False, type='string', in_='query',
                                                   description="comma-separated list of solar system bodies")])
 @api_view(['GET'])
-def get_angular_separation_from_bodies(request):
+def get_angular_separation(request):
     '''
     returns angular distances of the given sky coordinates from the given astronomical objects at the given timestamps and stations
     '''
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/CMakeLists.txt b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/CMakeLists.txt
index 186d29924f2c1706f57804848474f1a74bfeebb8..ab71ce95fb8cbf05bcc2533b2cec8bdd42956243 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/CMakeLists.txt
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/CMakeLists.txt
@@ -8,7 +8,8 @@ set(_py_files
     scheduling.py
     permissions.py
     project_permissions.py
-    )
+    calculations.py
+   )
 
 python_install(${_py_files}
     DESTINATION lofar/sas/tmss/tmss/tmssapp/viewsets)
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/__init__.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/__init__.py
index 0f7980fabfd9022b1389bf2ac72a975f9d2fb1e8..6f585af0a1c4a3ffd3a879a663fcef1cf4840d32 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/__init__.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/__init__.py
@@ -1,4 +1,5 @@
 from .specification import *
 from .scheduling import *
 from .permissions import *
-from .project_permissions import *
\ No newline at end of file
+from .project_permissions import *
+from .calculations import *
\ No newline at end of file
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/calculations.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/calculations.py
new file mode 100644
index 0000000000000000000000000000000000000000..fd7eb3fbfeab476afe094fc8de92c3b0876b09b4
--- /dev/null
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/calculations.py
@@ -0,0 +1,13 @@
+from .. import models
+from .. import serializers
+from .lofar_viewset import LOFARViewSet
+
+
+#
+# Conversions ViewSets
+#
+
+class StationTimelineViewSet(LOFARViewSet):
+    queryset = models.StationTimeline.objects.all()
+    serializer_class = serializers.StationTimelineSerializer
+
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/lofar_viewset.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/lofar_viewset.py
index fa74b76b1a3291235d865b4b9914b15324f36871..86631f7c703cddeff73e07c64400ef21a4b2963a 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/lofar_viewset.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/lofar_viewset.py
@@ -19,50 +19,27 @@ from rest_framework.decorators import action
 from lofar.common import json_utils
 from lofar.sas.tmss.tmss.tmssapp.viewsets.permissions import TMSSPermissions, IsProjectMemberFilterBackend
 from lofar.sas.tmss.tmss.tmssapp.models import permissions
-from django_filters.rest_framework import DjangoFilterBackend
+from django_filters.rest_framework import DjangoFilterBackend, FilterSet, CharFilter
+from django_filters import filterset
 from rest_framework.filters import OrderingFilter
+from django.contrib.postgres.fields import JSONField, ArrayField
+from copy import deepcopy
 
-#class TMSSPermissionsMixin:
-
-    # def __init__(self, *args, **kwargs):
-    #     self.permission_classes = (TMSSPermissions,)
-    #     self.filter_backends = (IsProjectMemberFilterBackend,)
-    #     self.extra_action_permission_classes = self._create_extra_action_permission_classes()
-    #     super(TMSSPermissionsMixin, self).__init__(*args, **kwargs)
-    #
-    # # TODO: Cache this method to avoid redundancy and overhead.
-    # def _create_extra_action_permission_classes(self):
-    #     extra_action_permission_classes = []
-    #     extra_actions = [a.__name__ for a in self.get_extra_actions()]
-    #     for ea in extra_actions:  # Create permission classes
-    #         permission_name = f'{ea}_{self.serializer_class.Meta.model.__name__.lower()}'
-    #         permission_class_name = f'Can {ea} {self.serializer_class.Meta.model.__name__.lower()}'
-    #         new_permission_class = type(f'{permission_class_name}', (permissions.TMSSBasePermissions,), {
-    #             # TODO: Is it necessary to have both permissions and object permissions?
-    #             # TODO: Find a way to use the "%(app_label)s." syntax.
-    #             'permission_name': permission_name,
-    #             'has_permission': lambda self, request, view: request.user.has_perm(f'tmssapp.{self.permission_name}'),
-    #             'has_object_permission': lambda self, request, view, obj: request.user.has_perm(f'tmssapp.{self.permission_name}'),
-    #         })
-    #         new_permission_class.__setattr__(self, 'permission_name', permission_name)
-    #         extra_action_permission_classes.append({ea: new_permission_class},)
-    #     return extra_action_permission_classes
-    #
-    # # TODO: Refactoring.
-    # def get_model_permissions(self):
-    #     extra_actions = [a.__name__ for a in self.get_extra_actions()]
-    #     if self.action in extra_actions:
-    #         for ea_permission_class in self.extra_action_permission_classes:
-    #             if ea_permission_class.get(self.action):
-    #                 return [permissions.TMSSBasePermissions, ea_permission_class.get(self.action),]
-    #             else:
-    #                 return [permissions.TMSSBasePermissions,]
-    #     else:
-    #         return [permissions.TMSSBasePermissions, ]
-
-    #def get_permissions(self):
-    #    self.get_extra_action_permission_classes()
-    #    return super(TMSSPermissionsMixin, self).get_permissions()
+class LOFARDefaultFilterSet(FilterSet):
+    FILTER_DEFAULTS = deepcopy(filterset.FILTER_FOR_DBFIELD_DEFAULTS)
+    FILTER_DEFAULTS.update({
+        JSONField: {
+            'filter_class': CharFilter
+        },
+        ArrayField: {
+            'filter_class': CharFilter,
+            'extra': lambda f: {'lookup_expr': 'icontains'}
+        },
+    })
+
+
+class LOFARFilterBackend(DjangoFilterBackend):
+    default_filter_set = LOFARDefaultFilterSet
 
 
 class LOFARViewSet(viewsets.ModelViewSet):
@@ -71,7 +48,8 @@ class LOFARViewSet(viewsets.ModelViewSet):
     the `format=None` keyword argument for each action.
     """
     permission_classes = (TMSSPermissions,)
-    filter_backends = (DjangoFilterBackend, OrderingFilter, IsProjectMemberFilterBackend,)
+    filter_backends = (LOFARFilterBackend, OrderingFilter, IsProjectMemberFilterBackend,)
+    filter_fields = '__all__'
 
     @swagger_auto_schema(responses={403: 'forbidden'})
     def list(self, request, **kwargs):
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py
index 841106cf440ecff536c32a22fc10d76adeed3ed6..94d00d005457ccabeea9d604c1762e983edfc077 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py
@@ -77,9 +77,9 @@ class StationTypeViewSet(LOFARViewSet):
     queryset = models.StationType.objects.all()
     serializer_class = serializers.StationTypeSerializer
 
-class AlgorithmViewSet(LOFARViewSet):
-    queryset = models.Algorithm.objects.all()
-    serializer_class = serializers.AlgorithmSerializer
+class HashAlgorithmViewSet(LOFARViewSet):
+    queryset = models.HashAlgorithm.objects.all()
+    serializer_class = serializers.HashAlgorithmSerializer
 
 class SubtaskTemplateFilter(filters.FilterSet):
     class Meta:
@@ -412,12 +412,12 @@ class DataproductViewSet(LOFARViewSet):
 
         if 'md5_checksum' in json_doc:
             models.DataproductHash.objects.create(dataproduct=dataproduct,
-                                                  algorithm=models.Algorithm.objects.get(value=models.Algorithm.Choices.MD5.value),
+                                                  hash_algorithm=models.HashAlgorithm.objects.get(value=models.HashAlgorithm.Choices.MD5.value),
                                                   hash=json_doc['md5_checksum'])
 
         if 'adler32_checksum' in json_doc:
             models.DataproductHash.objects.create(dataproduct=dataproduct,
-                                                  algorithm=models.Algorithm.objects.get(value=models.Algorithm.Choices.ADLER32.value),
+                                                  hash_algorithm=models.HashAlgorithm.objects.get(value=models.HashAlgorithm.Choices.ADLER32.value),
                                                   hash=json_doc['adler32_checksum'])
 
         # create empty feedback. Apart from the archive info above, ingest does not create feedback like observations/pipelines do.
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py
index 620742eaa77f9aedd8400e88f862121fcb2e2dbf..b8eefe04f4f4a96b6f8969059b58fa2806f27708 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py
@@ -19,9 +19,10 @@ from rest_framework.decorators import action
 from drf_yasg.utils import swagger_auto_schema
 from drf_yasg.openapi import Parameter
 
-from lofar.sas.tmss.tmss.tmssapp.viewsets.lofar_viewset import LOFARViewSet, LOFARNestedViewSet, AbstractTemplateViewSet, LOFARCopyViewSet
+from lofar.sas.tmss.tmss.tmssapp.viewsets.lofar_viewset import LOFARViewSet, LOFARNestedViewSet, AbstractTemplateViewSet, LOFARCopyViewSet, LOFARFilterBackend
 from lofar.sas.tmss.tmss.tmssapp import models
 from lofar.sas.tmss.tmss.tmssapp import serializers
+from lofar.sas.tmss.tmss.tmssapp.adapters.reports import create_project_report
 from django.http import JsonResponse
 
 from datetime import datetime
@@ -32,7 +33,6 @@ from lofar.sas.tmss.tmss.tmssapp.subtasks import *
 from lofar.sas.tmss.tmss.tmssapp.viewsets.permissions import TMSSDjangoModelPermissions
 
 from django.urls  import resolve, get_script_prefix,Resolver404
-from django_filters.rest_framework import DjangoFilterBackend
 from rest_framework.filters import OrderingFilter
 
 import json
@@ -312,7 +312,7 @@ class TaskConnectorTypeViewSet(LOFARViewSet):
 
 class CycleViewSet(LOFARViewSet):
     permission_classes = (TMSSDjangoModelPermissions,)      # override default project permission
-    filter_backends = (DjangoFilterBackend, OrderingFilter)                                    # override default project permission
+    filter_backends = (LOFARFilterBackend, OrderingFilter)                                    # override default project permission
     queryset = models.Cycle.objects.all()
     serializer_class = serializers.CycleSerializer
     ordering = ['start']
@@ -348,6 +348,15 @@ class ProjectViewSet(LOFARViewSet):
 
         return queryset
 
+    @swagger_auto_schema(responses={200: 'The Report information',
+                                    403: 'forbidden'},
+                         operation_description="Get Report information for the project.")
+    @action(methods=['get'], detail=True, url_name="report", name="Get Report")
+    def report(self, request, pk=None):
+        project = get_object_or_404(models.Project, pk=pk)
+        result = create_project_report(request, project)
+        return Response(result, status=status.HTTP_200_OK)
+
 
 class ProjectNestedViewSet(LOFARNestedViewSet):
     queryset = models.Project.objects.all()
@@ -401,9 +410,9 @@ class SchedulingSetViewSet(LOFARViewSet):
     serializer_class = serializers.SchedulingSetSerializer
 
 
-class FlagViewSet(LOFARViewSet):
-    queryset = models.Flag.objects.all()
-    serializer_class = serializers.FlagSerializer
+class SystemSettingFlagViewSet(LOFARViewSet):
+    queryset = models.SystemSettingFlag.objects.all()
+    serializer_class = serializers.SystemSettingFlagSerializer
 
 
 class SettingViewSet(LOFARViewSet):
@@ -431,13 +440,22 @@ class SchedulingUnitDraftPropertyFilter(property_filters.PropertyFilterSet):
 
     class Meta:
         model = models.SchedulingUnitDraft
-        fields = ['project']
+        fields = '__all__'
+        filter_overrides = {
+            models.JSONField: {
+                'filter_class': property_filters.CharFilter,
+            },
+            models.ArrayField: {
+                'filter_class': property_filters.CharFilter,
+                'extra': lambda f: {'lookup_expr': 'icontains'}
+            },
+        }
 
 
 class SchedulingUnitDraftViewSet(LOFARViewSet):
     queryset = models.SchedulingUnitDraft.objects.all()
     serializer_class = serializers.SchedulingUnitDraftSerializer
-    filter_class = SchedulingUnitDraftPropertyFilter
+    filter_class = SchedulingUnitDraftPropertyFilter    # note that this breaks other filter backends from LOFARViewSet
 
     # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries
     queryset = queryset.prefetch_related('copied_from') \
@@ -730,16 +748,26 @@ class SchedulingUnitBlueprintPropertyFilter(property_filters.PropertyFilterSet):
     start_time = property_filters.PropertyIsoDateTimeFromToRangeFilter(field_name='start_time')
     stop_time = property_filters.PropertyIsoDateTimeFromToRangeFilter(field_name='stop_time')
     project = property_filters.PropertyCharFilter(field_name='project')
+    status = property_filters.PropertyCharFilter(field_name='status')
 
     class Meta:
         model = models.SchedulingUnitBlueprint
-        fields = ['start_time', 'stop_time', 'project']
+        fields = '__all__'
+        filter_overrides = {
+            models.JSONField: {
+                'filter_class': property_filters.CharFilter,
+            },
+            models.ArrayField: {
+                'filter_class': property_filters.CharFilter,
+                'extra': lambda f: {'lookup_expr': 'icontains'}
+            },
+        }
 
 
 class SchedulingUnitBlueprintViewSet(LOFARViewSet):
     queryset = models.SchedulingUnitBlueprint.objects.all()
     serializer_class = serializers.SchedulingUnitBlueprintSerializer
-    filter_class = SchedulingUnitBlueprintPropertyFilter
+    filter_class = SchedulingUnitBlueprintPropertyFilter  # note that this breaks other filter backends from LOFARViewSet
 
     # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries
     queryset = queryset.prefetch_related('task_blueprints')
@@ -788,7 +816,7 @@ class SchedulingUnitBlueprintViewSet(LOFARViewSet):
                          operation_description="Get the subtask logging urls of this schedulingunit blueprint.")
     @action(methods=['get'], detail=True, url_name='get_all_subtasks_log_urls')
     def get_all_subtasks_log_urls(self, request, pk=None):
-        subtasks = models.Subtask.objects.filter(task_blueprint__scheduling_unit_blueprint_id=pk)
+        subtasks = models.Subtask.objects.filter(task_blueprints__scheduling_unit_blueprint_id=pk)
         result = []
         for subtask in subtasks:
             if subtask.log_url != "":
@@ -1081,3 +1109,8 @@ class TaskTypeViewSet(LOFARViewSet):
     queryset = models.TaskType.objects.all()
     serializer_class = serializers.TaskTypeSerializer
 
+
+class PriorityQueueTypeViewSet(LOFARViewSet):
+    queryset = models.PriorityQueueType.objects.all()
+    serializer_class = serializers.PriorityQueueTypeSerializer
+
diff --git a/SAS/TMSS/backend/src/tmss/urls.py b/SAS/TMSS/backend/src/tmss/urls.py
index afe222f05f2ef50547b85a34cd591755dbd77c40..1772bdc3b68f9bc789166e2efd9f0814ffa0b707 100644
--- a/SAS/TMSS/backend/src/tmss/urls.py
+++ b/SAS/TMSS/backend/src/tmss/urls.py
@@ -73,7 +73,7 @@ urlpatterns = [
     re_path('util/sun_rise_and_set/?', views.get_sun_rise_and_set, name='get_sun_rise_and_set'),
     re_path('util/utc/?', views.utc, name="system-utc"),
     re_path('util/lst/?', views.lst, name="conversion-lst"),
-    re_path('util/angular_separation_from_bodies/?', views.get_angular_separation_from_bodies, name='get_angular_separation_from_bodies'),
+    re_path('util/angular_separation/?', views.get_angular_separation, name='get_angular_separation'),
     re_path('util/target_rise_and_set/?', views.get_target_rise_and_set, name='get_target_rise_and_set'),
 ]
 
@@ -121,11 +121,12 @@ router.register(r'iotype', viewsets.IOTypeViewSet)
 router.register(r'datatype', viewsets.DatatypeViewSet)
 router.register(r'dataformat', viewsets.DataformatViewSet)
 router.register(r'copy_reason', viewsets.CopyReasonViewSet)
-router.register(r'flag', viewsets.FlagViewSet)
+router.register(r'system_setting_flag', viewsets.SystemSettingFlagViewSet)
 router.register(r'period_category', viewsets.PeriodCategoryViewSet)
 router.register(r'project_category', viewsets.ProjectCategoryViewSet)
 router.register(r'quantity', viewsets.QuantityViewSet)
 router.register(r'task_type', viewsets.TaskTypeViewSet)
+router.register(r'priority_queue_type', viewsets.PriorityQueueTypeViewSet)
 
 # templates
 router.register(r'common_schema_template', viewsets.CommonSchemaTemplateViewSet)
@@ -193,7 +194,7 @@ router.register(r'scheduling_unit_blueprint/(?P<scheduling_unit_blueprint_id>\d+
 router.register(r'subtask_state', viewsets.SubtaskStateViewSet)
 router.register(r'subtask_type', viewsets.SubtaskTypeViewSet)
 router.register(r'station_type', viewsets.StationTypeViewSet)
-router.register(r'algorithm', viewsets.AlgorithmViewSet)
+router.register(r'hash_algorithm', viewsets.HashAlgorithmViewSet)
 router.register(r'scheduling_relation_placement', viewsets.SchedulingRelationPlacement)
 
 # templates
@@ -226,6 +227,12 @@ router.register(r'sip_identifier', viewsets.SIPidentifierViewSet)
 router.register(r'project_role', viewsets.ProjectRoleViewSet)
 router.register(r'project_permission', viewsets.ProjectPermissionViewSet)
 
+
+# CONVERSIONS
+
+router.register(r'station_timeline', viewsets.StationTimelineViewSet)
+
+
 urlpatterns.extend(router.urls)
 
 frontend_urlpatterns = [
diff --git a/SAS/TMSS/backend/src/tmss/workflowapp/tests/t_workflow_qaworkflow.py b/SAS/TMSS/backend/src/tmss/workflowapp/tests/t_workflow_qaworkflow.py
index 4616f51c172043d48486bf833ef01fe1ddd2695c..99420f58829802e352481b51f9ab12113cbb18db 100755
--- a/SAS/TMSS/backend/src/tmss/workflowapp/tests/t_workflow_qaworkflow.py
+++ b/SAS/TMSS/backend/src/tmss/workflowapp/tests/t_workflow_qaworkflow.py
@@ -121,7 +121,7 @@ class SchedulingUnitFlowTest(unittest.TestCase):
                                             scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data()))
             
             scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
-            #ingest_subtask = models.Subtask.objects.get(task_blueprint__scheduling_unit_blueprint__id=scheduling_unit_blueprint.id,
+            #ingest_subtask = models.Subtask.objects.get(task_blueprints__scheduling_unit_blueprint__id=scheduling_unit_blueprint.id,
             #    specifications_template__type__value=TaskType.Choices.INGEST.value)
             scheduling_unit_draft.refresh_from_db()
             
@@ -434,7 +434,7 @@ class SchedulingUnitFlowTest(unittest.TestCase):
                                             scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data()))
             
             scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
-            #ingest_subtask = models.Subtask.objects.get(task_blueprint__scheduling_unit_blueprint__id=scheduling_unit_blueprint.id,
+            #ingest_subtask = models.Subtask.objects.get(task_blueprints__scheduling_unit_blueprint__id=scheduling_unit_blueprint.id,
             #    specifications_template__type__value=TaskType.Choices.INGEST.value)
             scheduling_unit_draft.refresh_from_db()
             
@@ -645,7 +645,7 @@ class SchedulingUnitFlowTest(unittest.TestCase):
                                             scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data()))
             
             scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
-            #ingest_subtask = models.Subtask.objects.get(task_blueprint__scheduling_unit_blueprint__id=scheduling_unit_blueprint.id,
+            #ingest_subtask = models.Subtask.objects.get(task_blueprints__scheduling_unit_blueprint__id=scheduling_unit_blueprint.id,
             #    specifications_template__type__value=TaskType.Choices.INGEST.value)
             scheduling_unit_draft.refresh_from_db()
             
@@ -883,7 +883,7 @@ class SchedulingUnitFlowTest(unittest.TestCase):
                                             scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data()))
             
             scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
-            #ingest_subtask = models.Subtask.objects.get(task_blueprint__scheduling_unit_blueprint__id=scheduling_unit_blueprint.id,
+            #ingest_subtask = models.Subtask.objects.get(task_blueprints__scheduling_unit_blueprint__id=scheduling_unit_blueprint.id,
             #    specifications_template__type__value=TaskType.Choices.INGEST.value)
             scheduling_unit_draft.refresh_from_db()
             
@@ -1180,7 +1180,7 @@ class SchedulingUnitFlowTest(unittest.TestCase):
                                             scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data()))
             
             scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
-            #ingest_subtask = models.Subtask.objects.get(task_blueprint__scheduling_unit_blueprint__id=scheduling_unit_blueprint.id,
+            #ingest_subtask = models.Subtask.objects.get(task_blueprints__scheduling_unit_blueprint__id=scheduling_unit_blueprint.id,
             #    specifications_template__type__value=TaskType.Choices.INGEST.value)
             scheduling_unit_draft.refresh_from_db()
             
diff --git a/SAS/TMSS/backend/test/t_adapter.py b/SAS/TMSS/backend/test/t_adapter.py
index 772a2d43ed706e328371dc2cdb048f38f65db9ed..4048a2e8120983cf846ede4dfdf17da6529bbcb0 100755
--- a/SAS/TMSS/backend/test/t_adapter.py
+++ b/SAS/TMSS/backend/test/t_adapter.py
@@ -63,6 +63,7 @@ class ObservationParsetAdapterTest(unittest.TestCase):
         subtask_template = models.SubtaskTemplate.objects.get(name='observation control')
         subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc)
         subtask:models.Subtask = models.Subtask.objects.create(**subtask_data)
+        subtask.task_blueprints.set([models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())])
         subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask))
         dataproduct:models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtask_output))
         return subtask
@@ -220,7 +221,7 @@ class SIPadapterTest(unittest.TestCase):
         # Create SubTask(output)
         subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc)
         subtask:models.Subtask = models.Subtask.objects.create(**subtask_data)
-
+        subtask.task_blueprints.set([models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())])
         subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask))
         # Create Dataproduct
         dataproduct: models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(feedback_doc=feedback_doc, producer=subtask_output))
@@ -507,6 +508,89 @@ _isCobalt=T
         self.assertEqual(self.feedback_pipe_complete.strip(), subtask_pipe.raw_feedback.strip())
 
 
+class ProjectReportTest(unittest.TestCase):
+    def setUp(self):
+        # Create requirements
+        self.project = models.Project.objects.create(**Project_test_data(name='test_for_report'))
+        self.project_quota = models.ProjectQuota.objects.create(
+            **ProjectQuota_test_data(project=self.project, resource_type=models.ResourceType.objects.create(
+                **ResourceType_test_data(quantity=models.Quantity.objects.get(value=models.Quantity.Choices.NUMBER.value)))))
+        self.scheduling_set = models.SchedulingSet.objects.create(**SchedulingSet_test_data(project=self.project))
+        self.scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(
+            **SchedulingUnitDraft_test_data(scheduling_set=self.scheduling_set))
+        self.task_draft = models.TaskDraft.objects.create(
+            **TaskDraft_test_data(scheduling_unit_draft=self.scheduling_unit_draft))
+
+        # Create test_data_creator as superuser
+        self.test_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH)
+        response = requests.get(self.test_data_creator.django_api_url + '/', auth=self.test_data_creator.auth)
+
+    def _get_SUB_with_subtask_and_set_status(self, status=None):
+        """
+        Help method to create SUB, TaskBlueprint, Subtask and (optionally) set the latter's status.
+        """
+        sub = models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data(draft=self.scheduling_unit_draft))
+        tb = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data(task_draft=self.task_draft, scheduling_unit_blueprint=sub))
+        # Create Subtask of type 'ingest'
+        subtask_template = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data(subtask_type_value='ingest'))
+        subtask = models.Subtask.objects.create(**Subtask_test_data(subtask_template=subtask_template))
+        subtask.task_blueprints.set([tb])
+
+        if status:  # Set Subtask status to 'cancelled'
+            with tmss_test_env.create_tmss_client() as client:
+                client.set_subtask_status(subtask.pk, status)
+            # Refreshing Subtask from cache
+            subtask = models.Subtask.objects.get(pk=subtask.pk)
+            while subtask.state.value != status:
+                subtask = models.Subtask.objects.get(pk=subtask.pk)
+
+        return sub, tb, subtask
+
+    def test_create_project_report(self):
+        """
+        Test create project extra action.
+        """
+        # Create and set three SUBs and respectively set the following states: 'finished', 'cancelled', 'defined' (not cancelled)
+        succeeded_sub, _, succeeded_subtask = self._get_SUB_with_subtask_and_set_status('finished')
+        cancelled_sub, _, cancelled_subtask = self._get_SUB_with_subtask_and_set_status('cancelled')
+        not_cancelled_sub, _, not_cancelled_subtask = self._get_SUB_with_subtask_and_set_status('defined')
+
+        # Create SubtaskOutput and Dataproducts from subtask_output
+        subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=succeeded_subtask))
+        dataproduct1 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtask_output))
+        dataproduct2 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtask_output))
+
+        # Calculate expected durations
+        total = succeeded_subtask.duration.total_seconds() + cancelled_subtask.duration.total_seconds() + \
+                not_cancelled_subtask.duration.total_seconds()
+        total_succeeded = succeeded_subtask.duration.total_seconds()
+        total_not_cancelled = succeeded_subtask.duration.total_seconds() + not_cancelled_subtask.duration.total_seconds()
+        total_failed = cancelled_subtask.duration.total_seconds()
+
+        # Assert we get the expected object
+        response = requests.get(BASE_URL + '/project/%s/report' % self.project.pk, auth=self.test_data_creator.auth)
+        result = response.json()
+
+        # Assert Project and ProjectQuota ids
+        self.assertEqual(result['project'], self.project.pk)
+        self.assertEqual(result['quota'][0]['id'], self.project_quota.pk)
+
+        # Assert durations are well calculated
+        self.assertAlmostEqual(result['durations']['total'], total)
+        self.assertAlmostEqual(result['durations']['total_succeeded'], total_succeeded)
+        self.assertAlmostEqual(result['durations']['total_not_cancelled'], total_not_cancelled)
+        self.assertAlmostEqual(result['durations']['total_failed'], total_failed)
+
+        # There is only one finished SUB
+        self.assertEqual(result['durations']['scheduling_unit_blueprints_finished'][0]['id'], succeeded_sub.pk)
+        # There is only one cancelled SUB
+        self.assertEqual(result['durations']['scheduling_unit_blueprints_failed'][0]['id'], cancelled_sub.pk)
+
+        # There are just two dataproducts
+        self.assertEqual(result['LTA dataproducts']['size__sum'], dataproduct1.size + dataproduct2.size)
+        # Just to check if the placeholder was added
+        self.assertIsNotNone(result['SAPs'])
+
 
 if __name__ == "__main__":
     os.environ['TZ'] = 'UTC'
diff --git a/SAS/TMSS/backend/test/t_complex_serializers.py b/SAS/TMSS/backend/test/t_complex_serializers.py
index c6e27c5424809cbc36e07a8a92ef4d6c09222bf5..c49d0ae9940b02fcf4fc70b8081fb2c071c66783 100755
--- a/SAS/TMSS/backend/test/t_complex_serializers.py
+++ b/SAS/TMSS/backend/test/t_complex_serializers.py
@@ -49,7 +49,7 @@ class DynamicRelationalHyperlinkedModelSerializerTestCase(unittest.TestCase):
         # create some connected objects
         cls.td_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/')
         cls.tb_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(draft_url=cls.td_url), '/task_blueprint/')
-        test_data_creator.post_data_and_get_url(test_data_creator.Subtask(task_blueprint_url=cls.tb_url), '/subtask/')
+        test_data_creator.post_data_and_get_url(test_data_creator.Subtask(task_blueprint_urls=[cls.tb_url]), '/subtask/')
 
     def test_GET_task_draft_serializes_to_depth_0_by_default(self):
 
diff --git a/SAS/TMSS/backend/test/t_conversions.py b/SAS/TMSS/backend/test/t_conversions.py
index 1773168c7b1ded14c41aee27f0fddd6683d9f9f7..6a07693cbced93562963ebd79790cf1716c58e0e 100755
--- a/SAS/TMSS/backend/test/t_conversions.py
+++ b/SAS/TMSS/backend/test/t_conversions.py
@@ -30,7 +30,6 @@ import json
 
 logger = logging.getLogger(__name__)
 logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
-from lofar.sas.tmss.tmss.tmssapp.conversions import local_sidereal_time_for_utc_and_station, local_sidereal_time_for_utc_and_longitude
 
 from lofar.common.test_utils import exit_with_skipped_code_if_skip_integration_tests
 exit_with_skipped_code_if_skip_integration_tests()
@@ -40,6 +39,10 @@ exit_with_skipped_code_if_skip_integration_tests()
 # (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module)
 from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import *
 
+# The next import should be done after the 'tmss_test_environment_unittest_setup' magic !!!
+from lofar.sas.tmss.tmss.tmssapp.conversions import local_sidereal_time_for_utc_and_station, local_sidereal_time_for_utc_and_longitude
+
+
 class SiderealTime(unittest.TestCase):
 
     def test_local_sidereal_time_for_utc_and_longitude_returns_correct_result(self):
@@ -205,17 +208,17 @@ class UtilREST(unittest.TestCase):
         response_date = dateutil.parser.parse(r_dict['CS002']['night'][1]['start']).date()
         self.assertEqual(expected_date, response_date)
 
-    # angular_separation_from_bodies
+    # angular_separation
 
-    def test_util_angular_separation_from_bodies_yields_error_when_no_pointing_is_given(self):
-        r = requests.get(BASE_URL + '/util/angular_separation_from_bodies', auth=AUTH)
+    def test_util_angular_separation_yields_error_when_no_pointing_is_given(self):
+        r = requests.get(BASE_URL + '/util/angular_separation', auth=AUTH)
 
         # assert error
         self.assertEqual(r.status_code, 500)
         self.assertIn("celestial coordinates", r.content.decode('utf-8'))
 
-    def test_util_angular_separation_from_bodies_returns_json_structure_with_defaults(self):
-        r = requests.get(BASE_URL + '/util/angular_separation_from_bodies?angle1=1&angle2=1', auth=AUTH)
+    def test_util_angular_separation_returns_json_structure_with_defaults(self):
+        r = requests.get(BASE_URL + '/util/angular_separation?angle1=1&angle2=1', auth=AUTH)
         self.assertEqual(r.status_code, 200)
         r_dict = json.loads(r.content.decode('utf-8'))
 
@@ -230,9 +233,9 @@ class UtilREST(unittest.TestCase):
         self.assertTrue(delta < 60.0)
         self.assertEqual(type(list(r_dict['jupiter'].values())[0]), float)
 
-    def test_util_angular_separation_from_bodies_considers_bodies(self):
+    def test_util_angular_separation_considers_bodies(self):
         bodies = ['sun', 'neptune', 'mercury']
-        r = requests.get(BASE_URL + '/util/angular_separation_from_bodies?angle1=1&angle2=1&bodies=%s' % ','.join(bodies), auth=AUTH)
+        r = requests.get(BASE_URL + '/util/angular_separation?angle1=1&angle2=1&bodies=%s' % ','.join(bodies), auth=AUTH)
         self.assertEqual(r.status_code, 200)
         r_dict = json.loads(r.content.decode('utf-8'))
 
@@ -245,9 +248,9 @@ class UtilREST(unittest.TestCase):
                 self.assertNotEqual(angle, angle_last)
             angle_last = angle
 
-    def test_util_angular_separation_from_bodies_considers_timestamps(self):
+    def test_util_angular_separation_considers_timestamps(self):
         timestamps = ['2020-01-01', '2020-02-22T16-00-00', '2020-3-11', '2020-01-01']
-        r = requests.get(BASE_URL + '/util/angular_separation_from_bodies?angle1=1&angle2=1&timestamps=%s' % ','.join(timestamps), auth=AUTH)
+        r = requests.get(BASE_URL + '/util/angular_separation?angle1=1&angle2=1&timestamps=%s' % ','.join(timestamps), auth=AUTH)
         self.assertEqual(r.status_code, 200)
         r_dict = json.loads(r.content.decode('utf-8'))
 
@@ -261,10 +264,10 @@ class UtilREST(unittest.TestCase):
                 self.assertNotEqual(angle, angle_last)
             angle_last = angle
 
-    def test_util_angular_separation_from_bodies_considers_coordinates(self):
+    def test_util_angular_separation_considers_coordinates(self):
         test_coords = [(1, 1,"J2000"), (1.1, 1, "J2000"), (1.1, 1.1, "J2000")]
         for coords in test_coords:
-            r = requests.get(BASE_URL + '/util/angular_separation_from_bodies?angle1=%s&angle2=%s&direction_type=%s' % coords, auth=AUTH)
+            r = requests.get(BASE_URL + '/util/angular_separation?angle1=%s&angle2=%s&direction_type=%s' % coords, auth=AUTH)
             self.assertEqual(r.status_code, 200)
             r_dict = json.loads(r.content.decode('utf-8'))
 
diff --git a/SAS/TMSS/backend/test/t_permissions_system_roles.py b/SAS/TMSS/backend/test/t_permissions_system_roles.py
index 5d05682bec00597c71fc3ae94f46eaf6cc35a0d9..00287ea1ac243e495d6373168c2ccb965d2f6477 100755
--- a/SAS/TMSS/backend/test/t_permissions_system_roles.py
+++ b/SAS/TMSS/backend/test/t_permissions_system_roles.py
@@ -94,12 +94,13 @@ class SystemPermissionTestCase(unittest.TestCase):
             obs_subtask_data = test_data_creator.Subtask(specifications_template_url=obs_subtask_template['url'],
                                                          specifications_doc=obs_spec,
                                                          cluster_url=cluster_url,
-                                                         task_blueprint_url=obs_task_blueprint['url'],
+                                                         task_blueprint_urls=[obs_task_blueprint['url']],
                                                          raw_feedback='Observation.Correlator.channelWidth=3051.7578125')
             obs_subtask = test_data_creator.post_data_and_get_response_as_json_object(obs_subtask_data, '/subtask/')
             cls.obs_subtask_id = obs_subtask['id']
             obs_subtask_output_url = test_data_creator.post_data_and_get_url(
-                test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url']), '/subtask_output/')
+                test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url'],
+                                                task_blueprint_url=obs_task_blueprint['url']), '/subtask_output/')
             test_data_creator.post_data_and_get_url(
                 test_data_creator.Dataproduct(filename="L%s_SB000.MS" % obs_subtask['id'],
                                               subtask_output_url=obs_subtask_output_url), '/dataproduct/')
diff --git a/SAS/TMSS/backend/test/t_scheduling.py b/SAS/TMSS/backend/test/t_scheduling.py
index 6a6ff816fce2866f0f34a9c07c805aac6a83bf6c..f750cbd585edc0505ed348ae6ad173d77f50ec1e 100755
--- a/SAS/TMSS/backend/test/t_scheduling.py
+++ b/SAS/TMSS/backend/test/t_scheduling.py
@@ -73,8 +73,10 @@ def create_subtask_object_for_testing(subtask_type_value, subtask_state_value):
     task_blueprint = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data(specifications_template=models.TaskTemplate.objects.get(name='target observation' if subtask_type_value=='observation' else 'preprocessing pipeline')))
     subtask_template_obj = models.SubtaskTemplate.objects.get(name="%s control" % subtask_type_value)
     subtask_state_obj = models.SubtaskState.objects.get(value=subtask_state_value)
-    subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj, task_blueprint=task_blueprint)
-    return models.Subtask.objects.create(**subtask_data)
+    subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj)
+    subtask = models.Subtask.objects.create(**subtask_data)
+    subtask.task_blueprints.set([task_blueprint])
+    return subtask
 
 
 def create_reserved_stations_for_testing(station_list):
@@ -137,10 +139,11 @@ class SchedulingTest(unittest.TestCase):
                                                      specifications_doc=spec,
                                                      cluster_url=cluster_url,
                                                      start_time=datetime.utcnow()+timedelta(minutes=5),
-                                                     task_blueprint_url=task_blueprint['url'])
+                                                     task_blueprint_urls=[task_blueprint['url']])
             subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
             subtask_id = subtask['id']
-            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/')
+            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url'],
+                                                                                    task_blueprint_url=task_blueprint['url']), '/subtask_output/')
 
             client.set_subtask_status(subtask_id, 'defined')
             subtask = client.schedule_subtask(subtask_id)
@@ -196,10 +199,11 @@ class SchedulingTest(unittest.TestCase):
                                                      specifications_doc=spec,
                                                      cluster_url=cluster_url,
                                                      start_time=datetime.utcnow() + timedelta(minutes=5),
-                                                     task_blueprint_url=task_blueprint['url'])
+                                                     task_blueprint_urls=[task_blueprint['url']])
             subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
             subtask_id = subtask['id']
-            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/')
+            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url'],
+                                                                                    task_blueprint_url=task_blueprint['url']), '/subtask_output/')
 
             client.set_subtask_status(subtask_id, 'defined')
 
@@ -233,10 +237,11 @@ class SchedulingTest(unittest.TestCase):
                                                      specifications_doc=spec,
                                                      cluster_url=cluster_url,
                                                      start_time=datetime.utcnow() + timedelta(minutes=5),
-                                                     task_blueprint_url=task_blueprint['url'])
+                                                     task_blueprint_urls=[task_blueprint['url']])
             subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
             subtask_id = subtask['id']
-            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/')
+            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url'],
+                                                                                    task_blueprint_url=task_blueprint['url']), '/subtask_output/')
 
             client.set_subtask_status(subtask_id, 'defined')
 
@@ -269,10 +274,11 @@ class SchedulingTest(unittest.TestCase):
                                                      specifications_doc=spec,
                                                      cluster_url=cluster_url,
                                                      start_time=datetime.utcnow()+timedelta(minutes=5),
-                                                     task_blueprint_url=task_blueprint['url'])
+                                                     task_blueprint_urls=[task_blueprint['url']])
             subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
             subtask_id = subtask['id']
-            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']),
+            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url'],
+                                                                                    task_blueprint_url=task_blueprint['url']),
                                                     '/subtask_output/')
 
             client.set_subtask_status(subtask_id, 'defined')
@@ -295,9 +301,10 @@ class SchedulingTest(unittest.TestCase):
             obs_subtask_data = test_data_creator.Subtask(specifications_template_url=obs_subtask_template['url'],
                                                          specifications_doc=obs_spec,
                                                          cluster_url=cluster_url,
-                                                         task_blueprint_url=obs_task_blueprint['url'])
+                                                         task_blueprint_urls=[obs_task_blueprint['url']])
             obs_subtask = test_data_creator.post_data_and_get_response_as_json_object(obs_subtask_data, '/subtask/')
-            obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url']), '/subtask_output/')
+            obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url'],
+                                                                                                             task_blueprint_url=obs_task_blueprint['url']), '/subtask_output/')
             test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(filename="L%s_SB000.MS"%obs_subtask['id'],
                                                                                   specifications_doc={"sap": "target0", "subband": 0 },
                                                                                   subtask_output_url=obs_subtask_output_url), '/dataproduct/')
@@ -311,13 +318,14 @@ class SchedulingTest(unittest.TestCase):
 
             pipe_subtask_data = test_data_creator.Subtask(specifications_template_url=pipe_subtask_template['url'],
                                                           specifications_doc=pipe_spec,
-                                                          task_blueprint_url=pipe_task_blueprint['url'],
+                                                          task_blueprint_urls=[pipe_task_blueprint['url']],
                                                           cluster_url=cluster_url)
             pipe_subtask = test_data_creator.post_data_and_get_response_as_json_object(pipe_subtask_data, '/subtask/')
 
             # ...and connect it to the observation
             test_data_creator.post_data_and_get_url(test_data_creator.SubtaskInput(subtask_url=pipe_subtask['url'], subtask_output_url=obs_subtask_output_url), '/subtask_input/')
-            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=pipe_subtask['url']), '/subtask_output/')
+            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=pipe_subtask['url'],
+                                                                                    task_blueprint_url=pipe_task_blueprint['url']), '/subtask_output/')
 
             for predecessor in client.get_subtask_predecessors(pipe_subtask['id']):
                 client.set_subtask_status(predecessor['id'], 'finished')
@@ -340,9 +348,10 @@ class SchedulingTest(unittest.TestCase):
             obs_subtask_data = test_data_creator.Subtask(specifications_template_url=obs_subtask_template['url'],
                                                          specifications_doc=obs_spec,
                                                          cluster_url=cluster_url,
-                                                         task_blueprint_url=test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/'))
+                                                         task_blueprint_urls=[test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/')])
             obs_subtask = test_data_creator.post_data_and_get_response_as_json_object(obs_subtask_data, '/subtask/')
-            obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url']), '/subtask_output/')
+            obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url'],
+                                                                                                             task_blueprint_url=obs_subtask['task_blueprints'][0]), '/subtask_output/')
             test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(filename="L%s_SB000.MS"%obs_subtask['id'],
                                                                                   specifications_doc={"sap": "target0", "subband": 0},
                                                     subtask_output_url=obs_subtask_output_url), '/dataproduct/')
@@ -353,19 +362,20 @@ class SchedulingTest(unittest.TestCase):
 
             ingest_subtask_data = test_data_creator.Subtask(specifications_template_url=ingest_subtask_template['url'],
                                                           specifications_doc=ingest_spec,
-                                                          task_blueprint_url=obs_subtask['task_blueprint'],
+                                                          task_blueprint_urls=obs_subtask['task_blueprints'],
                                                           cluster_url=cluster_url)
             ingest_subtask = test_data_creator.post_data_and_get_response_as_json_object(ingest_subtask_data, '/subtask/')
 
             # ...and connect it to the observation
             test_data_creator.post_data_and_get_url(test_data_creator.SubtaskInput(subtask_url=ingest_subtask['url'], subtask_output_url=obs_subtask_output_url), '/subtask_input/')
-            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=ingest_subtask['url']), '/subtask_output/')
+            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=ingest_subtask['url'],
+                                                                                    task_blueprint_url=obs_subtask['task_blueprints'][0]), '/subtask_output/')  # our subtask here has only one known related task
 
             for predecessor in client.get_subtask_predecessors(ingest_subtask['id']):
                 client.set_subtask_status(predecessor['id'], 'finished')
             client.set_subtask_status(ingest_subtask['id'], 'defined')
 
-            task_blueprint = client.get_url_as_json_object(ingest_subtask['task_blueprint'])
+            task_blueprint = client.get_url_as_json_object(ingest_subtask['task_blueprints'][0])  # our subtask here has only one known related task
             schedulingunit_blueprint = client.get_url_as_json_object(task_blueprint['scheduling_unit_blueprint'])
 
             # first, make sure we need but do not have ingest persmission...
@@ -505,6 +515,64 @@ class SubtaskInputOutputTest(unittest.TestCase):
         self.assertEqual(set(pipe_in1.dataproducts.all()), {dp1_1, dp1_3})
         self.assertEqual(set(pipe_in2.dataproducts.all()), {dp2_2})
 
+    @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks.assign_or_unassign_resources")
+    def test_combined_target_calibrator_subtask_connects_dataproducts_to_correct_output(self, assign_resources_mock):
+        """
+        Create a subtask that combines a target and parallel calibrator observation.
+        Schedule the subtask and assert that dataproducts are assigned to both outputs.
+        """
+
+        # setup tasks
+        cal_task_template = models.TaskTemplate.objects.get(name="calibrator observation")
+        cal_task_spec = get_default_json_object_for_schema(cal_task_template.schema)
+
+        cal_task_draft = models.TaskDraft.objects.create(**TaskDraft_test_data(specifications_template=cal_task_template, specifications_doc=cal_task_spec))
+        cal_task_blueprint = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data(task_draft=cal_task_draft))
+
+        target_task_template = models.TaskTemplate.objects.get(name="target observation")
+        target_task_spec = get_default_json_object_for_schema(target_task_template.schema)
+        target_task_draft = models.TaskDraft.objects.create(**TaskDraft_test_data(specifications_template=target_task_template, specifications_doc=target_task_spec))
+        target_task_blueprint = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data(task_draft=target_task_draft,
+                                                                                              scheduling_unit_blueprint=cal_task_blueprint.scheduling_unit_blueprint))
+
+        models.TaskSchedulingRelationBlueprint.objects.create(first=cal_task_blueprint, second=target_task_blueprint,
+                                                              placement=models.SchedulingRelationPlacement.objects.get(value='parallel'))
+
+        # specify two beams with known number of subbands
+        target_task_blueprint.specifications_doc['SAPs'] = [{'name': 'target1_combined', 'target': '', 'subbands': [0, 1],
+                                                             'digital_pointing': {'angle1': 0.1, 'angle2': 0.1,
+                                                                                  'direction_type': 'J2000'}},
+                                                            {'name': 'target2_combined', 'target': '', 'subbands': [2, 3, 4],
+                                                             'digital_pointing': {'angle1': 0.1, 'angle2': 0.1,
+                                                                                  'direction_type': 'J2000'}}
+                                                            ]
+        target_task_blueprint.save()
+        cal_task_blueprint.specifications_doc['name'] = "calibrator_combined"
+        cal_task_blueprint.save()
+
+        # create subtask
+        create_observation_control_subtask_from_task_blueprint(target_task_blueprint)
+        subtask = create_observation_control_subtask_from_task_blueprint(cal_task_blueprint)
+        subtask.start_time = datetime.utcnow()
+        subtask.stop_time = datetime.utcnow()
+        subtask.save()
+
+        # assert no dataproducts are connected before scheduling
+        target_output = subtask.outputs.filter(task_blueprint=target_task_blueprint).first()
+        cal_output = subtask.outputs.filter(task_blueprint=cal_task_blueprint).first()
+        self.assertEqual(target_output.dataproducts.count(), 0)
+        self.assertEqual(cal_output.dataproducts.count(), 0)
+
+        # schedule, and assert subtask state
+        self.assertEqual('defined', subtask.state.value)
+        schedule_observation_subtask(subtask)
+        self.assertEqual('scheduled', subtask.state.value)
+
+        # assert dataproducts are connected to both outputs after scheduling
+        # task and calibrator tasks should each have associated one dataproduct per subband of the target task
+        self.assertEqual(target_output.dataproducts.count(), 5)
+        self.assertEqual(cal_output.dataproducts.count(), 5)
+
 
 class SAPTest(unittest.TestCase):
     """
@@ -532,12 +600,13 @@ class SAPTest(unittest.TestCase):
             subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'],
                                                      specifications_doc=spec,
                                                      cluster_url = cluster_url,
-                                                     task_blueprint_url=task_blueprint['url'],
+                                                     task_blueprint_urls=[task_blueprint['url']],
                                                      start_time=datetime.utcnow() + timedelta(minutes=5),
                                                      stop_time=datetime.utcnow() + timedelta(minutes=15))
             subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
             subtask_id = subtask['id']
-            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']),
+            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url'],
+                                                                                    task_blueprint_url=task_blueprint['url']),
                                                     '/subtask_output/')
 
             subtask_model = models.Subtask.objects.get(id=subtask_id)
diff --git a/SAS/TMSS/backend/test/t_scheduling_units.py b/SAS/TMSS/backend/test/t_scheduling_units.py
index 48bf809de5810a31de54e767a58e45f61815be4e..98234e7d6bee7b43c22f395e402196538683b288 100644
--- a/SAS/TMSS/backend/test/t_scheduling_units.py
+++ b/SAS/TMSS/backend/test/t_scheduling_units.py
@@ -72,12 +72,13 @@ class SchedulingUnitBlueprintStateTest(unittest.TestCase):
         # Create observation task
         task_data = TaskBlueprint_test_data(name="Task Observation "+str(uuid.uuid4()), scheduling_unit_blueprint=schedulingunit_blueprint)
         task_obs = models.TaskBlueprint.objects.create(**task_data)
-        subtask_data = Subtask_test_data(task_obs, state=models.SubtaskState.objects.get(value="defined"),
+        subtask_data = Subtask_test_data(state=models.SubtaskState.objects.get(value="defined"),
                                                subtask_template=models.SubtaskTemplate.objects.get(name='observation control'))
         if "observation" in skip_create_subtask:
             subtask_obs = None
         else:
             subtask_obs = models.Subtask.objects.create(**subtask_data)
+            subtask_obs.task_blueprints.set([task_obs])
 
         # Create pipeline task
         task_data = TaskBlueprint_test_data(name="Task Pipeline", scheduling_unit_blueprint=schedulingunit_blueprint)
@@ -85,13 +86,13 @@ class SchedulingUnitBlueprintStateTest(unittest.TestCase):
         # Need to change the default template type (observation) to pipeline
         task_pipe.specifications_template = models.TaskTemplate.objects.get(type=models.TaskType.Choices.PIPELINE.value)
         task_pipe.save()
-        subtask_data = Subtask_test_data(task_pipe,
-                                         state=models.SubtaskState.objects.get(value="defined"),
+        subtask_data = Subtask_test_data(state=models.SubtaskState.objects.get(value="defined"),
                                          subtask_template=models.SubtaskTemplate.objects.get(name='pipeline control'))
         if "pipeline" in skip_create_subtask:
             subtask_pipe = None
         else:
             subtask_pipe = models.Subtask.objects.create(**subtask_data)
+            subtask_pipe.task_blueprints.set([task_pipe])
 
         # Create ingest task
         # Because there is no taskTemplate object for ingest by default I have to create one
@@ -103,13 +104,13 @@ class SchedulingUnitBlueprintStateTest(unittest.TestCase):
         task_ingest.save()
         # There is no template defined for ingest yet ...but I can use pipeline control, only the template type matters
         # ....should become other thing in future but for this test does not matter
-        subtask_data = Subtask_test_data(task_ingest,
-                                         state=models.SubtaskState.objects.get(value="defined"),
+        subtask_data = Subtask_test_data(state=models.SubtaskState.objects.get(value="defined"),
                                          subtask_template=models.SubtaskTemplate.objects.get(name='pipeline control'))
         if "ingest" in skip_create_subtask:
             subtask_ingest = None
         else:
             subtask_ingest = models.Subtask.objects.create(**subtask_data)
+            subtask_ingest.task_blueprints.set([task_ingest])
 
         return {"observation": {"task": task_obs, "subtask": subtask_obs},
                 "pipeline": {"task": task_pipe, "subtask": subtask_pipe},
diff --git a/SAS/TMSS/backend/test/t_subtask_validation.py b/SAS/TMSS/backend/test/t_subtask_validation.py
index 11c2fc94bf38726ba03658649227c724f73b0a1c..2abd4418e535a5aeb6c8bbd1e91bcd7d49acb876 100755
--- a/SAS/TMSS/backend/test/t_subtask_validation.py
+++ b/SAS/TMSS/backend/test/t_subtask_validation.py
@@ -68,10 +68,11 @@ class SubtaskValidationTest(unittest.TestCase):
         subtask_template = self.create_subtask_template(minimal_json_schema())
         specifications_doc = '{ this is not a json object }'
         subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc,
-                                         task_blueprint=self.task_blueprint, cluster=self.cluster, state=self.state)
+                                         cluster=self.cluster, state=self.state)
 
         with self.assertRaises(SchemaValidationException) as context:
-            models.Subtask.objects.create(**subtask_data)
+            subtask = models.Subtask.objects.create(**subtask_data)
+            subtask.task_blueprints.set([self.task_blueprint])
         self.assertTrue('invalid json' in str(context.exception).lower())
 
     def test_validate_correlator_schema_with_valid_specification(self):
@@ -81,7 +82,7 @@ class SubtaskValidationTest(unittest.TestCase):
 
         specifications_doc = get_default_json_object_for_schema(subtask_template.schema)
         subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc,
-                                         task_blueprint=self.task_blueprint, cluster=self.cluster, state=self.state)
+                                         cluster=self.cluster, state=self.state)
 
         subtask = models.Subtask.objects.create(**subtask_data)
         self.assertIsNotNone(subtask)
@@ -94,15 +95,15 @@ class SubtaskValidationTest(unittest.TestCase):
         # test with invalid json
         with self.assertRaises(SchemaValidationException) as context:
             subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc="bogus spec",
-                                             task_blueprint=self.task_blueprint, cluster=self.cluster, state=self.state)
-            models.Subtask.objects.create(**subtask_data)
+                                             cluster=self.cluster, state=self.state)
+            subtask = models.Subtask.objects.create(**subtask_data)
 
         # test with valid json, but not according to schema
         with self.assertRaises(SchemaValidationException) as context:
             specifications_doc = get_default_json_object_for_schema(subtask_template.schema)
             specifications_doc['COBALT']['blocksize'] = -1 # invalid value, should cause the SchemaValidationException
             subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc,
-                                             task_blueprint=self.task_blueprint, cluster=self.cluster, state=self.state)
+                                             cluster=self.cluster, state=self.state)
             models.Subtask.objects.create(**subtask_data)
         self.assertTrue('-1 is less than the minimum' in str(context.exception).lower())
 
diff --git a/SAS/TMSS/backend/test/t_subtasks.py b/SAS/TMSS/backend/test/t_subtasks.py
index 8086f231da703fba4bcdf574bed9940f0ee6d3d2..260f747acbcf75515cae69646b1a683c7ebdcae9 100755
--- a/SAS/TMSS/backend/test/t_subtasks.py
+++ b/SAS/TMSS/backend/test/t_subtasks.py
@@ -51,7 +51,9 @@ def create_subtask_object_for_testing(subtask_type_value, subtask_state_value):
     subtask_template_obj = create_subtask_template_for_testing(template_type)
     subtask_state_obj = models.SubtaskState.objects.get(value=subtask_state_value)
     subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj)
-    return models.Subtask.objects.create(**subtask_data)
+    subtask = models.Subtask.objects.create(**subtask_data)
+    subtask.task_blueprints.set([models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())])
+    return subtask
 
 
 def create_subtask_template_for_testing(template_type: object):
@@ -100,7 +102,7 @@ def create_relation_task_blueprint_object_for_testing(blueprint_task_producer, b
     return task_relation_obj
 
 
-def create_scheduling_relation_task_blueprint_for_testing(first_task_blueprint, second_task_blueprint):
+def create_scheduling_relation_task_blueprint_for_testing(first_task_blueprint, second_task_blueprint, placement='before'):
     """
     Helper function to create a task blueprint relation object between two task blueprint (calibrator and target observation)
     :param first_task_blueprint:
@@ -111,7 +113,7 @@ def create_scheduling_relation_task_blueprint_for_testing(first_task_blueprint,
                                  tags=[],
                                  first=first_task_blueprint,
                                  second=second_task_blueprint,
-                                 placement=models.SchedulingRelationPlacement.objects.get(value='before'),
+                                 placement=models.SchedulingRelationPlacement.objects.get(value=placement),
                                  time_offset=60)
     return task_scheduling_rel_obj
 
@@ -281,6 +283,80 @@ class SubTasksCreationFromTaskBluePrintCalibrator(unittest.TestCase):
         self.assertEqual(1.111, subtask.specifications_doc['stations']['analog_pointing']['angle1'])
         self.assertEqual(2.222, subtask.specifications_doc['stations']['analog_pointing']['angle2'])
 
+    def test_create_combined_subtask_from_task_blueprints(self):
+        """
+        Create subtasks from a target task blueprint and a separate calibrator task blueprint.
+        """
+        cal_task_blueprint = create_task_blueprint_object_for_testing(task_template_name="calibrator observation")
+        target_task_blueprint = create_task_blueprint_object_for_testing()
+        create_scheduling_relation_task_blueprint_for_testing(cal_task_blueprint, target_task_blueprint, placement='parallel')
+
+        subtask_1 = create_observation_control_subtask_from_task_blueprint(target_task_blueprint)
+        num_pointings_target = len(subtask_1.specifications_doc['stations']['digital_pointings'])
+
+        # assert target subtask still in defining state
+        self.assertEqual("defining", str(subtask_1.state))
+
+        subtask_2 = create_observation_control_subtask_from_task_blueprint(cal_task_blueprint)
+
+        # assert the same subtask is returned
+        self.assertEqual(subtask_1, subtask_2)
+
+        # assert the calibrator obs was added as an additional beam
+        num_pointings_calibrator = len(subtask_2.specifications_doc['stations']['digital_pointings'])
+        self.assertEqual(num_pointings_target + 1, num_pointings_calibrator)
+
+        # assert the subtask is now in defined state
+        self.assertEqual("defined", str(subtask_2.state))
+
+        # assert the subtask references both tasks
+        self.assertEqual(subtask_1.task_blueprints.count(), 2)
+        self.assertIn(target_task_blueprint, subtask_1.task_blueprints.all())
+        self.assertIn(cal_task_blueprint, subtask_1.task_blueprints.all())
+
+        # assert we have subtask outputs for both tasks
+        self.assertEqual(subtask_1.outputs.count(), 2)
+        self.assertEqual(subtask_1.outputs.filter(task_blueprint=target_task_blueprint).count(), 1)
+        self.assertEqual(subtask_1.outputs.filter(task_blueprint=cal_task_blueprint).count(), 1)
+
+    def test_create_combined_subtask_from_task_blueprints_fails_if_calibrator_handled_before_target(self):
+        """
+        Create subtasks from a target task blueprint and a separate calibrator task blueprint.
+        Handling calibrator before target task should raise Exception.
+        """
+        cal_task_blueprint = create_task_blueprint_object_for_testing(task_template_name="calibrator observation")
+        target_task_blueprint = create_task_blueprint_object_for_testing()
+        create_scheduling_relation_task_blueprint_for_testing(cal_task_blueprint, target_task_blueprint, placement='parallel')
+
+        with self.assertRaises(SubtaskCreationException) as cm:
+            create_observation_control_subtask_from_task_blueprint(cal_task_blueprint)
+            create_observation_control_subtask_from_task_blueprint(target_task_blueprint)
+
+        self.assertIn("cannot be added to the target subtask, because it does not exist", str(cm.exception))
+
+    def test_create_combined_subtask_from_task_blueprints_fails_if_calibrator_does_not_fit(self):
+        """
+        Create subtasks from a target task blueprint and a separate calibrator task blueprint.
+        And exception is raised when the combined number of subbands exceeds 488.
+        """
+        cal_task_blueprint = create_task_blueprint_object_for_testing(task_template_name="calibrator observation")
+        target_task_blueprint = create_task_blueprint_object_for_testing()
+        create_scheduling_relation_task_blueprint_for_testing(cal_task_blueprint, target_task_blueprint, placement='parallel')
+
+        target_task_blueprint.specifications_doc['SAPs'] = [{'name': 'target1', 'target': '', 'subbands': list(range(0, 150)),
+                                                             'digital_pointing': {'angle1': 0.1, 'angle2': 0.1,
+                                                                                  'direction_type': 'J2000'}},
+                                                            {'name': 'target2', 'target': '', 'subbands': list(range(150, 300)),
+                                                             'digital_pointing': {'angle1': 0.2, 'angle2': 0.2,
+                                                                                  'direction_type': 'J2000'}}]
+        target_task_blueprint.save()
+
+        with self.assertRaises(SubtaskCreationException) as cm:
+            create_observation_control_subtask_from_task_blueprint(target_task_blueprint)
+            create_observation_control_subtask_from_task_blueprint(cal_task_blueprint)
+
+        self.assertIn("results in 600 total subbands, but only 488 are possible", str(cm.exception))
+
 
 class SubTaskCreationFromTaskBlueprintIngest(unittest.TestCase):
 
diff --git a/SAS/TMSS/backend/test/t_tasks.py b/SAS/TMSS/backend/test/t_tasks.py
index 88e4791390c6e46ff365372fe86cc79be91f24b3..12ded040d83e4fb685bf56f259fe83cec5d83eec 100755
--- a/SAS/TMSS/backend/test/t_tasks.py
+++ b/SAS/TMSS/backend/test/t_tasks.py
@@ -264,9 +264,10 @@ class TaskBlueprintStateTest(unittest.TestCase):
         task_blueprint_data = TaskBlueprint_test_data(name="Task Blueprint With One Subtask")
         task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_data)
         # Create pipeline subtask related to taskblueprint
-        subtask_data = Subtask_test_data(task_blueprint, state=models.SubtaskState.objects.get(value="defined"),
+        subtask_data = Subtask_test_data(state=models.SubtaskState.objects.get(value="defined"),
                                          subtask_template=models.SubtaskTemplate.objects.get(name='pipeline control'))
         subtask_pipe = models.Subtask.objects.create(**subtask_data)
+        subtask_pipe.task_blueprints.set([task_blueprint])
 
         # Do the actual test
         for test_item in test_table:
@@ -333,12 +334,14 @@ class TaskBlueprintStateTest(unittest.TestCase):
         task_blueprint_data = TaskBlueprint_test_data(name="Task Blueprint With Subtasks")
         task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_data)
         # Create observation and qa subtask related to taskblueprint
-        subtask_data = Subtask_test_data(task_blueprint, state=models.SubtaskState.objects.get(value="defined"),
+        subtask_data = Subtask_test_data(state=models.SubtaskState.objects.get(value="defined"),
                                          subtask_template=models.SubtaskTemplate.objects.get(name='observation control'))
         subtask_obs = models.Subtask.objects.create(**subtask_data)
-        subtask_data = Subtask_test_data(task_blueprint, state=models.SubtaskState.objects.get(value="defined"),
+        subtask_obs.task_blueprints.set([task_blueprint])
+        subtask_data = Subtask_test_data(state=models.SubtaskState.objects.get(value="defined"),
                                          subtask_template=models.SubtaskTemplate.objects.get(name='QA file conversion'))
         subtask_qa = models.Subtask.objects.create(**subtask_data)
+        subtask_qa.task_blueprints.set([task_blueprint])
 
         # Do the actual test
         for test_item in test_table:
@@ -374,14 +377,19 @@ class TaskBlueprintStateTest(unittest.TestCase):
         task_blueprint_data = TaskBlueprint_test_data(name="Task Blueprint With Subtasks")
         task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_data)
         # Create observation and qa subtasks related to taskblueprint
-        subtask_data = Subtask_test_data(task_blueprint, state=models.SubtaskState.objects.get(value="defined"),
+        subtask_data = Subtask_test_data(state=models.SubtaskState.objects.get(value="defined"),
                                          subtask_template=models.SubtaskTemplate.objects.get(name='observation control'))
         subtask_obs1 = models.Subtask.objects.create(**subtask_data)
+        subtask_obs1.task_blueprints.set([task_blueprint])
         subtask_obs2 = models.Subtask.objects.create(**subtask_data)
-        subtask_data = Subtask_test_data(task_blueprint, state=models.SubtaskState.objects.get(value="defined"),
+        subtask_obs2.task_blueprints.set([task_blueprint])
+
+        subtask_data = Subtask_test_data(state=models.SubtaskState.objects.get(value="defined"),
                                          subtask_template=models.SubtaskTemplate.objects.get(name='QA file conversion'))
         subtask_qa1 = models.Subtask.objects.create(**subtask_data)
+        subtask_qa1.task_blueprints.set([task_blueprint])
         subtask_qa2 = models.Subtask.objects.create(**subtask_data)
+        subtask_qa2.task_blueprints.set([task_blueprint])
 
         # Do the actual test
         for test_item in test_table:
diff --git a/SAS/TMSS/backend/test/t_tmssapp_scheduling_REST_API.py b/SAS/TMSS/backend/test/t_tmssapp_scheduling_REST_API.py
index 232c237ec2f18edd7c91b514281425a8bc232089..1f4dbb16b5f032ef5fd02dc89eb45876c96532c6 100755
--- a/SAS/TMSS/backend/test/t_tmssapp_scheduling_REST_API.py
+++ b/SAS/TMSS/backend/test/t_tmssapp_scheduling_REST_API.py
@@ -285,7 +285,7 @@ class SubtaskTestCase(unittest.TestCase):
         GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask/1234321/', 404)
 
     def test_subtask_POST_and_GET(self):
-        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)
@@ -298,13 +298,13 @@ class SubtaskTestCase(unittest.TestCase):
         self.assertGreaterEqual(int(subtask_id), minimium_subtaskid)
 
     def test_subtask_PUT_invalid_raises_error(self):
-        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
 
         PUT_and_assert_expected_response(self, BASE_URL + '/subtask/9876789876/', st_test_data, 404, {})
 
     def test_subtask_PUT(self):
-        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
-        st_test_data2 = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
+        st_test_data2 = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)
@@ -316,7 +316,7 @@ class SubtaskTestCase(unittest.TestCase):
         GET_OK_and_assert_equal_expected_response(self, url, st_test_data2)
 
     def test_subtask_PATCH(self):
-        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)
@@ -332,7 +332,7 @@ class SubtaskTestCase(unittest.TestCase):
         GET_OK_and_assert_equal_expected_response(self, url, expected_data)
 
     def test_subtask_DELETE(self):
-        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)
@@ -343,7 +343,7 @@ class SubtaskTestCase(unittest.TestCase):
         DELETE_and_assert_gone(self, url)
 
     def test_subtask_PROTECT_behavior_on_state_choice_deleted(self):
-        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url], specifications_template_url=self.specifications_template_url)
 
         # create dependency that is safe to delete (enums are not populated / re-established between tests)
         state_data = {'value': 'kickme'}
@@ -369,7 +369,7 @@ class SubtaskTestCase(unittest.TestCase):
                                                         template_url=self.task_blueprint_data['specifications_template'],
                                                         scheduling_unit_blueprint_url=self.task_blueprint_data['scheduling_unit_blueprint'])
         task_blueprint_url = test_data_creator.post_data_and_get_url(tbp_test_data, '/task_blueprint/')
-        st_test_data = test_data_creator.Subtask(task_blueprint_url=task_blueprint_url, cluster_url=self.cluster_url, specifications_template_url=self.specifications_template_url)
+        st_test_data = test_data_creator.Subtask(task_blueprint_urls=[task_blueprint_url], cluster_url=self.cluster_url, specifications_template_url=self.specifications_template_url)
 
         # POST new item and verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)['url']
@@ -387,7 +387,7 @@ class SubtaskTestCase(unittest.TestCase):
         stt_test_data = test_data_creator.SubtaskTemplate()
         expected_data = test_data_creator.update_schema_from_template("subtasktemplate", stt_test_data)
         specifications_template_url = test_data_creator.post_data_and_get_url(stt_test_data, '/subtask_template/')
-        st_test_data = test_data_creator.Subtask(specifications_template_url=specifications_template_url, cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url)
+        st_test_data = test_data_creator.Subtask(specifications_template_url=specifications_template_url, cluster_url=self.cluster_url, task_blueprint_urls=[self.task_blueprint_url])
 
         # POST new item and verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)['url']
@@ -587,8 +587,8 @@ class SubtaskInputTestCase(unittest.TestCase):
 
         # make new subtask_url instance, but reuse related data for speed
         subtask_url = test_data_creator.post_data_and_get_url(test_data_creator.Subtask(cluster_url=self.subtask_data['cluster'],
-                                                                                        task_blueprint_url=self.subtask_data['task_blueprint'],
-                                                                                        specifications_template_url=self.subtask_data['specifications_template'],
+                                                                                        task_blueprint_urls=[self.subtask_data['task_blueprint']],
+                                                                                        specifications_template_urls=self.subtask_data['specifications_template'],
                                                                                         specifications_doc=self.subtask_data['specifications_doc']), '/subtask/')
         test_patch = {"subtask": subtask_url,
                       "tags": ['FANCYTAG'],
@@ -614,7 +614,7 @@ class SubtaskInputTestCase(unittest.TestCase):
     def test_subtask_input_CASCADE_behavior_on_subtask_deleted(self):
         # make new subtask_url instance, but reuse related data for speed
         subtask_url = test_data_creator.post_data_and_get_url(test_data_creator.Subtask(cluster_url=self.subtask_data['cluster'],
-                                                                                        task_blueprint_url=self.subtask_data['task_blueprint'],
+                                                                                        task_blueprint_urls=[self.subtask_data['task_blueprint']],
                                                                                         specifications_template_url=self.subtask_data['specifications_template'],
                                                                                         specifications_doc=self.subtask_data['specifications_doc']), '/subtask/')
         sti_test_data = test_data_creator.SubtaskInput(subtask_url=subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url)
@@ -1171,7 +1171,7 @@ class DataproductHashTestCase(unittest.TestCase):
         url = r_dict['url']
         GET_OK_and_assert_equal_expected_response(self, url, dph_test_data)
 
-        test_patch = {"algorithm": BASE_URL + '/algorithm/aes256',
+        test_patch = {"hash_algorithm": BASE_URL + '/hash_algorithm/aes256',
                       "hash": 'bender-was-here'}
 
         # PATCH item and verify
@@ -1207,7 +1207,7 @@ class DataproductHashTestCase(unittest.TestCase):
         self.assertTrue("ProtectedError" in str(response.content))
         GET_and_assert_equal_expected_code(self, dph_test_data['dataproduct'], 200)
 
-    def test_dataproduct_hash_PROTECT_behavior_on_algorithm_deleted(self):
+    def test_dataproduct_hash_PROTECT_behavior_on_hash_algorithm_deleted(self):
         dph_test_data = test_data_creator.DataproductHash(dataproduct_url=self.dataproduct_url)
 
         # POST new item and verify
@@ -1217,10 +1217,10 @@ class DataproductHashTestCase(unittest.TestCase):
 
         # Try to DELETE dependency, verify that was not successful
         # Unfortunately we don't get a nice error in json, but a Django debug page on error 500...
-        response = requests.delete(dph_test_data['algorithm'], auth=AUTH)
+        response = requests.delete(dph_test_data['hash_algorithm'], auth=AUTH)
         self.assertEqual(500, response.status_code)
         self.assertTrue("ProtectedError" in str(response.content))
-        GET_and_assert_equal_expected_code(self, dph_test_data['algorithm'], 200)
+        GET_and_assert_equal_expected_code(self, dph_test_data['hash_algorithm'], 200)
 
 
 class DataproductArchiveInfoTestCase(unittest.TestCase):
@@ -1354,7 +1354,7 @@ class SubtaskQueryTestCase(unittest.TestCase):
             start_time = datetime.now() + timedelta(hours=2, days=day_idx)
             stop_time = datetime.now() + timedelta(hours=4, days=day_idx)
             test_data_creator.post_data_and_get_url(test_data_creator.Subtask(start_time=start_time, stop_time=stop_time,
-                                                                              cluster_url=cluster_url, task_blueprint_url=task_blueprint_url), '/subtask/')
+                                                                              cluster_url=cluster_url, task_blueprint_urls=[task_blueprint_url]), '/subtask/')
 
     subtasks_test_data_with_start_stop_time = {'clusterB': 50, 'clusterC': 30 }
 
diff --git a/SAS/TMSS/backend/test/t_tmssapp_scheduling_django_API.py b/SAS/TMSS/backend/test/t_tmssapp_scheduling_django_API.py
index 682f22659885f52e3a3632ab288861efa19b3b5e..afca166b1a8b2871269661cae58af45b1b79e44d 100755
--- a/SAS/TMSS/backend/test/t_tmssapp_scheduling_django_API.py
+++ b/SAS/TMSS/backend/test/t_tmssapp_scheduling_django_API.py
@@ -132,6 +132,7 @@ class SubtaskOutputTest(unittest.TestCase):
         # setup
         test_data = dict(SubtaskOutput_test_data())
         test_data['subtask'] = None
+        test_data['task_blueprint'] = None
 
         # assert
         with self.assertRaises(IntegrityError):
@@ -188,7 +189,9 @@ class SubtaskTest(unittest.TestCase):
 
         # setup
         before = datetime.utcnow()
-        entry = models.Subtask.objects.create(**Subtask_test_data(task_blueprint=self.task_blueprint))
+        entry = models.Subtask.objects.create(**Subtask_test_data())
+        entry.task_blueprints.set([self.task_blueprint])
+        entry.save()
 
         after = datetime.utcnow()
 
@@ -199,7 +202,8 @@ class SubtaskTest(unittest.TestCase):
     def test_Subtask_update_timestamp_gets_changed_correctly(self):
 
         # setup
-        entry = models.Subtask.objects.create(**Subtask_test_data(task_blueprint=self.task_blueprint))
+        entry = models.Subtask.objects.create(**Subtask_test_data())
+        entry.task_blueprints.set([self.task_blueprint])
         before = datetime.utcnow()
         entry.save()
         after = datetime.utcnow()
@@ -211,7 +215,7 @@ class SubtaskTest(unittest.TestCase):
     def test_Subtask_prevents_missing_template(self):
 
         # setup
-        test_data = dict(Subtask_test_data(task_blueprint=self.task_blueprint))
+        test_data = dict(Subtask_test_data())
         test_data['specifications_template'] = None
 
         # assert
@@ -219,8 +223,9 @@ class SubtaskTest(unittest.TestCase):
             models.Subtask.objects.create(**test_data)
 
     def test_Subtask_predecessors_and_successors_none(self):
-        subtask1:models.Subtask = models.Subtask.objects.create(**Subtask_test_data(task_blueprint=self.task_blueprint))
-        subtask2:models.Subtask = models.Subtask.objects.create(**Subtask_test_data(task_blueprint=self.task_blueprint))
+        subtask1:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
+        subtask2:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
+
 
         self.assertEqual(set(), set(subtask1.predecessors.all()))
         self.assertEqual(set(), set(subtask2.predecessors.all()))
@@ -228,10 +233,14 @@ class SubtaskTest(unittest.TestCase):
         self.assertEqual(set(), set(subtask2.successors.all()))
 
     def test_Subtask_predecessors_and_successors_simple(self):
-        subtask1:models.Subtask = models.Subtask.objects.create(**Subtask_test_data(task_blueprint=self.task_blueprint))
-        subtask2:models.Subtask = models.Subtask.objects.create(**Subtask_test_data(task_blueprint=self.task_blueprint))
+        subtask1:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
+        subtask1.task_blueprints.set([self.task_blueprint])
+        subtask1.save()
+        subtask2:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
+        subtask2.task_blueprints.set([self.task_blueprint])
+        subtask2.save()
 
-        output1 = models.SubtaskOutput.objects.create(subtask=subtask1)
+        output1 = models.SubtaskOutput.objects.create(subtask=subtask1, task_blueprint=self.task_blueprint)
         models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=subtask2, producer=output1))
 
         self.assertEqual(subtask1, subtask2.predecessors.all()[0])
@@ -239,22 +248,32 @@ class SubtaskTest(unittest.TestCase):
 
     def test_Subtask_predecessors_and_successors_complex(self):
         subtask1:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
-        subtask2:models.Subtask = models.Subtask.objects.create(**Subtask_test_data(task_blueprint=subtask1.task_blueprint))
-        subtask3:models.Subtask = models.Subtask.objects.create(**Subtask_test_data(task_blueprint=subtask1.task_blueprint))
-        subtask4:models.Subtask = models.Subtask.objects.create(**Subtask_test_data(task_blueprint=subtask1.task_blueprint))
-        subtask5:models.Subtask = models.Subtask.objects.create(**Subtask_test_data(task_blueprint=subtask1.task_blueprint))
-        subtask6:models.Subtask = models.Subtask.objects.create(**Subtask_test_data(task_blueprint=subtask1.task_blueprint))
+        subtask2:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
+        subtask2.task_blueprints.set(subtask1.task_blueprints.all())
+        subtask2.save()
+        subtask3:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
+        subtask3.task_blueprints.set(subtask1.task_blueprints.all())
+        subtask3.save()
+        subtask4:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
+        subtask4.task_blueprints.set(subtask1.task_blueprints.all())
+        subtask4.save()
+        subtask5:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
+        subtask5.task_blueprints.set(subtask1.task_blueprints.all())
+        subtask5.save()
+        subtask6:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
+        subtask6.task_blueprints.set(subtask1.task_blueprints.all())
+        subtask6.save()
 
         # ST1 ---> ST3 ---> ST4
         #      |        |
         # ST2 -          -> ST5 ---> ST6
 
-        output1 = models.SubtaskOutput.objects.create(subtask=subtask1)
-        output2 = models.SubtaskOutput.objects.create(subtask=subtask2)
-        output3 = models.SubtaskOutput.objects.create(subtask=subtask3)
-        output4 = models.SubtaskOutput.objects.create(subtask=subtask4)
-        output5 = models.SubtaskOutput.objects.create(subtask=subtask5)
-        output6 = models.SubtaskOutput.objects.create(subtask=subtask6)
+        output1 = models.SubtaskOutput.objects.create(subtask=subtask1, task_blueprint=self.task_blueprint)
+        output2 = models.SubtaskOutput.objects.create(subtask=subtask2, task_blueprint=self.task_blueprint)
+        output3 = models.SubtaskOutput.objects.create(subtask=subtask3, task_blueprint=self.task_blueprint)
+        output4 = models.SubtaskOutput.objects.create(subtask=subtask4, task_blueprint=self.task_blueprint)
+        output5 = models.SubtaskOutput.objects.create(subtask=subtask5, task_blueprint=self.task_blueprint)
+        output6 = models.SubtaskOutput.objects.create(subtask=subtask6, task_blueprint=self.task_blueprint)
 
         models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=subtask3, producer=output1))
         models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=subtask3, producer=output2))
@@ -276,7 +295,8 @@ class SubtaskTest(unittest.TestCase):
     def test_Subtask_transformed_dataproducts(self):
         # setup
         subtask1:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
-        output1:models.SubtaskOutput = models.SubtaskOutput.objects.create(subtask=subtask1)
+        output1:models.SubtaskOutput = models.SubtaskOutput.objects.create(subtask=subtask1,
+                                                                           task_blueprint=self.task_blueprint)
         output1_dp:models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=output1))
 
         subtask2:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
@@ -284,7 +304,8 @@ class SubtaskTest(unittest.TestCase):
         input2_dp = output1_dp
         input2.dataproducts.set([input2_dp])
         input2.save()
-        output2:models.SubtaskOutput = models.SubtaskOutput.objects.create(subtask=subtask2)
+        output2:models.SubtaskOutput = models.SubtaskOutput.objects.create(subtask=subtask2,
+                                                                           task_blueprint=self.task_blueprint)
         output2_dp:models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=output2))
 
         models.DataproductTransform.objects.create(input=input2_dp, output=output2_dp, identity=True)
diff --git a/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py b/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py
index f0c8c331dc951757c7e98c3a3c90b467591446f7..7248ff73a2c58498048cec1df10da470800907d2 100755
--- a/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py
+++ b/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py
@@ -1509,6 +1509,9 @@ class SchedulingUnitDraftTestCase(unittest.TestCase):
         assertUrlList(self, response_data['task_drafts'], [task_draft_1, task_draft_2])
 
     def test_GET_SchedulingUnitDraft_view_filters_for_project(self):
+        """
+        Test we can filter on this property, which is explicitly named on the model-specific property filter
+        """
         # setup
         project_1 = models.Project.objects.create(**Project_test_data(name='myproject1'))
         project_2 = models.Project.objects.create(**Project_test_data(name='myproject2'))
@@ -1715,6 +1718,16 @@ class TaskDraftTestCase(unittest.TestCase):
         assertUrlList(self, response_data['consumed_by'], [task_relation_draft_1])
         assertUrlList(self, response_data['produced_by'], [task_relation_draft_2])
 
+    def test_GET_TaskDraft_view_filters_for_copy_reason(self):
+        """
+        Test we can filter on this model field, because the parent LOFARViewSet uses filtering on __all__ fields
+        We only test that we get an error if we filter for an invalid option, as proof that filtering is enabled,
+        and assume that the filter backend does the correct thing.
+        """
+        # assert
+        GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/?copy_reason=template', 200)
+        GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/?copy_reason=gibberish', 400)
+
 
 class TaskRelationDraftTestCase(unittest.TestCase):
     @classmethod
@@ -2012,10 +2025,18 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase):
         GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/%s/' % id2, test_data_2)
 
     def test_GET_SchedulingUnitBlueprint_view_filters_for_time_range(self):
-
+        """
+        Test we can filter on this property, which is explicitly named on the model-specific property filter
+        """
         # setup
         subtask_1 = models.Subtask.objects.create(**Subtask_test_data(start_time=datetime(2050, 1, 1, 10, 0, 0), stop_time=datetime(2050, 1, 1, 14, 0, 0)))
         subtask_2 = models.Subtask.objects.create(**Subtask_test_data(start_time=datetime(2050, 1, 5, 10, 0, 0), stop_time=datetime(2050, 1, 5, 14, 0, 0)))
+        task_blueprint_1 = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())
+        task_blueprint_2 = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())
+        subtask_1.task_blueprints.set([task_blueprint_1])
+        subtask_2.task_blueprints.set([task_blueprint_2])
+        subtask_1.save()
+        subtask_2.save()
 
         # assert
         response_1 = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_blueprint/?start_time_after=2050-01-01T9:00:00&stop_time_before=2050-01-01T15:00:00', 200)
@@ -2025,6 +2046,9 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase):
         self.assertEqual(response_2['count'], 2)
 
     def test_GET_SchedulingUnitBlueprint_view_filters_for_project(self):
+        """
+        Test we can filter on this property, which is explicitly named on the model-specific property filter
+        """
         # setup
         project_1 = models.Project.objects.create(**Project_test_data(name='myproject1_%s' % uuid.uuid4()))
         project_2 = models.Project.objects.create(**Project_test_data(name='myproject2_%s' % uuid.uuid4()))
@@ -2046,6 +2070,26 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase):
         self.assertEqual(response_2['results'][0]['name'], su_blueprint_2.name)
         self.assertEqual(response_3['count'], 0)
 
+    def test_GET_SchedulingUnitBlueprint_view_filters_for_output_pinned(self):
+        """
+        Test we can filter on this regular field, because the model-specific property filter uses __all__
+        """
+        # setup
+        models.SchedulingUnitBlueprint.objects.all().delete()
+        su_blueprint_true = models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data(name='mysub1_%s' % uuid.uuid4(), output_pinned=True))
+        su_blueprint_false = models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data(name='mysub2_%s' % uuid.uuid4(), output_pinned=False))
+
+        # assert
+        response = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_blueprint/', 200)
+        response_true = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_blueprint/?output_pinned=true', 200)
+        response_false = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_blueprint/?output_pinned=false', 200)
+
+        self.assertEqual(response['count'], 2)
+        self.assertEqual(response_true['count'], 1)
+        self.assertEqual(response_true['results'][0]['name'], su_blueprint_true.name)
+        self.assertEqual(response_false['count'], 1)
+        self.assertEqual(response_false['results'][0]['name'], su_blueprint_false.name)
+
 
 class TaskBlueprintTestCase(unittest.TestCase):
     @classmethod
@@ -2240,10 +2284,10 @@ class TaskBlueprintTestCase(unittest.TestCase):
         st_test_data_2 = Subtask_test_data()
         task_blueprint = models.TaskBlueprint.objects.create(**test_data_1)
         subtask_1 = models.Subtask.objects.create(**st_test_data_1)
-        subtask_1.task_blueprint = task_blueprint
+        subtask_1.task_blueprints.set([task_blueprint])
         subtask_1.save()
         subtask_2 = models.Subtask.objects.create(**st_test_data_2)
-        subtask_2.task_blueprint = task_blueprint
+        subtask_2.task_blueprints.set([task_blueprint])
         subtask_2.save()
         # assert
         response_data = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/%s/' % task_blueprint.id, 200)
@@ -2921,7 +2965,7 @@ class ExtendedViewTestCase(unittest.TestCase):
         cls.sub_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitBlueprint(scheduling_unit_draft_url=cls.sud_url), '/scheduling_unit_blueprint/')
         cls.td_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(scheduling_unit_draft_url=cls.sud_url), '/task_draft/')
         cls.tb_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(draft_url=cls.td_url, scheduling_unit_blueprint_url=cls.sub_url), '/task_blueprint/')
-        test_data_creator.post_data_and_get_url(test_data_creator.Subtask(task_blueprint_url=cls.tb_url), '/subtask/')
+        test_data_creator.post_data_and_get_url(test_data_creator.Subtask(task_blueprint_urls=[cls.tb_url]), '/subtask/')
 
     def test_GET_scheduling_unit_draft_serializes_referenced_objects(self):
         # get the extended view on the su draft
diff --git a/SAS/TMSS/backend/test/t_tmssapp_specification_django_API.py b/SAS/TMSS/backend/test/t_tmssapp_specification_django_API.py
index 7ace3e3ad11b88a2c9f1e169c8b01b7dc8d5e57d..577932cd868df45bc7335df4a3c67f91ecbb56b3 100755
--- a/SAS/TMSS/backend/test/t_tmssapp_specification_django_API.py
+++ b/SAS/TMSS/backend/test/t_tmssapp_specification_django_API.py
@@ -922,6 +922,41 @@ class TaskRelationBlueprintTest(unittest.TestCase):
             models.TaskRelationBlueprint.objects.create(**test_data)
 
 
+
+
+class TestStationTimeLine(unittest.TestCase):
+    """
+    Actually this simple testcase should be in a separate module (t_tmssapp_calculations_django_API.py)
+    but I was just lazy and spare some overhead and I just 'piggyback' with this module
+    """
+
+    def test_StationTimeline_raises_Error_on_duplicate_station_timeline(self):
+        """
+        Test if adding a duplicate station-timestamp combination leads to an Error and so data is not inserted
+        """
+        import datetime
+
+        test_data = {"station_name": "CS001",
+                     "timestamp": datetime.date(2021, 4, 1),
+                     "sunrise_start": datetime.datetime(year=2021, month=4, day=1, hour=6, minute=1, second=0),
+                     "sunrise_end": datetime.datetime(year=2021, month=4, day=1, hour=7, minute=2, second=0),
+                     "sunset_start": datetime.datetime(year=2021, month=4, day=1, hour=20, minute=31, second=0),
+                     "sunset_end": datetime.datetime(year=2021, month=4, day=1, hour=21, minute=33, second=0) }
+
+        models.StationTimeline.objects.create(**test_data)
+        with self.assertRaises(IntegrityError) as context:
+            models.StationTimeline.objects.create(**test_data)
+            self.assertIn('unique_station_time_line', str(context.exception))
+
+        self.assertEqual(len(models.StationTimeline.objects.filter(timestamp=datetime.date(2021, 4, 1))), 1)
+        self.assertEqual(len(models.StationTimeline.objects.all()), 1)
+        # Add a non-duplicate
+        test_data["station_name"] = "CS002"
+        models.StationTimeline.objects.create(**test_data)
+        self.assertEqual(len(models.StationTimeline.objects.filter(timestamp=datetime.date(2021, 4, 1))), 2)
+        self.assertEqual(len(models.StationTimeline.objects.all()), 2)
+
+
 if __name__ == "__main__":
     os.environ['TZ'] = 'UTC'
     unittest.main()
diff --git a/SAS/TMSS/backend/test/test_utils.py b/SAS/TMSS/backend/test/test_utils.py
index 980e461915c3cf310fddf8c2cc9893dc0a1f2fc5..4a26cccb33892497d59ca555e04f813d6622d701 100644
--- a/SAS/TMSS/backend/test/test_utils.py
+++ b/SAS/TMSS/backend/test/test_utils.py
@@ -286,6 +286,7 @@ class TMSSTestEnvironment:
                  start_pipeline_control: bool=False, start_websocket: bool=False,
                  start_feedback_service: bool=False,
                  start_workflow_service: bool=False, enable_viewflow: bool=False,
+                 start_precalculations_service: bool=False,
                  ldap_dbcreds_id: str=None, db_dbcreds_id: str=None, client_dbcreds_id: str=None):
         self._exchange = exchange
         self._broker = broker
@@ -332,6 +333,9 @@ class TMSSTestEnvironment:
         self.workflow_service = None
         os.environ['TMSS_ENABLE_VIEWFLOW'] = str(bool(self.enable_viewflow))
 
+        self._start_precalculations_service = start_precalculations_service
+        self.precalculations_service = None
+
         # Check for correct Django version, should be at least 3.0
         if django.VERSION[0] < 3:
             print("\nWARNING: YOU ARE USING DJANGO VERSION '%s', WHICH WILL NOT SUPPORT ALL FEATURES IN TMSS!\n" %
@@ -430,6 +434,8 @@ class TMSSTestEnvironment:
             except Exception as e:
                 logger.exception(e)
 
+
+
         # wait for all services to be fully started in their background threads
         for thread in service_threads:
             thread.join()
@@ -447,6 +453,14 @@ class TMSSTestEnvironment:
 
         logger.info("started TMSSTestEnvironment ldap/database/django + services + schemas + data in %.1fs", (datetime.datetime.utcnow()-starttime).total_seconds())
 
+        # next service does not have a buslistener, it is just a simple time scheduler and currently rely and
+        # populated stations schema to retrieve all stations
+        if self._start_precalculations_service:
+            from lofar.sas.tmss.services.precalculations_service import create_service_job_for_sunrise_and_sunset_calculations
+            # For testpurposes we can use a smaller range and higher interval frequency
+            self.precalculations_service = \
+                create_service_job_for_sunrise_and_sunset_calculations(interval_time=60, nbr_days_calculate_ahead=3, nbr_days_before_today=1)
+            self.precalculations_service.start()
 
     def stop(self):
         if self.workflow_service is not None:
@@ -477,6 +491,10 @@ class TMSSTestEnvironment:
             self.ra_test_environment.stop()
             self.ra_test_environment = None
 
+        if self.precalculations_service is not None:
+            self.precalculations_service.stop()
+            self.precalculations_service = None
+
         self.django_server.stop()
         self.ldap_server.stop()
         self.database.destroy()
@@ -518,6 +536,7 @@ class TMSSTestEnvironment:
         from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator
         return TMSSRESTTestDataCreator(self.django_server.url, (self.django_server.ldap_dbcreds.user, self.django_server.ldap_dbcreds.password))
 
+
 def main_test_database():
     """instantiate, run and destroy a test postgress django database"""
     os.environ['TZ'] = 'UTC'
@@ -550,6 +569,7 @@ def main_test_database():
         print("Press Ctrl-C to exit (and remove the test database automatically)")
         waitForInterrupt()
 
+
 def main_test_environment():
     """instantiate, run and destroy a full tmss test environment (postgress database, ldap server, django server)"""
     from optparse import OptionParser, OptionGroup
@@ -583,6 +603,7 @@ def main_test_environment():
     group.add_option('-V', '--viewflow_service', dest='viewflow_service', action='store_true', help='Enable the viewflow service. Implies --viewflow_app and --eventmessages')
     group.add_option('-w', '--websockets', dest='websockets', action='store_true', help='Enable json updates pushed via websockets')
     group.add_option('-f', '--feedbackservice', dest='feedbackservice', action='store_true', help='Enable feedbackservice to handle feedback from observations/pipelines which comes in via the (old qpid) otdb messagebus.')
+    group.add_option('-C', '--precalculations_service', dest='precalculations_service', action='store_true', help='Enable the PreCalculations service')
     group.add_option('--all', dest='all', action='store_true', help='Enable/Start all the services, upload schemas and testdata')
     group.add_option('--simulate', dest='simulate', action='store_true', help='Simulate a run of the first example scheduling_unit (implies --data and --eventmessages and --ra_test_environment)')
 
@@ -622,6 +643,7 @@ def main_test_environment():
                              start_feedback_service=options.feedbackservice or options.all,
                              enable_viewflow=options.viewflow_app or options.viewflow_service or options.all,
                              start_workflow_service=options.viewflow_service or options.all,
+                             start_precalculations_service=options.precalculations_service or options.all,
                              ldap_dbcreds_id=options.LDAP_ID, db_dbcreds_id=options.DB_ID, client_dbcreds_id=options.REST_CLIENT_ID) as tmss_test_env:
 
             # print some nice info for the user to use the test servers...
@@ -699,7 +721,7 @@ def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int
             self.create_output_dataproducts = create_output_dataproducts
 
         def need_to_handle(self, subtask: models.Subtask) -> bool:
-            if subtask.task_blueprint.scheduling_unit_blueprint.id != self.scheduling_unit_blueprint_id:
+            if self.scheduling_unit_blueprint_id in [tb.scheduling_unit_blueprint.id for tb in subtask.task_blueprints.all()]:
                 return False
 
             if subtask.specifications_template.type.value == models.SubtaskType.Choices.OBSERVATION.value and not self.handle_observations:
@@ -737,7 +759,7 @@ def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int
                 pass
 
             # trick: trigger any already scheduled subtasks, cascading in events simulating the run
-            subtasks = models.Subtask.objects.filter(task_blueprint__scheduling_unit_blueprint_id=self.scheduling_unit_blueprint_id)
+            subtasks = models.Subtask.objects.filter(task_blueprints__scheduling_unit_blueprint_id=self.scheduling_unit_blueprint_id)
             for subtask in subtasks.filter(state__value=models.SubtaskState.Choices.SCHEDULED.value):
                 self.onSubTaskStatusChanged(subtask.id, "scheduled")
 
@@ -828,7 +850,7 @@ def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int
                         output_dp.feedback_doc = feedback_doc
                         output_dp.save()
                 elif subtask.specifications_template.type.value == models.SubtaskType.Choices.INGEST.value:
-                    project_name = subtask.task_blueprint.draft.scheduling_unit_draft.scheduling_set.project.name
+                    project_name = subtask.task_blueprints.first().draft.scheduling_unit_draft.scheduling_set.project.name    # todo: support for multiple projects needs to be picked up in TMSS-689
 
                     for output_dp in subtask.output_dataproducts:
                         try:
@@ -848,8 +870,8 @@ def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int
 
                         models.DataproductArchiveInfo.objects.create(dataproduct=output_dp, storage_ticket=uuid4())
 
-                        for algo in models.Algorithm.objects.all():
-                            models.DataproductHash.objects.create(dataproduct=output_dp, algorithm=algo, hash=uuid4())
+                        for algo in models.HashAlgorithm.objects.all():
+                            models.DataproductHash.objects.create(dataproduct=output_dp, hash_algorithm=algo, hash=uuid4())
             elif status == models.SubtaskState.Choices.DEFINED.value:
                 state_transition = models.SubtaskStateLog.objects.filter(subtask__id=subtask.id,
                                                                          old_state__value=models.SubtaskState.Choices.SCHEDULING.value,
@@ -860,12 +882,13 @@ def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int
 
                     if subtask.specifications_template.type.value == 'ingest':
                         logger.info("subtask id=%d is an ingest task which requires permission in order to be scheduled", subtask.id)
-                        if self.auto_grant_ingest_permission and subtask.task_blueprint.scheduling_unit_blueprint.ingest_permission_required:
+                        if self.auto_grant_ingest_permission and any([tb.scheduling_unit_blueprint.ingest_permission_required for tb in subtask.task_blueprints.all()]):
                             # just granting the permission triggers the scheduling_service to check and schedulable ingest subtasks,
                             # resulting in a scheduled ingest subtask.
                             logger.info("granting ingest subtask id=%d ingest_permission", subtask.id)
-                            subtask.task_blueprint.scheduling_unit_blueprint.ingest_permission_granted_since = datetime.utcnow()
-                            subtask.task_blueprint.scheduling_unit_blueprint.save()
+                            for tb in subtask.task_blueprints.all():
+                                tb.scheduling_unit_blueprint.ingest_permission_granted_since = datetime.utcnow()
+                                tb.scheduling_unit_blueprint.save()
 
             if next_state:
                 sleep(self.delay)  # mimic a little 'processing' delay
@@ -958,5 +981,7 @@ def main_scheduling_unit_blueprint_simulator():
             pass
 
 
+
+
 if __name__ == '__main__':
     main_test_environment()
diff --git a/SAS/TMSS/backend/test/tmss_test_data_django_models.py b/SAS/TMSS/backend/test/tmss_test_data_django_models.py
index 08c549f734feed11c0cda5fe64edd974297cb0af..fb0d9a88c0b541baf613d8626b6d0f514536ceb5 100644
--- a/SAS/TMSS/backend/test/tmss_test_data_django_models.py
+++ b/SAS/TMSS/backend/test/tmss_test_data_django_models.py
@@ -250,7 +250,7 @@ def TaskRelationDraft_test_data(producer: models.TaskDraft = None, consumer: mod
             "output_role": models.TaskConnectorType.objects.create(**TaskConnectorType_test_data()),
             "selection_template": models.TaskRelationSelectionTemplate.objects.create(**TaskRelationSelectionTemplate_test_data())}
 
-def SchedulingUnitBlueprint_test_data(name=None, requirements_template: models.SchedulingUnitTemplate=None, draft=None) -> dict:
+def SchedulingUnitBlueprint_test_data(name=None, requirements_template: models.SchedulingUnitTemplate=None, draft=None, output_pinned=None) -> dict:
     if name is None:
         name = 'my_scheduling_unit_blueprint_' + str(uuid.uuid4())
 
@@ -260,13 +260,17 @@ def SchedulingUnitBlueprint_test_data(name=None, requirements_template: models.S
     if draft is None:
         draft = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data())
 
+    if output_pinned is None:
+        output_pinned = False
+
     return {"name": name,
             "description": "",
             "tags": [],
             "requirements_doc": get_default_json_object_for_schema(requirements_template.schema),
             "requirements_template": requirements_template,
             "do_cancel": False,
-            "draft": draft }
+            "draft": draft,
+            "output_pinned": output_pinned}
 
 def TaskBlueprint_test_data(name: str=None, task_draft: models.TaskDraft = None, scheduling_unit_blueprint: models.SchedulingUnitBlueprint = None, specifications_template: models.TaskTemplate=None, specifications_doc: dict=None, output_pinned=False) -> dict:
     if name is None:
@@ -361,11 +365,15 @@ def DataproductFeedbackTemplate_test_data() -> dict:
             "schema": minimal_json_schema(),
             "tags": ["TMSS", "TESTING"]}
 
-def SubtaskOutput_test_data(subtask: models.Subtask=None) -> dict:
+def SubtaskOutput_test_data(subtask: models.Subtask=None, task_blueprint: models.TaskBlueprint=None) -> dict:
     if subtask is None:
         subtask = models.Subtask.objects.create(**Subtask_test_data())
 
+    if task_blueprint is None:
+        task_blueprint = models. TaskBlueprint.objects.create(**TaskBlueprint_test_data(()))
+
     return {"subtask": subtask,
+            "task_blueprint": task_blueprint,
             "tags":[]}
 
 def SubtaskInput_test_data(subtask: models.Subtask=None, producer: models.SubtaskOutput=None, selection_doc=None, selection_template: models.TaskRelationSelectionTemplate=None) -> dict:
@@ -388,13 +396,10 @@ def SubtaskInput_test_data(subtask: models.Subtask=None, producer: models.Subtas
             "selection_template": selection_template,
             "tags":[]}
 
-def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_template: models.SubtaskTemplate=None,
+def Subtask_test_data(subtask_template: models.SubtaskTemplate=None,
                       specifications_doc: dict=None, start_time=None, stop_time=None, cluster=None, state=None,
                       raw_feedback=None) -> dict:
 
-    if task_blueprint is None:
-        task_blueprint = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())
-
     if subtask_template is None:
         subtask_template = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data())
 
@@ -418,7 +423,7 @@ def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_templat
              "stop_time": stop_time,
              "state": state,
              "specifications_doc": specifications_doc,
-             "task_blueprint": task_blueprint,
+             #"task_blueprint": task_blueprint,  # ManyToMany, use set()
              "specifications_template": subtask_template,
              "tags": ["TMSS", "TESTING"],
              "do_cancel": datetime.utcnow(),
@@ -519,7 +524,7 @@ def DataproductArchiveInfo_test_data() -> dict:
 
 def DataproductHash_test_data() -> dict:
     return {"dataproduct": models.Dataproduct.objects.create(**Dataproduct_test_data()),
-            "algorithm": models.Algorithm.objects.get(value='md5'),
+            "hash_algorithm": models.HashAlgorithm.objects.get(value='md5'),
             "hash": "myhash_1",
             "tags": ['tmss', 'testing']}
 
diff --git a/SAS/TMSS/backend/test/tmss_test_data_rest.py b/SAS/TMSS/backend/test/tmss_test_data_rest.py
index 1426e271f50f5cbcc5a9b1ea9cd5f7f86c7c4a75..e1633004ec5fd7eca8a3ff6883564328f0a733a8 100644
--- a/SAS/TMSS/backend/test/tmss_test_data_rest.py
+++ b/SAS/TMSS/backend/test/tmss_test_data_rest.py
@@ -633,12 +633,12 @@ class TMSSRESTTestDataCreator():
             return self._cluster_url
 
 
-    def Subtask(self, cluster_url=None, task_blueprint_url=None, specifications_template_url=None, specifications_doc=None, state:str="defining", start_time: datetime=None, stop_time: datetime=None, raw_feedback:str =None):
+    def Subtask(self, cluster_url=None, task_blueprint_urls=None, specifications_template_url=None, specifications_doc=None, state:str="defining", start_time: datetime=None, stop_time: datetime=None, raw_feedback:str =None):
         if cluster_url is None:
             cluster_url = self.cached_cluster_url
     
-        if task_blueprint_url is None:
-            task_blueprint_url = self.cached_task_blueprint_url
+        if task_blueprint_urls is None:
+            task_blueprint_urls = [self.cached_task_blueprint_url]
     
         if specifications_template_url is None:
             specifications_template_url = self.cached_subtask_template_url
@@ -662,7 +662,7 @@ class TMSSRESTTestDataCreator():
                 "stop_time": stop_time,
                 "state": self.django_api_url + '/subtask_state/%s' % (state,),
                 "specifications_doc": specifications_doc,
-                "task_blueprint": task_blueprint_url,
+                "task_blueprints": task_blueprint_urls,
                 "specifications_template": specifications_template_url,
                 "tags": ["TMSS", "TESTING"],
                 "do_cancel": datetime.utcnow().isoformat(),
@@ -677,11 +677,16 @@ class TMSSRESTTestDataCreator():
             self._subtask_url = self.post_data_and_get_url(self.Subtask(), '/subtask/')
             return self._subtask_url
 
-    def SubtaskOutput(self, subtask_url=None):
+    def SubtaskOutput(self, subtask_url=None, task_blueprint_url=None):
+
         if subtask_url is None:
             subtask_url = self.cached_subtask_url
 
+        if task_blueprint_url is None:
+            task_blueprint_url = self.cached_task_blueprint_url
+
         return {"subtask": subtask_url,
+                "task_blueprint": task_blueprint_url,
                 "tags": []}
 
     @property
@@ -762,15 +767,15 @@ class TMSSRESTTestDataCreator():
                 "identity": True,
                 "tags": ['tmss', 'testing']}
     
-    def DataproductHash(self, algorithm_url=None, hash="my_hash", dataproduct_url=None):
-        if algorithm_url is None:
-            algorithm_url = self.django_api_url + '/algorithm/md5'
+    def DataproductHash(self, hash_algorithm_url=None, hash="my_hash", dataproduct_url=None):
+        if hash_algorithm_url is None:
+            hash_algorithm_url = self.django_api_url + '/hash_algorithm/md5'
     
         if dataproduct_url is None:
             dataproduct_url = self.cached_dataproduct_url
     
         return {"dataproduct": dataproduct_url,
-                "algorithm": algorithm_url,
+                "hash_algorithm": hash_algorithm_url,
                 "hash": hash,
                 "tags": ['tmss', 'testing']}
     
diff --git a/SAS/TMSS/client/lib/tmss_http_rest_client.py b/SAS/TMSS/client/lib/tmss_http_rest_client.py
index a2c1ea5a5c541528b5fe73f4d716def4289494e1..2409220e473145e083b3e0b72b91adb7649908dc 100644
--- a/SAS/TMSS/client/lib/tmss_http_rest_client.py
+++ b/SAS/TMSS/client/lib/tmss_http_rest_client.py
@@ -386,7 +386,7 @@ class TMSSsession(object):
         if response.status_code == 201:
             logger.info("created new template with name=%s: %s", name, json.loads(response.text)['url'])
         else:
-            raise Exception("Could not POST template with name=%s: %s" (name,response.text))
+            raise Exception("Could not POST template with name=%s: %s" % (name,response.text))
 
     def process_feedback_and_set_to_finished_if_complete(self, subtask_id: int, feedback: str) -> {}:
         '''Process the feedback_doc (which can be for one or more or all dataproducts), store/append it in the subtask's raw_feedback, and process it into json feedback per dataproduct. Sets the subtask to finished if all dataproducts are processed, which may require multiple postings of partial feedback docs.
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js b/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js
index 7581c8e1765f6c365eda45cfcd79681a9cfbede4..fce56dec489c12171a738f814635207a3dc123fc 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js
@@ -5,7 +5,8 @@
 /* eslint-disable react-hooks/exhaustive-deps */
 import React, {useEffect, useRef} from 'react';
 import _ from 'lodash';
-import UnitConverter from '../../utils/unit.converter'
+import UnitConverter from '../../utils/unit.converter';
+import Validator from '../../utils/validator';
 import $RefParser from "@apidevtools/json-schema-ref-parser";
 import "@fortawesome/fontawesome-free/css/all.css";
 import flatpickr from 'flatpickr';
@@ -137,8 +138,8 @@ function Jeditor(props) {
         getCustomProperties(schema.definitions);
         schema.title = props.title;
         const subbandValidator = validateSubbandOutput;
-        const timeValidator = validateTime;
-        const angleValidator = validateAngle;
+        const timeValidator = Validator.validateTime;
+        const angleValidator = Validator.validateAngle;
         JSONEditor.defaults.custom_validators.push((schema, value, path) => {
             const errors = [];
             if (schema.validationType === "subband_list") {
@@ -154,7 +155,7 @@ function Jeditor(props) {
                     errors.push({
                         path: path,
                         property: 'validationType',
-                        message: 'Not a valid input. Mimimum: 00:00:00.0000, Maximum:23:59:59.9999'
+                        message: 'Not a valid input. Mimimum: 00:00:00.0000hours or 0, Maximum:23:59:59.9999hours or 6.2831'
                     });
                 }
             }   else if (schema.validationType === "angle") {
@@ -162,7 +163,7 @@ function Jeditor(props) {
                     errors.push({
                         path: path,
                         property: 'validationType',
-                        message: 'Not a valid input. Mimimum: 00:00:00.0000, Maximum:90:00:00.0000'
+                        message: 'Not a valid input. Mimimum: -90:00:00.0000degrees 0r -1.57079632679489661923, Maximum:90:00:00.0000degrees or 1.57079632679489661923'
                     });
                 }
             } else if (schema.validationType === "distanceOnSky") {
@@ -265,19 +266,15 @@ function Jeditor(props) {
         let newProperty = {
             type: "string",
             title: defProperty.title,
-            description: (defProperty.description + (isDegree?'(Degrees:Minutes:Seconds.MilliSeconds)':'(Hours:Minutes:Seconds.MilliSeconds)')),
-            default: "00:00:00.0000",
+            description: (defProperty.description + (isDegree?
+                            "(Supported Formats: '10d15m10.1234s', '10:15:10.1234degrees', '10.2528degrees', '0.1789')":
+                            "(Supported Formats: '10h15m10.1234s', '10:15:10.1234hours', '10.4187hours', '2.7276')")),
+            default: "0",
             validationType: isDegree?'angle':'time',
             options: {
                 "grid_columns": 4,
                 "inputAttributes": {
-                    "placeholder": isDegree?"DD:mm:ss.ssss":"HH:mm:ss.ssss"
-                },
-                "cleave": {
-                    numericOnly: true,
-                    blocks: [2, 2, 2, 4],
-                    delimiters: isDegree ? [':', ':','.'] : [':', ':', '.'],
-                    delimiterLazyShow: true
+                    "placeholder": isDegree?"Degrees or Radian":"Hours or Radian"
                 }
             }
         }
@@ -391,8 +388,8 @@ function Jeditor(props) {
             let outputValue = editorOutput[outputKey];
             if (outputValue instanceof Object) {
                 if (_.indexOf(pointingProps, outputKey) >= 0) {
-                    outputValue.angle1 = UnitConverter.getAngleOutput(outputValue.angle1, false);
-                    outputValue.angle2 = UnitConverter.getAngleOutput(outputValue.angle2, true);
+                    outputValue.angle1 = UnitConverter.parseAngle(outputValue.angle1);
+                    outputValue.angle2 = UnitConverter.parseAngle(outputValue.angle2);
                 } else {
                     updateOutput(outputValue);
                 }
@@ -443,53 +440,6 @@ function Jeditor(props) {
         return (hh<10?`0${hh}`:`${hh}`) + ':' + (mm<10?`0${mm}`:`${mm}`) + ':' + (ss<10?`0${ss}`:`${ss}`);
     }
 
-    /**
-     * Validate time entered as string in HH:mm:ss format
-     * @param {String} prpOutput 
-     */
-    function validateTime(prpOutput) {
-        const splitOutput = prpOutput.split(':');
-        const seconds = splitOutput[2]?splitOutput[2].split('.')[0].split('.')[0]:splitOutput[2];
-        let milliSeconds = prpOutput.split('.')[1] || '0000';
-        milliSeconds = milliSeconds.padEnd(4,0);
-        if (splitOutput.length < 3) {
-            return false;
-        }   else {
-            if (parseInt(splitOutput[0]) > 23 || parseInt(splitOutput[1])>59 || parseInt(splitOutput[2])>59 )
-             {
-                return false;
-            }
-            const timeValue = parseInt(splitOutput[0]*60*60) + parseInt(splitOutput[1]*60) + parseInt(seconds) + milliSeconds/10000;
-            if (timeValue >= 86400) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    /**
-     * Validate angle input to not exceed 90 degrees
-     * @param {String} prpOutput 
-     */
-    function validateAngle(prpOutput) {
-        const splitOutput = prpOutput.split(':');
-        const seconds = splitOutput[2]?splitOutput[2].split('.')[0].split('.')[0]:splitOutput[2];
-        let milliSeconds = prpOutput.split('.')[1] || '0000';
-        milliSeconds = milliSeconds.padEnd(4,0);
-        if (splitOutput.length < 3) {
-            return false;
-        }   else {
-            if (parseInt(splitOutput[0]) > 90 || parseInt(splitOutput[1])>59 || parseInt(seconds)>59) {
-                return false;
-            }
-            const timeValue = parseInt(splitOutput[0]*60*60) + parseInt(splitOutput[1]*60) + parseInt(seconds) + milliSeconds/10000;
-            if (timeValue > 324000) {
-                return false;
-            }
-        }
-        return true;
-    }
-
     /**
      * Validates if the subband list custom field
      * @param {String} prpOutput 
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/DegreeInputmask.js b/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/DegreeInputmask.js
index 16e5057bdf3629ec5b66dfc2f7dd33a3b5edb058..4fd0b705175e3ae8cd5757583545cdb1cb2b552e 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/DegreeInputmask.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/DegreeInputmask.js
@@ -1,5 +1,4 @@
 import React, { Component } from 'react';
-import { InputMask } from 'primereact/inputmask';
 import Validator from  '../../utils/validator';
 import Cleave from 'cleave.js/react';
 
@@ -35,10 +34,8 @@ export default class DegreeInputMask extends Component {
 
   render() {
     return (
-      <Cleave placeholder="DD:mm:ss.ssss" value={this.props.value}
-          options={{numericOnly: true, blocks: [2, 2, 2, 4],
-                    delimiters: [':', ':', '.'],
-                    delimiterLazyShow: false}}
+      <Cleave placeholder="Degree/Radian" value={this.props.value}
+          title="Enter in dms or degrees or radians"
           className="inputmask" 
           htmlRef={(ref) => this.input = ref }
           onChange={this.callbackUpdateAngle} />
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/TimeInputmask.js b/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/TimeInputmask.js
index 540f276baf86e6eb9a36a81823a1feef14583fa1..3a7fe62f3a1f0d23f99ca482e7fb45992d9eea32 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/TimeInputmask.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/TimeInputmask.js
@@ -1,5 +1,4 @@
 import React, { Component } from 'react';
-import { InputMask } from 'primereact/inputmask';
 import Validator from  '../../utils/validator';
 import Cleave from 'cleave.js/react';
 
@@ -33,10 +32,8 @@ export default class TimeInputMask extends Component {
 
   render() {
     return (
-      <Cleave placeholder="HH:mm:ss.ssss" value={this.props.value}
-          options={{numericOnly: true, blocks: [2, 2, 2, 4],
-                    delimiters: [':', ':', '.'],
-                    delimiterLazyShow: false}}
+      <Cleave placeholder="Hour/Radian" value={this.props.value}
+          title="Enter in hms or hours or radians"
           className="inputmask" 
           htmlRef={(ref) => this.input = ref }
           onChange={this.callbackUpdateAngle} />
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/excelview.schedulingset.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/excelview.schedulingset.js
index 6e8e565a824195df4de6e33736a6715eea03e577..01fee2c2d75179c2fef03f5d04714ddc243c8704 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/excelview.schedulingset.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/excelview.schedulingset.js
@@ -1337,7 +1337,7 @@ export class SchedulingSetCreate extends Component {
             row = this.state.commonRowData[0];
             row[field] = value;
             row['isValid'] = isValid;
-            row[field+'value'] = UnitConverter.getAngleOutput(value,isDegree);
+            row[field+'value'] = UnitConverter.parseAngle(value);
             tmpRowData = this.state.commonRowData;
             tmpRowData[0] = row;
             await this.setState({
@@ -1348,7 +1348,7 @@ export class SchedulingSetCreate extends Component {
             row = this.state.rowData[rowIndex];
             row[field] = value;
             row['isValid'] = isValid;
-            row[field+'value'] = UnitConverter.getAngleOutput(value,isDegree);
+            row[field+'value'] = UnitConverter.parseAngle(value);
             tmpRowData = this.state.rowData;
             tmpRowData[rowIndex] = row;
             await this.setState({
@@ -1752,13 +1752,13 @@ export class SchedulingSetCreate extends Component {
                                 validRow = false;
                                 return;
                             }
-                            paramOutput[key] = UnitConverter.getAngleOutput(suRow[result[key]],false);
+                            paramOutput[key] = UnitConverter.parseAngle(suRow[result[key]]);
                         } else if (key === 'angle2'){
                             if  (!Validator.validateAngle(suRow[result[key]])){
                                 validRow = false;
                                 return;
                             }
-                            paramOutput[key] = UnitConverter.getAngleOutput(suRow[result[key]],true);
+                            paramOutput[key] = UnitConverter.parseAngle(suRow[result[key]]);
                         }  else if (key === 'angle3'){
                             paramOutput[key] = Number(suRow[result[key]]);
 
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/utils/unit.converter.js b/SAS/TMSS/frontend/tmss_webapp/src/utils/unit.converter.js
index 471818ef7f1c3915101b6a85d2fe48d34151aa8b..c4f80c5163ab31be1dc4791e730f32be215eeda2 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/utils/unit.converter.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/utils/unit.converter.js
@@ -65,24 +65,22 @@ const UnitConverter = {
      */
     getAngleInput(prpInput, isDegree) {
         if (prpInput){
+            const isNegative = prpInput<0;
+            prpInput = prpInput * (isNegative?-1:1);
             const degrees = prpInput * 180 / Math.PI;
             if (isDegree) {
                 const dd = Math.floor(prpInput * 180 / Math.PI);
                 const mm = Math.floor((degrees-dd) * 60);
-                const ss = Math.floor((degrees-dd-(mm/60)) * 3600);
-                const ssss = round(((degrees - dd - (mm/60) - (ss/3600)) * 36000000), 4);
-                const milliSeconds = String(ssss).padStart(4,0);
-                return (dd<10?`0${dd}`:`${dd}`) + ':' + (mm<10?`0${mm}`:`${mm}`) + ':' + (ss<10?`0${ss}`:`${ss}`) + '.' + milliSeconds;
+                const ss = round((degrees-dd-(mm/60)) * 3600,4);
+                return (isNegative?'-':'') + (dd<10?`0${dd}`:`${dd}`) + 'd' + (mm<10?`0${mm}`:`${mm}`) + 'm' + (ss<10?`0${ss}`:`${ss}`) + 's';
             }   else {
                 const hh = Math.floor(degrees/15);
                 const mm = Math.floor((degrees - (hh*15))/15 * 60 );
-                const ss = Math.floor((degrees -(hh*15)-(mm*15/60))/15 * 3600);
-                const ssss = round(((degrees - (hh*15) - (mm/4) - (ss/240)) *2400000),4);
-                const milliSeconds = String(ssss).padStart(4,0);
-                return (hh<10?`0${hh}`:`${hh}`) + ':' + (mm<10?`0${mm}`:`${mm}`) + ':' + (ss<10?`0${ss}`:`${ss}`) + '.' + milliSeconds;
+                const ss = round((degrees -(hh*15)-(mm*15/60))/15 * 3600, 4);
+                return (hh<10?`0${hh}`:`${hh}`) + 'h' + (mm<10?`0${mm}`:`${mm}`) + 'm' + (ss<10?`0${ss}`:`${ss}`) + 's';
             }
         } else {
-            return "00:00:00";
+            return isDegree?"0d0m0s":'0h0m0s';
         }
     },
 
@@ -103,6 +101,107 @@ const UnitConverter = {
         }else{
             return "00:00:00.0000";
         }
+    },
+    /**
+     * Function to check the input type/format based on the matching predeifined regular expression. It can be any of the supported format
+     * like dms, hms, degrees, hours, radians. Example values are 10h10m10s, 10h10m10.1234s, 10:10:10 hour, 10:10:10.1234 hours,
+     * 10.1234 hours, 15d15m15s, 15d15m15.1515s, 15:15:15 degree, 15:15:15.1515 degrees, 15.1515 degrees. If only number is entered, it will
+     * be considered as radians.
+     * @param {String} input - value entered in the angle field. 
+     * @returns String - the format of the input identified. If no format is identified, returns null. 
+     */
+    getAngleInputType(input) {
+        if (input.match(/^\-?((\d0?d(0?0m)(0?0(\.\d{1,4})?s))|(([0-8]?\d)d(([0-5]?\d)m)(([0-5]?\d)(\.\d{1,4})?s)))$/)) {
+            return 'dms';
+        }   else if (input.match(/^([0-1]?\d|2[0-3])h([0-5]?\d)m([0-5]?\d)(\.\d{1,4})?s$/)) {
+            return 'hms';
+        }   else if (input.match(/^-?((\d0(.0{1,4})?)|([0-8]?\d)(\.\d{1,4})?) ?d(egree)?s?$/)) {
+            return 'degrees';
+        }   else if (input.match(/^([0-1]?\d|2[0-3])(\.\d{1,4})? ?h(our)?s?$/)) {
+            return 'hours';
+        }   else if (input.match(/^\-?((\d0?:(00:)(00))|(([0-8]\d):(([0-5]\d):)(([0-5]\d)(\.\d{1,4})?))) ?d(egree)?s?$/)) {
+            return 'deg_format';
+        }   else if (input.match(/^([0-1]?\d|2[0-3]):([0-5]?\d):([0-5]?\d)(\.\d{1,4})? ?h(our)?s?$/)) {
+            return 'hour_format';
+        }   else if (input.match(/^\-?[0-6](\.\d{1,20})?$/)) {
+            return 'radians';
+        }   else {
+            return null;
+        }
+    },
+    /**
+     * Function to validate an angle input value based on  the format entered and converrt to radians
+     * @param {String} angle - value to be parsed to radians.
+     * @returns number - radian value.
+     */
+    parseAngle(angle) {
+        let radians = 0;
+        const angleType = this.getAngleInputType(angle);
+        switch(angleType) {
+            case 'dms' : {
+                radians = this.convertAngleToRadian(angle);
+                break;
+            }
+            case 'hms' : {
+                radians = this.convertAngleToRadian(angle);
+                break;
+            }
+            case 'degrees' : {
+                radians = this.convertToRadians(angle.replace('d','').replace('egree','').replace('s','').replace(' ',''));
+                break;
+            }
+            case 'hours' : {
+                radians = this.convertToRadians(angle.replace('h','').replace('our','').replace('s','').replace(' ','') * 15);
+                break;
+            }
+            case 'deg_format' : {
+                radians  = this.getAngleOutput(angle.replace('d','').replace('egree','').replace('s','').replace(' ',''), true);
+                break;
+            }
+            case 'hour_format' : {
+                radians = this.getAngleOutput(angle.replace('h','').replace('our','').replace('s','').replace(' ',''), false);
+                break;
+            }
+            case 'radians': {
+                radians = parseFloat(angle);
+                break;
+            }
+            default: {
+                break;
+            }
+        }
+        return radians;
+    },
+    /**
+     * Convert a degree value to radian
+     * @param {*} angle 
+     * @returns 
+     */
+    convertToRadians(angle) {
+        return angle * Math.PI /180;
+    },
+    /**
+     * Converts a formatted string to a radian value
+     * @param {String} angle 
+     * @returns 
+     */
+    convertAngleToRadian(angle) {
+        let radian = 0;
+        const isDegree = angle.indexOf('d') > 0;
+        const degreeHourSplit = isDegree?angle.split("d"):angle.split("h");
+        let degreeHour = degreeHourSplit[0];
+        const isNegativeAngle = parseInt(degreeHour)<0;
+        degreeHour = isNegativeAngle?degreeHour*-1:degreeHour;
+        const minuteSplit = degreeHourSplit[1].split('m');
+        const minute = minuteSplit[0];
+        const second = minuteSplit[1].replace('s','');
+        if (isDegree) {
+            radian = this.convertToRadians((degreeHour*1 + minute/60 + second/3600));
+            radian = isNegativeAngle?radian*-1:radian;
+        }   else {
+            radian = this.convertToRadians((degreeHour*15 + minute/4 + second/240));
+        }
+        return radian;
     }
 };
 
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/utils/validator.js b/SAS/TMSS/frontend/tmss_webapp/src/utils/validator.js
index c3101bd69b608c704590586f43a646974ee29858..f7612d15d63f4c2a6d625b74aa4badc3afb585d9 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/utils/validator.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/utils/validator.js
@@ -1,39 +1,25 @@
+import UnitConverter from "./unit.converter";
+
 const Validator = {
     validateTime(value) {
-        const splitOutput = value.split(':');
-        const seconds = splitOutput[2]?splitOutput[2].split('.')[0]:splitOutput[2];
-        let milliSeconds = value.split('.')[1] || '0000';
-        milliSeconds = milliSeconds.padEnd(4,0);
-        if (splitOutput.length < 3) {
-            return false;
-        }   else {
-            if (parseInt(splitOutput[0]) > 23 || parseInt(splitOutput[1])>59 || parseInt(splitOutput[2])>59) {
-                return false;
-            }
-            const timeValue = parseInt(splitOutput[0]*60*60) + parseInt(splitOutput[1]*60) + parseInt(seconds) + milliSeconds/10000;
-            if (timeValue >= 86400) {
+        const angleType = UnitConverter.getAngleInputType(value);
+        if (angleType && ['hms', 'hours', 'hour_format', 'radians'].indexOf(angleType)>=0) {
+            if (angleType === 'radians' && (parseFloat(value)<0 || parseFloat(value) > 6.2831)) {
                 return false;
             }
+            return true;
         }
-        return true;
+        return false;
     },
     validateAngle(value) {
-        const splitOutput = value.split(':');
-        const seconds = splitOutput[2]?splitOutput[2].split('.')[0]:splitOutput[2];
-        let milliSeconds = value.split('.')[1] || '0000';
-        milliSeconds = milliSeconds.padEnd(4,0);
-        if (splitOutput.length < 3) {
-            return false;
-        }   else {
-            if (parseInt(splitOutput[0]) > 90 || parseInt(splitOutput[1])>59 || parseInt(splitOutput[2])>59) {
-                return false;
-            }
-            const timeValue = parseInt(splitOutput[0]*60*60) + parseInt(splitOutput[1]*60) + parseInt(seconds) + milliSeconds/10000;
-            if (timeValue > 324000) {
+        const angleType = UnitConverter.getAngleInputType(value);
+        if (angleType && ['dms', 'degrees', 'deg_format', 'radians'].indexOf(angleType)>=0) {
+            if (angleType === 'radians' && (parseFloat(value) < -1.57079632679489661923 || parseFloat(value) > 1.57079632679489661923)) {
                 return false;
             }
+            return true;
         }
-        return true;
+        return false;
     },
     /**
      * Validates whether any of the given property values is modified comparing the old and new object.