diff --git a/SAS/TMSS/backend/services/CMakeLists.txt b/SAS/TMSS/backend/services/CMakeLists.txt
index 2fb200270ac09e02e2c2daeb8668bc68044f11be..de9c7990be1187f5d391ab151cb815fcb47b1357 100644
--- a/SAS/TMSS/backend/services/CMakeLists.txt
+++ b/SAS/TMSS/backend/services/CMakeLists.txt
@@ -6,4 +6,6 @@ lofar_add_package(TMSSPostgresListenerService tmss_postgres_listener)
 lofar_add_package(TMSSWebSocketService websocket)
 lofar_add_package(TMSSWorkflowService workflow_service)
 lofar_add_package(TMSSLTAAdapter tmss_lta_adapter)
+lofar_add_package(TMSSPreCalculationsService precalculations_service)
+
 
diff --git a/SAS/TMSS/backend/services/precalculations_service/CMakeLists.txt b/SAS/TMSS/backend/services/precalculations_service/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..1c52667c78f120c0b6340e71f67b45febdee919c
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/CMakeLists.txt
@@ -0,0 +1,10 @@
+lofar_package(TMSSPreCalculationsService 0.1)
+
+lofar_find_package(PythonInterp 3.4 REQUIRED)
+
+IF(NOT SKIP_TMSS_BUILD)
+    add_subdirectory(lib)
+    add_subdirectory(test)
+ENDIF(NOT SKIP_TMSS_BUILD)
+
+add_subdirectory(bin)
\ No newline at end of file
diff --git a/SAS/TMSS/backend/services/precalculations_service/bin/CMakeLists.txt b/SAS/TMSS/backend/services/precalculations_service/bin/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..80db184789d8880d2bbb2c7f3792208d49512a69
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/bin/CMakeLists.txt
@@ -0,0 +1,4 @@
+lofar_add_bin_scripts(tmss_precalculations_service)
+
+# supervisord config files
+lofar_add_sysconf_files(tmss_precalculations_service.ini DESTINATION supervisord.d)
diff --git a/SAS/TMSS/backend/services/precalculations_service/bin/tmss_precalculations_service b/SAS/TMSS/backend/services/precalculations_service/bin/tmss_precalculations_service
new file mode 100755
index 0000000000000000000000000000000000000000..2bcfee690f143ad791012bf25e6f5b7aff5223db
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/bin/tmss_precalculations_service
@@ -0,0 +1,24 @@
+#!/usr/bin/python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+
+from lofar.sas.tmss.services.precalculations_service import main
+
+if __name__ == "__main__":
+    main()
diff --git a/SAS/TMSS/backend/services/precalculations_service/bin/tmss_precalculations_service.ini b/SAS/TMSS/backend/services/precalculations_service/bin/tmss_precalculations_service.ini
new file mode 100644
index 0000000000000000000000000000000000000000..924ce072404b15d8f96bf70b102844af673fbcdc
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/bin/tmss_precalculations_service.ini
@@ -0,0 +1,9 @@
+[program:tmss_precalculations_service]
+command=docker run --rm --net=host -u 7149:7149 -v /opt/lofar/var/log:/opt/lofar/var/log -v /tmp/tmp -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro -v /localhome/lofarsys:/localhome/lofarsys -e HOME=/localhome/lofarsys -e USER=lofarsys nexus.cep4.control.lofar:18080/tmss_django:latest /bin/bash -c 'source ~/.lofar/.lofar_env;source $LOFARROOT/lofarinit.sh;exec tmss_precalculations_service'
+user=lofarsys
+stopsignal=INT ; KeyboardInterrupt
+stopasgroup=true ; bash does not propagate signals
+stdout_logfile=%(program_name)s.log
+redirect_stderr=true
+stderr_logfile=NONE
+stdout_logfile_maxbytes=0
diff --git a/SAS/TMSS/backend/services/precalculations_service/lib/CMakeLists.txt b/SAS/TMSS/backend/services/precalculations_service/lib/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..31845d5064326785365cd0932d3090b5e4fd137f
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/lib/CMakeLists.txt
@@ -0,0 +1,10 @@
+lofar_find_package(PythonInterp 3.4 REQUIRED)
+include(PythonInstall)
+
+set(_py_files
+    precalculations_service.py
+    )
+
+python_install(${_py_files}
+    DESTINATION lofar/sas/tmss/services)
+
diff --git a/SAS/TMSS/backend/services/precalculations_service/lib/precalculations_service.py b/SAS/TMSS/backend/services/precalculations_service/lib/precalculations_service.py
new file mode 100644
index 0000000000000000000000000000000000000000..87442a866d5c2d7c496de393fa6a00e8c56c2a1f
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/lib/precalculations_service.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+import logging
+logger = logging.getLogger(__name__)
+
+import os
+import threading
+import datetime
+from datetime import timedelta
+import time
+from lofar.common.util import waitForInterrupt
+
+# Default values of parameters
+INTERVAL_TIME_SECONDS = 24 * 60 * 60  # 24 hours (every day one calculation ahead)
+NBR_DAYS_CALCULATE_AHEAD = 365    # 1 year
+NBR_DAYS_BEFORE_TODAY = 1
+
+
+def execute_populate_sunrise_and_sunset_for_all_stations(nbr_days_calculate_ahead, start_date):
+    """
+    Execute the populate of calculations (sunrise/sunset) for given number of days stating at give date
+    :param nbr_days_calculate_ahead: Number of days to calculated
+    :param start_date: The date to start calculate
+    :return next_date: The next_date to process
+    """
+    logger.info("execute_populate_sunrise_and_sunset_for_all_stations %s for %d days" % (start_date, nbr_days_calculate_ahead))
+    # Import here otherwise you get
+    # "django.core.exceptions.ImproperlyConfigured: Requested setting INSTALLED_APPS, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings."
+    from lofar.sas.tmss.tmss.tmssapp.populate import populate_sunrise_and_sunset_for_all_stations
+
+    populate_sunrise_and_sunset_for_all_stations(nbr_days=nbr_days_calculate_ahead, start_date=start_date)
+    # Return the next_date to process
+    next_date = start_date + datetime.timedelta(days=nbr_days_calculate_ahead)
+    return next_date
+
+
+class TMSSPreCalculationsServiceJob(threading.Thread):
+    def __init__(self, interval, execute, *args, **kwargs):
+        threading.Thread.__init__(self)
+        self.daemon = False
+        self.stopped = threading.Event()
+        self.interval = interval
+        self.execute = execute
+        self.args = args
+        self.kwargs = kwargs
+
+    def __enter__(self):
+        pass
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        pass
+
+    def stop(self):
+        self.stopped.set()
+        self.join()
+
+    def run(self):
+        start_time = time.time()
+        next_date = self.execute(*self.args, **self.kwargs)
+        # determine remaining time for exact heartbeat of the interval time
+        remaining_wait_time_in_sec = self.interval.total_seconds() - (time.time() - start_time)
+        while not self.stopped.wait(remaining_wait_time_in_sec):
+            self.kwargs["nbr_days_calculate_ahead"] = 1
+            self.kwargs["start_date"] = next_date
+            start_time = time.time()
+            next_date = self.execute(*self.args, **self.kwargs)
+            remaining_wait_time_in_sec = self.interval.total_seconds() - (time.time() - start_time)
+
+
+def create_service_job_for_sunrise_and_sunset_calculations(interval_time, nbr_days_calculate_ahead, nbr_days_before_today):
+    start_date = datetime.date.today() - datetime.timedelta(days=nbr_days_before_today)
+    return TMSSPreCalculationsServiceJob(interval=timedelta(seconds=interval_time),
+                                         execute=execute_populate_sunrise_and_sunset_for_all_stations,
+                                         nbr_days_calculate_ahead=nbr_days_calculate_ahead, start_date=start_date)
+
+
+def main():
+    # make sure we run in UTC timezone
+    os.environ['TZ'] = 'UTC'
+
+    from optparse import OptionParser, OptionGroup
+    from lofar.common import dbcredentials
+
+    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
+
+    # Check the invocation arguments
+    parser = OptionParser('%prog [options]',
+                          description='run the tmss_workflow_service which forwards TMSS events to the workflow engine.')
+
+    parser.add_option('-i', '--interval_time', dest='interval_time', type='int', default=INTERVAL_TIME_SECONDS,
+                      help='The time between next calculation, default: %default')
+    parser.add_option('-d', '--nbr_days_calculate_ahead', dest='nbr_days_calculate_ahead', type='int', default=NBR_DAYS_CALCULATE_AHEAD,
+                      help='The number of days to calculate the sunset/sunrise ahead, default: %default')
+    parser.add_option('-b', '--nbr_days_before_today', dest='nbr_days_before_today', type='int', default=NBR_DAYS_BEFORE_TODAY,
+                      help='The number of days to calculate the sunset/sunrise before today (so yesterday=1), default: %default')
+
+    group = OptionGroup(parser, 'Django options')
+    parser.add_option_group(group)
+    group.add_option('-C', '--credentials', dest='dbcredentials', type='string', default=os.environ.get('TMSS_DBCREDENTIALS', 'TMSS'), help='django dbcredentials name, default: %default')
+
+    (options, args) = parser.parse_args()
+    from lofar.sas.tmss.tmss import setup_and_check_tmss_django_database_connection_and_exit_on_error
+    setup_and_check_tmss_django_database_connection_and_exit_on_error(options.dbcredentials)
+
+    job = create_service_job_for_sunrise_and_sunset_calculations(options.interval_time, options.nbr_days_calculate_ahead, options.nbr_days_before_today)
+    job.start()
+    waitForInterrupt()
+    job.stop()
+
+
+if __name__ == '__main__':
+    main()
+
diff --git a/SAS/TMSS/backend/services/precalculations_service/test/CMakeLists.txt b/SAS/TMSS/backend/services/precalculations_service/test/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..a3f0060bad5c5f9adfbbceb9c07b138a08675378
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/test/CMakeLists.txt
@@ -0,0 +1,10 @@
+# $Id: CMakeLists.txt 32679 2015-10-26 09:31:56Z schaap $
+
+if(BUILD_TESTING)
+    include(LofarCTest)
+
+    lofar_add_test(t_precalculations_service)
+
+    set_tests_properties(t_precalculations_service PROPERTIES TIMEOUT 300)
+
+endif()
diff --git a/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.py b/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.py
new file mode 100644
index 0000000000000000000000000000000000000000..f230691e85c92f1b64742cf731a6b1058bd7f188
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.py
@@ -0,0 +1,229 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+import unittest
+import time
+import datetime
+import logging
+logger = logging.getLogger('lofar.' + __name__)
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
+
+
+from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment
+
+from lofar.sas.tmss.services.precalculations_service import create_service_job_for_sunrise_and_sunset_calculations
+from lofar.common.test_utils import integration_test
+
+
+@integration_test
+class TestPreCalculationService(unittest.TestCase):
+    """
+    Tests for the TMSSPreCalculationsServiceJob
+    It will check the number of items created of the StationTimeline model based on the input of the service to start
+    It will not check the content of the sunrise/sunset data of the  StationTimeline model itself
+    Note that 1 day calculation will take about 6 seconds (my local developer environment)
+    So the assumption was that the calculation takes about 6 sec BUT the build environment tooks ever longer,  11
+    a 14 seconds!!!! Some 'timing' parameters where adjusted in this testcase but maybe not robust enough
+    On the other hand if the build system is getting even more slower than this  there should be really be a doubt
+    about the build system.
+    """
+
+    @classmethod
+    def setUpClass(cls) -> None:
+        """
+        Populate schema to be able to retrieve all stations
+        """
+        cls.tmss_test_env = TMSSTestEnvironment(populate_schemas=True)
+        cls.tmss_test_env.start()
+        cls.test_data_creator = cls.tmss_test_env.create_test_data_creator()
+
+    @classmethod
+    def tearDownClass(cls) -> None:
+        cls.tmss_test_env.stop()
+
+    def setUp(self) -> None:
+        """
+        Start every testcase with 'clean' StationTimeline model
+        """
+        from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+        StationTimeline.objects.all().delete()
+
+    def test_all_stations_calculated_for_one_day(self):
+        """
+        Test if creating and starting, followed by stopping the (pre)calculation service results in 'one day'
+        of StationTimeline data for all stations
+        Note that 1 day calculation will take about 6 seconds
+        """
+        # Import here otherwise you get
+        # "django.core.exceptions.ImproperlyConfigured: Requested setting INSTALLED_APPS, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings."
+        from lofar.sas.tmss.tmss.tmssapp.conversions import get_all_stations
+        from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+
+        nbr_stations = len(get_all_stations())
+        # Initially there should be no data
+        self.assertEqual(len(StationTimeline.objects.all()), 0)
+        # Now we are going to create and start the calculation service with a wait time of 60 sec,
+        # nbr days to calculate ahead is 1 and nbr days before today 1 ->  so only 'yesterday' should be created
+        job = create_service_job_for_sunrise_and_sunset_calculations(60, 1, 1)
+        job.start()
+        job.stop()
+        # Check what have been created
+        st_objects = StationTimeline.objects.all()
+        self.assertEqual(len(st_objects), nbr_stations)
+        # lets check with the timestamp of today, that should be zero
+        st_objects = StationTimeline.objects.filter(timestamp=datetime.date.today())
+        self.assertEqual(len(st_objects), 0)
+        # lets check with the timestamp in future, that should be zero
+        st_objects = StationTimeline.objects.filter(timestamp__gt=datetime.date.today())
+        self.assertEqual(len(st_objects), 0)
+        # lets check with the timestamp yesterday, that should be equal to the number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp=datetime.date.today()-datetime.timedelta(days=1))
+        self.assertEqual(len(st_objects), nbr_stations)
+
+    def test_all_stations_calculated_for_multiple_days_with_one_trigger(self):
+        """
+        Test if creating and starting, followed by stopping the (pre)calculation service results in 'multiple day'
+        of StationTimeline data for all stations
+        Note that 4 days calculation will take about 30 seconds
+        """
+        # Import here otherwise you get
+        # "django.core.exceptions.ImproperlyConfigured: Requested setting INSTALLED_APPS, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings."
+        from lofar.sas.tmss.tmss.tmssapp.conversions import get_all_stations
+        from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+
+        nbr_stations = len(get_all_stations())
+        # Initially there should be no data
+        self.assertEqual(len(StationTimeline.objects.all()), 0)
+        # Now we are going to create and start the calculation service with a interval of 120 sec,
+        # nbr days to calculate ahead is 4 and nbr days before today 2 ->  so 'day before yesterday, 'yesterday',
+        # 'today' and 'tomorrow' should be created
+        job = create_service_job_for_sunrise_and_sunset_calculations(120, 4, 2)
+        job.start()
+        job.stop()
+        # Check what have been created
+        st_objects = StationTimeline.objects.all()
+        self.assertEqual(len(st_objects), 4*nbr_stations)
+        # lets check with the timestamp of today, that should be equal to the number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp=datetime.date.today())
+        self.assertEqual(len(st_objects), nbr_stations)
+        # lets check with the timestamp in future, that should be equal to the number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp__gt=datetime.date.today())
+        self.assertEqual(len(st_objects), nbr_stations)
+        # lets check with the timestamp in the past, that should be equal to the 2 times number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp__lt=datetime.date.today())
+        self.assertEqual(len(st_objects), 2*nbr_stations)
+
+    def test_all_stations_calculated_after_interval(self):
+        """
+        Test if creating and starting, waiting for period (25 seconds), followed by stopping the (pre)calculation service results
+        in 'multiple day' of StationTimeline data for all stations.
+        It will test the scheduler with interval of 20 seconds, so three days should be calculated
+        """
+        # Import here otherwise you get
+        # "django.core.exceptions.ImproperlyConfigured: Requested setting INSTALLED_APPS, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings."
+        from lofar.sas.tmss.tmss.tmssapp.conversions import get_all_stations
+        from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+
+        nbr_stations = len(get_all_stations())
+        # Initially there should be no data
+        self.assertEqual(len(StationTimeline.objects.all()), 0)
+        # Now we are going to create and start the calculation service with a interval of 20 sec (smaller will not make sense),
+        # nbr days to calculate ahead is 1 and nbr days before today 0 ->  so it start with 'today' and after 20 seconds
+        # 'tomorrow' etc..,
+        job = create_service_job_for_sunrise_and_sunset_calculations(20, 1, 0)
+        job.start()
+        time.sleep(25)
+        job.stop()
+        # Check what have been created with interval of 20 seconds we should have two days
+        st_objects = StationTimeline.objects.all()
+        self.assertEqual(len(st_objects), 2*nbr_stations)
+        # lets check with the timestamp of today, that should be equal to the number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp=datetime.date.today())
+        self.assertEqual(len(st_objects), nbr_stations)
+        # lets check with the timestamp in future, that should be equal to the number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp__gt=datetime.date.today())
+        self.assertEqual(len(st_objects), nbr_stations)
+        # lets check with the timestamp in the past, that should be equal to zero
+        st_objects = StationTimeline.objects.filter(timestamp__lt=datetime.date.today())
+        self.assertEqual(len(st_objects), 0)
+
+    def test_all_stations_calculated_for_when_interval_time_is_too_small(self):
+        """
+        Check that if the interval time < calculation time it does not lead to exception
+        Test if creating and starting, waiting for period (20 seconds), followed by stopping the (pre)calculation service results
+        in 'multiple day' of StationTimeline data for all stations.
+        It will test the scheduler with interval of 2 seconds, which smaller than ~6 seconds
+        Stopping after 20 seconds should make 2 days calculated
+        """
+        # Import here otherwise you get
+        # "django.core.exceptions.ImproperlyConfigured: Requested setting INSTALLED_APPS, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings."
+        from lofar.sas.tmss.tmss.tmssapp.conversions import get_all_stations
+        from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+
+        nbr_stations = len(get_all_stations())
+        # Initially there should be no data
+        self.assertEqual(len(StationTimeline.objects.all()), 0)
+        # Now we are going to create and start the calculation service with an interval of 2 sec
+        # nbr days to calculate ahead is 1 and nbr days before today 0 ->  so it start with 'today' and after ~6 seconds
+        # 'tomorrow' etc..
+        job = create_service_job_for_sunrise_and_sunset_calculations(2, 1, 0)
+        job.start()
+        time.sleep(20)
+        job.stop()
+        # Check what have been created with interval of 2 seconds we should have two days
+        st_objects = StationTimeline.objects.all()
+        self.assertGreaterEqual(len(st_objects), 2 * nbr_stations)
+        # lets check with the timestamp of today, that should be equal to the number of all stations
+        st_objects = StationTimeline.objects.filter(timestamp=datetime.date.today())
+        self.assertEqual(len(st_objects), nbr_stations)
+
+    @unittest.skip("TODO: fix blinking test due to incorrect synchronization issues.")
+    def test_all_stations_calculated_with_two_jobs_started(self):
+        """
+        Test if starting two jobs of (pre)calculation service results in no Exception, there are no
+        duplicate data stored (covered by the Constraints in the model)
+        It will test the scheduler with interval of 20 seconds, to make sure one interval after the start has been passed
+        """
+        # Import here otherwise you get
+        # "django.core.exceptions.ImproperlyConfigured: Requested setting INSTALLED_APPS, but settings are not configured. You must either define the environment variable DJANGO_SETTINGS_MODULE or call settings.configure() before accessing settings."
+        from lofar.sas.tmss.tmss.tmssapp.conversions import get_all_stations
+        from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+
+        nbr_stations = len(get_all_stations())
+        # Initially there should be no data
+        self.assertEqual(len(StationTimeline.objects.all()), 0)
+        # Now we are going to create and start the calculation service with an interval of 20 sec
+        # nbr days to calculate ahead is 1 and nbr days before today 0 ->  so it start with 'today' and after ~20 seconds
+        # 'tomorrow' etc..
+        job = create_service_job_for_sunrise_and_sunset_calculations(20, 1, 0)
+        job2 = create_service_job_for_sunrise_and_sunset_calculations(20, 1, 0)
+
+        job.start()
+        job2.start()
+        time.sleep(22)
+        job.stop()
+        job2.stop()
+        # Check what have been created should only be today and tomorrow
+        st_objects = StationTimeline.objects.all()
+        self.assertGreaterEqual(len(st_objects), 2 * nbr_stations)
+
+
+if __name__ == '__main__':
+    #run the unit tests
+    unittest.main()
\ No newline at end of file
diff --git a/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.run b/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.run
new file mode 100755
index 0000000000000000000000000000000000000000..187c3bf1e7ba9d481b31f00104a57b7904d56c15
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.run
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+# Run the unit test
+source python-coverage.sh
+python_coverage_test "*tmss*" t_precalculations_service.py
+
diff --git a/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.sh b/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.sh
new file mode 100755
index 0000000000000000000000000000000000000000..54b180d52549f5fcb1b84e706f4e6ae5b2e45010
--- /dev/null
+++ b/SAS/TMSS/backend/services/precalculations_service/test/t_precalculations_service.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+./runctest.sh t_precalculations_service
\ No newline at end of file
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/conversions.py b/SAS/TMSS/backend/src/tmss/tmssapp/conversions.py
index 3c0e184ce79ac8e697043dcf8ced5dceba3bf1eb..14b0a38e566666fda10ba8292bb9d4f91525afef 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/conversions.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/conversions.py
@@ -5,10 +5,16 @@ from astropy.coordinates.earth import EarthLocation
 from astropy.coordinates import Angle, get_body
 import astropy.time
 from functools import lru_cache
+from lofar.sas.tmss.tmss.tmssapp.models.calculations import StationTimeline
+from lofar.sas.tmss.tmss.tmssapp.models.specification import CommonSchemaTemplate
+from django.db.utils import IntegrityError
+
+from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
 
 import logging
 logger = logging.getLogger(__name__)
 
+
 def create_astroplan_observer_for_station(station: str) -> 'Observer':
     '''
     returns an astroplan observer for object for a given station, located in the LBA center of the given station
@@ -26,18 +32,28 @@ def create_astroplan_observer_for_station(station: str) -> 'Observer':
 # default angle to the horizon at which the sunset/sunrise starts and ends, as per LOFAR definition.
 SUN_SET_RISE_ANGLE_TO_HORIZON = Angle(10, unit=astropy.units.deg)
 # default n_grid_points; higher is more precise but very costly; astropy defaults to 150, errors now can be in the minutes, increase if this is not good enough
+# TODO: To be considered, now we store the sunset/sunrise data in advanced, we can increase the number of points!!
 SUN_SET_RISE_PRECISION = 30
 
-@lru_cache(maxsize=256, typed=False)  # does not like lists, so use tuples to allow caching
-def timestamps_and_stations_to_sun_rise_and_set(timestamps: tuple, stations: tuple, angle_to_horizon: Angle=SUN_SET_RISE_ANGLE_TO_HORIZON) -> dict:
+
+def timestamps_and_stations_to_sun_rise_and_set(timestamps: tuple, stations: tuple, angle_to_horizon: Angle=SUN_SET_RISE_ANGLE_TO_HORIZON,
+                                                create_when_not_found=False) -> dict:
     """
-    Compute sunrise, sunset, day and night of the given stations at the given timestamps.
+    Retrieve for given stations and given timestamps the sunrise/sunset/day/night data as dictionary
+    If station/timestamp is already calculated it will be retrieved from database otherwise it will be calculated
+    and added to the database for possible future retrieval (optional parameter must be true).
+    Storing the pre-calculated data into a database makes retrieval faster.
+
     The day/sunrise/sunset is always on the date of the timestamp.
-    The night is usually the one _starting_ on the date of the time stamp, unless the given timestamp falls before sunrise, in which case it is the night _ending_ on the timestamp date.
-    :param timestamps: tuple of datetimes, e.g. (datetime(2020, 1, 1), datetime(2020, 1, 2))
-    :param stations: tuple of station names, e.g. ("CS002",)
+    The night is usually the one _starting_ on the date of the time stamp, unless the given timestamp falls before
+    sunrise, in which case it is the night _ending_ on the timestamp date.
+
+    :param timestamps: tuple of datetimes, e.g. datetime(2020, 1, 1)
+    :param stations: tuple of station names, e.g. ("CS002")
     :param angle_to_horizon: the angle between horizon and given coordinates for which rise and set times are returned
-    :return A dict that maps station names to a nested dict that contains lists of start and end times for sunrise, sunset, etc, on each requested date.
+    :param: create_when_not_found: Add data to database if not found in database and so calculated for first time
+    :return A dict that maps station names to a nested dict that contains lists of start and end times for sunrise,
+            sunset, day and night, on each requested date.
         E.g.
         {"CS002":
             {   "sunrise": [{"start": datetime(2020, 1, 1, 6, 0, 0)), "end": datetime(2020, 1, 1, 6, 30, 0)},
@@ -53,27 +69,112 @@ def timestamps_and_stations_to_sun_rise_and_set(timestamps: tuple, stations: tup
     """
     return_dict = {}
     for station in stations:
+        observer = create_astroplan_observer_for_station(station)
         for timestamp in timestamps:
-            # todo: this can probably be made faster by moving the following logic to an own function with single station/timestamp as input and putting the lru_cache on there.
-            #  This also means that we have to strip the time from the datetime. Can this be safely done?
-            observer = create_astroplan_observer_for_station(station)
-            sunrise_start = observer.sun_rise_time(time=Time(datetime.combine(timestamp.date(), dtime(12,0,0))), horizon=-angle_to_horizon, which='previous', n_grid_points=SUN_SET_RISE_PRECISION)
-            sunrise_end = observer.sun_rise_time(time=Time(sunrise_start), horizon=angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION)
-            sunset_start = observer.sun_set_time(time=sunrise_end, horizon=angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION)
-            sunset_end = observer.sun_set_time(time=sunset_start, horizon=-angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION)
-            return_dict.setdefault(station, {}).setdefault("sunrise", []).append({"start": sunrise_start.to_datetime(), "end": sunrise_end.to_datetime()})
-            return_dict[station].setdefault("sunset", []).append({"start": sunset_start.to_datetime(), "end": sunset_end.to_datetime()})
-            return_dict[station].setdefault("day", []).append({"start": sunrise_end.to_datetime(), "end": sunset_start.to_datetime()})
-            if timestamp >= sunrise_start:
-                sunrise_next_start = observer.sun_rise_time(time=sunset_end, horizon=-angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION)
-                return_dict[station].setdefault("night", []).append({"start": sunset_end.to_datetime(), "end": sunrise_next_start.to_datetime()})
+            # We can also check if ALL stations/timestamps are in DB once. Do it now in a loop for each
+            # station/timestamp, because we might missing something
+            station_timestamp_found = False
+            try:
+                obj = StationTimeline.objects.get(station_name=station, timestamp=datetime.date(timestamp))
+                station_timestamp_found = True
+            except ObjectDoesNotExist:
+                station_timestamp_found = False
+
+            if station_timestamp_found:
+                logger.debug("StationTimeline data found in DB for station=%s, timestamp=%s" % (station,timestamp))
+                sunrise_dict = {"start": obj.sunrise_start, "end": obj.sunrise_end}
+                sunset_dict = {"start": obj.sunset_start, "end": obj.sunset_end}
             else:
-                sunset_previous_end = observer.sun_set_time(time=sunrise_start, horizon=-angle_to_horizon, which='previous', n_grid_points=SUN_SET_RISE_PRECISION)
-                return_dict[station].setdefault("night", []).append({"start": sunset_previous_end.to_datetime(), "end": sunrise_start.to_datetime()})
+                # Not found in database so calculate it
+                try:
+                    sunrise_dict, sunset_dict = calculate_and_get_sunrise_and_sunset_of_observer_day(observer, timestamp, angle_to_horizon)
+                except Exception as exp:
+                    logger.warning("Can not calculate sunrise/sunset for station=%s, timestamp=%s" % (station,timestamp))
+                    # raise exp
+                    # Don't let it crash for now
+                    # The stations SE607 and LV614 station has problems calculation on 2021-07-01....
+                    # The SE607 also on 2021-06-04 ??
+                    break
+                # Add to database
+                if create_when_not_found:
+                    try:
+                        station_timeline = StationTimeline.objects.create(
+                                                    station_name=station,
+                                                    timestamp=timestamp,
+                                                    sunrise_start=sunrise_dict['start'],
+                                                    sunrise_end=sunrise_dict['end'],
+                                                    sunset_start=sunset_dict['start'],
+                                                    sunset_end=sunset_dict['end'])
+                        logger.debug("StationTimeline %s calculated and created for station=%s, timestamp=%s" %
+                                    (station_timeline, station, timestamp))
+                    except IntegrityError as e:
+                        if 'unique_station_time_line' in str(e):
+                            logger.info("StationTimeline with station=%s and timestamp=%s already exists, "
+                                        "so not added to database",  station, timestamp)
+                        else:
+                            raise
+
+            # Derive day/night from sunset/sunrise
+            day_dict = {"start": sunrise_dict["end"], "end": sunset_dict["start"]}
+
+            if timestamp >= sunrise_dict["start"]:
+                # Determine next sunrise start
+                try:
+                    obj_next = StationTimeline.objects.get(station_name=station,
+                                                           timestamp=datetime.date(timestamp + timedelta(days=1)))
+                    sunrise_next_start = obj_next.sunrise_start
+                except:
+                    sunrise_next_start = observer.sun_rise_time(time=Time(sunrise_dict["end"]), horizon=-angle_to_horizon,
+                                                                which='next',
+                                                                n_grid_points=SUN_SET_RISE_PRECISION).to_datetime()
+                night_dict = {"start": sunset_dict["end"], "end": sunrise_next_start}
+            else:
+                # Determine previous sunset end
+                try:
+                    obj_prev = StationTimeline.objects.get(station_name=station,
+                                                           timestamp=datetime.date(timestamp - timedelta(days=1)))
+                    sunset_previous_end = obj_prev.sunrise_start
+                except:
+                    sunset_previous_end = observer.sun_set_time(time=Time(sunrise_dict["start"]), horizon=-angle_to_horizon,
+                                                                which='previous',
+                                                                n_grid_points=SUN_SET_RISE_PRECISION).to_datetime()
+                night_dict = {"start": sunset_previous_end, "end": sunrise_dict["start"]}
+
+            # Create overall result
+            return_dict.setdefault(station, {})
+            return_dict[station].setdefault("sunrise", []).append(sunrise_dict)
+            return_dict[station].setdefault("sunset", []).append(sunset_dict)
+            return_dict[station].setdefault("day", []).append(day_dict)
+            return_dict[station].setdefault("night", []).append(night_dict)
 
     return return_dict
 
 
+@lru_cache(maxsize=256, typed=False)
+def calculate_and_get_sunrise_and_sunset_of_observer_day(observer, timestamp: datetime, angle_to_horizon: Angle) -> dict:
+    """
+    Compute sunrise, sunset of the given observer object (station) at the given timestamp.
+    :param observer: observer object
+    :param timestamp: Datetime of a day (datetime(2020, 1, 1)
+    :param the angle between horizon and given coordinates for which rise and set times are returned
+    :return: dictionaries (with 'start' and 'end' defined) of sunrise, sunset
+    """
+    sunrise_start = observer.sun_rise_time(time=Time(datetime.combine(timestamp.date(), dtime(12, 0, 0))),
+                                           horizon=-angle_to_horizon, which='previous',
+                                           n_grid_points=SUN_SET_RISE_PRECISION)
+    sunrise_end = observer.sun_rise_time(time=Time(sunrise_start), horizon=angle_to_horizon, which='next',
+                                         n_grid_points=SUN_SET_RISE_PRECISION)
+    sunset_start = observer.sun_set_time(time=sunrise_end, horizon=angle_to_horizon, which='next',
+                                         n_grid_points=SUN_SET_RISE_PRECISION)
+    sunset_end = observer.sun_set_time(time=sunset_start, horizon=-angle_to_horizon, which='next',
+                                       n_grid_points=SUN_SET_RISE_PRECISION)
+
+    sunrise_dict = {"start": sunrise_start.to_datetime(), "end": sunrise_end.to_datetime()}
+    sunset_dict = {"start": sunset_start.to_datetime(), "end": sunset_end.to_datetime()}
+
+    return sunrise_dict, sunset_dict
+
+
 # todo: Depending on usage patterns, we should consider refactoring this a little so that we cache on a function with a single timestamp as input. Requests with similar (but not identical) timestamps or bodies currently make no use of cached results for the subset computed in previous requests.
 @lru_cache(maxsize=256, typed=False)  # does not like lists, so use tuples to allow caching
 def coordinates_and_timestamps_to_separation_from_bodies(angle1: float, angle2: float, direction_type: str, timestamps: tuple, bodies: tuple) -> dict:
@@ -228,3 +329,20 @@ def antennafields_for_antennaset_and_station(antennaset:str, station:str) -> lis
 
     return fields
 
+
+def get_all_stations():
+    """
+    returns all possible stations.
+    Retrieve station names from station template by getting the Dutch and International stations,
+    then you should have it all.
+    """
+    lst_stations = []
+    for station_group in ["Dutch", "International"]:
+        try:
+            station_schema_template = CommonSchemaTemplate.objects.get(name="stations", version=1)
+            groups = station_schema_template.schema['definitions']['station_group']['anyOf']
+            selected_group = next(g for g in groups if g['title'].lower() == station_group.lower())
+            lst_stations.extend(selected_group['properties']['stations']['enum'][0])
+        except Exception:
+            logger.warning("No stations schema found, sorry can not determine station list, return empty list")
+    return lst_stations
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py
index 2fffaacce2860830ce8cf931ccb535535ae69121..fc070c79167afd4e55e76c20d8ae39db1ba8f961 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py
@@ -1,4 +1,4 @@
-# Generated by Django 3.0.9 on 2021-03-23 17:08
+# Generated by Django 3.0.9 on 2021-03-29 13:02
 
 from django.conf import settings
 import django.contrib.postgres.fields
@@ -6,6 +6,7 @@ import django.contrib.postgres.fields.jsonb
 import django.contrib.postgres.indexes
 from django.db import migrations, models
 import django.db.models.deletion
+import lofar.sas.tmss.tmss.tmssapp.models.common
 import lofar.sas.tmss.tmss.tmssapp.models.specification
 
 
@@ -98,6 +99,7 @@ class Migration(migrations.Migration):
             options={
                 'abstract': False,
             },
+            bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model),
         ),
         migrations.CreateModel(
             name='CycleQuota',
@@ -378,6 +380,15 @@ class Migration(migrations.Migration):
                 'abstract': False,
             },
         ),
+        migrations.CreateModel(
+            name='PriorityQueueType',
+            fields=[
+                ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
         migrations.CreateModel(
             name='Project',
             fields=[
@@ -398,6 +409,7 @@ class Migration(migrations.Migration):
             options={
                 'abstract': False,
             },
+            bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model),
         ),
         migrations.CreateModel(
             name='ProjectCategory',
@@ -433,6 +445,7 @@ class Migration(migrations.Migration):
             fields=[
                 ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
             ],
+            bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model),
         ),
         migrations.CreateModel(
             name='ProjectRole',
@@ -608,10 +621,12 @@ class Migration(migrations.Migration):
                 ('output_data_allowed_to_be_ingested', models.BooleanField(default=False, help_text='boolean (default FALSE), which blocks Ingest Tasks from starting if OFF. When toggled ON, backend must scan for startable Ingest Tasks.')),
                 ('output_pinned', models.BooleanField(default=False, help_text='boolean (default FALSE), which blocks deleting unpinned dataproducts. When toggled ON, backend must pick SUB up for deletion. It also must when dataproducts are unpinned.')),
                 ('results_accepted', models.BooleanField(default=False, help_text='boolean (default NULL), which records whether the results were accepted, allowing the higher-level accounting to be adjusted.')),
+                ('priority_rank', models.FloatField(default=0.0, help_text='Priority of this scheduling unit w.r.t. other scheduling units within the same queue and project.')),
             ],
             options={
                 'abstract': False,
             },
+            bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model),
         ),
         migrations.CreateModel(
             name='SchedulingUnitDraft',
@@ -626,10 +641,12 @@ class Migration(migrations.Migration):
                 ('generator_instance_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Parameter value that generated this run draft (NULLable).', null=True)),
                 ('scheduling_constraints_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling Constraints for this run.', null=True)),
                 ('ingest_permission_required', models.BooleanField(default=False, help_text='Explicit permission is needed before the task.')),
+                ('priority_rank', models.FloatField(default=0.0, help_text='Priority of this scheduling unit w.r.t. other scheduling units within the same queue and project.')),
             ],
             options={
                 'abstract': False,
             },
+            bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model),
         ),
         migrations.CreateModel(
             name='SchedulingUnitObservingStrategyTemplate',
@@ -791,6 +808,7 @@ class Migration(migrations.Migration):
                 ('do_cancel', models.BooleanField(help_text='Cancel this task.')),
                 ('output_pinned', models.BooleanField(default=False, help_text='True if the output of this task is pinned to disk, that is, forbidden to be removed.')),
             ],
+            bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model),
         ),
         migrations.CreateModel(
             name='TaskConnectorType',
@@ -920,6 +938,17 @@ class Migration(migrations.Migration):
                 ('second', models.ForeignKey(help_text='Second Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_scheduling_relation', to='tmssapp.TaskBlueprint')),
             ],
         ),
+        migrations.CreateModel(
+            name='StationTimeline',
+            fields=[
+                ('station_name', models.CharField(max_length=16, null=False, editable=False, help_text='The LOFAR station name.')),
+                ('timestamp', models.DateField(editable=False, null=True, help_text='The date (YYYYMMDD).')),
+                ('sunrise_start', models.DateTimeField(null=True, help_text='Start time of the sunrise.')),
+                ('sunrise_end', models.DateTimeField(null=True, help_text='End time of the sunrise.')),
+                ('sunset_start', models.DateTimeField(null=True, help_text='Start time of the sunset.')),
+                ('sunset_end', models.DateTimeField(null=True, help_text='End time of the sunset.')),
+            ],
+        ),
         migrations.AddConstraint(
             model_name='taskrelationselectiontemplate',
             constraint=models.UniqueConstraint(fields=('name', 'version'), name='taskrelationselectiontemplate_unique_name_version'),
@@ -1134,6 +1163,10 @@ class Migration(migrations.Migration):
             name='task_blueprint',
             field=models.ForeignKey(help_text='Task Blueprint to which this Subtask belongs.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='subtasks', to='tmssapp.TaskBlueprint'),
         ),
+        migrations.AddConstraint(
+            model_name='stationtimeline',
+            constraint=models.UniqueConstraint(fields=('station_name', 'timestamp'),  name='unique_station_time_line'),
+        ),
         migrations.AddConstraint(
             model_name='schedulingunittemplate',
             constraint=models.UniqueConstraint(fields=('name', 'version'), name='schedulingunittemplate_unique_name_version'),
@@ -1158,6 +1191,11 @@ class Migration(migrations.Migration):
             name='observation_strategy_template',
             field=models.ForeignKey(help_text='Observation Strategy Template used to create the requirements_doc.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingUnitObservingStrategyTemplate'),
         ),
+        migrations.AddField(
+            model_name='schedulingunitdraft',
+            name='priority_queue',
+            field=models.ForeignKey(default='A', help_text='Priority queue of this scheduling unit. Queues provide a strict ordering between scheduling units.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.PriorityQueueType'),
+        ),
         migrations.AddField(
             model_name='schedulingunitdraft',
             name='requirements_template',
@@ -1178,6 +1216,11 @@ class Migration(migrations.Migration):
             name='draft',
             field=models.ForeignKey(help_text='Scheduling Unit Draft which this run instantiates.', on_delete=django.db.models.deletion.PROTECT, related_name='scheduling_unit_blueprints', to='tmssapp.SchedulingUnitDraft'),
         ),
+        migrations.AddField(
+            model_name='schedulingunitblueprint',
+            name='priority_queue',
+            field=models.ForeignKey(default='A', help_text='Priority queue of this scheduling unit. Queues provide a strict ordering between scheduling units.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.PriorityQueueType'),
+        ),
         migrations.AddField(
             model_name='schedulingunitblueprint',
             name='requirements_template',
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/CMakeLists.txt b/SAS/TMSS/backend/src/tmss/tmssapp/models/CMakeLists.txt
index 3496efd57358ab186b665fe2dc3bd40264d4deaa..f6e74f93da044cdb42d2144d32a96fad0ed10097 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/CMakeLists.txt
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/CMakeLists.txt
@@ -8,6 +8,7 @@ set(_py_files
     scheduling.py
     common.py
     permissions.py
+    calculations.py
     )
 
 python_install(${_py_files}
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/__init__.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/__init__.py
index 0b0546b8d4bb175b9d8b5f9d98727aab73191c6b..3eb788371d97e4e3b1e62cbb5636014ceffc88bd 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/__init__.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/__init__.py
@@ -1,4 +1,5 @@
 from .specification import *
 from .scheduling import *
 from .common import *
-from .permissions import *
\ No newline at end of file
+from .permissions import *
+from .calculations import *
\ No newline at end of file
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/calculations.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/calculations.py
new file mode 100644
index 0000000000000000000000000000000000000000..e0f361589f577b47d3bedd8b5072b294fe7c409f
--- /dev/null
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/calculations.py
@@ -0,0 +1,30 @@
+"""
+This file contains the database models for calculations
+"""
+
+import os
+import logging
+logger = logging.getLogger(__name__)
+
+from django.db.models import Model, CharField, DateTimeField, DateField, UniqueConstraint
+
+
+class StationTimeline(Model):
+    """
+    Represents computations of sunrise, sunset of the given stations at the given timestamps.
+    Day and night are derived from sunset/sunrise data.
+    The day/sunrise/sunset is always on the date of the timestamp.
+    The night is usually the one _starting_ on the date of the time stamp, unless the given timestamp falls
+    before sunrise, in which case it is the night _ending_ on the timestamp date.
+    """
+    station_name = CharField(max_length=16, null=False, editable=False, help_text='The LOFAR station name.')
+    timestamp = DateField(editable=False, null=True, help_text='The date (YYYYMMDD).')
+
+    sunrise_start = DateTimeField(null=True, help_text='Start time of the sunrise.')
+    sunrise_end = DateTimeField(null=True, help_text='End time of the sunrise.')
+    sunset_start = DateTimeField(null=True, help_text='Start time of the sunset.')
+    sunset_end = DateTimeField(null=True, help_text='End time of the sunset.')
+
+    class Meta:
+        # ensure there are no duplicate station-timestamp combinations
+        constraints = [UniqueConstraint(fields=['station_name', 'timestamp'], name='unique_station_time_line')]
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py
index 140b298db576485d9f3d8f23cb49f20daf15cd37..21c56bb84ca9393c50c745a632dc70d34a5d4815 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py
@@ -156,6 +156,13 @@ class TaskType(AbstractChoice):
         OTHER = 'other'
 
 
+class PriorityQueueType(AbstractChoice):
+    """Defines the possible priority queues for SchedulingUnits.
+    The items in the Choices class below are automagically populated into the database via a data migration."""
+    class Choices(Enum):
+        A = "A"
+        B = "B"
+
 # concrete models
 
 class Setting(BasicCommon):
@@ -387,6 +394,8 @@ class SchedulingUnitDraft(RefreshFromDbInvalidatesCachedPropertiesMixin, NamedCo
     scheduling_constraints_doc = JSONField(help_text='Scheduling Constraints for this run.', null=True)
     scheduling_constraints_template = ForeignKey('SchedulingConstraintsTemplate', on_delete=CASCADE, null=True, help_text='Schema used for scheduling_constraints_doc.')
     ingest_permission_required = BooleanField(default=False, help_text='Explicit permission is needed before the task.')
+    priority_rank = FloatField(null=False, default=0.0, help_text='Priority of this scheduling unit w.r.t. other scheduling units within the same queue and project.')
+    priority_queue = ForeignKey('PriorityQueueType', null=False, on_delete=PROTECT, default="A", help_text='Priority queue of this scheduling unit. Queues provide a strict ordering between scheduling units.')
 
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
         if self.requirements_doc is not None and self.requirements_template_id and self.requirements_template.schema is not None:
@@ -463,6 +472,8 @@ class SchedulingUnitBlueprint(RefreshFromDbInvalidatesCachedPropertiesMixin, Nam
     output_data_allowed_to_be_ingested = BooleanField(default=False, help_text='boolean (default FALSE), which blocks Ingest Tasks from starting if OFF. When toggled ON, backend must scan for startable Ingest Tasks.')
     output_pinned = BooleanField(default=False, help_text='boolean (default FALSE), which blocks deleting unpinned dataproducts. When toggled ON, backend must pick SUB up for deletion. It also must when dataproducts are unpinned.')
     results_accepted = BooleanField(default=False, help_text='boolean (default NULL), which records whether the results were accepted, allowing the higher-level accounting to be adjusted.')
+    priority_rank = FloatField(null=False, default=0.0, help_text='Priority of this scheduling unit w.r.t. other scheduling units within the same queue and project.')
+    priority_queue = ForeignKey('PriorityQueueType', null=False, on_delete=PROTECT, default="A", help_text='Priority queue of this scheduling unit. Queues provide a strict ordering between scheduling units.')
 
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
         annotate_validate_add_defaults_to_doc_using_template(self, 'requirements_doc', 'requirements_template')
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/populate.py b/SAS/TMSS/backend/src/tmss/tmssapp/populate.py
index 1768345692a3b519bac2c555834231455ebe611d..684280c9ad39c7828f4e0be3bf121ff3b97fde3e 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/populate.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/populate.py
@@ -19,12 +19,13 @@ logger = logging.getLogger(__name__)
 
 import inspect
 import re
-from datetime import datetime, timezone
+from datetime import timezone, datetime, date
 from lofar.sas.tmss.tmss.tmssapp import models
 from lofar.sas.tmss.tmss.tmssapp import viewsets
 from lofar.sas.tmss.tmss.tmssapp.models.specification import *
 from lofar.sas.tmss.tmss.tmssapp.models.scheduling import *
 from lofar.sas.tmss.tmss.tmssapp.models.permissions import *
+from lofar.sas.tmss.tmss.tmssapp.conversions import timestamps_and_stations_to_sun_rise_and_set, get_all_stations
 from lofar.common import isTestEnvironment, isDevelopmentEnvironment
 from concurrent.futures import ThreadPoolExecutor
 from django.contrib.auth.models import User, Group, Permission
@@ -42,7 +43,7 @@ def populate_choices(apps, schema_editor):
     '''
     choice_classes = [Role, IOType, Datatype, Dataformat, CopyReason,
                       SubtaskState, SubtaskType, StationType, Algorithm, SchedulingRelationPlacement,
-                      Flag, ProjectCategory, PeriodCategory, Quantity, TaskType, ProjectRole]
+                      Flag, ProjectCategory, PeriodCategory, Quantity, TaskType, ProjectRole, PriorityQueueType]
 
     # upload choices in parallel
     with ThreadPoolExecutor() as executor:
@@ -622,3 +623,19 @@ def populate_system_test_users():
     guest_user.groups.add(Group.objects.get(name='Guest'))
     lta_user, _ = User.objects.get_or_create(username='lta_user', password='lta_user')
     lta_user.groups.add(Group.objects.get(name='LTA User'))
+
+
+def populate_sunrise_and_sunset_for_all_stations(nbr_days=3, start_date=date.today()):
+    """
+    Populate station timeline data of all stations for given number of days the starting at given date
+    Note: If data is not in database yet, it will take about 6 seconds to calculate it for all (51) stations
+    """
+    starttime_for_logging = datetime.utcnow()
+    logger.info("Populate sunrise and sunset for ALL known stations from %s up to %d days" % (start_date, nbr_days))
+    lst_timestamps = []
+    for i in range(0, nbr_days):
+        dt = datetime.combine(start_date, datetime.min.time()) + timedelta(days=i)
+        lst_timestamps.append(dt)
+
+    timestamps_and_stations_to_sun_rise_and_set(tuple(lst_timestamps), tuple(get_all_stations()), create_when_not_found=True)
+    logger.info("Populate sunrise and sunset done in %.1fs", (datetime.utcnow()-starttime_for_logging).total_seconds())
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json
index a7a5ee4c0c6d43b6ba8539cf2e7f9529536c7974..7f8df95358330be51622051ed4ae34dc8c5fa899 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json
@@ -35,7 +35,6 @@
           "CS302",
           "CS401",
           "CS501",
-          "RS104",
           "RS106",
           "RS205",
           "RS208",
@@ -47,7 +46,6 @@
           "RS406",
           "RS407",
           "RS409",
-          "RS410",
           "RS503",
           "RS508",
           "RS509",
@@ -116,8 +114,8 @@
           "properties":{
             "stations":{
               "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list",
-              "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501"]],
-              "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501"],
+              "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501"]],
+              "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501"],
               "uniqueItems": false
             },
             "max_nr_missing":{
@@ -154,8 +152,8 @@
           "properties":{
             "stations":{
               "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list",
-              "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"]],
-              "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"],
+              "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"]],
+              "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"],
               "uniqueItems": false
             },
             "max_nr_missing":{
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/CMakeLists.txt b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/CMakeLists.txt
index 83a8174527b6f67a614c62aa26739e4e38377af7..f5f6fe3833689eb59d13bca4ad0b66af0517d805 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/CMakeLists.txt
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/CMakeLists.txt
@@ -8,6 +8,7 @@ set(_py_files
     widgets.py
     common.py
     permissions.py
+    calculations.py
     )
 
 python_install(${_py_files}
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/__init__.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/__init__.py
index 0b0546b8d4bb175b9d8b5f9d98727aab73191c6b..3eb788371d97e4e3b1e62cbb5636014ceffc88bd 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/__init__.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/__init__.py
@@ -1,4 +1,5 @@
 from .specification import *
 from .scheduling import *
 from .common import *
-from .permissions import *
\ No newline at end of file
+from .permissions import *
+from .calculations import *
\ No newline at end of file
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/calculations.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/calculations.py
new file mode 100644
index 0000000000000000000000000000000000000000..8584228204e5737e659fec51df69363b25ae5673
--- /dev/null
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/calculations.py
@@ -0,0 +1,15 @@
+"""
+This file contains the serializers for conversion models
+"""
+
+import logging
+logger = logging.getLogger(__name__)
+
+from rest_framework import serializers
+from .. import models
+
+
+class StationTimelineSerializer(serializers.ModelSerializer):
+    class Meta:
+        model = models.StationTimeline
+        fields = '__all__'
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py
index 8e21947208819f013ba1c7d23bda3586cd774f91..fc23e9e94249066fdd813ea2ece5ad199bd2f452 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py
@@ -369,6 +369,12 @@ class TaskTypeSerializer(DynamicRelationalHyperlinkedModelSerializer):
         fields = '__all__'
 
 
+class PriorityQueueTypeSerializer(DynamicRelationalHyperlinkedModelSerializer):
+    class Meta:
+        model = models.PriorityQueueType
+        fields = '__all__'
+
+
 class ReservationStrategyTemplateSerializer(DynamicRelationalHyperlinkedModelSerializer):
     template = JSONEditorField(schema_source="reservation_template.schema")
 
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/CMakeLists.txt b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/CMakeLists.txt
index 186d29924f2c1706f57804848474f1a74bfeebb8..ab71ce95fb8cbf05bcc2533b2cec8bdd42956243 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/CMakeLists.txt
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/CMakeLists.txt
@@ -8,7 +8,8 @@ set(_py_files
     scheduling.py
     permissions.py
     project_permissions.py
-    )
+    calculations.py
+   )
 
 python_install(${_py_files}
     DESTINATION lofar/sas/tmss/tmss/tmssapp/viewsets)
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/__init__.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/__init__.py
index 0f7980fabfd9022b1389bf2ac72a975f9d2fb1e8..6f585af0a1c4a3ffd3a879a663fcef1cf4840d32 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/__init__.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/__init__.py
@@ -1,4 +1,5 @@
 from .specification import *
 from .scheduling import *
 from .permissions import *
-from .project_permissions import *
\ No newline at end of file
+from .project_permissions import *
+from .calculations import *
\ No newline at end of file
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/calculations.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/calculations.py
new file mode 100644
index 0000000000000000000000000000000000000000..fd7eb3fbfeab476afe094fc8de92c3b0876b09b4
--- /dev/null
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/calculations.py
@@ -0,0 +1,13 @@
+from .. import models
+from .. import serializers
+from .lofar_viewset import LOFARViewSet
+
+
+#
+# Conversions ViewSets
+#
+
+class StationTimelineViewSet(LOFARViewSet):
+    queryset = models.StationTimeline.objects.all()
+    serializer_class = serializers.StationTimelineSerializer
+
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py
index 620742eaa77f9aedd8400e88f862121fcb2e2dbf..49ddf7a09713dd394c1265d8baf1dbcbcc29121a 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py
@@ -1081,3 +1081,8 @@ class TaskTypeViewSet(LOFARViewSet):
     queryset = models.TaskType.objects.all()
     serializer_class = serializers.TaskTypeSerializer
 
+
+class PriorityQueueTypeViewSet(LOFARViewSet):
+    queryset = models.PriorityQueueType.objects.all()
+    serializer_class = serializers.PriorityQueueTypeSerializer
+
diff --git a/SAS/TMSS/backend/src/tmss/urls.py b/SAS/TMSS/backend/src/tmss/urls.py
index afe222f05f2ef50547b85a34cd591755dbd77c40..e45c9db4013e4025570156045e609b22d30df240 100644
--- a/SAS/TMSS/backend/src/tmss/urls.py
+++ b/SAS/TMSS/backend/src/tmss/urls.py
@@ -126,6 +126,7 @@ router.register(r'period_category', viewsets.PeriodCategoryViewSet)
 router.register(r'project_category', viewsets.ProjectCategoryViewSet)
 router.register(r'quantity', viewsets.QuantityViewSet)
 router.register(r'task_type', viewsets.TaskTypeViewSet)
+router.register(r'priority_queue_type', viewsets.PriorityQueueTypeViewSet)
 
 # templates
 router.register(r'common_schema_template', viewsets.CommonSchemaTemplateViewSet)
@@ -226,6 +227,12 @@ router.register(r'sip_identifier', viewsets.SIPidentifierViewSet)
 router.register(r'project_role', viewsets.ProjectRoleViewSet)
 router.register(r'project_permission', viewsets.ProjectPermissionViewSet)
 
+
+# CONVERSIONS
+
+router.register(r'station_timeline', viewsets.StationTimelineViewSet)
+
+
 urlpatterns.extend(router.urls)
 
 frontend_urlpatterns = [
diff --git a/SAS/TMSS/backend/test/t_conversions.py b/SAS/TMSS/backend/test/t_conversions.py
index 1773168c7b1ded14c41aee27f0fddd6683d9f9f7..76a525df037543a961d94362171019a7d2d7297d 100755
--- a/SAS/TMSS/backend/test/t_conversions.py
+++ b/SAS/TMSS/backend/test/t_conversions.py
@@ -30,7 +30,6 @@ import json
 
 logger = logging.getLogger(__name__)
 logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
-from lofar.sas.tmss.tmss.tmssapp.conversions import local_sidereal_time_for_utc_and_station, local_sidereal_time_for_utc_and_longitude
 
 from lofar.common.test_utils import exit_with_skipped_code_if_skip_integration_tests
 exit_with_skipped_code_if_skip_integration_tests()
@@ -40,6 +39,10 @@ exit_with_skipped_code_if_skip_integration_tests()
 # (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module)
 from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import *
 
+# The next import should be done after the 'tmss_test_environment_unittest_setup' magic !!!
+from lofar.sas.tmss.tmss.tmssapp.conversions import local_sidereal_time_for_utc_and_station, local_sidereal_time_for_utc_and_longitude
+
+
 class SiderealTime(unittest.TestCase):
 
     def test_local_sidereal_time_for_utc_and_longitude_returns_correct_result(self):
diff --git a/SAS/TMSS/backend/test/t_tmssapp_specification_django_API.py b/SAS/TMSS/backend/test/t_tmssapp_specification_django_API.py
index 7ace3e3ad11b88a2c9f1e169c8b01b7dc8d5e57d..577932cd868df45bc7335df4a3c67f91ecbb56b3 100755
--- a/SAS/TMSS/backend/test/t_tmssapp_specification_django_API.py
+++ b/SAS/TMSS/backend/test/t_tmssapp_specification_django_API.py
@@ -922,6 +922,41 @@ class TaskRelationBlueprintTest(unittest.TestCase):
             models.TaskRelationBlueprint.objects.create(**test_data)
 
 
+
+
+class TestStationTimeLine(unittest.TestCase):
+    """
+    Actually this simple testcase should be in a separate module (t_tmssapp_calculations_django_API.py)
+    but I was just lazy and spare some overhead and I just 'piggyback' with this module
+    """
+
+    def test_StationTimeline_raises_Error_on_duplicate_station_timeline(self):
+        """
+        Test if adding a duplicate station-timestamp combination leads to an Error and so data is not inserted
+        """
+        import datetime
+
+        test_data = {"station_name": "CS001",
+                     "timestamp": datetime.date(2021, 4, 1),
+                     "sunrise_start": datetime.datetime(year=2021, month=4, day=1, hour=6, minute=1, second=0),
+                     "sunrise_end": datetime.datetime(year=2021, month=4, day=1, hour=7, minute=2, second=0),
+                     "sunset_start": datetime.datetime(year=2021, month=4, day=1, hour=20, minute=31, second=0),
+                     "sunset_end": datetime.datetime(year=2021, month=4, day=1, hour=21, minute=33, second=0) }
+
+        models.StationTimeline.objects.create(**test_data)
+        with self.assertRaises(IntegrityError) as context:
+            models.StationTimeline.objects.create(**test_data)
+            self.assertIn('unique_station_time_line', str(context.exception))
+
+        self.assertEqual(len(models.StationTimeline.objects.filter(timestamp=datetime.date(2021, 4, 1))), 1)
+        self.assertEqual(len(models.StationTimeline.objects.all()), 1)
+        # Add a non-duplicate
+        test_data["station_name"] = "CS002"
+        models.StationTimeline.objects.create(**test_data)
+        self.assertEqual(len(models.StationTimeline.objects.filter(timestamp=datetime.date(2021, 4, 1))), 2)
+        self.assertEqual(len(models.StationTimeline.objects.all()), 2)
+
+
 if __name__ == "__main__":
     os.environ['TZ'] = 'UTC'
     unittest.main()
diff --git a/SAS/TMSS/backend/test/test_utils.py b/SAS/TMSS/backend/test/test_utils.py
index ad64349b5c195fb1d7b5381cc2a4404a535711ae..88c46e4780d91bb23d61fcc6f679c9773903b2d8 100644
--- a/SAS/TMSS/backend/test/test_utils.py
+++ b/SAS/TMSS/backend/test/test_utils.py
@@ -286,6 +286,7 @@ class TMSSTestEnvironment:
                  start_pipeline_control: bool=False, start_websocket: bool=False,
                  start_feedback_service: bool=False,
                  start_workflow_service: bool=False, enable_viewflow: bool=False,
+                 start_precalculations_service: bool=False,
                  ldap_dbcreds_id: str=None, db_dbcreds_id: str=None, client_dbcreds_id: str=None):
         self._exchange = exchange
         self._broker = broker
@@ -332,6 +333,9 @@ class TMSSTestEnvironment:
         self.workflow_service = None
         os.environ['TMSS_ENABLE_VIEWFLOW'] = str(bool(self.enable_viewflow))
 
+        self._start_precalculations_service = start_precalculations_service
+        self.precalculations_service = None
+
         # Check for correct Django version, should be at least 3.0
         if django.VERSION[0] < 3:
             print("\nWARNING: YOU ARE USING DJANGO VERSION '%s', WHICH WILL NOT SUPPORT ALL FEATURES IN TMSS!\n" %
@@ -430,6 +434,8 @@ class TMSSTestEnvironment:
             except Exception as e:
                 logger.exception(e)
 
+
+
         # wait for all services to be fully started in their background threads
         for thread in service_threads:
             thread.join()
@@ -447,6 +453,14 @@ class TMSSTestEnvironment:
 
         logger.info("started TMSSTestEnvironment ldap/database/django + services + schemas + data in %.1fs", (datetime.datetime.utcnow()-starttime).total_seconds())
 
+        # next service does not have a buslistener, it is just a simple time scheduler and currently rely and
+        # populated stations schema to retrieve all stations
+        if self._start_precalculations_service:
+            from lofar.sas.tmss.services.precalculations_service import create_service_job_for_sunrise_and_sunset_calculations
+            # For testpurposes we can use a smaller range and higher interval frequency
+            self.precalculations_service = \
+                create_service_job_for_sunrise_and_sunset_calculations(wait_time_seconds=60, nbr_days_calculate_ahead=3, nbr_days_before_today=1)
+            self.precalculations_service.start()
 
     def stop(self):
         if self.workflow_service is not None:
@@ -477,6 +491,10 @@ class TMSSTestEnvironment:
             self.ra_test_environment.stop()
             self.ra_test_environment = None
 
+        if self.precalculations_service is not None:
+            self.precalculations_service.stop()
+            self.precalculations_service = None
+
         self.django_server.stop()
         self.ldap_server.stop()
         self.database.destroy()
@@ -518,6 +536,7 @@ class TMSSTestEnvironment:
         from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator
         return TMSSRESTTestDataCreator(self.django_server.url, (self.django_server.ldap_dbcreds.user, self.django_server.ldap_dbcreds.password))
 
+
 def main_test_database():
     """instantiate, run and destroy a test postgress django database"""
     os.environ['TZ'] = 'UTC'
@@ -550,6 +569,7 @@ def main_test_database():
         print("Press Ctrl-C to exit (and remove the test database automatically)")
         waitForInterrupt()
 
+
 def main_test_environment():
     """instantiate, run and destroy a full tmss test environment (postgress database, ldap server, django server)"""
     from optparse import OptionParser, OptionGroup
@@ -583,6 +603,7 @@ def main_test_environment():
     group.add_option('-V', '--viewflow_service', dest='viewflow_service', action='store_true', help='Enable the viewflow service. Implies --viewflow_app and --eventmessages')
     group.add_option('-w', '--websockets', dest='websockets', action='store_true', help='Enable json updates pushed via websockets')
     group.add_option('-f', '--feedbackservice', dest='feedbackservice', action='store_true', help='Enable feedbackservice to handle feedback from observations/pipelines which comes in via the (old qpid) otdb messagebus.')
+    group.add_option('-C', '--precalculations_service', dest='precalculations_service', action='store_true', help='Enable the PreCalculations service')
     group.add_option('--all', dest='all', action='store_true', help='Enable/Start all the services, upload schemas and testdata')
     group.add_option('--simulate', dest='simulate', action='store_true', help='Simulate a run of the first example scheduling_unit (implies --data and --eventmessages and --ra_test_environment)')
 
@@ -622,6 +643,7 @@ def main_test_environment():
                              start_feedback_service=options.feedbackservice or options.all,
                              enable_viewflow=options.viewflow_app or options.viewflow_service or options.all,
                              start_workflow_service=options.viewflow_service or options.all,
+                             start_precalculations_service=options.precalculations_service or options.all,
                              ldap_dbcreds_id=options.LDAP_ID, db_dbcreds_id=options.DB_ID, client_dbcreds_id=options.REST_CLIENT_ID) as tmss_test_env:
 
             # print some nice info for the user to use the test servers...
@@ -958,5 +980,7 @@ def main_scheduling_unit_blueprint_simulator():
             pass
 
 
+
+
 if __name__ == '__main__':
     main_test_environment()
diff --git a/SAS/TMSS/client/lib/tmss_http_rest_client.py b/SAS/TMSS/client/lib/tmss_http_rest_client.py
index bcd7309b82b976177ce4f527435e2bea60b9cb09..8ca49cf4cbd16802330bcf504e21156298aff771 100644
--- a/SAS/TMSS/client/lib/tmss_http_rest_client.py
+++ b/SAS/TMSS/client/lib/tmss_http_rest_client.py
@@ -371,7 +371,7 @@ class TMSSsession(object):
         if response.status_code == 201:
             logger.info("created new template with name=%s: %s", name, json.loads(response.text)['url'])
         else:
-            raise Exception("Could not POST template with name=%s: %s" (name,response.text))
+            raise Exception("Could not POST template with name=%s: %s" % (name,response.text))
 
     def process_feedback_and_set_to_finished_if_complete(self, subtask_id: int, feedback: str) -> {}:
         '''Process the feedback_doc (which can be for one or more or all dataproducts), store/append it in the subtask's raw_feedback, and process it into json feedback per dataproduct. Sets the subtask to finished if all dataproducts are processed, which may require multiple postings of partial feedback docs.