diff --git a/SAS/TMSS/backend/services/CMakeLists.txt b/SAS/TMSS/backend/services/CMakeLists.txt
index ee220bcd39d6774fb61053b7b7a58d956fefd6b8..88c9d1aa4cff39a8eb60a8e85ece8c3636208361 100644
--- a/SAS/TMSS/backend/services/CMakeLists.txt
+++ b/SAS/TMSS/backend/services/CMakeLists.txt
@@ -8,5 +8,5 @@ lofar_add_package(TMSSWorkflowService workflow_service)
 lofar_add_package(TMSSLTAAdapter tmss_lta_adapter)
 lofar_add_package(TMSSSlackWebhookService slackwebhook)
 lofar_add_package(TMSSPreCalculationsService precalculations_service)
-
+lofar_add_package(TMSSSipGenerationService sip_generation)
 
diff --git a/SAS/TMSS/backend/services/sip_generation/CMakeLists.txt b/SAS/TMSS/backend/services/sip_generation/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..b6d77ab43ad0c83b806f805980a61fa14ecf928a
--- /dev/null
+++ b/SAS/TMSS/backend/services/sip_generation/CMakeLists.txt
@@ -0,0 +1,10 @@
+lofar_package(TMSSSipGenerationService 0.1 DEPENDS TMSSClient PyCommon pyparameterset PyMessaging)
+
+lofar_find_package(PythonInterp 3.4 REQUIRED)
+
+IF(NOT SKIP_TMSS_BUILD)
+    add_subdirectory(lib)
+    add_subdirectory(test)
+ENDIF(NOT SKIP_TMSS_BUILD)
+
+add_subdirectory(bin)
diff --git a/SAS/TMSS/backend/services/sip_generation/bin/CMakeLists.txt b/SAS/TMSS/backend/services/sip_generation/bin/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..5e36034574d8d80101fe6796cbb3e8d245c07a0f
--- /dev/null
+++ b/SAS/TMSS/backend/services/sip_generation/bin/CMakeLists.txt
@@ -0,0 +1,4 @@
+lofar_add_bin_scripts(tmss_sip_generation_service)
+
+# supervisord config files
+lofar_add_sysconf_files(tmss_sip_generation_service.ini DESTINATION supervisord.d)
diff --git a/SAS/TMSS/backend/services/sip_generation/bin/tmss_sip_generation_service b/SAS/TMSS/backend/services/sip_generation/bin/tmss_sip_generation_service
new file mode 100755
index 0000000000000000000000000000000000000000..bf0a492cba21e3c11c930c873864b7c3c422d8e9
--- /dev/null
+++ b/SAS/TMSS/backend/services/sip_generation/bin/tmss_sip_generation_service
@@ -0,0 +1,24 @@
+#!/usr/bin/python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+
+from lofar.sas.tmss.services.sip_generation import main
+
+if __name__ == "__main__":
+    main()
diff --git a/SAS/TMSS/backend/services/sip_generation/bin/tmss_sip_generation_service.ini b/SAS/TMSS/backend/services/sip_generation/bin/tmss_sip_generation_service.ini
new file mode 100644
index 0000000000000000000000000000000000000000..33fb5faab83fb34997905700cd7f899351f99267
--- /dev/null
+++ b/SAS/TMSS/backend/services/sip_generation/bin/tmss_sip_generation_service.ini
@@ -0,0 +1,9 @@
+[program:tmss_sip_generation_service]
+command=docker run --rm -u 7149:7149 -v /opt/lofar/var/log:/opt/lofar/var/log -v /tmp/tmp -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro -v /localhome/lofarsys:/localhome/lofarsys --env-file /localhome/lofarsys/.lofar/.lofar_env -e HOME=/localhome/lofarsys -e USER=lofarsys nexus.cep4.control.lofar:18080/tmss_django:latest /bin/bash -c 'source ~/.lofar/.lofar_env;source $LOFARROOT/lofarinit.sh;exec tmss_sip_generation_service'
+user=lofarsys
+stopsignal=INT ; KeyboardInterrupt
+stopasgroup=true ; bash does not propagate signals
+stdout_logfile=%(program_name)s.log
+redirect_stderr=true
+stderr_logfile=NONE
+stdout_logfile_maxbytes=0
diff --git a/SAS/TMSS/backend/services/sip_generation/lib/CMakeLists.txt b/SAS/TMSS/backend/services/sip_generation/lib/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..f74123066248fd6fd8d0ffb7886a4b9a3ea8b205
--- /dev/null
+++ b/SAS/TMSS/backend/services/sip_generation/lib/CMakeLists.txt
@@ -0,0 +1,10 @@
+lofar_find_package(PythonInterp 3.4 REQUIRED)
+include(PythonInstall)
+
+set(_py_files
+    sip_generation.py
+    )
+
+python_install(${_py_files}
+    DESTINATION lofar/sas/tmss/services)
+
diff --git a/SAS/TMSS/backend/services/sip_generation/lib/sip_generation.py b/SAS/TMSS/backend/services/sip_generation/lib/sip_generation.py
new file mode 100644
index 0000000000000000000000000000000000000000..ddb1bb093c83864f68c1f53f516a671a339238ea
--- /dev/null
+++ b/SAS/TMSS/backend/services/sip_generation/lib/sip_generation.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python3
+
+# sip_generation.py
+#
+# Copyright (C) 2015
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it
+# and/or modify it under the terms of the GNU General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be
+# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+#
+# $Id: sip_generation.py 1580 2015-09-30 14:18:57Z loose $
+
+"""
+The Sip generation service generates a Sip for each dataproduct when their TMSS subtask finished.
+It listens on the lofar notification message bus for state changes of TMSS subtasks; when a task finished,
+it loops over all dataproducts and generates a Sip (rest action) one at a time, and puts them in a lookup table.
+This is to avoid peaks of load on the system during Ingest, where many Sips are requested in bulk.
+"""
+
+import os
+from optparse import OptionParser
+import logging
+logger = logging.getLogger(__name__)
+
+from lofar.sas.tmss.client.tmssbuslistener import *
+from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession
+
+class TMSSSipGenerationEventMessageHandler(TMSSEventMessageHandler):
+    '''
+    '''
+    def __init__(self, tmss_client_credentials_id: str=None):
+        super().__init__()
+        self.tmss_client = TMSSsession.create_from_dbcreds_for_ldap(tmss_client_credentials_id)
+
+    def start_handling(self):
+        self.tmss_client.open()
+        super().start_handling()
+
+    def stop_handling(self):
+        super().stop_handling()
+        self.tmss_client.close()
+
+    def onSubTaskStatusChanged(self, id: int, status: str):
+        super().onSubTaskStatusChanged(id, status)
+
+        if status == "finished":
+            dataproducts = self.tmss_client.get_subtask_output_dataproducts(id)
+            dataproduct_ids = sorted([d['id'] for d in dataproducts])
+
+            logger.info("subtask %s finished. trying to generate SIPs for its dataproducts: %s",
+                        id, ', '.join(str(id) for id in dataproduct_ids) or 'None')
+
+            for dataproduct_id in dataproduct_ids:
+                try:
+                    self.tmss_client.get_dataproduct_SIP(dataproduct_id)
+                except Exception as e:
+                    logger.error(f'Error when generating SIP for dataproduct id={dataproduct_id}: {e}')
+
+
+def create_sip_generation_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER, tmss_client_credentials_id: str=None):
+    return TMSSBusListener(handler_type=TMSSSipGenerationEventMessageHandler,
+                                  handler_kwargs={'tmss_client_credentials_id': tmss_client_credentials_id},
+                                  exchange=exchange,
+                                  broker=broker)
+
+def main():
+    # make sure we run in UTC timezone
+    os.environ['TZ'] = 'UTC'
+
+    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
+
+    # Check the invocation arguments
+    parser = OptionParser('%prog [options]', description='run the tmss_sip_generation_service which automatically triggers generation of SIPs for dataproducts of finished subtasks (so that load is reduced during ingest)')
+    parser.add_option('-q', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, help='Address of the messaging broker, default: %default')
+    parser.add_option('--exchange', dest='exchange', type='string', default=DEFAULT_BUSNAME, help='Name of the exchange on the messaging broker, default: %default')
+    parser.add_option('-t', '--tmss_client_credentials_id', dest='tmss_client_credentials_id', type='string',
+                      default=os.environ.get("TMSS_CLIENT_DBCREDENTIALS", "TMSSClient"),
+                      help='the credentials id for the file in ~/.lofar/dbcredentials which holds the TMSS http REST api url and credentials, default: %default')
+    (options, args) = parser.parse_args()
+
+    with create_sip_generation_service(options.exchange, options.broker, options.tmss_client_credentials_id):
+        waitForInterrupt()
+
+if __name__ == '__main__':
+    main()
diff --git a/SAS/TMSS/backend/services/sip_generation/test/CMakeLists.txt b/SAS/TMSS/backend/services/sip_generation/test/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..476e284b6dc219bce4ff96102cd03ff67089642e
--- /dev/null
+++ b/SAS/TMSS/backend/services/sip_generation/test/CMakeLists.txt
@@ -0,0 +1,7 @@
+# $Id: CMakeLists.txt 32679 2022-01-06 15:00:00Z jkuensem $
+
+if(BUILD_TESTING)
+    include(LofarCTest)
+
+    lofar_add_test(t_sip_generation_service)
+endif()
diff --git a/SAS/TMSS/backend/services/sip_generation/test/t_sip_generation_service.py b/SAS/TMSS/backend/services/sip_generation/test/t_sip_generation_service.py
new file mode 100755
index 0000000000000000000000000000000000000000..3aab47018a17268a2854513e1c23f1b28be020fe
--- /dev/null
+++ b/SAS/TMSS/backend/services/sip_generation/test/t_sip_generation_service.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+import unittest
+import uuid
+
+import logging
+logger = logging.getLogger('lofar.'+__name__)
+
+from lofar.common.test_utils import skip_integration_tests
+if skip_integration_tests():
+    exit(3)
+
+from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor
+from lofar.sas.tmss.services.sip_generation import create_sip_generation_service
+
+import time
+
+class TestSipGenerationService(unittest.TestCase):
+    '''
+    Tests for the SipGenerationService
+    '''
+
+    # feedback doc (needs %-formatting with subband number)
+    feedback_doc = """{"percentage_written": 0, 
+                       "frequency": {"subbands": [%s], 
+                                     "beamlet_indices": [%s], 
+                                     "central_frequencies": [102734375.0], 
+                                     "channel_width": 3051.757812, 
+                                     "channels_per_subband": 64}, 
+                       "time": {"start_time": "2021-01-29T12:39:00Z", 
+                                "duration": 0.0, 
+                                "sample_width": 1.006633}, 
+                       "antennas": {"set": "HBA_DUAL", 
+                                    "fields": [{"station": "CS001", 
+                                                "field": "HBA",
+                                                "type": "HBA"}, 
+                                               {"station": "CS001", 
+                                                "field": "HBA1",
+                                                "type": "HBA"}]}, 
+                       "target": {"pointing": {"direction_type": "J2000", 
+                                               "angle1": 0.1, 
+                                               "angle2": 0.2, 
+                                               "target": "my_source"}, 
+                                  "coherent": true}, 
+                       "samples": {"polarisations": ["XX", "XY", "YX", "YY"], 
+                                   "type": "float", 
+                                   "bits": 32, 
+                                   "writer": "lofarstman", 
+                                   "writer_version": "3", 
+                                   "complex": true}, 
+                       "$schema": "http://127.0.0.1:8000/api/schemas/dataproductfeedbacktemplate/feedback/1#", 
+                       "files": []
+                       }"""
+
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.TEST_UUID = uuid.uuid1()
+
+        cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID))
+        cls.tmp_exchange.open()
+
+        # override DEFAULT_BUSNAME
+        import lofar
+        lofar.messaging.config.DEFAULT_BUSNAME = cls.tmp_exchange.address
+
+        # import here, and not at top of module, because DEFAULT_BUSNAME needs to be set before importing
+        from lofar.sas.tmss.test.test_environment import TMSSTestEnvironment
+
+        cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address, populate_schemas=True, start_sip_generation_service=True, start_postgres_listener=True)
+        cls.tmss_test_env.start()
+
+        from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator
+        cls.test_data_creator = TMSSRESTTestDataCreator(cls.tmss_test_env.django_server.url,
+                                                        (cls.tmss_test_env.ldap_server.dbcreds.user,
+                                                         cls.tmss_test_env.ldap_server.dbcreds.password))
+
+    @classmethod
+    def tearDownClass(cls) -> None:
+        cls.tmss_test_env.stop()
+        cls.tmp_exchange.close()
+
+    def test_sip_generation_service_generates_sip_when_subtask_finished(self):
+        # create and start the service (the object under test)
+        service = create_sip_generation_service(exchange=self.tmp_exchange.address, tmss_client_credentials_id=self.tmss_test_env.client_credentials.dbcreds_id)
+        with BusListenerJanitor(service):
+            with self.tmss_test_env.create_tmss_client() as tmss_client:
+                # create a subtask with some output dataproducts
+                dataproduct_feedback_templates = tmss_client.get_path_as_json_object('dataproduct_feedback_template')
+                empty_dataproduct_feedback_template = next(x for x in dataproduct_feedback_templates if x['name']=='empty')
+
+                dataproduct_specifications_templates = tmss_client.get_path_as_json_object('dataproduct_specifications_template')
+                visibilities_specifications_template = next(x for x in dataproduct_specifications_templates if x['name']=='visibilities')
+
+                subtask_templates = tmss_client.get_path_as_json_object('subtask_template')
+                obs_subtask_template = next(x for x in subtask_templates if x['name']=='observation control')
+
+                subtask = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Subtask(specifications_template_url=obs_subtask_template['url']), '/subtask/')
+                subtask_id = subtask['id']
+                subtask_output = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/')
+                NUM_DATAPRODUCTS = 4
+                for i in range(NUM_DATAPRODUCTS):
+                    sap_template_url = tmss_client.get_path_as_json_object('sap_template/1')['url']
+                    sap_url = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.SAP(specifications_template_url=sap_template_url), '/sap/')['url']
+                    self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Dataproduct(subtask_output_url=subtask_output['url'],
+                                                                                                                        filename="L%d_SAP000_SB%03d_uv.MS" % (subtask_id, i),
+                                                                                                                        specifications_template_url=visibilities_specifications_template['url'],
+                                                                                                                        dataproduct_feedback_template_url=empty_dataproduct_feedback_template['url'],
+                                                                                                                        dataproduct_feedback_doc=self.feedback_doc % (i,i),
+                                                                                                                        sap_url=sap_url),
+                                                                                                                        '/dataproduct/')
+
+                # check that there are initially no cached SIPs for the dataproducts
+                dataproducts = tmss_client.get_subtask_output_dataproducts(subtask_id=subtask_id)
+                self.assertEqual(NUM_DATAPRODUCTS, len(dataproducts))
+                SIPs = tmss_client.get_path_as_json_object('sip')
+                num_SIPs = len(SIPs)
+                for dataproduct in dataproducts:
+                    self.assertNotIn(dataproduct['filename'], str(SIPs))
+
+                # set subtask state to finished to trigger service
+                from lofar.sas.tmss.test.test_utils import set_subtask_state_following_allowed_transitions, Subtask
+                set_subtask_state_following_allowed_transitions(Subtask.objects.get(id=subtask_id), 'finished')
+
+                # wait for service to trigger generation of SIPs
+                for i in range(10):
+                    SIPs = tmss_client.get_path_as_json_object('sip')
+                    if len(SIPs) == num_SIPs + NUM_DATAPRODUCTS:
+                        break
+                    time.sleep(5)
+
+                # check there now is a SIP for each dataproduct in the table
+                self.assertEqual(len(SIPs), num_SIPs + NUM_DATAPRODUCTS)
+                for dataproduct in dataproducts:
+                    self.assertIn(dataproduct['filename'], str(SIPs))
+
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
+
+if __name__ == '__main__':
+    #run the unit tests
+    unittest.main()
diff --git a/SAS/TMSS/backend/services/sip_generation/test/t_sip_generation_service.run b/SAS/TMSS/backend/services/sip_generation/test/t_sip_generation_service.run
new file mode 100755
index 0000000000000000000000000000000000000000..06d799539c80e8f0467354bad7b00c504e45b234
--- /dev/null
+++ b/SAS/TMSS/backend/services/sip_generation/test/t_sip_generation_service.run
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+# Run the unit test
+source python-coverage.sh
+python_coverage_test "*tmss*" t_sip_generation_service.py
+
diff --git a/SAS/TMSS/backend/services/sip_generation/test/t_sip_generation_service.sh b/SAS/TMSS/backend/services/sip_generation/test/t_sip_generation_service.sh
new file mode 100755
index 0000000000000000000000000000000000000000..97f5e765549d1b5670af320428dc93ace9b4db22
--- /dev/null
+++ b/SAS/TMSS/backend/services/sip_generation/test/t_sip_generation_service.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+./runctest.sh t_sip_generation_service
\ No newline at end of file
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py
index f78897af448bf150b1c4e5dcb4fe92766c4191f5..71ce09b999440a16961954a9cd129f021e6ad934 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py
@@ -1,5 +1,5 @@
 from lofar.sas.tmss.tmss.exceptions import *
-from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Dataproduct, SubtaskType, Subtask, SubtaskOutput, SubtaskState, SIPidentifier, HashAlgorithm
+from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Dataproduct, SubtaskType, Subtask, SubtaskOutput, SubtaskState, SIPidentifier, HashAlgorithm, SIP
 from lofar.sas.tmss.tmss.tmssapp.models.specification import Datatype, Dataformat
 from lofar.lta.sip import siplib, ltasip, validator, constants
 from lofar.common.json_utils import add_defaults_to_json_object_for_schema
@@ -768,3 +768,9 @@ def generate_sip_for_dataproduct(dataproduct):
     validator.check_consistency(sip)
     return sip
 
+
+def get_or_create_sip_xml_for_dataproduct(dataproduct):
+    '''look up the sip document for the provided dataproduct (generate it first if it does not yet exist)'''
+    if not SIP.objects.filter(dataproduct=dataproduct).exists():
+        SIP.objects.create(dataproduct=dataproduct, sip=generate_sip_for_dataproduct(dataproduct).get_prettyxml())
+    return SIP.objects.get(dataproduct=dataproduct).sip
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0018_sip.py b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0018_sip.py
new file mode 100644
index 0000000000000000000000000000000000000000..f3ef4ce5408c75595e2de32e36aa301ddf91c283
--- /dev/null
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0018_sip.py
@@ -0,0 +1,22 @@
+# Generated by Django 3.0.9 on 2022-01-07 16:19
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('tmssapp', '0017_task_queud_status'),
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='SIP',
+            fields=[
+                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('sip', models.CharField(help_text='The SIP in XML form as text', max_length=1048576, null=True)),
+                ('dataproduct', models.OneToOneField(help_text='The dataproduct that this SIP describes.', on_delete=django.db.models.deletion.PROTECT, related_name='sip', to='tmssapp.Dataproduct')),
+            ],
+        ),
+    ]
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
index 3f7b491ced8f462f2b2bd58a3e955b213f1d3048..f5ea3baaca0b4e0498b8b0e7d2c16259d9066821 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
@@ -135,7 +135,12 @@ class SIPidentifier(Model):
         if model._state.adding:
             model.global_parset_identifier = SIPidentifier.objects.create(source="TMSS")
 
-
+class SIP(Model):
+    '''A SIP (Submission Information Package) is an XML document that contains provenance info of a dataproduct
+    and is required to ingest data into the LTA. While these documents can be generated on-the-fly, we keep them
+    in a table so that we can (pre-)generate them independently from the ingest itself'''
+    dataproduct = OneToOneField('Dataproduct', related_name='sip', on_delete=PROTECT, help_text='The dataproduct that this SIP describes.')
+    sip = CharField(null=True, max_length=1048576, help_text='The SIP in XML form as text')
 
 #
 # Instance Objects
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py
index 68166c43100b41a1476c7db59d33f07364f86113..209b5ce5c62762ffc81cee6867db056c9173a701 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py
@@ -198,3 +198,8 @@ class SIPidentifierSerializer(serializers.HyperlinkedModelSerializer):
         model = models.SIPidentifier
         fields = ['unique_identifier', 'source', 'url']
 
+
+class SIPSerializer(serializers.HyperlinkedModelSerializer):
+    class Meta:
+        model = models.SIP
+        fields = '__all__'
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py
index 3a86ae29cea2dcf9e15979bd0ed8b16431ac208f..30147d7714f80a3bf021eac06c9ed884c5b3665a 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py
@@ -449,15 +449,15 @@ class DataproductViewSet(LOFARViewSet):
                          operation_description="Get the Submission Information Package (SIP) for this dataproduct")
     @action(methods=['get'], detail=True, url_name="sip")
     def sip(self, request, pk=None):
-        from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct
+        from lofar.sas.tmss.tmss.tmssapp.adapters.sip import get_or_create_sip_xml_for_dataproduct
         from lofar.sas.tmss.tmss.tmssapp import views
         from django.urls import reverse
 
         # get the dataproduct...
         dataproduct = get_object_or_404(models.Dataproduct, pk=pk)
 
-        # generate the sip
-        sip = generate_sip_for_dataproduct(dataproduct).get_prettyxml()
+        # get a sip document for the dataproduct
+        sip = get_or_create_sip_xml_for_dataproduct(dataproduct)
 
         # construct the schema location for the sip
         lta_sip_xsd_path = reverse(views.get_lta_sip_xsd)
@@ -574,3 +574,8 @@ class SAPTemplateViewSet(AbstractTemplateViewSet):
 class SIPidentifierViewSet(LOFARViewSet):
     queryset = models.SIPidentifier.objects.all()
     serializer_class = serializers.SIPidentifierSerializer
+
+
+class SIPViewSet(LOFARViewSet):
+    queryset = models.SIP.objects.all()
+    serializer_class = serializers.SIPSerializer
diff --git a/SAS/TMSS/backend/src/tmss/urls.py b/SAS/TMSS/backend/src/tmss/urls.py
index 14133a38b59842b7fc9c341012931d2eda666a2f..fc7284b39a77403580e68be2f7b9e6796dc21e87 100644
--- a/SAS/TMSS/backend/src/tmss/urls.py
+++ b/SAS/TMSS/backend/src/tmss/urls.py
@@ -226,6 +226,7 @@ router.register(r'subtask_state_log', viewsets.SubtaskStateLogViewSet)
 router.register(r'user', viewsets.UserViewSet)
 router.register(r'sap', viewsets.SAPViewSet)
 router.register(r'sip_identifier', viewsets.SIPidentifierViewSet)
+router.register(r'sip', viewsets.SIPViewSet)
 
 
 # PERMISSIONS
diff --git a/SAS/TMSS/backend/test/t_adapter.py b/SAS/TMSS/backend/test/t_adapter.py
index 772c44c38020f7bca7480fc2a9975b7522744e5c..71cc9962a1deb79fc9f603e4ac481c5cc5539b75 100755
--- a/SAS/TMSS/backend/test/t_adapter.py
+++ b/SAS/TMSS/backend/test/t_adapter.py
@@ -66,7 +66,7 @@ from lofar.sas.tmss.tmss.workflowapp.models.schedulingunitflow import Scheduling
 from lofar.sas.tmss.tmss.exceptions import SubtaskInvalidStateException
 from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset, convert_to_parset_dict, _order_beamformer_dataproducts
 from lofar.common.json_utils import get_default_json_object_for_schema, add_defaults_to_json_object_for_schema, resolved_remote_refs
-from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct, create_sip_representation_for_dataproduct
+from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct, get_or_create_sip_xml_for_dataproduct, create_sip_representation_for_dataproduct
 from lofar.lta.sip import constants
 from lofar.sas.tmss.test.test_utils import set_subtask_state_following_allowed_transitions
 from lofar.sas.tmss.tmss.tmssapp.tasks import update_task_graph_from_specifications_doc, create_scheduling_unit_blueprint_and_tasks_and_subtasks_from_scheduling_unit_draft
@@ -768,8 +768,9 @@ class SIPadapterTest(unittest.TestCase):
         # create their SIPs (separate loop since we needed to clear the cache in between):
         for i in range(10):
             dataproduct = main_dataproducts[i]
-            sip = generate_sip_for_dataproduct(dataproduct)
-            prettyxml = sip.get_prettyxml()
+            self.assertEqual(models.SIP.objects.filter(dataproduct=dataproduct).count(), 0)
+            prettyxml = get_or_create_sip_xml_for_dataproduct(dataproduct)
+            self.assertEqual(models.SIP.objects.filter(dataproduct=dataproduct).count(), 1)
             self.assertIn(str('<fileName>my_related_dataproduct_42'), prettyxml)
             self.assertIn(str(f'<fileName>my_main_dataproduct_{i}'), prettyxml)
             self.assertNotIn(str(f'<fileName>my_main_dataproduct_{i+1}'), prettyxml)
diff --git a/SAS/TMSS/backend/test/test_environment.py b/SAS/TMSS/backend/test/test_environment.py
index a85b76dd3c9ce8097cf19861f3e016c89d5a7c5c..e0c714019f484654018290611b122556ccd30a60 100644
--- a/SAS/TMSS/backend/test/test_environment.py
+++ b/SAS/TMSS/backend/test/test_environment.py
@@ -235,6 +235,7 @@ class TMSSTestEnvironment:
                  start_feedback_service: bool=False,
                  start_workflow_service: bool=False, enable_viewflow: bool=False,
                  start_precalculations_service: bool=False,
+                 start_sip_generation_service: bool=False,
                  ldap_dbcreds_id: str=None, db_dbcreds_id: str=None, client_dbcreds_id: str=None):
         self._exchange = exchange
         self._broker = broker
@@ -285,6 +286,9 @@ class TMSSTestEnvironment:
         self._start_precalculations_service = start_precalculations_service
         self.precalculations_service = None
 
+        self._start_sip_generation_service = start_sip_generation_service
+        self.sip_generation_service = None
+
         # Check for correct Django version, should be at least 3.0
         if django.VERSION[0] < 3:
             print("\nWARNING: YOU ARE USING DJANGO VERSION '%s', WHICH WILL NOT SUPPORT ALL FEATURES IN TMSS!\n" %
@@ -384,7 +388,11 @@ class TMSSTestEnvironment:
             except Exception as e:
                 logger.exception(e)
 
-
+        if self._start_sip_generation_service:
+            from lofar.sas.tmss.services.sip_generation import create_sip_generation_service
+            self.sip_generation_service = create_sip_generation_service(exchange=self._exchange, broker=self._broker, tmss_client_credentials_id=self.client_credentials.dbcreds_id)
+            service_threads.append(threading.Thread(target=self.sip_generation_service.start_listening()))
+            service_threads[-1].start()
 
         # wait for all services to be fully started in their background threads
         for thread in service_threads:
@@ -445,6 +453,10 @@ class TMSSTestEnvironment:
             self.precalculations_service.stop()
             self.precalculations_service = None
 
+        if self.sip_generation_service is not None:
+            BusListenerJanitor.stop_listening_and_delete_queue(self.sip_generation_service)
+            self.sip_generation_service = None
+
         self.django_server.stop()
         self.ldap_server.stop()
         self.database.destroy()
@@ -555,6 +567,7 @@ def main_test_environment():
     group.add_option('-w', '--websockets', dest='websockets', action='store_true', help='Enable json updates pushed via websockets')
     group.add_option('-f', '--feedbackservice', dest='feedbackservice', action='store_true', help='Enable feedbackservice to handle feedback from observations/pipelines which comes in via the (old qpid) otdb messagebus.')
     group.add_option('-C', '--precalculations_service', dest='precalculations_service', action='store_true', help='Enable the PreCalculations service')
+    group.add_option('-G', '--sip_generation_service', dest='sip_generation_service', action='store_true', help='Enable the SIP generation service')
     group.add_option('--all', dest='all', action='store_true', help='Enable/Start all the services, upload schemas and testdata')
     group.add_option('--simulate', dest='simulate', action='store_true', help='Simulate a run of the first example scheduling_unit (implies --data and --eventmessages and --ra_test_environment)')
 
@@ -595,6 +608,7 @@ def main_test_environment():
                              enable_viewflow=options.viewflow_app or options.viewflow_service or options.all,
                              start_workflow_service=options.viewflow_service or options.all,
                              start_precalculations_service=options.precalculations_service or options.all,
+                             start_sip_generation_service=options.sip_generation_service or options.all,
                              ldap_dbcreds_id=options.LDAP_ID, db_dbcreds_id=options.DB_ID, client_dbcreds_id=options.REST_CLIENT_ID) as tmss_test_env:
 
             # print some nice info for the user to use the test servers...
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js
index a13e0ad5dc71fe1c36582ec497d295de69039da9..185d69bfec18aee658ee009272fb0a8a54eb887c 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js
@@ -41,7 +41,28 @@ class ViewSchedulingUnit extends Component {
     SU_ACTIVE_STATUSES = ['started', 'observing', 'observed', 'processing', 'processed', 'ingesting'];
     SU_END_STATUSES = ['finished', 'error', 'cancelled'];
     TASK_END_STATUSES = ['finished', 'error', 'cancelled'];
-
+    SU_BLUEPRINT_EXPAND= 'draft.scheduling_constraints_template,draft,draft.scheduling_set,task_blueprints.specifications_template,task_blueprints,task_blueprints.subtasks,draft.observation_strategy_template'
+    SU_BLUEPRINT_FIELDS= ['id','url','created_at','status','tags','output_pinned','duration','name','on_sky_start_time','on_sky_stop_time','scheduling_constraints_doc','description',
+                            'updated_at','draft.url','draft.id','draft.name','draft.scheduling_set.url','draft.scheduling_set.name','draft.scheduling_set.project_id','task_blueprints.status',
+                            'draft.priority_rank','draft.priority_queue_value','task_blueprints.subtasks.id','task_blueprints.subtasks.primary','task_blueprints.subtasks.specifications_template_id',
+                            'task_blueprints.task_type','task_blueprints.id','task_blueprints.subtasks_ids','task_blueprints.name','task_blueprints.description',
+                            'task_blueprints.short_description','task_blueprints.on_sky_start_time','task_blueprints.on_sky_stop_time','task_blueprints.process_start_time',
+                            'task_blueprints.process_stop_time','task_blueprints.duration','task_blueprints.relative_start_time','task_blueprints.relative_stop_time','task_blueprints.tags', 'task_blueprints.url',
+                            'task_blueprints.do_cancel','task_blueprints.obsolete_since','task_blueprints.created_at','task_blueprints.updated_at','task_blueprints.specifications_template.id', 'task_blueprints.draft_id',
+                            'task_blueprints.specifications_template.type_value','draft.scheduling_constraints_template.schema','draft.scheduling_constraints_template.url','task_blueprints.produced_by_ids','task_blueprints.specifications_doc',
+                            'draft.observation_strategy_template_id','draft.observation_strategy_template.id', ,'draft.observation_strategy_template.template']
+    SU_DRAFT_EXPAND= 'scheduling_constraints_template,scheduling_set,task_drafts.specifications_template,task_drafts,observation_strategy_template,scheduling_unit_blueprints'
+    SU_DRAFT_FIELDS=['id','url','created_at','status','tags','output_pinned','duration','name','on_sky_start_time','on_sky_stop_time','priority_rank','priority_queue_value','description',
+                        'scheduling_constraints_doc','scheduling_constraints_template.schema','scheduling_constraints_template.url','observation_strategy_template_id','task_drafts.url','scheduling_constraints_template_id',
+                        'updated_at','scheduling_set.url','scheduling_set.name','scheduling_set.project_id','task_drafts.status','task_drafts.task_type','task_drafts.id','task_drafts.subtasks_ids',
+                        'task_drafts.name','task_drafts.description','task_drafts.short_description','task_drafts.on_sky_start_time','task_drafts.on_sky_stop_time','task_drafts.process_start_time',
+                        'task_drafts.process_stop_time','task_drafts.duration','task_drafts.relative_start_time','task_drafts.relative_stop_time','task_drafts.tags','task_drafts.do_cancel',
+                        'task_drafts.obsolete_since','task_drafts.created_at','task_drafts.updated_at','task_drafts.specifications_template.id','task_drafts.specifications_template.type_value',
+                        'task_drafts.task_blueprints_ids','task_drafts.specifications_doc','task_drafts.produced_by_ids','scheduling_unit_blueprints_ids', 
+                        'observation_strategy_template.id','observation_strategy_template.template',
+                        'scheduling_unit_blueprints.id','scheduling_unit_blueprints.name']
+                    
+                        
     constructor(props) {
         super(props);
         this.setToggleBySorting();
@@ -339,23 +360,22 @@ class ViewSchedulingUnit extends Component {
     };
 
     getSchedulingUnitDetails(schedule_type, schedule_id) {
-        ScheduleService.getSchedulingUnitExtended(schedule_type, schedule_id)
+        let expand = schedule_type.toLowerCase() === 'draft' ? this.SU_DRAFT_EXPAND: this.SU_BLUEPRINT_EXPAND;
+        let fields = schedule_type.toLowerCase() === 'draft' ? this.SU_DRAFT_FIELDS: this.SU_BLUEPRINT_FIELDS;
+        ScheduleService.getExpandedSchedulingUnit(schedule_type, schedule_id,expand,fields)
             .then(async (schedulingUnit) => {
                 if (schedulingUnit) {
                     schedulingUnit = this.formatConstraintDocForUI(schedulingUnit);
-                    ScheduleService.getSchedulingConstraintTemplate(schedulingUnit.scheduling_constraints_template_id)
-                        .then(async (template) => {
-                            //template = await ConstraintUtility.updateConstraintSchema(template);
-                            this.setState({ scheduleunitId: schedule_id,
-                                scheduleunit: schedulingUnit,
-                                scheduleunitType: schedule_type,
-                                constraintTemplate: template })
-                        });
-                    if (schedulingUnit.draft_id) {
-                        await ScheduleService.getSchedulingUnitDraftById(schedulingUnit.draft_id).then((response) => {
-                            schedulingUnit['observation_strategy_template_id'] = response.observation_strategy_template_id;
-                        });
+                    if (schedulingUnit.draft) {
+                        schedulingUnit['observation_strategy_template_id'] = schedulingUnit.draft.observation_strategy_template_id;
+                        schedulingUnit['scheduling_set'] = schedulingUnit.draft.scheduling_set;
+                        schedulingUnit['scheduling_constraints_template'] = schedulingUnit.draft.scheduling_constraints_template
                     }
+                    this.setState({ scheduleunitId: schedule_id,
+                                    scheduleunit: schedulingUnit,
+                                    scheduleunitType: schedule_type,
+                                    constraintTemplate: schedulingUnit.scheduling_constraints_template})
+                    
                     let tasks = schedulingUnit.task_drafts ? (await this.getFormattedTaskDrafts(schedulingUnit)) :await this.getFormattedTaskBlueprints(schedulingUnit);
                     let ingestGroup;
                     if(this.props.match.params.type === 'draft') {
@@ -414,7 +434,7 @@ class ViewSchedulingUnit extends Component {
                         dialogVisible: false,
                         ingestGroup
                     });
-                    this.loadTaskParameters(schedulingUnit.observation_strategy_template_id);
+                    this.loadTaskParameters(schedulingUnit.draft?schedulingUnit.draft.observation_strategy_template: schedulingUnit.observation_strategy_template);
                     this.selectedRows = [];
                     // Add Action menu
                     this.getActionMenu(schedule_type, isIngestPresent);
@@ -444,65 +464,62 @@ class ViewSchedulingUnit extends Component {
      * To get task parameters from observation strategy to create custom json schema and load with existing value of the task as input 
      * parameters to pass to the JSON editor.
      */
-    loadTaskParameters(observationStrategyId) {
-        ScheduleService.getObservationStrategy(observationStrategyId)
-        .then(async(observStrategy) => {
-            if (observStrategy) {
-                const tasks = observStrategy.template.tasks;
-                const parameters = observStrategy.template.parameters;
-                let paramsOutput = {};
-                let schema = { type: 'object', additionalProperties: false, 
-                                properties: {}, definitions:{}
-                                };
-                let bandPassFilter = null;
-                const $strategyRefs = await $RefParser.resolve(observStrategy.template);
-                // TODo: This schema reference resolving code has to be moved to common file and needs to rework
-                for (const param of parameters) {
-                    // TODO: make parameter handling more generic, instead of task specific.
-                    if (!param.refs[0].startsWith("#/tasks/")) { continue; }
-                    let taskPaths = param.refs[0].split("/");
-                    const taskName = taskPaths[2];
-                    //taskPaths = taskPaths.slice(4, taskPaths.length);
-                    /**
-                     * For Short_Description, the task path length will be 4, so added below condition to get short_description details
-                     *  #/tasks/Combined Observation/short_description
-                     */
-                    taskPaths = taskPaths.slice((taskPaths.length===4?3:4), taskPaths.length);
-                    const task = tasks[taskName];
-                    const suTask = this.state.schedulingUnitTasks.find(taskD => taskD.name === taskName);
-                    if (suTask) { task.specifications_doc = suTask.specifications_doc;
-                        task.short_description = suTask.short_description;
-                        //task.specifications_doc = suTask.specifications_doc;
-                        const taskKeys = Object.keys(task);
-                        for (const taskKey of taskKeys) {
-                            if (taskKey !== 'specifications_template') {
-                                task[taskKey] = suTask[taskKey];
-                            }
+    async loadTaskParameters(observStrategy) {
+        if (observStrategy) {
+            const tasks = observStrategy.template.tasks;
+            const parameters = observStrategy.template.parameters;
+            let paramsOutput = {};
+            let schema = { type: 'object', additionalProperties: false, 
+                            properties: {}, definitions:{}
+                            };
+            let bandPassFilter = null;
+            const $strategyRefs = await $RefParser.resolve(observStrategy.template);
+            // TODo: This schema reference resolving code has to be moved to common file and needs to rework
+            for (const param of parameters) {
+                // TODO: make parameter handling more generic, instead of task specific.
+                if (!param.refs[0].startsWith("#/tasks/")) { continue; }
+                let taskPaths = param.refs[0].split("/");
+                const taskName = taskPaths[2];
+                //taskPaths = taskPaths.slice(4, taskPaths.length);
+                /**
+                 * For Short_Description, the task path length will be 4, so added below condition to get short_description details
+                 *  #/tasks/Combined Observation/short_description
+                 */
+                taskPaths = taskPaths.slice((taskPaths.length===4?3:4), taskPaths.length);
+                const task = tasks[taskName];
+                const suTask = this.state.schedulingUnitTasks.find(taskD => taskD.name === taskName);
+                if (suTask) { task.specifications_doc = suTask.specifications_doc;
+                    task.short_description = suTask.short_description;
+                    //task.specifications_doc = suTask.specifications_doc;
+                    const taskKeys = Object.keys(task);
+                    for (const taskKey of taskKeys) {
+                        if (taskKey !== 'specifications_template') {
+                            task[taskKey] = suTask[taskKey];
                         }
                     }
-                    if (task) {
-                        const taskTemplate = suTask.template;
-                        // Get the default Bandpass filter and pass to the editor for frequency calculation from subband list
-                        if (taskTemplate.type_value === 'observation' && task.specifications_doc.filter) {
-                            bandPassFilter = task.specifications_doc.filter;
-                        }   else if (taskTemplate.type_value === 'observation' && taskTemplate.schema.properties.filter) {
-                            bandPassFilter = taskTemplate.schema.properties.filter.default;
-                        }
-                        let taskTemplateSchema = await UtilService.resolveSchema(_.cloneDeep(taskTemplate.schema));
-                        schema.definitions = {...schema.definitions, ...taskTemplateSchema.definitions};
-                        taskPaths.reverse();
-                        const paramProp = await ParserUtility.getParamProperty($strategyRefs, taskPaths, taskTemplateSchema, this.taskFilters);
-                        schema.properties[param.name] = _.cloneDeep(paramProp);
-                        if (schema.properties[param.name]) {
-                            schema.properties[param.name].title = param.name;
-                            schema.properties[param.name].default = $strategyRefs.get(param.refs[0]);
-                            paramsOutput[param.name] = schema.properties[param.name].default; 
-                        }
+                }
+                if (task) {
+                    const taskTemplate = suTask.template;
+                    // Get the default Bandpass filter and pass to the editor for frequency calculation from subband list
+                    if (taskTemplate.type_value === 'observation' && task.specifications_doc.filter) {
+                        bandPassFilter = task.specifications_doc.filter;
+                    }   else if (taskTemplate.type_value === 'observation' && taskTemplate.schema.properties.filter) {
+                        bandPassFilter = taskTemplate.schema.properties.filter.default;
+                    }
+                    let taskTemplateSchema = await UtilService.resolveSchema(_.cloneDeep(taskTemplate.schema));
+                    schema.definitions = {...schema.definitions, ...taskTemplateSchema.definitions};
+                    taskPaths.reverse();
+                    const paramProp = await ParserUtility.getParamProperty($strategyRefs, taskPaths, taskTemplateSchema, this.taskFilters);
+                    schema.properties[param.name] = _.cloneDeep(paramProp);
+                    if (schema.properties[param.name]) {
+                        schema.properties[param.name].title = param.name;
+                        schema.properties[param.name].default = $strategyRefs.get(param.refs[0]);
+                        paramsOutput[param.name] = schema.properties[param.name].default; 
                     }
                 }
-                this.setState({paramsSchema: schema, paramsOutput: paramsOutput, bandPassFilter: bandPassFilter });
             }
-        });
+            this.setState({paramsSchema: schema, paramsOutput: paramsOutput, bandPassFilter: bandPassFilter });
+        }
     }
 
     async getFilterColumns(type) {
@@ -541,7 +558,7 @@ class ViewSchedulingUnit extends Component {
         });
         this.actions.push({ icon: 'fa-window-close', title: 'Click to Close Scheduling Unit View', type: 'button',  actOn: 'click', props:{ callback: this.cancelView }});
         if (this.props.match.params.type ==='draft') {           
-           let blueprintExist = this.state.scheduleunit && this.state.scheduleunit.scheduling_unit_blueprints && this.state.scheduleunit.scheduling_unit_blueprints.length>0;
+           let blueprintExist = this.state.scheduleunit && this.state.scheduleunit.scheduling_unit_blueprints_ids && this.state.scheduleunit.scheduling_unit_blueprints_ids.length>0;
             if(isIngestPresent) {
                 this.actions.unshift({
                     icon: 'fa-file-import', 
@@ -751,7 +768,7 @@ class ViewSchedulingUnit extends Component {
             dialog.onSubmit = this.createBlueprintTree;
             dialog.content = null;
             dialog.width = null;
-            if (this.state.scheduleunit.scheduling_unit_blueprints.length > 0) {
+            if (this.state.scheduleunit.scheduling_unit_blueprints_ids.length > 0) {
                 dialog.detail = "Blueprint(s) already exist for this Scheduling Unit. Do you want to create another one?";
             } else {
                 dialog.detail = "Do you want to create a Scheduling Unit Blueprint?";
@@ -1614,19 +1631,21 @@ class ViewSchedulingUnit extends Component {
       * Enable/Disable autodeletion in the scheduling unit
       */
     async setAutoDeletion() {
+        let suCopy = _.cloneDeep(this.state.scheduleunit);
         let resSU = this.state.scheduleunit;
         resSU['output_pinned'] = !this.state.scheduleunit.output_pinned;
+        resSU['draft'] = this.state.scheduleunit.draft.url;
+        resSU['scheduling_constraints_template'] = this.state.scheduleunit.scheduling_constraints_template.url;
         delete resSU['task_blueprints'];
         delete resSU['task_drafts'];
-        resSU = await ScheduleService.updateSchedulingUnit(this.props.match.params.type, resSU);
-        if (resSU) {
+        let updatedResSU = await ScheduleService.updateSchedulingUnit(this.props.match.params.type, resSU);
+        if (updatedResSU) {
             appGrowl.show({ severity: 'success', summary: 'Success', detail: 'Prevent Automatic Deletion updated successfully' });
-            let tmpSu = this.state.scheduleunit;
-            tmpSu['output_pinned'] = resSU.output_pinned;
+            suCopy['output_pinned'] = updatedResSU.output_pinned;
             var index = _.indexOf(this.actions, _.find(this.actions, {'icon' :'fa-thumbtack'}));
             this.actions.splice(index, 1, { icon: 'fa-thumbtack', title: this.state.scheduleunit.output_pinned? 'Allow Automatic Deletion' : 'Prevent Automatic Deletion', 
             type: 'button', actOn: 'click', props: { callback: this.confirmAutoDeletion } }); 
-            this.setState({scheduleunit: tmpSu, actions: this.actions, dialogVisible: false});
+            this.setState({scheduleunit: suCopy, actions: this.actions, dialogVisible: false});
         }   else {
             appGrowl.show({ severity: 'error', summary: 'Failed', detail: 'Unable to update Automatic Deletion' });
             this.setState({dialogVisible: false});
@@ -1835,35 +1854,35 @@ class ViewSchedulingUnit extends Component {
                                 <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.observation_strategy_template_id}</span>
                             </div>
                             <div className="p-grid">
-                                {this.state.scheduleunit.scheduling_set_object.project_id &&
+                                {this.state.scheduleunit.scheduling_set && this.state.scheduleunit.scheduling_set.project_id &&
                                     <>
                                         <label className="col-lg-2 col-md-2 col-sm-12">Project</label>
                                         <span className="col-lg-4 col-md-4 col-sm-12">
-                                            <Link to={`/project/view/${this.state.scheduleunit.scheduling_set_object.project_id}`}>{this.state.scheduleunit.scheduling_set_object.project_id}</Link>
+                                            <Link to={`/project/view/${this.state.scheduleunit.scheduling_set.project_id}`}>{this.state.scheduleunit.scheduling_set.project_id}</Link>
                                         </span>
                                     </>
                                 }
                                 <label className="col-lg-2 col-md-2 col-sm-12">Scheduling set</label>
-                                <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.scheduling_set_object.name}</span>
+                                <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.scheduling_set && this.state.scheduleunit.scheduling_set.name}</span>
                             </div>
                             <div className="p-grid">
                                 <label className="col-lg-2 col-md-2 col-sm-12" >Priority Rank</label>
-                                <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.priority_rank}</span>
+                                <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.draft?this.state.scheduleunit.draft.priority_rank: this.state.scheduleunit.priority_rank}</span>
                                 <label className="col-lg-2 col-md-2 col-sm-12">Priority Queue</label>
-                                <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.priority_queue_value}</span>
+                                <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.draft? this.state.scheduleunit.draft.priority_queue_value : this.state.scheduleunit.priority_queue_value}</span>
                             </div>
                             <div className="p-grid">
                                 <label className="col-lg-2 col-md-2 col-sm-12">{this.props.match.params.type === 'blueprint' ? 'Draft' : 'Blueprints'}</label>
                                 <span className="col-lg-4 col-md-4 col-sm-12">
                                     <ul className="task-list">
-                                        {(this.state.scheduleunit.blueprintList || []).map(blueprint => (
+                                        {(this.state.scheduleunit.scheduling_unit_blueprints || []).map(blueprint => (
                                             <li>
                                                 <Link to={{ pathname: `/schedulingunit/view/blueprint/${blueprint.id}` }}>{blueprint.name}</Link>
                                             </li>))}
-                                        {this.state.scheduleunit.draft_object &&
+                                        {this.state.scheduleunit.draft &&
                                             <li>
-                                                <Link to={{ pathname: `/schedulingunit/view/draft/${this.state.scheduleunit.draft_object.id}` }}>
-                                                    {this.state.scheduleunit.draft_object.name}
+                                                <Link to={{ pathname: `/schedulingunit/view/draft/${this.state.scheduleunit.draft.id}` }}>
+                                                    {this.state.scheduleunit.draft.name}
                                                 </Link>
                                             </li>}
                                     </ul>
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js
index b5384aa4d05b406bb5a5e7237688f617711c3cdf..b6684a162480a97b754ef2aa9cc10c6fe18abcd8 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js
@@ -117,6 +117,21 @@ const ScheduleService = {
             console.error('[schedule.services.getSchedulingUnitsExtendedWithFilter]',error);
         }
         return response;
+    },
+    getExpandedSchedulingUnit: async function(type, id, expand, fields) {
+        let schedulingUnit = null;
+        try {
+            let api = `/api/scheduling_unit_${type}/${id}/?`;
+            api += (expand === '')? '' : 'expand='+expand+'&';
+            api += (!fields || fields === '')? '' : 'fields='+fields+'&';
+            const response = await axios.get(api);
+            schedulingUnit = response.data;
+        }   catch(error) {
+            console.error('[schedule.services.getSchedulingUnitsExpandWithFilter]',error);
+        }
+        return schedulingUnit
+
+
     },
     getSchedulingUnitExtended: async function (type, id, ignoreRef){
         if (type === "constraints") {