diff --git a/QA/QA_Common/lib/hdf5_io.py b/QA/QA_Common/lib/hdf5_io.py
index b6a9e1a192ae13d3570a743e491e44cd68179d6c..ec1814dbfb1c5c02a3fc4b7f002de2c66b716119 100644
--- a/QA/QA_Common/lib/hdf5_io.py
+++ b/QA/QA_Common/lib/hdf5_io.py
@@ -290,7 +290,7 @@ def read_sap_numbers(path):
 
         return sorted([int(sap_nr) for sap_nr in file['measurement/saps'].keys()])
 
-def read_version(h5_path):
+def read_version(h5_path: str) -> str:
     with SharedH5File(h5_path, "r") as file:
         version = file['version'][0]
         if isinstance(version, bytes):
@@ -661,7 +661,8 @@ def combine_hypercubes(input_paths, output_dir, output_filename=None, do_compres
         if len(versions) != 1:
             raise ValueError('Cannot combine h5 files of multiple versions: %s' % (', '.join(versions),))
 
-        version_str = list(versions)[0]
+        versions = [v.decode('utf-8') if isinstance(v, bytes) else v for v in versions]
+        version_str = versions[0]
 
         if version_str != '1.4':
             raise ValueError('Cannot read version %s' % (version_str,))
diff --git a/QA/QA_Common/test/t_hdf5_io.py b/QA/QA_Common/test/t_hdf5_io.py
index a4ad76cd421105327b2669639bd4ba5dd364ec70..9dccf7bd8f466291b80f59744ec497650bc3e869 100755
--- a/QA/QA_Common/test/t_hdf5_io.py
+++ b/QA/QA_Common/test/t_hdf5_io.py
@@ -85,7 +85,7 @@ class TestHdf5_IO(unittest.TestCase):
             file_annotations = read_file_annotations(path)
 
             self.assertEqual(1, len(file_annotations))
-            self.assertEqual('This file was recorded in front of a live audience ;-)', file_annotations[0]['annotation'])
+            self.assertEqual(b'This file was recorded in front of a live audience ;-)', file_annotations[0]['annotation'])
             self.assertEqual('test_user', file_annotations[0]['user'])
 
         finally:
@@ -362,9 +362,7 @@ class TestHdf5_IO(unittest.TestCase):
             write_hypercube(path, saps_in, sas_id=123456)
 
             # check if version is 1.4
-            with h5py.File(path, "r") as file:
-                version_str = file['version'][0]
-                self.assertEqual('1.4', version_str)
+            self.assertEqual('1.4', read_version(path))
 
             # change version back to 1.2
             # and modify visibility data to have the 1.2 incorrect phases
@@ -408,17 +406,13 @@ class TestHdf5_IO(unittest.TestCase):
                     sap_group.create_dataset('visibilities', data=scaled_visibilities)
 
             # check if version is 1.2
-            with h5py.File(path, "r") as file:
-                version_str = file['version'][0]
-                self.assertEqual('1.2', version_str)
+            self.assertEqual('1.2', read_version(path))
 
             # reading the 1.2 file should result in automatic conversion via 1.3 to 1.4 and correction of phases
             result_raw = read_hypercube(path, visibilities_in_dB=False, python_datetimes=True)
 
             # check if version is now 1.3
-            with h5py.File(path, "r") as file:
-                version_str = file['version'][0]
-                self.assertEqual('1.4', version_str)
+            self.assertEqual('1.4', read_version(path))
 
             # read in dB as well because we usually plot the visibilities in dB
             result_dB = read_hypercube(path, visibilities_in_dB=True, python_datetimes=True)
diff --git a/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py b/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py
index 49e555448b91d3587c7f39db9a1c7b8021dc5f90..28c0e733214039e7b99ddbd195e49eab30c81cdf 100644
--- a/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py
+++ b/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py
@@ -1,5 +1,5 @@
 from lofar.sas.tmss.tmss.exceptions import *
-from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Dataproduct, SubtaskType, Subtask, SubtaskOutput
+from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Dataproduct, SubtaskType, Subtask, SubtaskOutput, SIPidentifier
 from lofar.sas.tmss.tmss.tmssapp.models.specification import Datatype, Dataformat
 from lofar.lta.sip import siplib, ltasip, validator, constants
 
@@ -77,28 +77,29 @@ def get_siplib_stations_list(subtask):
     return siplib_station_list
 
 
-# todo: how do we handle IDs? ...use ID service for new dataproducts? ...determine IDs of already ingested dataproducts?
-# todo: replace the following hack after a conclusion is reached on how to handle IDs.
-#   Either use the default constructor of siplib.Identifier to have the ID service generate unique IDs
-#   Or add a class method to create IDs from within TMSS, by implementing a logic that can generate LTA-wide unique integer identifiers.
-def create_fake_identifier_for_testing(unique_id: int=None) -> siplib.Identifier:
+def get_siplib_identifier(sipid_obj: SIPidentifier, context_str="") -> siplib.Identifier:
     """
-    Create an Identifier object for testing.
-    While under deleopment, work around the ID service and create fake identifiers that we are not gonna use anyway:
-    ! Important: Note that the created identifiers will be unknown to the LTA and must not be used for an actual ingest!
+    Retrieve an Identifier object. Get the unique_identifier and source of the given sip object and covert that to
+    a siblib object
+    :param sipid_obj: SIP Identifier object
+    :param context_str: In case of failure add some context info
     :return: A lofar.lta.sip.siplib.Identifier object
     """
     identifier = siplib.Identifier.__new__(siplib.Identifier)
+    unique_id = sipid_obj.unique_identifier
     if unique_id is None:
-        unique_id = uuid.uuid4().int
+        raise TMSSException("Can not create SIP Identifier, the value of 'unique_id' is None for %s" % context_str)
+    source = sipid_obj.source
+    if source is None:
+        raise TMSSException("Can not create SIP Identifier, the value 'source' is None for %s" % context_str)
+
     identifier._set_pyxb_identifier(
         ltasip.IdentifierType(
-            source='tmss_test',
+            source=source,
             identifier=unique_id,
             name=None,
             label=None),
         suppress_warning=True)
-    logger.warning("Created fake Identifier %s - do not use for an actual ingest!" % unique_id)
     return identifier
 
 
@@ -130,7 +131,7 @@ def create_sip_representation_for_subtask(subtask: Subtask):
     :return: A siplib.Observation object or one of the various siplib pipeline object flavors
     """
     # determine common properties
-    subtask_sip_identifier = create_fake_identifier_for_testing(unique_id=subtask.id)   # todo: use correct id
+    subtask_sip_identifier = get_siplib_identifier(subtask.global_identifier, "Subtask id=%s" % subtask.id)
     name = str(subtask.id)
     process_map = siplib.ProcessMap(strategyname=subtask.specifications_template.name,
                                     strategydescription=subtask.specifications_template.description,
@@ -170,7 +171,7 @@ def create_sip_representation_for_subtask(subtask: Subtask):
     elif subtask.specifications_template.type.value == SubtaskType.Choices.PIPELINE.value:
         sourcedata_identifiers = []
         for input in subtask.inputs.all():
-            sourcedata_identifiers += [create_fake_identifier_for_testing(unique_id=dp.id) for dp in input.dataproducts.all()]     # todo: use correct id, lookup based on TMSS reference or so, tbd
+            sourcedata_identifiers += [get_siplib_identifier(dp.global_identifier, "Dataproduct id=%s" % dp.id) for dp in input.dataproducts.all()]     # todo: use correct id, lookup based on TMSS reference or so, tbd
         if not sourcedata_identifiers:
             raise TMSSException("There seems to be no subtask input associated to your pipeline subtask id %s. Please define what data the pipeline processed." % subtask.id)
 
@@ -282,18 +283,18 @@ def create_sip_representation_for_dataproduct(dataproduct: Dataproduct):
         logger.warning("Could not determine the type of dataproduct id %s (%s). Falling back to %s" % (dataproduct.id, err, dataproduct_fileformat))
 
     dataproduct_map = siplib.DataProductMap(type=dataproduct_type,
-                                            identifier=create_fake_identifier_for_testing(unique_id=dataproduct.id), # todo: use correct id
+                                            identifier=get_siplib_identifier(dataproduct.global_identifier, "Dataproduct %s" % dataproduct.id),
                                             size=dataproduct.size if dataproduct.size else 0,
                                             filename=dataproduct.filename,
                                             fileformat=dataproduct_fileformat,
                                             storage_writer=storage_writer_map[dataproduct.feedback_doc["samples"]["writer"] if 'samples' in dataproduct.feedback_doc else 'unknown'], # todo: verify we can use the feedback_doc here and remove the old method | storage_writer_map[dataproduct.producer.subtask.task_blueprint.specifications_doc.get("storagemanager", 'unknown')],
                                             storage_writer_version=dataproduct.feedback_doc["samples"]["writer_version"] if 'samples' in dataproduct.feedback_doc else 'unknown',
-                                            process_identifier=create_fake_identifier_for_testing(unique_id=dataproduct.producer.subtask.id))
+                                            process_identifier=get_siplib_identifier(dataproduct.producer.subtask.global_identifier, "Producer Subtask %s" % dataproduct.producer.subtask.id))
 
     if dataproduct.dataformat.value == Dataformat.Choices.MEASUREMENTSET.value:  # <- This is the only one we currently need for UC1
         sip_dataproduct = siplib.CorrelatedDataProduct(
             dataproduct_map,
-            subarraypointing_identifier=create_fake_identifier_for_testing(), # todo, from dataproduct.specifications_doc, Jan David checks how to translate int -> Identifier object
+            subarraypointing_identifier=get_siplib_identifier(dataproduct.sap.global_identifier, "SAP %s" % dataproduct.sap.id),
             subband=dataproduct.feedback_doc['frequency']['subbands'][0],
             starttime=dataproduct.feedback_doc['time']['start_time'],
             duration=isodate.duration_isoformat(datetime.timedelta(seconds=dataproduct.feedback_doc['time']['duration'])),
diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py
index f19b35a76f07f50442832cea462599935aed7450..7faae82d9b3a694ab79ebddd5b3d0c5676d8ffac 100644
--- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py
+++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py
@@ -1,4 +1,4 @@
-# Generated by Django 3.0.9 on 2020-10-27 16:12
+# Generated by Django 3.0.7 on 2020-10-29 16:37
 
 from django.conf import settings
 import django.contrib.postgres.fields
@@ -597,6 +597,19 @@ class Migration(migrations.Migration):
                 'abstract': False,
             },
         ),
+        migrations.CreateModel(
+            name='SIPidentifier',
+            fields=[
+                ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)),
+                ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
+                ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
+                ('source', models.CharField(help_text='Source name', max_length=128)),
+                ('unique_identifier', models.BigAutoField(help_text='Unique global identifier.', primary_key=True, serialize=False)),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
         migrations.CreateModel(
             name='StationType',
             fields=[
@@ -1057,6 +1070,11 @@ class Migration(migrations.Migration):
             name='created_or_updated_by_user',
             field=models.ForeignKey(editable=False, help_text='The user who created / updated the subtask.', null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL),
         ),
+        migrations.AddField(
+            model_name='subtask',
+            name='global_identifier',
+            field=models.ForeignKey(editable=False, help_text='The global unique identifier for LTA SIP.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SIPidentifier'),
+        ),
         migrations.AddField(
             model_name='subtask',
             name='schedule_method',
@@ -1077,6 +1095,10 @@ class Migration(migrations.Migration):
             name='task_blueprint',
             field=models.ForeignKey(help_text='Task Blueprint to which this Subtask belongs.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='subtasks', to='tmssapp.TaskBlueprint'),
         ),
+        migrations.AddIndex(
+            model_name='sipidentifier',
+            index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sip_tags_bbce92_gin'),
+        ),
         migrations.AddConstraint(
             model_name='schedulingunittemplate',
             constraint=models.UniqueConstraint(fields=('name', 'version'), name='schedulingunittemplate_unique_name_version'),
@@ -1149,6 +1171,11 @@ class Migration(migrations.Migration):
             model_name='saptemplate',
             constraint=models.UniqueConstraint(fields=('name', 'version'), name='saptemplate_unique_name_version'),
         ),
+        migrations.AddField(
+            model_name='sap',
+            name='global_identifier',
+            field=models.ForeignKey(editable=False, help_text='The global unique identifier for LTA SIP.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SIPidentifier'),
+        ),
         migrations.AddField(
             model_name='sap',
             name='specifications_template',
@@ -1281,6 +1308,11 @@ class Migration(migrations.Migration):
             name='feedback_template',
             field=models.ForeignKey(help_text='Schema used for feedback_doc.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.DataproductFeedbackTemplate'),
         ),
+        migrations.AddField(
+            model_name='dataproduct',
+            name='global_identifier',
+            field=models.ForeignKey(editable=False, help_text='The global unique identifier for LTA SIP.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SIPidentifier'),
+        ),
         migrations.AddField(
             model_name='dataproduct',
             name='producer',
diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
index b10dc6daf59db2ef65951d29b8e99953a8654db7..a6257840944db83aedf0df4fc8667588ecd8a19f 100644
--- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
@@ -9,7 +9,7 @@ logger = logging.getLogger(__name__)
 from datetime import datetime, timedelta
 
 from django.db.models import ForeignKey, CharField, DateTimeField, BooleanField, IntegerField, BigIntegerField, \
-    ManyToManyField, CASCADE, SET_NULL, PROTECT, QuerySet
+    ManyToManyField, CASCADE, SET_NULL, PROTECT, QuerySet, BigAutoField
 from django.contrib.postgres.fields import ArrayField, JSONField
 from django.contrib.auth.models import User
 from .specification import AbstractChoice, BasicCommon, Template, NamedCommon, annotate_validate_add_defaults_to_doc_using_template
@@ -24,6 +24,7 @@ from lofar.sas.tmss.client.tmssbuslistener import DEFAULT_TMSS_SUBTASK_NOTIFICAT
 from lofar.common.util import single_line_with_single_spaces
 from django.conf import settings
 from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC
+import uuid
 #
 # I/O
 #
@@ -32,6 +33,16 @@ from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC
 # Choices
 #
 
+
+def generate_unique_identifier_for_SIP_when_needed(model):
+    """
+    Create an Unique Identifier for given model class if not exist (None)
+    We just use an Auto Increment ID which is 64 bit
+    """
+    if model.id is not None and model.global_identifier is None:
+        model.global_identifier = SIPidentifier.objects.create(source="TMSS")
+
+
 class SubtaskState(AbstractChoice):
     """Defines the model and predefined list of possible SubtaskStatusChoice's for Subtask.
     The items in the Choices class below are automagically populated into the database via a data migration."""
@@ -151,6 +162,7 @@ class Subtask(BasicCommon):
     # resource_claim = ForeignKey("ResourceClaim", null=False, on_delete=PROTECT) # todo <-- how is this external reference supposed to work?
     created_or_updated_by_user = ForeignKey(User, null=True, editable=False, on_delete=PROTECT, help_text='The user who created / updated the subtask.')
     raw_feedback = CharField(null=True, max_length=1048576, help_text='The raw feedback for this Subtask')
+    global_identifier = ForeignKey('SIPidentifier', null=True, editable=False, on_delete=PROTECT, help_text='The global unique identifier for LTA SIP.')
 
     def __init__(self, *args, **kwargs):
         super().__init__(*args, **kwargs)
@@ -195,6 +207,7 @@ class Subtask(BasicCommon):
         creating = self._state.adding  # True on create, False on update
 
         annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template')
+        generate_unique_identifier_for_SIP_when_needed(self)
 
         # check for uniqueness of SAP names:
         # todo: this is a very specific check, that depends on the template. On the task level, we have a javascript
@@ -300,12 +313,15 @@ class SubtaskOutput(BasicCommon):
 class SAP(BasicCommon):
     specifications_doc = JSONField(help_text='SAP properties.')
     specifications_template = ForeignKey('SAPTemplate', null=False, on_delete=CASCADE, help_text='Schema used for specifications_doc.')
+    global_identifier = ForeignKey('SIPidentifier', null=True, editable=False, on_delete=PROTECT, help_text='The global unique identifier for LTA SIP.')
 
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
         annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template')
+        generate_unique_identifier_for_SIP_when_needed(self)
 
         super().save(force_insert, force_update, using, update_fields)
 
+
 class Dataproduct(BasicCommon):
     """
     A data product represents an atomic dataset, produced and consumed by subtasks. The consumed dataproducts are those
@@ -328,13 +344,16 @@ class Dataproduct(BasicCommon):
     feedback_doc = JSONField(help_text='Dataproduct properties, as reported by the producing process.')
     feedback_template = ForeignKey('DataproductFeedbackTemplate', on_delete=PROTECT, help_text='Schema used for feedback_doc.')
     sap = ForeignKey('SAP', on_delete=PROTECT, null=True, related_name="dataproducts", help_text='SAP this dataproduct was generated out of (NULLable).')
+    global_identifier = ForeignKey('SIPidentifier', editable=False, null=True, on_delete=PROTECT, help_text='The global unique identifier for LTA SIP.')
 
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
         annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template')
         annotate_validate_add_defaults_to_doc_using_template(self, 'feedback_doc', 'feedback_template')
+        generate_unique_identifier_for_SIP_when_needed(self)
 
         super().save(force_insert, force_update, using, update_fields)
 
+
 class AntennaSet(NamedCommon):
     station_type = ForeignKey('StationType', null=False, on_delete=PROTECT)
     rcus = ArrayField(IntegerField(), size=128, blank=False)
@@ -380,3 +399,7 @@ class DataproductHash(BasicCommon):
     algorithm = ForeignKey('Algorithm', null=False, on_delete=PROTECT, help_text='Algorithm used (MD5, AES256).')
     hash = CharField(max_length=128, help_text='Hash value.')
 
+
+class SIPidentifier(BasicCommon):
+    source = CharField(max_length=128, help_text='Source name')
+    unique_identifier = BigAutoField(primary_key=True, help_text='Unique global identifier.')
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/sap_template-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/sap_template-1.json
index b4f6eb64f1e422da284e0f7b7c6c3c37a9d7bd58..e80661a829d938ce0f585355b76c6053cdc0733a 100644
--- a/SAS/TMSS/src/tmss/tmssapp/schemas/sap_template-1.json
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/sap_template-1.json
@@ -5,17 +5,6 @@
   "type": "object",
   "default": {},
   "properties": {
-    "identifiers": {
-      "type": "object",
-      "properties": {
-        "SIP": {
-          "type": "string",
-          "default": ""
-        }
-      },
-      "additionalProperties": false,
-      "default": {}
-    },
     "measurement_type": {
       "type": "string",
       "enum": ["calibrator", "target"],
@@ -59,7 +48,6 @@
     }
   },
   "required": [
-    "identifiers",
     "name",
     "pointing",
     "time",
diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
index 091a2352b42ec6cea116152462769e62cdd624c7..a2a10449daef88deacd7eadf8ae421e0e8891bf9 100644
--- a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
@@ -160,3 +160,17 @@ class SAPTemplateSerializer(AbstractTemplateSerializer):
     class Meta:
         model = models.SAPTemplate
         fields = '__all__'
+
+
+class SAPSerializer(RelationalHyperlinkedModelSerializer):
+    specifications_doc = JSONEditorField(schema_source='specifications_template.schema')
+
+    class Meta:
+        model = models.SAP
+        fields = '__all__'
+
+
+class SIPidentifierSerializer(RelationalHyperlinkedModelSerializer):
+    class Meta:
+        model = models.SIPidentifier
+        fields = '__all__'
diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py
index 6502b5187e32720fcc80f9e4e7d2e4a267ebdd0a..febb5de9b585ed8202bb2853b61aa5374559689a 100644
--- a/SAS/TMSS/src/tmss/tmssapp/subtasks.py
+++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py
@@ -574,6 +574,7 @@ def schedule_qafile_subtask(qafile_subtask: Subtask):
                                                                 feedback_template=DataproductFeedbackTemplate.objects.get(name="empty"),
                                                                 sap=None  # todo: do we need to point to a SAP here? Of which dataproduct then?
                                                                 )
+        qafile_subtask_dataproduct.save()
 
     # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it)
     qafile_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value)
@@ -626,6 +627,7 @@ def schedule_qaplots_subtask(qaplots_subtask: Subtask):
                                                              feedback_template=DataproductFeedbackTemplate.objects.get(name="empty"),
                                                              sap=None  # todo: do we need to point to a SAP here? Of which dataproduct then?
                                                              )
+    qaplots_subtask_dataproduct.save()
 
     # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it)
     qaplots_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value)
@@ -743,7 +745,6 @@ def schedule_observation_subtask(observation_subtask: Subtask):
             antennafields += [{"station": station, "field": field, "type": antennaset.split('_')[0]} for field in fields]
 
         sap = SAP.objects.create(specifications_doc={ "name": "%s_%s" % (observation_subtask.id, pointing['name']),
-                                                      "identifiers": {},  # todo: TMSS-324
                                                       "pointing": pointing['pointing'],
                                                       "time": {"start_time": observation_subtask.start_time.isoformat(),
                                                                "duration": (observation_subtask.stop_time - observation_subtask.start_time).total_seconds()},
@@ -753,9 +754,9 @@ def schedule_observation_subtask(observation_subtask: Subtask):
                                                       }
                                                     },
                                  specifications_template=SAPTemplate.objects.get(name="SAP"))
-
+        sap.save()
         for sb_nr in pointing['subbands']:
-            Dataproduct.objects.create(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr),
+            dp = Dataproduct.objects.create(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr),
                                        directory=directory,
                                        dataformat=Dataformat.objects.get(value="MeasurementSet"),
                                        datatype=Datatype.objects.get(value="visibilities"),  # todo: is this correct?
@@ -767,7 +768,7 @@ def schedule_observation_subtask(observation_subtask: Subtask):
                                        size=0 if sb_nr%10==0 else 1024*1024*1024*sb_nr,
                                        expected_size=1024*1024*1024*sb_nr,
                                        sap=sap)
-
+            dp.save()
     # step 4: resource assigner (if possible)
     _assign_resources(observation_subtask)
 
@@ -849,6 +850,7 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask):
                                                    feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema),
                                                    feedback_template=dataproduct_feedback_template,
                                                    sap=input_dp.sap)
+            output_dp.save()
             DataproductTransform.objects.create(input=input_dp, output=output_dp, identity=False)
             output_dps.append(output_dp)
 
diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
index 601321cf92d003f606c7d9d32afb5c32020b970e..54ed8f42527b6a902201b46b2c6e644b26ce38cc 100644
--- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
@@ -328,6 +328,12 @@ class SAPViewSet(LOFARViewSet):
         serializer = serializers.DataproductSerializer(sap.dataproducts, many=True, context={'request': request})
         return RestResponse(serializer.data)
 
+
 class SAPTemplateViewSet(AbstractTemplateViewSet):
     queryset = models.SAPTemplate.objects.all()
     serializer_class = serializers.SAPTemplateSerializer
+
+
+class SIPidentifierViewSet(LOFARViewSet):
+    queryset = models.SIPidentifier.objects.all()
+    serializer_class = serializers.SIPidentifierSerializer
\ No newline at end of file
diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py
index b66e916301cfe468a00a52fd43430504010ac32a..376c1c83ba457c81f98901a262b286e8db94c52b 100644
--- a/SAS/TMSS/src/tmss/urls.py
+++ b/SAS/TMSS/src/tmss/urls.py
@@ -199,6 +199,7 @@ router.register(r'dataproduct_hash', viewsets.DataproductHashViewSet)
 router.register(r'subtask_state_log', viewsets.SubtaskStateLogViewSet)
 router.register(r'user', viewsets.UserViewSet)
 router.register(r'sap', viewsets.SAPViewSet)
+router.register(r'sip_identifier', viewsets.SIPidentifierViewSet)
 
 # ---
 
diff --git a/SAS/TMSS/test/t_adapter.py b/SAS/TMSS/test/t_adapter.py
index 379f3a37a4b53165882be3579af3eef08a8b40fe..4b11c380b6f06edc6a44e5985d8ce6c61197d671 100755
--- a/SAS/TMSS/test/t_adapter.py
+++ b/SAS/TMSS/test/t_adapter.py
@@ -46,6 +46,7 @@ from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduc
 from lofar.sas.tmss.tmss.tmssapp.adapters.feedback import generate_dataproduct_feedback_from_subtask_feedback_and_set_finished
 from lofar.lta.sip import constants
 
+
 class ParsetAdapterTest(unittest.TestCase):
     def test_01(self):
         subtask_template = models.SubtaskTemplate.objects.get(name='observation control')
@@ -60,8 +61,17 @@ class ParsetAdapterTest(unittest.TestCase):
         parset = convert_to_parset(subtask)
 
 
-class SIPdapterTest(unittest.TestCase):
+class SIPadapterTest(unittest.TestCase):
     def test_simple_sip_generate_from_dataproduct(self):
+        """
+        Test if SIP is generated successfully when subtask, dataproduct and SAP objects are created
+        Check some value in the SIP (xml) output
+        Check that the SIP identifiers are in SIP (xml) output
+        Check the number of SIP identifiers are increased with 3
+        Check that all SIP identifiers are unique
+        """
+        nbr_expected_sip_identifiers_before_setup = len(models.SIPidentifier.objects.all())
+
         subtask_template = models.SubtaskTemplate.objects.get(name='observation control')
         specifications_doc = get_default_json_object_for_schema(subtask_template.schema)
         specifications_doc['stations']['filter'] = "HBA_210_250"
@@ -70,12 +80,22 @@ class SIPdapterTest(unittest.TestCase):
         feedback_doc = {'percentage_written': 100, 'frequency': {'subbands': [156], 'central_frequencies': [33593750.0], 'channel_width': 6103.515625, 'channels_per_subband': 32}, 'time': {'start_time': '2013-02-16T17:00:00', 'duration': 5.02732992172, 'sample_width': 2.00278016}, 'antennas': {'set': 'HBA_DUAL', 'fields': [{'type': 'HBA', 'field': 'HBA0', 'station': 'CS001'}, {'type': 'HBA', 'field': 'HBA1', 'station': 'CS001'}]}, 'target': {'pointing': {'angle1': 0, 'angle2': 0, 'angle3': 0, 'direction_type': 'J2000'}}, 'samples': {'polarisations': ['XX', 'XY', 'YX', 'YY'], 'type': 'float', 'bits': 32, 'writer': 'standard', 'writer_version': '2.2.0', 'complex': True}, '$schema': 'http://127.0.0.1:8001/api/schemas/dataproductfeedbacktemplate/feedback/1#'}
         for dp in specifications_doc['stations']['digital_pointings']:
             dp['subbands'] = list(range(8))
+        # Create SubTask(output)
         subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc)
         subtask:models.Subtask = models.Subtask.objects.create(**subtask_data)
+        subtask.save()
         subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask))
-        dataproduct:models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(feedback_doc=feedback_doc, producer=subtask_output))
+        # Create Dataproduct
+        dataproduct: models.Dataproduct = models.Dataproduct.objects.create(
+            **Dataproduct_test_data(feedback_doc=feedback_doc, producer=subtask_output))
+        dataproduct.save()
+        # Create SAP
+        sap_template = models.SAPTemplate.objects.get(name="SAP")
+        specifications_doc = get_default_json_object_for_schema(sap_template.schema)
+        sap = models.SAP.objects.create(specifications_doc=specifications_doc, specifications_template=sap_template)
+        dataproduct.sap = sap
+        sap.save()
 
-        # make sure we can create a valid SIP
         sip = generate_sip_for_dataproduct(dataproduct)
 
         # double-check that SIP contains values from feedback and specifications docs
@@ -83,6 +103,15 @@ class SIPdapterTest(unittest.TestCase):
         self.assertIn(str(feedback_doc['time']['start_time']), sip.get_prettyxml())
         self.assertIn(constants.FILTERSELECTIONTYPE_210_250_MHZ, sip.get_prettyxml()) # specifications_doc: "HBA_210_250"
 
+        self.assertIn(str(subtask.global_identifier.unique_identifier), sip.get_prettyxml())
+        self.assertIn(str(dataproduct.global_identifier.unique_identifier), sip.get_prettyxml())
+        self.assertIn(str(sap.global_identifier.unique_identifier), sip.get_prettyxml())
+
+        all_sip_ids = list(models.SIPidentifier.objects.all())
+        self.assertEqual(nbr_expected_sip_identifiers_before_setup+3, len(all_sip_ids))
+        for sip_id in all_sip_ids:
+            self.assertEqual(models.SIPidentifier.objects.filter(unique_identifier=sip_id.unique_identifier).count(), 1)
+
 
 class FeedbackAdapterTest(unittest.TestCase):