diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py
index 80dfcee16ec563ea2417916c6eb06069f2ebe7e7..92298e833d01864410d81ebf91203e72f6f834b7 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py
@@ -64,7 +64,7 @@ def _dataproduct_parset_subkeys(subtask: models.Subtask, dataproducts: list) ->
     parset["enabled"] = len(dataproducts) > 0
     parset["filenames"] = [dp.filename if dp else "null:" for dp in dataproducts]
     parset["skip"] = [0 if dp else 1 for dp in dataproducts]
-    parset["locations"] = ["%s:%s" % (subtask.cluster.name, dp.directory if dp else "") for dp in dataproducts]
+    parset["locations"] = ["%s:%s" % (subtask.cluster.name, dp.directory if dp else "") for dp in dataproducts]  # todo: use storage cluster instead of processing cluster? dp.producer.filesystem.cluster.name?
 
     return parset
 
@@ -112,8 +112,8 @@ def _convert_correlator_settings_to_parset_dict(subtask: models.Subtask, spec: d
                 correlator_dataproducts.append(dataproduct[0] if dataproduct else None)
 
     parset.update(_add_prefix(_dataproduct_parset_subkeys(subtask, correlator_dataproducts), "Observation.DataProducts.Output_Correlated."))
-    parset["Observation.DataProducts.Output_Correlated.storageClusterName"] = subtask.cluster.name # TODO: This must be the storage cluster, not the processing cluster
-    parset["Observation.DataProducts.Output_Correlated.storageClusterPartition"] = "/data/projects" if isProductionEnvironment() else "/data/test-projects"
+    parset["Observation.DataProducts.Output_Correlated.storageClusterName"] = subtask.outputs.first().filesystem.cluster.name  # todo: filter for specific output?
+    parset["Observation.DataProducts.Output_Correlated.storageClusterPartition"] = subtask.outputs.first().filesystem.directory  # todo: filter for specific output?
 
     # mimic MoM placeholder thingy (the resource estimator parses this)
     parset["Observation.DataProducts.Output_Correlated.identifications"] = ["TMSS_subtask_%s.SAP%03d" % (subtask.id, sap_nr) for sap_nr in range(len(digi_beams))]  # we need one per SAP, and that needs to include the string "SAPxxx" with xxx being the SAP number, just to satisfy the ResourceEstimator.
@@ -288,12 +288,12 @@ def _convert_beamformer_settings_to_parset_dict(subtask: models.Subtask, spec: d
     coherent_dataproducts, incoherent_dataproducts = _order_beamformer_dataproducts(dataproducts, spec)
 
     parset.update(_add_prefix(_dataproduct_parset_subkeys(subtask, coherent_dataproducts), "Observation.DataProducts.Output_CoherentStokes."))
-    parset["Observation.DataProducts.Output_CoherentStokes.storageClusterName"] = subtask.cluster.name # TODO: This must be the storage cluster, not the processing cluster
-    parset["Observation.DataProducts.Output_CoherentStokes.storageClusterPartition"] = "/data/projects" if isProductionEnvironment() else "/data/test-projects"
+    parset["Observation.DataProducts.Output_CoherentStokes.storageClusterName"] = subtask.outputs.first().filesystem.cluster.name  # todo: filter for specific output?
+    parset["Observation.DataProducts.Output_CoherentStokes.storageClusterPartition"] = subtask.outputs.first().filesystem.directory  # todo: filter for specific output?
 
     parset.update(_add_prefix(_dataproduct_parset_subkeys(subtask, incoherent_dataproducts), "Observation.DataProducts.Output_IncoherentStokes."))
-    parset["Observation.DataProducts.Output_IncoherentStokes.storageClusterName"] = subtask.cluster.name # TODO: This must be the storage cluster, not the processing cluster
-    parset["Observation.DataProducts.Output_IncoherentStokes.storageClusterPartition"] = "/data/projects" if isProductionEnvironment() else "/data/test-projects"
+    parset["Observation.DataProducts.Output_IncoherentStokes.storageClusterName"] = subtask.outputs.first().filesystem.cluster.name  # todo: filter for specific output?
+    parset["Observation.DataProducts.Output_IncoherentStokes.storageClusterPartition"] = subtask.outputs.first().filesystem.directory  # todo: filter for specific output?
 
     # mimic MoM placeholder thingy (the resource estimator parses this)
     parset["Observation.DataProducts.Output_CoherentStokes.identifications"] = ["TMSS_subtask_%s.SAP%03d" % (subtask.id, sap_nr) for sap_nr in range(len(digi_beams))]  # this needs to be unique per SAP only, not dataproduct
@@ -609,7 +609,7 @@ def _convert_to_parset_dict_for_preprocessing_pipeline_schema(subtask: models.Su
 
     parset.update(_add_prefix(_dataproduct_parset_subkeys(subtask, out_dataproducts), "Observation.DataProducts.Output_Correlated."))
     parset["Observation.DataProducts.Output_Correlated.identifications"] = ["TMSS_subtask_%s.SAP%03d" % (subtask.id, 0)]
-    parset["Observation.DataProducts.Output_Correlated.storageClusterName"] = subtask.cluster.name
+    parset["Observation.DataProducts.Output_Correlated.storageClusterName"] = subtask.outputs.first().filesystem.cluster.name  # todo: should we filter for an output with role=Role.Choices.CORRELATOR?
 
     # Other
     parset["Observation.ObservationControl.PythonControl.PreProcessing.SkyModel"] = "Ateam_LBA_CC"
@@ -700,7 +700,7 @@ def _convert_to_parset_dict_for_pulsarpipeline_schema(subtask: models.Subtask) -
 
     parset.update(_add_prefix(_dataproduct_parset_subkeys(subtask, out_dataproducts), "Observation.DataProducts.Output_Pulsar."))
     parset["Observation.DataProducts.Output_Pulsar.identifications"] = ["TMSS_subtask_%s.SAP%03d" % (subtask.id, 0)]   # todo: find correct SAP id (although this is probably fine since the pulsar pipeline does not use this?)
-    parset["Observation.DataProducts.Output_Pulsar.storageClusterName"] = subtask.cluster.name
+    parset["Observation.DataProducts.Output_Pulsar.storageClusterName"] = subtask.outputs.first().filesystem.cluster.name  # todo: should we filter for an output with role=Role.Choices.BEAMFORMER?
 
     # pragmatic solution to deal with the various parset using subsystems...
     # some want the keys as "Observation.<subkey>" and some as "ObsSW.Observation.<subkey>"
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0023_subtaskoutput_filesystem.py b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0023_subtaskoutput_filesystem.py
new file mode 100644
index 0000000000000000000000000000000000000000..156fc9cb0e1aef3e64ef7c74cac69e6e7da884fb
--- /dev/null
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0023_subtaskoutput_filesystem.py
@@ -0,0 +1,32 @@
+# Generated by Django 3.0.9 on 2022-01-25 10:18
+
+from django.db import migrations, models
+import django.db.models.deletion
+from lofar.sas.tmss.tmss.tmssapp.populate import populate_misc2
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('tmssapp', '0022_sip'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='subtaskoutput',
+            name='filesystem',
+            field=models.ForeignKey(default=None, help_text='The filesystem that all dataproducts that this output produces are written to', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Filesystem'),
+            preserve_default=False,
+        ),
+        migrations.AlterField(
+            model_name='schedulingunitdraft',
+            name='priority_rank',
+            field=models.FloatField(default=1.0, help_text='Priority of this scheduling unit w.r.t. other scheduling units within the same queue and project.'),
+        ),
+        migrations.AlterField(
+            model_name='subtask',
+            name='global_parset_identifier',
+            field=models.OneToOneField(default=1, editable=False, help_text="The global unique identifier of this Subtask's parset for LTA SIP.", on_delete=django.db.models.deletion.PROTECT, related_name='related_subtask', to='tmssapp.SIPidentifier'),
+            preserve_default=False,
+        ),
+        migrations.RunPython(populate_misc2)
+    ]
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
index 9df71f609402d7f6f38a497ed8dee904431cf671..153fe14d04feb6c96ad64445f652368a4fe1b9d0 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py
@@ -499,6 +499,7 @@ class SubtaskInput(BasicCommon, TemplateSchemaMixin):
 class SubtaskOutput(BasicCommon):
     subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, related_name='outputs', help_text='Subtask to which this output specification refers.')
     output_role = ForeignKey('TaskConnectorType', null=True, related_name='subtask_outputs', on_delete=CASCADE, help_text='Output connector type (what kind of data is taken from the producer).')
+    filesystem = ForeignKey('Filesystem', null=False, on_delete=PROTECT, help_text='The filesystem that all dataproducts that this output produces are written to')
 
 
 class SAP(BasicCommon, TemplateSchemaMixin):
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py
index 363cbf3f627bbc4a3f510787bfe54dc150430b21..c28a983c5e7fd0d1d25bbce9da9ed18645805aae 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py
@@ -301,6 +301,24 @@ class BaseStrategyTemplate(NamedVersionedCommon):
     class Meta(NamedVersionedCommon.Meta):
         abstract = True
 
+    @property
+    def _need_to_auto_increment_version_number(self) -> bool:
+        '''For any StrategyTemplate instance, we only update the version number if the template document itself changed'''
+        if self.pk is None:
+            # this is a new instance, so we need a new version number
+            return True
+
+        # check current contents of template document in the database...
+        prev_template_value = self.__class__.objects.values('template').get(pk=self.pk)['template']
+
+        # ... if it's changed
+        if prev_template_value != self.template:
+            # the template value itself changed, so do auto increment version number if this template instance is used
+            return self.is_used
+
+        # the template(value) did not change, so do NOT auto_increment the version number
+        return False
+
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
         self.auto_set_version_number()
         super().save(force_insert, force_update, using, update_fields)
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/populate.py b/SAS/TMSS/backend/src/tmss/tmssapp/populate.py
index a44659eecd9aa959f14fbeccb385c2c7c38973cb..728450422c1a7472899f37115c57c6a99de0b3a8 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/populate.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/populate.py
@@ -531,6 +531,14 @@ def populate_misc(apps, schema_editor):
                                            directory="srm://lta-head.lofar.psnc.pl:8443/lofar/ops/projects/")
 
 
+def populate_misc2(apps, schema_editor):
+    cluster, _ = Cluster.objects.get_or_create(name="CEP4", location="CIT", archive_site=False)
+    cep4_test = Filesystem.objects.create(name="Test Storage (CIT)", cluster=cluster, capacity=3.6e15,
+                                          directory="/data/test-projects")
+    cep4_production = Filesystem.objects.create(name="Production Storage (CIT)", cluster=cluster, capacity=3.6e15,
+                                                directory="/data/projects")
+
+
 def populate_connectors():
     # the TaskConnectorType's define how the Task[Draft/Blueprint] *can* be connected.
 
diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py
index 8092b2429e93ad9e7585a5f78f9b26a9eb310add..f4d6b616cb2b1325297e27c3d1e138ce1987d01d 100644
--- a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py
+++ b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py
@@ -653,7 +653,7 @@ def create_observation_control_subtask_from_task_blueprint(task_blueprint: TaskB
     # an observation has no input, it just produces output data
     # create a subtask_output per task_connector_type of the task, but only with role Correlator or Beamformer because that is what observations produce.
     for task_connector_type in task_blueprint.specifications_template.connector_types.filter(iotype__value=IOType.Choices.OUTPUT.value).filter(Q(role__value=Role.Choices.CORRELATOR.value)|Q(role__value=Role.Choices.BEAMFORMER.value)).all():
-        subtask_output = SubtaskOutput.objects.create(subtask=subtask, output_role=task_connector_type)
+        subtask_output = SubtaskOutput.objects.create(subtask=subtask, output_role=task_connector_type, filesystem=_output_filesystem(subtask))
 
     # step 3: set state to DEFINED
     subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value)
@@ -720,7 +720,7 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask)
                                                            selection_template=selection_template)
 
     # create an internal SubtaskOutput (output_role=None), because we do not expose qa data yet at task level
-    qafile_subtask_output = SubtaskOutput.objects.create(subtask=qafile_subtask, output_role=None)
+    qafile_subtask_output = SubtaskOutput.objects.create(subtask=qafile_subtask, output_role=None, filesystem=_output_filesystem(qafile_subtask))
 
     # step 3: set state to DEFINED
     qafile_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value)
@@ -798,7 +798,7 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta
                                                         selection_template=selection_template)
 
     # create an internal SubtaskOutput (output_role=None), because we do not expose qa data yet at task level
-    qaplots_subtask_output = SubtaskOutput.objects.create(subtask=qaplots_subtask, output_role=None)
+    qaplots_subtask_output = SubtaskOutput.objects.create(subtask=qaplots_subtask, output_role=None, filesystem=_output_filesystem(qaplots_subtask))
 
     # step 3: set state to DEFINED
     qaplots_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value)
@@ -867,7 +867,7 @@ def create_pipeline_subtask_from_task_blueprint(task_blueprint: TaskBlueprint, s
 
     # create a subtask_output per task_connector_type of the task
     for task_connector_type in task_blueprint.specifications_template.connector_types.filter(iotype__value=IOType.Choices.OUTPUT.value).all():
-        subtask_output = SubtaskOutput.objects.create(subtask=subtask, output_role=task_connector_type)
+        subtask_output = SubtaskOutput.objects.create(subtask=subtask, output_role=task_connector_type, filesystem=_output_filesystem(subtask))
 
     # step 3: set state to DEFINED
     subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value)
@@ -1380,12 +1380,17 @@ def _output_root_directory(subtask: Subtask) -> str:
     """ Return the directory under which output needs to be stored. """
 
     # Support for several projects will be added in TMSS-689
-    directory = "/data/%s/%s/L%s" % ("projects" if isProductionEnvironment() else "test-projects",
-                                     subtask.project.name,
-                                     subtask.id)
+    directory = "%s/%s/L%s" % (_output_filesystem(subtask).directory, subtask.project.name, subtask.id)
 
     return directory
 
+
+def _output_filesystem(subtask: Subtask) -> Filesystem:
+    """ Return the filesystem that output dataproducts should be written to."""
+
+    return Filesystem.objects.get(name='%s Storage (CIT)' % ('Production' if isProductionEnvironment() else 'Test'))
+
+
 def schedule_observation_subtask(observation_subtask: Subtask):
     ''' Schedule the given observation_subtask
     For first observations in a 'train' of subtasks this method is typically called by hand, or by the short-term-scheduler.
@@ -1983,7 +1988,7 @@ def schedule_ingest_subtask(ingest_subtask: Subtask):
 
     # Create one internal SubtaskOutput (output_role=None), all dataproducts end up in here.
     # We do not expose ingested data as a task output connector. The LTA is an endpoint.
-    ingest_subtask_output = SubtaskOutput.objects.create(subtask=ingest_subtask, output_role=None)
+    ingest_subtask_output = SubtaskOutput.objects.create(subtask=ingest_subtask, output_role=None, filesystem=_output_filesystem(ingest_subtask))
 
     # gather all dataproducts from all inputs
     for ingest_subtask_input in ingest_subtask.inputs.all():
@@ -2238,7 +2243,7 @@ def _generate_subtask_specs_from_pulsar_pipeline_task_specs(pipeline_task_specs,
     subtask_specs["dspsr"]["digifil_extra_opts"]   = " ".join([
               "-D {dm}".format(**digifil_specs) if digifil_specs["dm"] != -1 else "",
               "-t {integration_time}".format(**digifil_specs),
-              "-f {frequency_channels}{dedispersion}".format(**digifil_specs, dedispersion=":D" if digifil_specs["coherent_dedispersion"] else ""),
+              "-F {frequency_channels}{dedispersion}".format(**digifil_specs, dedispersion=":D" if digifil_specs["coherent_dedispersion"] else ""),
               ])
 
     subtask_specs["dspsr"]["nopdmp"]               = not dspsr_specs["optimise_period_dm"]
diff --git a/SAS/TMSS/backend/test/t_tmssapp_scheduling_django_API.py b/SAS/TMSS/backend/test/t_tmssapp_scheduling_django_API.py
index 8bbddf0766621a51c7d1efb949b8d39ff358ff9c..d12554dc74c7e6175c5efeb4c0d18fe7eef22145 100755
--- a/SAS/TMSS/backend/test/t_tmssapp_scheduling_django_API.py
+++ b/SAS/TMSS/backend/test/t_tmssapp_scheduling_django_API.py
@@ -194,6 +194,7 @@ class SubtaskTest(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
         cls.task_blueprint = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())
+        cls.filesystem = models.Filesystem.objects.create(**Filesystem_test_data())
 
     def test_Subtask_gets_created_with_correct_creation_timestamp(self):
 
@@ -282,7 +283,7 @@ class SubtaskTest(unittest.TestCase):
         subtask1:models.Subtask = models.Subtask.objects.create(**Subtask_test_data(task_blueprint=self.task_blueprint, primary=True))
         subtask2:models.Subtask = models.Subtask.objects.create(**Subtask_test_data(task_blueprint=self.task_blueprint, primary=False))
 
-        output1 = models.SubtaskOutput.objects.create(subtask=subtask1)
+        output1 = models.SubtaskOutput.objects.create(subtask=subtask1, filesystem=self.filesystem)
         models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=subtask2, producer=output1))
 
         self.assertEqual(subtask1, subtask2.predecessors.all()[0])
@@ -300,12 +301,12 @@ class SubtaskTest(unittest.TestCase):
         #      |        |
         # ST2 -          -> ST5 ---> ST6
 
-        output1 = models.SubtaskOutput.objects.create(subtask=subtask1)
-        output2 = models.SubtaskOutput.objects.create(subtask=subtask2)
-        output3 = models.SubtaskOutput.objects.create(subtask=subtask3)
-        output4 = models.SubtaskOutput.objects.create(subtask=subtask4)
-        output5 = models.SubtaskOutput.objects.create(subtask=subtask5)
-        output6 = models.SubtaskOutput.objects.create(subtask=subtask6)
+        output1 = models.SubtaskOutput.objects.create(subtask=subtask1, filesystem=self.filesystem)
+        output2 = models.SubtaskOutput.objects.create(subtask=subtask2, filesystem=self.filesystem)
+        output3 = models.SubtaskOutput.objects.create(subtask=subtask3, filesystem=self.filesystem)
+        output4 = models.SubtaskOutput.objects.create(subtask=subtask4, filesystem=self.filesystem)
+        output5 = models.SubtaskOutput.objects.create(subtask=subtask5, filesystem=self.filesystem)
+        output6 = models.SubtaskOutput.objects.create(subtask=subtask6, filesystem=self.filesystem)
 
         models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=subtask3, producer=output1))
         models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=subtask3, producer=output2))
@@ -327,7 +328,7 @@ class SubtaskTest(unittest.TestCase):
     def test_Subtask_transformed_dataproducts(self):
         # setup
         subtask1:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
-        output1:models.SubtaskOutput = models.SubtaskOutput.objects.create(subtask=subtask1)
+        output1:models.SubtaskOutput = models.SubtaskOutput.objects.create(subtask=subtask1, filesystem=self.filesystem)
         output1_dp:models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=output1))
 
         subtask2:models.Subtask = models.Subtask.objects.create(**Subtask_test_data())
@@ -335,7 +336,7 @@ class SubtaskTest(unittest.TestCase):
         input2_dp = output1_dp
         input2.dataproducts.set([input2_dp])
         input2.save()
-        output2:models.SubtaskOutput = models.SubtaskOutput.objects.create(subtask=subtask2)
+        output2:models.SubtaskOutput = models.SubtaskOutput.objects.create(subtask=subtask2, filesystem=self.filesystem)
         output2_dp:models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=output2))
 
         models.DataproductTransform.objects.create(input=input2_dp, output=output2_dp, identity=True)
diff --git a/SAS/TMSS/backend/test/t_tmssapp_specification_django_API.py b/SAS/TMSS/backend/test/t_tmssapp_specification_django_API.py
index 3dce344b8eeed7caa5671c30b4370018cc6c9606..7e37bf7bde152b8f4dd5301831a94b1c24beee15 100755
--- a/SAS/TMSS/backend/test/t_tmssapp_specification_django_API.py
+++ b/SAS/TMSS/backend/test/t_tmssapp_specification_django_API.py
@@ -837,6 +837,64 @@ class SchedulingUnitBlueprintTest(unittest.TestCase):
         self.assertIn('does not allow triggering', str(context.exception))
 
 
+class SchedulingUnitObservingStrategyTemplateTest(unittest.TestCase):
+    def test_SchedulingUnitObservingStrategyTemplateTest_version_auto_increment(self):
+        name = str(uuid.uuid4())
+        description = str(uuid.uuid4())
+        scheduling_unit_template = models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data())
+        template_doc = scheduling_unit_template.get_default_json_document_for_schema()
+
+        template = models.SchedulingUnitObservingStrategyTemplate.objects.create(name=name,
+                                                                                 description=description,
+                                                                                 scheduling_unit_template=scheduling_unit_template,
+                                                                                 template=template_doc)
+
+        # version should be 1
+        self.assertEqual(1, template.version)
+
+        # changing name should not auto-increment version
+        template.name = str(uuid.uuid4())
+        template.save()
+        template.refresh_from_db()
+        self.assertEqual(1, template.version)
+
+        # changing description should not auto-increment version
+        template.description = str(uuid.uuid4())
+        template.save()
+        template.refresh_from_db()
+        self.assertEqual(1, template.version)
+
+        # changing template should not auto-increment version yet because the template is not used (yet)
+        self.assertFalse(template.is_used)
+        template.template['extra'] = 'property'
+        template.save()
+        template.refresh_from_db()
+        self.assertEqual(1, template.version)
+
+        # use the template
+        models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data(observation_strategy_template=template))
+        self.assertTrue(template.is_used)
+
+        # changing name should still not auto-increment version, even now that it's used
+        template.name = str(uuid.uuid4())
+        template.save()
+        template.refresh_from_db()
+        self.assertEqual(1, template.version)
+
+        # changing description should still not auto-increment version, even now that it's used
+        template.description = str(uuid.uuid4())
+        template.save()
+        template.refresh_from_db()
+        self.assertEqual(1, template.version)
+
+        # but changing template should auto-increment version now, because the template is used
+        self.assertTrue(template.is_used)
+        template.template['extra2'] = 'property2'
+        template.save()
+        template.refresh_from_db()
+        self.assertEqual(2, template.version)
+
+
 class TaskBlueprintTest(unittest.TestCase):
     @classmethod
     def setUpClass(cls) -> None:
diff --git a/SAS/TMSS/backend/test/tmss_test_data_django_models.py b/SAS/TMSS/backend/test/tmss_test_data_django_models.py
index d0bf350595efa1e860afb9c995eec85cb7b09bc8..a5ce8233401eceee38183665dda69f9f7033e6bd 100644
--- a/SAS/TMSS/backend/test/tmss_test_data_django_models.py
+++ b/SAS/TMSS/backend/test/tmss_test_data_django_models.py
@@ -362,7 +362,8 @@ def SubtaskOutput_test_data(subtask: models.Subtask=None) -> dict:
         subtask = models.Subtask.objects.create(**Subtask_test_data())
 
     return {"subtask": subtask,
-            "tags":[]}
+            "tags":[],
+            "filesystem": models.Filesystem.objects.create(**Filesystem_test_data())}
 
 def SubtaskInput_test_data(subtask: models.Subtask=None, producer: models.SubtaskOutput=None, selection_doc=None, selection_template: models.TaskRelationSelectionTemplate=None) -> dict:
     if subtask is None:
diff --git a/SAS/TMSS/backend/test/tmss_test_data_rest.py b/SAS/TMSS/backend/test/tmss_test_data_rest.py
index 8dbf787d06ae1f7c454a9a3c6bf2e7c55a8500a1..7592d0fdd05e64a21cda275db9fcc3902c36c5b3 100644
--- a/SAS/TMSS/backend/test/tmss_test_data_rest.py
+++ b/SAS/TMSS/backend/test/tmss_test_data_rest.py
@@ -659,13 +659,22 @@ class TMSSRESTTestDataCreator():
             self._subtask_url = self.post_data_and_get_url(self.Subtask(task_blueprint_url = self.post_data_and_get_url(self.TaskBlueprint(), '/task_blueprint/')), '/subtask/')
             return self._subtask_url
 
+    @property
+    def cached_filesystem_url(self):
+        try:
+            return self._filesystem_url
+        except AttributeError:
+            self._filesystem_url = self.post_data_and_get_url(self.Filesystem(), '/filesystem/')
+            return self._filesystem_url
+
     def SubtaskOutput(self, subtask_url=None):
 
         if subtask_url is None:
             subtask_url = self.cached_subtask_url
 
         return {"subtask": subtask_url,
-                "tags": []}
+                "tags": [],
+                "filesystem": self.cached_filesystem_url}
 
     @property
     def cached_subtask_output_url(self):
diff --git a/SubSystems/SCU/SCU.ini b/SubSystems/SCU/SCU.ini
index cdf52544d4fd065b5a1944d23448ee9b35c8a24b..b43925eb895284eda6be4fb6dafbfb7c19bc86d3 100644
--- a/SubSystems/SCU/SCU.ini
+++ b/SubSystems/SCU/SCU.ini
@@ -21,4 +21,4 @@ programs=messagelogger
 programs=autocleanupservice,cleanupservice,storagequeryservice
 
 [group:TMSS]
-programs=tmss,tmss_feedback_handling_service,tmss_postgres_listener_service,tmss_scheduling_service,tmss_websocket_service,tmss_workflow_service,tmss_lta_adapter,tmss_ra_adapter,tmss_slack_webhook_service
+programs=tmss,tmss_feedback_handling_service,tmss_postgres_listener_service,tmss_scheduling_service,tmss_websocket_service,tmss_workflow_service,tmss_lta_adapter,tmss_ra_adapter,tmss_slack_webhook_service,tmss-nginx,tmss_precalculations_service,tmss_sip_generation_service