diff --git a/CDB/LOFAR_ConfigDb.json b/CDB/LOFAR_ConfigDb.json
index 26378e962292197d2e3dbed10a22dc20e51d1174..21a93809a8f8df10ccf4ecfaeb3ef8d926ef9b83 100644
--- a/CDB/LOFAR_ConfigDb.json
+++ b/CDB/LOFAR_ConfigDb.json
@@ -1,4 +1,14 @@
 {
+    "objects": {
+        "station": {
+            "name": [
+                "DevStation"
+            ],
+            "number": [
+                "999"
+            ]
+        }
+    },
     "servers": {
         "Docker": {
             "STAT": {
diff --git a/CDB/stations/DTS_ConfigDb.json b/CDB/stations/DTS_ConfigDb.json
index 398ef7d63577ce62f61c2374b9335a905ebce566..b0cd5d91ed6795c579680df10fe53a82b1e93ccb 100644
--- a/CDB/stations/DTS_ConfigDb.json
+++ b/CDB/stations/DTS_ConfigDb.json
@@ -1,4 +1,14 @@
 {
+    "objects": {
+        "station": {
+            "name": [
+                "DTS"
+            ],
+            "number": [
+                "902"
+            ]
+        }
+    },
     "servers": {
         "boot": {
             "STAT": {
diff --git a/CDB/stations/DTS_Outside_ConfigDb.json b/CDB/stations/DTS_Outside_ConfigDb.json
index 510b369f4c3eb38e7491bb91251bd94d4b87d0d3..e1b6e19079df728ebba70204fea9768249501224 100644
--- a/CDB/stations/DTS_Outside_ConfigDb.json
+++ b/CDB/stations/DTS_Outside_ConfigDb.json
@@ -1,4 +1,14 @@
 {
+    "objects": {
+        "station": {
+            "name": [
+                "DTSOutside"
+            ],
+            "number": [
+                "903"
+            ]
+        }
+    },
     "servers": {
         "boot": {
             "STAT": {
@@ -341,22 +351,22 @@
                                 "5.0"
                             ],
                             "FPGA_sdp_info_station_id_RW_default": [
-                                "902",
-                                "902",
-                                "902",
-                                "902",
-                                "902",
-                                "902",
-                                "902",
-                                "902",
-                                "902",
-                                "902",
-                                "902",
-                                "902",
-                                "902",
-                                "902",
-                                "902",
-                                "902"
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903"
                             ],
                             "TR_fpga_mask_RW_default": [
                                 "True",
diff --git a/CDB/stations/LTS_ConfigDb.json b/CDB/stations/LTS_ConfigDb.json
index 63b16a78b809ec51644094a41f6700f7f16ced22..f6cacc0d93adddef12a3868a3e1c93c70a083f6b 100644
--- a/CDB/stations/LTS_ConfigDb.json
+++ b/CDB/stations/LTS_ConfigDb.json
@@ -1,4 +1,14 @@
 {
+    "objects": {
+        "station": {
+            "name": [
+                "LTS"
+            ],
+            "number": [
+                "901"
+            ]
+        }
+    },
     "servers": {
         "boot": {
             "STAT": {
diff --git a/bin/dump_ConfigDb.sh b/bin/dump_ConfigDb.sh
index c1f6dc214e32458af1f1d555332ecb40c2b71601..2532b8e275a3c4a609dc9b618fb143f8815f94a6 100755
--- a/bin/dump_ConfigDb.sh
+++ b/bin/dump_ConfigDb.sh
@@ -1,4 +1,7 @@
 #!/bin/bash
 
 # writes the JSON dump to stdout, Do not change -i into -it incompatible with gitlab ci!
-docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig python -m dsconfig.dump
+docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig bash -c '
+  python -m dsconfig.dump > /tmp/dsconfig-configdb-dump.json
+  /manage_object_properties.py -r > /tmp/dsconfig-objectdb-dump.json
+  /merge_json.py /tmp/dsconfig-objectdb-dump.json /tmp/dsconfig-configdb-dump.json'
diff --git a/docker-compose/dsconfig/Dockerfile b/docker-compose/dsconfig/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..9783411ff933a880dca5003b8d5bceab703ff54a
--- /dev/null
+++ b/docker-compose/dsconfig/Dockerfile
@@ -0,0 +1,5 @@
+ARG SOURCE_IMAGE
+FROM ${SOURCE_IMAGE}
+
+COPY manage_object_properties.py /
+COPY merge_json.py /
diff --git a/docker-compose/dsconfig/manage_object_properties.py b/docker-compose/dsconfig/manage_object_properties.py
new file mode 100755
index 0000000000000000000000000000000000000000..7c4a75bb7d97293fa9df3b94af81486393350ee8
--- /dev/null
+++ b/docker-compose/dsconfig/manage_object_properties.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python3
+"""
+
+   Import/export the object propertis of the Tango Controls Database.
+
+"""
+
+from tango import Database
+
+def read_objects(db = None) -> dict:
+    """ Read and return all object properties. """
+
+    db = db or Database()
+
+    result = {}
+
+    objects = db.get_object_list("*").value_string
+
+    for obj in objects:
+        result[obj] = {}
+        properties = db.get_object_property_list(obj, "*").value_string
+
+        for prop in properties:
+            value = db.get_property(obj, prop)[prop]
+
+            result[obj][prop] = list(value)
+
+    return result
+
+def write_objects(objects: dict, db = None) -> None:
+    """ Write the given object properties. """
+
+    db = db or Database()
+
+    for obj, properties in objects.items():
+        db.put_property(obj, properties)
+
+if __name__ == "__main__":
+    import sys
+    import argparse
+    import json
+
+    parser = argparse.ArgumentParser("Import/export object properties of the Tango Database using the JSON file format")
+    parser.add_argument('-w', '--write', default=False, required=False, action='store_true', help='import objects from stdin')
+    parser.add_argument('-r', '--read', default=False, required=False, action='store_true', help='export all objects to stdout in JSON')
+    args = parser.parse_args()
+
+    if not args.read and not args.write:
+        parser.print_help()
+        sys.exit(1)
+
+    # import
+    if args.write:
+        objects = json.load(sys.stdin)
+        write_objects(objects["objects"])
+
+    # export
+    if args.read:
+        objects = read_objects()
+        print(json.dumps({"objects": objects}, indent=4))
diff --git a/docker-compose/dsconfig/merge_json.py b/docker-compose/dsconfig/merge_json.py
new file mode 100755
index 0000000000000000000000000000000000000000..c0b04d8466273862950f1a7060541d961d937d7d
--- /dev/null
+++ b/docker-compose/dsconfig/merge_json.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python3
+
+""" Merge all JSON files given on the command line at top level. """
+
+import json
+
+if __name__ == "__main__":
+    import argparse
+
+    parser = argparse.ArgumentParser("Merge input JSON files at top level. Keys from later files override those from earlier files.")
+    parser.add_argument('files', metavar='FILE', type=str, nargs='+', help='JSON input files')
+    args = parser.parse_args()
+
+    result = {}
+
+    # read all provided files
+    for filename in args.files:
+        with open(filename) as f:
+            file_dict = json.load(f)
+
+            # add them to the result
+            result.update(file_dict)
+
+
+    # print result in JSON
+    print(json.dumps(result, indent=4))
diff --git a/docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py b/docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py
index 3152ca29c8c0d78846508e8301e259e694579e75..489d5282acff1618d2c436a741f1a3c5d9f6db3b 100644
--- a/docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py
+++ b/docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py
@@ -64,7 +64,8 @@ class ArchiverPolicy(object):
         return sorted(list(attributes))
 
 class CustomCollector(object):
-    def __init__(self, config, proxy_timeout=250):
+    def __init__(self, station, config, proxy_timeout=250):
+        self.station = station
         self.policy = ArchiverPolicy(config)
         self.proxy_timeout = proxy_timeout
 
@@ -97,7 +98,7 @@ class CustomCollector(object):
             return None
 
         # (labels, value)
-        return ([dev.dev_name(), attr_info.name, str_value, data_type, f"{x:02}", f"{y:02}", f"{idx:03}"], float_value)
+        return ([self.station, dev.dev_name(), attr_info.name, str_value, data_type, f"{x:02}", f"{y:02}", f"{idx:03}"], float_value)
 
     def metrics_scalar(self, dev, attr_info, attr_value):
         """ Return all metrics for a given SCALAR attribute. """
@@ -182,8 +183,8 @@ class CustomCollector(object):
         logger.info("Start scraping")
         scrape_begin = time.time()
 
-        attribute_metrics = GaugeMetricFamily("device_attribute", 'Device attribute value', labels=['device', 'name', 'str_value', 'type', 'x', 'y', 'idx'])
-        scraping_metrics = GaugeMetricFamily("device_scraping", 'Device scraping duration', labels=['device'])
+        attribute_metrics = GaugeMetricFamily("device_attribute", 'Device attribute value', labels=['station', 'device', 'name', 'str_value', 'type', 'x', 'y', 'idx'])
+        scraping_metrics = GaugeMetricFamily("device_scraping", 'Device scraping duration', labels=['station', 'device'])
 
         for device_name in self.policy.devices():
             logger.debug(f"Processing device {device_name}")
@@ -203,7 +204,7 @@ class CustomCollector(object):
 
             logger.info(f"Done processing device {device_name}. Took {dev_scrape_end - dev_scrape_begin} seconds.")
 
-            scraping_metrics.add_metric([device_name], dev_scrape_end - dev_scrape_begin)
+            scraping_metrics.add_metric([self.station, device_name], dev_scrape_end - dev_scrape_begin)
 
         scrape_end = time.time()
         logger.info(f"Done scraping. Took {scrape_end - scrape_begin} seconds.")
@@ -214,6 +215,8 @@ class CustomCollector(object):
         yield scraping_metrics
 
 if __name__ == '__main__':
+    import sys
+
     parser = argparse.ArgumentParser()
     parser.add_argument('-c', '--config', type=str, required=True, help='configuration file')
     parser.add_argument('-t', '--timeout', type=int, required=False, default=250, help='device proxy timeout (ms)')
@@ -221,7 +224,15 @@ if __name__ == '__main__':
     args = parser.parse_args()
 
     config = ArchiverPolicy.load_config(args.config)
-    collector = CustomCollector(config, proxy_timeout=args.timeout)
+
+    db = Database()
+    try:
+        station = db.get_property("station","name")["name"][0]
+    except Exception as e:
+        logger.exception("Could not determine station name")
+        sys.exit(1)
+
+    collector = CustomCollector(config, station=station, proxy_timeout=args.timeout)
 
     logger.info("Starting server")
     start_http_server(args.port)
diff --git a/docker-compose/tango.yml b/docker-compose/tango.yml
index 5a6839f44a356113ae1fc525a0ff6e3290e777cd..c9cdac909bf4a863367f3541b1e77d5be659fd2a 100644
--- a/docker-compose/tango.yml
+++ b/docker-compose/tango.yml
@@ -70,7 +70,10 @@ services:
     restart: unless-stopped
 
   dsconfig:
-    image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-dsconfig:${TANGO_DSCONFIG_VERSION}
+    build:
+        context: dsconfig
+        args:
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-dsconfig:${TANGO_DSCONFIG_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}dsconfig
     networks:
       - control
diff --git a/sbin/update_ConfigDb.sh b/sbin/update_ConfigDb.sh
index 1255f1ea141a75940f2cd858dfc2b40818bd6ec2..f1401d9c6e40601036449553d2919c434c7f8bf1 100755
--- a/sbin/update_ConfigDb.sh
+++ b/sbin/update_ConfigDb.sh
@@ -11,6 +11,9 @@ fi
 # in the container won't be the same as on the host.
 docker cp "${file}" "${CONTAINER_NAME_PREFIX}"dsconfig:/tmp/dsconfig-update-settings.json || exit 1
 
+# update settings, Do not change -i into -it this will break integration tests in gitlab ci!
+docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig /manage_object_properties.py --write < "${file}"
+
 # update settings, Do not change -i into -it this will break integration tests in gitlab ci!
 docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig json2tango --write --update /tmp/dsconfig-update-settings.json