diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ab1edeb28b2c72b34aadbc7eb132492b8e3b6952..e35305c9c183a36c2e2b8515141dd8d2c25657be 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -65,7 +65,7 @@ stages:
 docker_store_images_master_tag:
   extends: .base_docker_store_images
   rules:
-    - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) && $CI_COMMIT_TAG
+    - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) || $CI_COMMIT_TAG
 
 # Download all remote images and store them on our image registry if .env changes
 # on a merge request
@@ -85,7 +85,7 @@ docker_store_images_changes:
 docker_build_image_all:
   extends: .base_docker_images
   rules:
-    - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) && $CI_COMMIT_TAG
+    - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) || $CI_COMMIT_TAG
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh lofar-device-base latest
@@ -94,33 +94,34 @@ docker_build_image_all:
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh grafana latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh loki latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh logstash latest
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh jupyter latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh jupyter-lab latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh apsct-sim latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh ccd-sim latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh apspu-sim latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh recv-sim latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh sdptr-sim latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh unb2-sim latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-antennafield latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-apsct latest
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-ccd latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-apspu latest
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-tilebeam latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-boot latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-beamlet latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-ccd latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-configuration latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-digitalbeam latest
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-antennafield latest
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-boot latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-docker latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-observation latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-observation-control latest
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-psoc latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-pcon latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-psoc latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-recv latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sdp latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-temperature-manager latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-tilebeam latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-unb2 latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-bst latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sst latest
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-unb2 latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-xst latest
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-temperature-manager latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh archiver-timescale latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbpp latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-cm latest
@@ -199,11 +200,11 @@ docker_build_image_jupyter:
     refs:
       - merge_requests
     changes:
-      - docker-compose/jupyter.yml
-      - docker-compose/jupyter/*
+      - docker-compose/jupyter-lab.yml
+      - docker-compose/jupyterlab/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
-    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh jupyter $tag
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh jupyter-lab $tag
 docker_build_image_apsct_sim:
   extends: .base_docker_images_except
   only:
@@ -292,6 +293,17 @@ docker_build_image_device_ccd:
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-ccd $tag
+docker_build_image_device_configuration:
+  extends: .base_docker_images_except
+  only:
+    refs:
+      - merge_requests
+    changes:
+      - docker-compose/device-configuration.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-configuration $tag
 docker_build_image_device_apspu:
   extends: .base_docker_images_except
   only:
@@ -651,7 +663,7 @@ wheel_packaging:
   image: ubuntu:bionic
   when: manual
   rules:
-  - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) && $CI_COMMIT_TAG
+  - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) || $CI_COMMIT_TAG
   before_script:
     - apt-get update
     - apt-get install ansible -y
diff --git a/CDB/LOFAR_ConfigDb.json b/CDB/LOFAR_ConfigDb.json
index 1df2e30fdc1854edf45328cd3c61e64c0d1f4479..d4f69ae67f2d4c19ae01b81b2645524e929c9443 100644
--- a/CDB/LOFAR_ConfigDb.json
+++ b/CDB/LOFAR_ConfigDb.json
@@ -17,6 +17,13 @@
                 }
             }
         },
+        "Configuration": {
+            "STAT": {
+                "Configuration": {
+                    "STAT/Configuration/1": {}
+                }
+            }
+        },
         "Observation": {
             "STAT": {
                 "Observation": {
diff --git a/CDB/stations/CS001_ConfigDb.json b/CDB/stations/CS001_ConfigDb.json
index 5ea7c3d0bc87365f3de6cdf4c3e06aedcc4d65f1..ecb8f2016d63d49cc9d2ba229dffeee02a98b768 100644
--- a/CDB/stations/CS001_ConfigDb.json
+++ b/CDB/stations/CS001_ConfigDb.json
@@ -254,32 +254,32 @@
                             ],
                             "Control_to_RECV_mapping": [
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "1", "25", "1", "27", "1", "29", "0", "-1",
-                                "0", "-1", "0", "-1", "1", "31", "1", "33", "1", "35", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "0", "-1", "1", "25", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "1", "27", "1", "29", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "0", "-1", "1", "31", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "1", "33", "1", "35", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1"
                             ],
                             "Power_to_RECV_mapping": [
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "1", "24", "1", "26", "1", "28", "0", "-1",
-                                "0", "-1", "0", "-1", "1", "30", "1", "32", "1", "34", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "0", "-1", "1", "24", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "1", "26", "1", "28", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "0", "-1", "1", "30", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "1", "32", "1", "34", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1"
                             ],
                             "Antenna_to_SDP_Mapping": [
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "2",  "0", "2",  "1", "2",  "2", "0", "-1",
-                                "0", "-1", "0", "-1", "2",  "3", "2",  "4", "2",  "5", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "0", "-1", "2", "12", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "2", "13", "2", "14", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "0", "-1", "2", "15", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "2", "16", "2", "17", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1"
                             ],
                             "Antenna_Field_Reference_ETRS": [
@@ -399,56 +399,56 @@
                             "Control_to_RECV_mapping": [
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1",  "1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "1",  "3", "1",  "5", "0", "-1",
-                                "1",  "7", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "1",  "9", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "11",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "13", "1", "15",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "17", "0", "-1"
+                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
+                                "1",  "1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1",  "3",
+                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1",  "5",
+                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1",  "7", "0", "-1",
+                                "1",  "9", "0", "-1", "1", "11", "1", "13", "1", "15", "1", "17"
                             ],
                             "Power_to_RECV_mapping": [
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1",  "0", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "1",  "2", "1",  "4", "0", "-1",
-                                "1",  "6", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "1",  "8", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "10",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "12", "1", "14",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1", "16", "0", "-1"
+                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
+                                "1",  "0", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1",  "2",
+                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1",  "4",
+                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1",  "6", "0", "-1",
+                                "1",  "8", "0", "-1", "1", "10", "1", "12", "1", "14", "1", "16"
                             ],
                             "Antenna_to_SDP_Mapping": [
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0",  "0", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "0",  "1", "0",  "2", "0", "-1",
-                                "0",  "3", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0",  "4", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0",  "5",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1",  "0", "1",  "1",
                                 "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
-                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "1",  "2", "0", "-1"
+                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
+                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1",
+                                "0",  "0", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0",  "1",
+                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0",  "2",
+                                "0", "-1", "0", "-1", "0", "-1", "0", "-1", "0",  "3", "0", "-1",
+                                "0",  "4", "0", "-1", "0",  "5", "0",  "6", "0",  "7", "0",  "8"
                             ],
                             "Antenna_Field_Reference_ETRS": [
                                 "3826923.942", "460915.117", "5064643.229"
diff --git a/README.md b/README.md
index 6903db66caa2e9f35de02979b59fe681c258d6c7..7c56c0181ed18d7d9e9cc034f96347c97783d269 100644
--- a/README.md
+++ b/README.md
@@ -14,7 +14,7 @@ Station Control software related to Tango devices.
 * [User documentation (ReadTheDocs (Sphinx / ReStructuredText))](tangostationcontrol/docs/README.md)
 * [Docker compose documentation](docker-compose/README.md)
   * [Timescaledb](docker-compose/timescaledb/README.md)
-  * [Jupyter startup files](docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/README.md)
+  * [Jupyter startup files](docker-compose/jupyterlab/ipython-profiles/stationcontrol-jupyter/startup/README.md)
   * [Tango Prometheus exporter](https://git.astron.nl/lofar2.0/ska-tango-grafana-exporter)
 * [Developer Documentation](#development)
   * [Deployments](deploy/README.md)
@@ -71,7 +71,7 @@ automatically be restarted on reboot or failure. Stop them explicitly to bring
 them down (`make stop <container>`).
 
 Most notably, you will have web interfaces available at:
- - http://localhost:8888 (Jupyter Notebook)
+ - http://localhost:8888 (Jupyter Lab)
  - http://localhost:3000 (Grafana).
 
 # Development
@@ -106,6 +106,8 @@ Next change the version in the following places:
 
 # Release Notes
 
+* 0.5.0 Add `Configuration` device
+* 0.4.1 Fix for missing SDP attributes for spectral inversion
 * 0.4.0 Have most containers report health status and add `make await` command
 * 0.3.1 Fix for applying boot device dsconfig
 * 0.3.0 Initial version of deployment scripts and functionality
diff --git a/bootstrap/sbin/rebuild_system_from_scratch.sh b/bootstrap/sbin/rebuild_system_from_scratch.sh
index cb19cdcdec9c1a775bb1de614092fc9013de263f..bfbb52361032b49dd8333b3ec9704804630db7bb 100755
--- a/bootstrap/sbin/rebuild_system_from_scratch.sh
+++ b/bootstrap/sbin/rebuild_system_from_scratch.sh
@@ -100,7 +100,7 @@ function configure_tango_db()
 
 function start_support_images()
 {
-    (cd ${HOME_DIR}/docker-compose && make start jupyter)
+    (cd ${HOME_DIR}/docker-compose && make start jupyter-lab)
 }
 
 function start_lofar_images()
diff --git a/docker-compose/device-configuration.yml b/docker-compose/device-configuration.yml
new file mode 100644
index 0000000000000000000000000000000000000000..e1c5ef673284b6be3155ed73adde0fee2b846304
--- /dev/null
+++ b/docker-compose/device-configuration.yml
@@ -0,0 +1,59 @@
+#
+# Docker compose file that launches an interactive iTango session.
+#
+# Connect to the interactive session with 'docker attach itango'.
+# Disconnect with the Docker deattach sequence: <CTRL>+<P> <CTRL>+<Q>
+#
+# Defines:
+#   - itango: iTango interactive session
+#
+# Requires:
+#   - lofar-device-base.yml
+#
+version: '2.1'
+
+services:
+  device-configuration:
+    image: device-configuration
+    # build explicitly, as docker-compose does not understand a local image
+    # being shared among services.
+    build:
+        context: .
+        dockerfile: lofar-device-base/Dockerfile
+        args:
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+    container_name: ${CONTAINER_NAME_PREFIX}device-configuration
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
+    networks:
+      - control
+    ports:
+      - "5722:5722" # unique port for this DS
+      - "5822:5822" # ZeroMQ event port
+      - "5922:5922" # ZeroMQ heartbeat port
+    extra_hosts:
+      - "host.docker.internal:host-gateway"
+    volumes:
+      - ..:/opt/lofar/tango:rw
+    environment:
+    - TANGO_HOST=${TANGO_HOST}
+    - TANGO_ZMQ_EVENT_PORT=5822
+    - TANGO_ZMQ_HEARTBEAT_PORT=5922
+    healthcheck:
+      test: l2ss-health STAT/Configuration/1
+      interval: 1m
+      timeout: 30s
+      retries: 3
+      start_period: 30s
+    working_dir: /opt/lofar/tango
+    entrypoint:
+      - bin/start-ds.sh
+      # configure CORBA to _listen_ on 0:port, but tell others we're _reachable_ through ${HOSTNAME}:port, since CORBA
+      # can't know about our Docker port forwarding
+      - l2ss-configuration-device Configuration STAT -v -ORBendPoint giop:tcp:device-configuration:5722 -ORBendPointPublish giop:tcp:${HOSTNAME}:5722
+    restart: on-failure
+    stop_signal: SIGINT # request a graceful shutdown of Tango
+    stop_grace_period: 2s
diff --git a/docker-compose/jupyter-lab.yml b/docker-compose/jupyter-lab.yml
index 2c5984dc6eac7327f0af29a9251e4834a7f9c2ff..6c4b6e75047a84bdc5268d37f2ca3cef15a76977 100644
--- a/docker-compose/jupyter-lab.yml
+++ b/docker-compose/jupyter-lab.yml
@@ -1,7 +1,7 @@
 #
 # Docker compose file that launches Jupyter Lab for interactive iTango sessions over HTTP.
 #
-# Connect by surfing to http://localhost:8889/
+# Connect by surfing to http://localhost:8888/
 # View logs through 'docker logs -f -t jupyter-lab'
 #
 # Defines:
@@ -33,10 +33,10 @@ services:
     environment:
       - TANGO_HOST=${TANGO_HOST}
     ports:
-      - "8889:8889"
+      - "8888:80"
     user: ${CONTAINER_EXECUTION_UID}
     working_dir: /jupyter-notebooks
     entrypoint:
       - /opt/lofar/tango/bin/start-ds.sh
-      - jupyter lab --port=8889 --no-browser --ip=0.0.0.0 --allow-root --NotebookApp.token= --NotebookApp.password=
+      - jupyter lab --port=80 --no-browser --ip=0.0.0.0 --allow-root --NotebookApp.token= --NotebookApp.password=
     restart: unless-stopped
diff --git a/docker-compose/jupyter.yml b/docker-compose/jupyter.yml
deleted file mode 100644
index 49824c9f0ce40632553e699cf407e5a195b38386..0000000000000000000000000000000000000000
--- a/docker-compose/jupyter.yml
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Docker compose file that launches Jupyter Notebook for interactive iTango sessions over HTTP.
-#
-# Connect by surfing to http://localhost:8888/
-# View logs through 'docker logs -f -t jupyter'
-#
-# Defines:
-#   - jupyter: Jupyter Notebook with iTango support
-#
-
-version: '2.1'
-
-services:
-  jupyter:
-    build:
-        context: jupyter
-        args:
-            CONTAINER_EXECUTION_UID: ${CONTAINER_EXECUTION_UID}
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
-    container_name: ${CONTAINER_NAME_PREFIX}jupyter
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "100m"
-        max-file: "10"
-    networks:
-      - control
-    volumes:
-        - ..:/opt/lofar/tango:rw
-        - ../jupyter-notebooks:/jupyter-notebooks:rw
-        - ${HOME}:/hosthome
-        - ${SCRATCH}:/scratch:rw
-    environment:
-      - TANGO_HOST=${TANGO_HOST}
-    ports:
-      - "8888:8888"
-    user: ${CONTAINER_EXECUTION_UID}
-    working_dir: /jupyter-notebooks
-    entrypoint:
-      - /opt/lofar/tango/bin/start-ds.sh
-      - /usr/bin/tini -- /usr/local/bin/jupyter-notebook --port=8888 --no-browser --ip=0.0.0.0 --allow-root --NotebookApp.token= --NotebookApp.password=
-    restart: unless-stopped
diff --git a/docker-compose/jupyter/Dockerfile b/docker-compose/jupyter/Dockerfile
deleted file mode 100644
index abb6f8872b202f952f587165a17fb08856cc653c..0000000000000000000000000000000000000000
--- a/docker-compose/jupyter/Dockerfile
+++ /dev/null
@@ -1,61 +0,0 @@
-ARG SOURCE_IMAGE
-FROM ${SOURCE_IMAGE}
-
-# UID if the user that this container will run under. This is needed to give directories
-# that are needed for temporary storage the proper owner and access rights.
-ARG CONTAINER_EXECUTION_UID=1000
-
-# Create new user with uid but only if uid not used
-RUN sudo adduser --disabled-password --system --uid ${CONTAINER_EXECUTION_UID} --no-create-home --home ${HOME} user || exit 0
-RUN sudo chown ${CONTAINER_EXECUTION_UID} -R ${HOME}
-
-# Add compiler to install python packages which come with C++ code
-RUN sudo apt-get update -y
-RUN sudo apt-get install -y g++ gcc python3-dev
-
-# start-ds file synchronization requirements
-RUN sudo apt-get install -y rsync
-
-# Install git to install pip requirements from git
-RUN sudo apt-get install -y git
-
-# Install dependencies of our scripts (bin/start-ds.sh)
-RUN sudo apt-get install -y rsync
-
-COPY requirements.txt ./
-RUN sudo pip3 install -r requirements.txt
-
-# Install some version of the casacore measures tables, to allow basic delay computation analysis in the notebooks
-RUN sudo apt-get install -y casacore-data
-
-# see https://github.com/jupyter/nbconvert/issues/1434
-RUN sudo bash -c "echo DEFAULT_ARGS += [\\\"--no-sandbox\\\"] >> /usr/local/lib/python3.7/dist-packages/pyppeteer/launcher.py"
-RUN sudo apt-get update -y
-RUN sudo apt-get install -y git gconf-service libasound2 libatk1.0-0 libatk-bridge2.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget libcairo-gobject2 libxinerama1 libgtk2.0-0 libpangoft2-1.0-0 libthai0 libpixman-1-0 libxcb-render0 libharfbuzz0b libdatrie1 libgraphite2-3 libgbm1
-
-# Allow Download as -> PDF via LaTeX
-RUN sudo apt-get install -y texlive-xetex texlive-fonts-recommended texlive-latex-recommended cm-super
-
-# Configure jupyter_bokeh
-RUN sudo mkdir -p /usr/share/jupyter /usr/etc
-RUN sudo chmod a+rwx /usr/share/jupyter /usr/etc
-RUN sudo jupyter nbextension install --sys-prefix --symlink --py jupyter_bokeh
-RUN sudo jupyter nbextension enable jupyter_bokeh --py --sys-prefix
-
-# Install profiles for ipython & jupyter
-COPY ipython-profiles /opt/ipython-profiles/
-RUN sudo chown ${CONTAINER_EXECUTION_UID} -R /opt/ipython-profiles
-COPY jupyter-kernels /usr/local/share/jupyter/kernels/
-
-# Install patched jupyter executable
-COPY jupyter-notebook /usr/local/bin/jupyter-notebook
-
-# Add Tini. Tini operates as a process subreaper for jupyter. This prevents kernel crashes.
-ENV TINI_VERSION v0.6.0
-ENV JUPYTER_RUNTIME_DIR=/tmp
-ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /usr/bin/tini
-RUN sudo chmod +x /usr/bin/tini
-
-USER ${CONTAINER_EXECUTION_UID}
-# pyppeteer-install installs in the homedir, so run it as the user that will execute the notebook
-RUN pyppeteer-install
diff --git a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/ipython_config.py b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/ipython_config.py
deleted file mode 100644
index 91b04aaa3a20232b60e5ced00a99648891955ce5..0000000000000000000000000000000000000000
--- a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/ipython_config.py
+++ /dev/null
@@ -1,578 +0,0 @@
-# Configuration file for ipython.
-
-#------------------------------------------------------------------------------
-# InteractiveShellApp(Configurable) configuration
-#------------------------------------------------------------------------------
-
-## A Mixin for applications that start InteractiveShell instances.
-#  
-#  Provides configurables for loading extensions and executing files as part of
-#  configuring a Shell environment.
-#  
-#  The following methods should be called by the :meth:`initialize` method of the
-#  subclass:
-#  
-#    - :meth:`init_path`
-#    - :meth:`init_shell` (to be implemented by the subclass)
-#    - :meth:`init_gui_pylab`
-#    - :meth:`init_extensions`
-#    - :meth:`init_code`
-
-## Execute the given command string.
-#c.InteractiveShellApp.code_to_run = ''
-
-## Run the file referenced by the PYTHONSTARTUP environment variable at IPython
-#  startup.
-#c.InteractiveShellApp.exec_PYTHONSTARTUP = True
-
-## List of files to run at IPython startup.
-#c.InteractiveShellApp.exec_files = []
-
-## lines of code to run at IPython startup.
-#c.InteractiveShellApp.exec_lines = []
-
-## A list of dotted module names of IPython extensions to load.
-#c.InteractiveShellApp.extensions = []
-
-## dotted module name of an IPython extension to load.
-#c.InteractiveShellApp.extra_extension = ''
-
-## A file to be run
-#c.InteractiveShellApp.file_to_run = ''
-
-## Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk2', 'gtk3',
-#  'osx', 'pyglet', 'qt', 'qt4', 'qt5', 'tk', 'wx', 'gtk2', 'qt4').
-#c.InteractiveShellApp.gui = None
-
-## Should variables loaded at startup (by startup files, exec_lines, etc.) be
-#  hidden from tools like %who?
-#c.InteractiveShellApp.hide_initial_ns = True
-
-## Configure matplotlib for interactive use with the default matplotlib backend.
-#c.InteractiveShellApp.matplotlib = None
-
-## Run the module as a script.
-#c.InteractiveShellApp.module_to_run = ''
-
-## Pre-load matplotlib and numpy for interactive use, selecting a particular
-#  matplotlib backend and loop integration.
-#c.InteractiveShellApp.pylab = None
-
-## If true, IPython will populate the user namespace with numpy, pylab, etc. and
-#  an ``import *`` is done from numpy and pylab, when using pylab mode.
-#  
-#  When False, pylab mode should not import any names into the user namespace.
-#c.InteractiveShellApp.pylab_import_all = True
-
-## Reraise exceptions encountered loading IPython extensions?
-#c.InteractiveShellApp.reraise_ipython_extension_failures = False
-
-#------------------------------------------------------------------------------
-# Application(SingletonConfigurable) configuration
-#------------------------------------------------------------------------------
-
-## This is an application.
-
-## The date format used by logging formatters for %(asctime)s
-#c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S'
-
-## The Logging format template
-#c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s'
-
-## Set the log level by value or name.
-#c.Application.log_level = 30
-
-#------------------------------------------------------------------------------
-# BaseIPythonApplication(Application) configuration
-#------------------------------------------------------------------------------
-
-## IPython: an enhanced interactive Python shell.
-
-## Whether to create profile dir if it doesn't exist
-#c.BaseIPythonApplication.auto_create = False
-
-## Whether to install the default config files into the profile dir. If a new
-#  profile is being created, and IPython contains config files for that profile,
-#  then they will be staged into the new directory.  Otherwise, default config
-#  files will be automatically generated.
-#c.BaseIPythonApplication.copy_config_files = False
-
-## Path to an extra config file to load.
-#  
-#  If specified, load this config file in addition to any other IPython config.
-#c.BaseIPythonApplication.extra_config_file = ''
-
-## The name of the IPython directory. This directory is used for logging
-#  configuration (through profiles), history storage, etc. The default is usually
-#  $HOME/.ipython. This option can also be specified through the environment
-#  variable IPYTHONDIR.
-#c.BaseIPythonApplication.ipython_dir = ''
-
-## Whether to overwrite existing config files when copying
-#c.BaseIPythonApplication.overwrite = False
-
-## The IPython profile to use.
-#c.BaseIPythonApplication.profile = 'default'
-
-## Create a massive crash report when IPython encounters what may be an internal
-#  error.  The default is to append a short message to the usual traceback
-#c.BaseIPythonApplication.verbose_crash = False
-
-#------------------------------------------------------------------------------
-# TerminalIPythonApp(BaseIPythonApplication,InteractiveShellApp) configuration
-#------------------------------------------------------------------------------
-
-## Whether to display a banner upon starting IPython.
-#c.TerminalIPythonApp.display_banner = True
-
-## If a command or file is given via the command-line, e.g. 'ipython foo.py',
-#  start an interactive shell after executing the file or command.
-#c.TerminalIPythonApp.force_interact = False
-
-## Class to use to instantiate the TerminalInteractiveShell object. Useful for
-#  custom Frontends
-#c.TerminalIPythonApp.interactive_shell_class = 'IPython.terminal.interactiveshell.TerminalInteractiveShell'
-
-## Start IPython quickly by skipping the loading of config files.
-#c.TerminalIPythonApp.quick = False
-
-#------------------------------------------------------------------------------
-# InteractiveShell(SingletonConfigurable) configuration
-#------------------------------------------------------------------------------
-
-## An enhanced, interactive shell for Python.
-
-## 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run
-#  interactively (displaying output from expressions).
-#c.InteractiveShell.ast_node_interactivity = 'last_expr'
-
-## A list of ast.NodeTransformer subclass instances, which will be applied to
-#  user input before code is run.
-#c.InteractiveShell.ast_transformers = []
-
-## Make IPython automatically call any callable object even if you didn't type
-#  explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically.
-#  The value can be '0' to disable the feature, '1' for 'smart' autocall, where
-#  it is not applied if there are no more arguments on the line, and '2' for
-#  'full' autocall, where all callable objects are automatically called (even if
-#  no arguments are present).
-#c.InteractiveShell.autocall = 0
-
-## Autoindent IPython code entered interactively.
-#c.InteractiveShell.autoindent = True
-
-## Enable magic commands to be called without the leading %.
-#c.InteractiveShell.automagic = True
-
-## The part of the banner to be printed before the profile
-#c.InteractiveShell.banner1 = 'Python 3.7.3 (default, Jul 25 2020, 13:03:44) \nType "copyright", "credits" or "license" for more information.\n\nIPython 5.8.0 -- An enhanced Interactive Python.\n?         -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp      -> Python\'s own help system.\nobject?   -> Details about \'object\', use \'object??\' for extra details.\n'
-
-## The part of the banner to be printed after the profile
-#c.InteractiveShell.banner2 = ''
-
-## Set the size of the output cache.  The default is 1000, you can change it
-#  permanently in your config file.  Setting it to 0 completely disables the
-#  caching system, and the minimum value accepted is 20 (if you provide a value
-#  less than 20, it is reset to 0 and a warning is issued).  This limit is
-#  defined because otherwise you'll spend more time re-flushing a too small cache
-#  than working
-#c.InteractiveShell.cache_size = 1000
-
-## Use colors for displaying information about objects. Because this information
-#  is passed through a pager (like 'less'), and some pagers get confused with
-#  color codes, this capability can be turned off.
-#c.InteractiveShell.color_info = True
-
-## Set the color scheme (NoColor, Neutral, Linux, or LightBG).
-#c.InteractiveShell.colors = 'Neutral'
-
-## 
-#c.InteractiveShell.debug = False
-
-## **Deprecated**
-#  
-#  Will be removed in IPython 6.0
-#  
-#  Enable deep (recursive) reloading by default. IPython can use the deep_reload
-#  module which reloads changes in modules recursively (it replaces the reload()
-#  function, so you don't need to change anything to use it). `deep_reload`
-#  forces a full reload of modules whose code may have changed, which the default
-#  reload() function does not.  When deep_reload is off, IPython will use the
-#  normal reload(), but deep_reload will still be available as dreload().
-#c.InteractiveShell.deep_reload = False
-
-## Don't call post-execute functions that have failed in the past.
-#c.InteractiveShell.disable_failing_post_execute = False
-
-## If True, anything that would be passed to the pager will be displayed as
-#  regular output instead.
-#c.InteractiveShell.display_page = False
-
-## (Provisional API) enables html representation in mime bundles sent to pagers.
-#c.InteractiveShell.enable_html_pager = False
-
-## Total length of command history
-#c.InteractiveShell.history_length = 10000
-
-## The number of saved history entries to be loaded into the history buffer at
-#  startup.
-#c.InteractiveShell.history_load_length = 1000
-
-## 
-#c.InteractiveShell.ipython_dir = ''
-
-## Start logging to the given file in append mode. Use `logfile` to specify a log
-#  file to **overwrite** logs to.
-#c.InteractiveShell.logappend = ''
-
-## The name of the logfile to use.
-#c.InteractiveShell.logfile = ''
-
-## Start logging to the default log file in overwrite mode. Use `logappend` to
-#  specify a log file to **append** logs to.
-#c.InteractiveShell.logstart = False
-
-## 
-#c.InteractiveShell.object_info_string_level = 0
-
-## Automatically call the pdb debugger after every exception.
-#c.InteractiveShell.pdb = False
-
-## Deprecated since IPython 4.0 and ignored since 5.0, set
-#  TerminalInteractiveShell.prompts object directly.
-#c.InteractiveShell.prompt_in1 = 'In [\\#]: '
-
-## Deprecated since IPython 4.0 and ignored since 5.0, set
-#  TerminalInteractiveShell.prompts object directly.
-#c.InteractiveShell.prompt_in2 = '   .\\D.: '
-
-## Deprecated since IPython 4.0 and ignored since 5.0, set
-#  TerminalInteractiveShell.prompts object directly.
-#c.InteractiveShell.prompt_out = 'Out[\\#]: '
-
-## Deprecated since IPython 4.0 and ignored since 5.0, set
-#  TerminalInteractiveShell.prompts object directly.
-#c.InteractiveShell.prompts_pad_left = True
-
-## 
-#c.InteractiveShell.quiet = False
-
-## 
-#c.InteractiveShell.separate_in = '\n'
-
-## 
-#c.InteractiveShell.separate_out = ''
-
-## 
-#c.InteractiveShell.separate_out2 = ''
-
-## Show rewritten input, e.g. for autocall.
-#c.InteractiveShell.show_rewritten_input = True
-
-## Enables rich html representation of docstrings. (This requires the docrepr
-#  module).
-#c.InteractiveShell.sphinxify_docstring = False
-
-## 
-#c.InteractiveShell.wildcards_case_sensitive = True
-
-## 
-#c.InteractiveShell.xmode = 'Context'
-
-#------------------------------------------------------------------------------
-# TerminalInteractiveShell(InteractiveShell) configuration
-#------------------------------------------------------------------------------
-
-## Set to confirm when you try to exit IPython with an EOF (Control-D in Unix,
-#  Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a
-#  direct exit without any confirmation.
-#c.TerminalInteractiveShell.confirm_exit = True
-
-## Options for displaying tab completions, 'column', 'multicolumn', and
-#  'readlinelike'. These options are for `prompt_toolkit`, see `prompt_toolkit`
-#  documentation for more information.
-#c.TerminalInteractiveShell.display_completions = 'multicolumn'
-
-## Shortcut style to use at the prompt. 'vi' or 'emacs'.
-#c.TerminalInteractiveShell.editing_mode = 'emacs'
-
-## Set the editor used by IPython (default to $EDITOR/vi/notepad).
-#c.TerminalInteractiveShell.editor = 'vi'
-
-## Enable vi (v) or Emacs (C-X C-E) shortcuts to open an external editor. This is
-#  in addition to the F2 binding, which is always enabled.
-#c.TerminalInteractiveShell.extra_open_editor_shortcuts = False
-
-## Highlight matching brackets.
-#c.TerminalInteractiveShell.highlight_matching_brackets = True
-
-## The name or class of a Pygments style to use for syntax highlighting. To see
-#  available styles, run `pygmentize -L styles`.
-#c.TerminalInteractiveShell.highlighting_style = traitlets.Undefined
-
-## Override highlighting format for specific tokens
-#c.TerminalInteractiveShell.highlighting_style_overrides = {}
-
-## Enable mouse support in the prompt
-#c.TerminalInteractiveShell.mouse_support = False
-
-## Class used to generate Prompt token for prompt_toolkit
-#c.TerminalInteractiveShell.prompts_class = 'IPython.terminal.prompts.Prompts'
-
-## Use `raw_input` for the REPL, without completion and prompt colors.
-#  
-#  Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR.
-#  Known usage are: IPython own testing machinery, and emacs inferior-shell
-#  integration through elpy.
-#  
-#  This mode default to `True` if the `IPY_TEST_SIMPLE_PROMPT` environment
-#  variable is set, or the current terminal is not a tty.
-#c.TerminalInteractiveShell.simple_prompt = False
-
-## Number of line at the bottom of the screen to reserve for the completion menu
-#c.TerminalInteractiveShell.space_for_menu = 6
-
-## Automatically set the terminal title
-#c.TerminalInteractiveShell.term_title = True
-
-## Use 24bit colors instead of 256 colors in prompt highlighting. If your
-#  terminal supports true color, the following command should print 'TRUECOLOR'
-#  in orange: printf "\x1b[38;2;255;100;0mTRUECOLOR\x1b[0m\n"
-#c.TerminalInteractiveShell.true_color = False
-
-#------------------------------------------------------------------------------
-# HistoryAccessor(HistoryAccessorBase) configuration
-#------------------------------------------------------------------------------
-
-## Access the history database without adding to it.
-#  
-#  This is intended for use by standalone history tools. IPython shells use
-#  HistoryManager, below, which is a subclass of this.
-
-## Options for configuring the SQLite connection
-#  
-#  These options are passed as keyword args to sqlite3.connect when establishing
-#  database conenctions.
-#c.HistoryAccessor.connection_options = {}
-
-## enable the SQLite history
-#  
-#  set enabled=False to disable the SQLite history, in which case there will be
-#  no stored history, no SQLite connection, and no background saving thread.
-#  This may be necessary in some threaded environments where IPython is embedded.
-#c.HistoryAccessor.enabled = True
-
-## Path to file to use for SQLite history database.
-#  
-#  By default, IPython will put the history database in the IPython profile
-#  directory.  If you would rather share one history among profiles, you can set
-#  this value in each, so that they are consistent.
-#  
-#  Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts.
-#  If you see IPython hanging, try setting this to something on a local disk,
-#  e.g::
-#  
-#      ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite
-#  
-#  you can also use the specific value `:memory:` (including the colon at both
-#  end but not the back ticks), to avoid creating an history file.
-#c.HistoryAccessor.hist_file = ''
-
-#------------------------------------------------------------------------------
-# HistoryManager(HistoryAccessor) configuration
-#------------------------------------------------------------------------------
-
-## A class to organize all history-related functionality in one place.
-
-## Write to database every x commands (higher values save disk access & power).
-#  Values of 1 or less effectively disable caching.
-#c.HistoryManager.db_cache_size = 0
-
-## Should the history database include output? (default: no)
-#c.HistoryManager.db_log_output = False
-
-#------------------------------------------------------------------------------
-# ProfileDir(LoggingConfigurable) configuration
-#------------------------------------------------------------------------------
-
-## An object to manage the profile directory and its resources.
-#  
-#  The profile directory is used by all IPython applications, to manage
-#  configuration, logging and security.
-#  
-#  This object knows how to find, create and manage these directories. This
-#  should be used by any code that wants to handle profiles.
-
-## Set the profile location directly. This overrides the logic used by the
-#  `profile` option.
-#c.ProfileDir.location = ''
-
-#------------------------------------------------------------------------------
-# BaseFormatter(Configurable) configuration
-#------------------------------------------------------------------------------
-
-## A base formatter class that is configurable.
-#  
-#  This formatter should usually be used as the base class of all formatters. It
-#  is a traited :class:`Configurable` class and includes an extensible API for
-#  users to determine how their objects are formatted. The following logic is
-#  used to find a function to format an given object.
-#  
-#  1. The object is introspected to see if it has a method with the name
-#     :attr:`print_method`. If is does, that object is passed to that method
-#     for formatting.
-#  2. If no print method is found, three internal dictionaries are consulted
-#     to find print method: :attr:`singleton_printers`, :attr:`type_printers`
-#     and :attr:`deferred_printers`.
-#  
-#  Users should use these dictionaries to register functions that will be used to
-#  compute the format data for their objects (if those objects don't have the
-#  special print methods). The easiest way of using these dictionaries is through
-#  the :meth:`for_type` and :meth:`for_type_by_name` methods.
-#  
-#  If no function/callable is found to compute the format data, ``None`` is
-#  returned and this format type is not used.
-
-## 
-#c.BaseFormatter.deferred_printers = {}
-
-## 
-#c.BaseFormatter.enabled = True
-
-## 
-#c.BaseFormatter.singleton_printers = {}
-
-## 
-#c.BaseFormatter.type_printers = {}
-
-#------------------------------------------------------------------------------
-# PlainTextFormatter(BaseFormatter) configuration
-#------------------------------------------------------------------------------
-
-## The default pretty-printer.
-#  
-#  This uses :mod:`IPython.lib.pretty` to compute the format data of the object.
-#  If the object cannot be pretty printed, :func:`repr` is used. See the
-#  documentation of :mod:`IPython.lib.pretty` for details on how to write pretty
-#  printers.  Here is a simple example::
-#  
-#      def dtype_pprinter(obj, p, cycle):
-#          if cycle:
-#              return p.text('dtype(...)')
-#          if hasattr(obj, 'fields'):
-#              if obj.fields is None:
-#                  p.text(repr(obj))
-#              else:
-#                  p.begin_group(7, 'dtype([')
-#                  for i, field in enumerate(obj.descr):
-#                      if i > 0:
-#                          p.text(',')
-#                          p.breakable()
-#                      p.pretty(field)
-#                  p.end_group(7, '])')
-
-## 
-#c.PlainTextFormatter.float_precision = ''
-
-## Truncate large collections (lists, dicts, tuples, sets) to this size.
-#  
-#  Set to 0 to disable truncation.
-#c.PlainTextFormatter.max_seq_length = 1000
-
-## 
-#c.PlainTextFormatter.max_width = 79
-
-## 
-#c.PlainTextFormatter.newline = '\n'
-
-## 
-#c.PlainTextFormatter.pprint = True
-
-## 
-#c.PlainTextFormatter.verbose = False
-
-#------------------------------------------------------------------------------
-# Completer(Configurable) configuration
-#------------------------------------------------------------------------------
-
-## Enable unicode completions, e.g. \alpha<tab> . Includes completion of latex
-#  commands, unicode names, and expanding unicode characters back to latex
-#  commands.
-#c.Completer.backslash_combining_completions = True
-
-## Activate greedy completion PENDING DEPRECTION. this is now mostly taken care
-#  of with Jedi.
-#  
-#  This will enable completion on elements of lists, results of function calls,
-#  etc., but can be unsafe because the code is actually evaluated on TAB.
-#c.Completer.greedy = False
-
-#------------------------------------------------------------------------------
-# IPCompleter(Completer) configuration
-#------------------------------------------------------------------------------
-
-## Extension of the completer class with IPython-specific features
-
-## DEPRECATED as of version 5.0.
-#  
-#  Instruct the completer to use __all__ for the completion
-#  
-#  Specifically, when completing on ``object.<tab>``.
-#  
-#  When True: only those names in obj.__all__ will be included.
-#  
-#  When False [default]: the __all__ attribute is ignored
-#c.IPCompleter.limit_to__all__ = False
-
-## Whether to merge completion results into a single list
-#  
-#  If False, only the completion results from the first non-empty completer will
-#  be returned.
-#c.IPCompleter.merge_completions = True
-
-## Instruct the completer to omit private method names
-#  
-#  Specifically, when completing on ``object.<tab>``.
-#  
-#  When 2 [default]: all names that start with '_' will be excluded.
-#  
-#  When 1: all 'magic' names (``__foo__``) will be excluded.
-#  
-#  When 0: nothing will be excluded.
-#c.IPCompleter.omit__names = 2
-
-#------------------------------------------------------------------------------
-# ScriptMagics(Magics) configuration
-#------------------------------------------------------------------------------
-
-## Magics for talking to scripts
-#  
-#  This defines a base `%%script` cell magic for running a cell with a program in
-#  a subprocess, and registers a few top-level magics that call %%script with
-#  common interpreters.
-
-## Extra script cell magics to define
-#  
-#  This generates simple wrappers of `%%script foo` as `%%foo`.
-#  
-#  If you want to add script magics that aren't on your path, specify them in
-#  script_paths
-#c.ScriptMagics.script_magics = []
-
-## Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby'
-#  
-#  Only necessary for items in script_magics where the default path will not find
-#  the right interpreter.
-#c.ScriptMagics.script_paths = {}
-
-#------------------------------------------------------------------------------
-# StoreMagics(Magics) configuration
-#------------------------------------------------------------------------------
-
-## Lightweight persistence for python variables.
-#  
-#  Provides the %store magic.
-
-## If True, any %store-d variables will be automatically restored when IPython
-#  starts.
-#c.StoreMagics.autorestore = False
diff --git a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/00-tango.py b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/00-tango.py
deleted file mode 100644
index 38fcb84c3417c6b19d89527be6f8122bd0249765..0000000000000000000000000000000000000000
--- a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/00-tango.py
+++ /dev/null
@@ -1 +0,0 @@
-from tango import *
diff --git a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
deleted file mode 100644
index 527f1ee25ebfd8acb420d5433b44541c8a705656..0000000000000000000000000000000000000000
--- a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Create shortcuts for our devices
-apsct = DeviceProxy("STAT/APSCT/1")
-ccd = DeviceProxy("STAT/CCD/1")
-apspu = DeviceProxy("STAT/APSPU/1")
-recv = DeviceProxy("STAT/RECV/1")
-sdp = DeviceProxy("STAT/SDP/1")
-bst = DeviceProxy("STAT/BST/1")
-sst = DeviceProxy("STAT/SST/1")
-xst = DeviceProxy("STAT/XST/1")
-unb2 = DeviceProxy("STAT/UNB2/1")
-boot = DeviceProxy("STAT/Boot/1")
-tilebeam = DeviceProxy("STAT/TileBeam/1")
-pcon = DeviceProxy("STAT/PCON/1")
-psoc = DeviceProxy("STAT/PSOC/1")
-beamlet = DeviceProxy("STAT/Beamlet/1")
-digitalbeam = DeviceProxy("STAT/DigitalBeam/1")
-antennafield = DeviceProxy("STAT/AntennaField/1")
-docker = DeviceProxy("STAT/Docker/1")
-temperaturemanager = DeviceProxy("STAT/TemperatureManager/1")
-
-# Put them in a list in case one wants to iterate
-devices = [apsct, ccd, apspu, recv, sdp, bst, sst, xst, unb2, boot, tilebeam, beamlet, digitalbeam, antennafield, temperaturemanager, docker, pcon, psoc]
diff --git a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/02-stationcontrol.py b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/02-stationcontrol.py
deleted file mode 100644
index d21ed1cf013d73b700cbc72e3d89ef9541efcacc..0000000000000000000000000000000000000000
--- a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/02-stationcontrol.py
+++ /dev/null
@@ -1 +0,0 @@
-import tangostationcontrol
diff --git a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/README.md b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/README.md
deleted file mode 100644
index 61d470004218ae459ce7bfdc974f7c86e0790486..0000000000000000000000000000000000000000
--- a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/README.md
+++ /dev/null
@@ -1,11 +0,0 @@
-This is the IPython startup directory
-
-.py and .ipy files in this directory will be run *prior* to any code or files specified
-via the exec_lines or exec_files configurables whenever you load this profile.
-
-Files will be run in lexicographical order, so you can control the execution order of files
-with a prefix, e.g.::
-
-    00-first.py
-    50-middle.py
-    99-last.ipy
diff --git a/docker-compose/jupyter/jupyter-kernels/stationcontrol/kernel.json b/docker-compose/jupyter/jupyter-kernels/stationcontrol/kernel.json
deleted file mode 100644
index ff6d4a1a01d0f7bd6eda3a40886eae74b451a5a4..0000000000000000000000000000000000000000
--- a/docker-compose/jupyter/jupyter-kernels/stationcontrol/kernel.json
+++ /dev/null
@@ -1,13 +0,0 @@
- {
-     "argv": [
-	  "python",
-	  "-m",
-	  "ipykernel",
-	  "-f",
-	  "{connection_file}",
-	  "--profile-dir",
-	  "/opt/ipython-profiles/stationcontrol-jupyter/"
-     ],
-     "language": "python",
-     "display_name": "StationControl"
-}
diff --git a/docker-compose/jupyter/jupyter-notebook b/docker-compose/jupyter/jupyter-notebook
deleted file mode 100755
index 59613a137cc1bb5c86b4cd7c82f3a2cb1f9abde3..0000000000000000000000000000000000000000
--- a/docker-compose/jupyter/jupyter-notebook
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/python3
-# -*- coding: utf-8 -*-
-# An adjustment of the `jupyter-notebook' executable patched to:
-#  - log to the ELK stack
-#
-# We go straight for the notebook executable here, as the "jupyter" command
-# execvp's into the requested notebook subcommand, erasing all configuration
-# we set here.
-import re
-import sys
-
-from notebook.notebookapp import main 
-
-from logstash_async.handler import AsynchronousLogstashHandler, LogstashFormatter
-import logging
-
-if __name__ == '__main__':
-    # log to the tcp_input of logstash in our ELK stack
-    handler = AsynchronousLogstashHandler("elk", 5959, database_path='/tmp/pending_log_messages.db')
-
-    # add to logger of Jupyter traitlets Application. As that logger is configured not to propagate
-    # messages upward, we need to configure it directly.
-    logger = logging.getLogger("NotebookApp")
-    logger.addHandler(handler)
-    logger.setLevel(logging.DEBUG)
-
-    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
-    sys.exit(main())
diff --git a/docker-compose/jupyter/requirements.txt b/docker-compose/jupyter/requirements.txt
deleted file mode 100644
index 27a6d1ed38ea1edc00323ae7b7e20a69242746e6..0000000000000000000000000000000000000000
--- a/docker-compose/jupyter/requirements.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-ipython >=7.27.0,!=7.28.0 # BSD
-jupyter
-ipykernel
-jupyter_bokeh
-matplotlib
-jupyterplot
-nbconvert
-notebook-as-pdf
-python-logstash-async
-PyMySQL[rsa]
-psycopg2-binary >= 2.9.2 #LGPL
-sqlalchemy
-pyvisa
-pyvisa-py
-opcua
-
-numpy
-scipy
-
-pabeam@git+https://git.astron.nl/mevius/grate # Apache2
-lofar-station-client@git+https://git.astron.nl/lofar2.0/lofar-station-client # Apache2
diff --git a/docker-compose/jupyterlab/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py b/docker-compose/jupyterlab/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
index 527f1ee25ebfd8acb420d5433b44541c8a705656..1123fde3afca3a742f2fb239d12bb361d91e5e12 100644
--- a/docker-compose/jupyterlab/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
+++ b/docker-compose/jupyterlab/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
@@ -17,6 +17,7 @@ digitalbeam = DeviceProxy("STAT/DigitalBeam/1")
 antennafield = DeviceProxy("STAT/AntennaField/1")
 docker = DeviceProxy("STAT/Docker/1")
 temperaturemanager = DeviceProxy("STAT/TemperatureManager/1")
+configuration = DeviceProxy("STAT/Configuration/1")
 
 # Put them in a list in case one wants to iterate
-devices = [apsct, ccd, apspu, recv, sdp, bst, sst, xst, unb2, boot, tilebeam, beamlet, digitalbeam, antennafield, temperaturemanager, docker, pcon, psoc]
+devices = [apsct, ccd, apspu, recv, sdp, bst, sst, xst, unb2, boot, tilebeam, beamlet, digitalbeam, antennafield, temperaturemanager, docker, pcon, psoc, configuration]
diff --git a/docker-compose/tango-prometheus-exporter/lofar2-policy.json b/docker-compose/tango-prometheus-exporter/lofar2-policy.json
index 606f06499472aa11324b7566739efd6de37fdcab..bfb587489da949275dd15f31f372cd0aac4938c2 100644
--- a/docker-compose/tango-prometheus-exporter/lofar2-policy.json
+++ b/docker-compose/tango-prometheus-exporter/lofar2-policy.json
@@ -17,6 +17,8 @@
         },
         "stat/ccd/1": {
         },
+        "stat/configuration/1": {
+        },
         "stat/apspu/1": {
         },
         "stat/beamlet/1": {
diff --git a/sbin/run_integration_test.sh b/sbin/run_integration_test.sh
index 1f7dea1139bcfabf7c9347b95b7ee5a97f33b9dd..046f0ab4a4926df0731aa6f55ca0e60221ba387e 100755
--- a/sbin/run_integration_test.sh
+++ b/sbin/run_integration_test.sh
@@ -76,7 +76,9 @@ sleep 1 # dsconfig container must be up and running...
 # shellcheck disable=SC2016
 echo '/usr/local/bin/wait-for-it.sh ${TANGO_HOST} --strict --timeout=300 -- true' | make run dsconfig bash -
 
-DEVICES=(device-boot device-apsct device-ccd device-apspu device-sdp device-recv device-bst device-sst device-unb2 device-xst device-beamlet device-digitalbeam device-tilebeam device-psoc device-pcon device-antennafield device-temperature-manager device-observation device-observation-control)
+# Devices list is used to explitly word split when supplied to commands, must
+# disable shellcheck SC2086 for each case.
+DEVICES=(device-boot device-apsct device-ccd device-apspu device-sdp device-recv device-bst device-sst device-unb2 device-xst device-beamlet device-digitalbeam device-tilebeam device-psoc device-pcon device-antennafield device-temperature-manager device-observation device-observation-control device-configuration)
 
 SIMULATORS=(sdptr-sim recv-sim unb2-sim apsct-sim apspu-sim ccd-sim)
 
diff --git a/sbin/tag_and_push_docker_image.sh b/sbin/tag_and_push_docker_image.sh
index ba98860ad989922b5e4c0433440ebbdd59cfeeb2..16b0e40fb97c10135c8b957323233ab61ac192c8 100755
--- a/sbin/tag_and_push_docker_image.sh
+++ b/sbin/tag_and_push_docker_image.sh
@@ -71,6 +71,7 @@ LOCAL_IMAGES=(
   "device-antennafield device-antennafield y"
   "device-apsct device-apsct y" "device-apspu device-apspu y"
   "device-ccd device-ccd y"
+  "device-configuration device-configuration y"
   "device-boot device-boot y" "device-docker device-docker y"
   "device-observation device-observation y"
   "device-observation-control device-observation-control y"
@@ -86,7 +87,7 @@ LOCAL_IMAGES=(
   "hdbppts-es hdbppts-es y"
 
   "grafana grafana n" "prometheus prometheus n"
-  "jupyter docker-compose_jupyter n"
+  "jupyter-lab docker-compose_jupyter-lab n"
   "integration-test docker-compose_integration-test n"
   "tango-prometheus-exporter docker-compose_tango-prometheus-exporter n"
 )
diff --git a/tangostationcontrol/VERSION b/tangostationcontrol/VERSION
index 1d0ba9ea182b0f7354f3daf12120744ec5e0c2f8..8f0916f768f0487bcf8d33827ce2c8dcecb645c1 100644
--- a/tangostationcontrol/VERSION
+++ b/tangostationcontrol/VERSION
@@ -1 +1 @@
-0.4.0
+0.5.0
diff --git a/tangostationcontrol/docs/source/devices/configuration.rst b/tangostationcontrol/docs/source/devices/configuration.rst
new file mode 100644
index 0000000000000000000000000000000000000000..39b918fc8cb4a180e6fa6f207e26a234b2dab41d
--- /dev/null
+++ b/tangostationcontrol/docs/source/devices/configuration.rst
@@ -0,0 +1,6 @@
+.. _configuration:
+
+Configuration
+--------------------
+
+The ``Configuration == DeviceProxy("STAT/Configuration/1")`` Configuration Device controls the loading, updating, exposing and dumping of the whole Station Configuration
diff --git a/tangostationcontrol/docs/source/index.rst b/tangostationcontrol/docs/source/index.rst
index 7f4ec29f443df38627eeb1907afa208680313699..1445143be6c5fcf511896af505d11cfe54b96a2b 100644
--- a/tangostationcontrol/docs/source/index.rst
+++ b/tangostationcontrol/docs/source/index.rst
@@ -30,6 +30,7 @@ Even without having access to any LOFAR2.0 hardware, you can install the full st
    devices/docker
    devices/psoc
    devices/ccd
+   devices/configuration
    devices/temperature-manager
    devices/configure
    configure_station
diff --git a/tangostationcontrol/docs/source/interfaces/control.rst b/tangostationcontrol/docs/source/interfaces/control.rst
index 4dac94eb9112e959b664eaa671e2317dc3569ca3..e2f436f378acd97280845886f32cf0bf4f48eb22 100644
--- a/tangostationcontrol/docs/source/interfaces/control.rst
+++ b/tangostationcontrol/docs/source/interfaces/control.rst
@@ -12,7 +12,7 @@ The station offers Juypyter notebooks On http://localhost:8888, which allow one
 
 The notebooks provide some predefined variables, so you don't have to look them up:
 
-.. literalinclude:: ../../../../docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
+.. literalinclude:: ../../../../docker-compose/jupyterlab/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
 
 Note: the Jupyter notebooks use enhancements from the ``itango`` suite, which provide tab completions, but also the ``Device`` alias for ``DeviceProxy`` as was used in the Python examples in the next section.
 
diff --git a/tangostationcontrol/setup.cfg b/tangostationcontrol/setup.cfg
index 38eed462baf65342db01e0045905c115e5097ab5..66cd3b4bc2e0461b7c2a41d3cd0f785c886ddb9a 100644
--- a/tangostationcontrol/setup.cfg
+++ b/tangostationcontrol/setup.cfg
@@ -56,6 +56,7 @@ console_scripts =
     l2ss-unb2 = tangostationcontrol.devices.unb2:main
     l2ss-xst = tangostationcontrol.devices.sdp.xst:main
     l2ss-temperature-manager = tangostationcontrol.devices.temperature_manager:main
+    l2ss-configuration-device = tangostationcontrol.devices.configuration_device:main
 
 # The following entry points should eventually be removed / replaced
     l2ss-hardware-device-template = tangostationcontrol.examples.HW_device_template:main
diff --git a/tangostationcontrol/tangostationcontrol/common/configuration.py b/tangostationcontrol/tangostationcontrol/common/configuration.py
new file mode 100644
index 0000000000000000000000000000000000000000..71111c0e4bb2b13ad87111b51dc8bd1234455115
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/common/configuration.py
@@ -0,0 +1,122 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+from tango import DeviceProxy, Database
+
+from itertools import islice
+
+class StationConfiguration:
+
+    DEVICE_PROPERTIES_QUERY = "SELECT device, property_device.name, property_device.value FROM property_device \
+                            INNER JOIN device ON property_device.device = device.name \
+                            WHERE class != 'DServer' \
+                            AND property_device.name != '__SubDevices' \
+                            ORDER BY device, property_device.name, property_device.count ASC"
+
+    ATTRS_PROPERTIES_QUERY = "SELECT device, attribute, property_attribute_device.name, \
+                            property_attribute_device.value \
+                            FROM property_attribute_device \
+                            INNER JOIN device ON property_attribute_device.device = device.name \
+                            WHERE class != 'DServer' \
+                            ORDER BY device, property_attribute_device.name, property_attribute_device.count ASC"
+
+    SERVER_QUERY = "SELECT server, class, name FROM device \
+                WHERE class != 'DServer' \
+                ORDER BY server ASC"
+
+    def __init__(self, db: Database, tangodb_timeout:int = 10000):
+        self.dbproxy = DeviceProxy(db.dev_name())            # TangoDB
+        self.dbproxy.set_timeout_millis(tangodb_timeout)     # Set a security timeout (default is 3000ms)
+
+    def get_tangodb_data(self) -> dict:
+        """ Dump a subset of TANGO database into dictionary. 
+        
+        The dictionary contains the info about all the Devices used in the 
+        present environment, including their Properties values, their Attribute Properties,
+        and the namespace of the DeviceServers which incapsulate each Device.
+        """
+        # Create empty dictionaries to be populated
+        devices_dict = {}
+        server_dict = {}
+        
+        # Populate devices dictionary from query data
+        device_property_result = self._query_tangodb(self.dbproxy, self.DEVICE_PROPERTIES_QUERY, 3)
+        devices_dict = self.add_to_devices_dict(devices_dict, device_property_result)
+        
+        # Populate devices dictionary from query data
+        attrs_property_result = self._query_tangodb(self.dbproxy, self.ATTRS_PROPERTIES_QUERY, 4)
+        devices_dict = self.add_to_attrs_dict(devices_dict, attrs_property_result)
+        
+        # Populate server dictionary from query data and merge it with devices dict
+        server_result = self._query_tangodb(self.dbproxy, self.SERVER_QUERY, 3)
+        server_dict = self.add_to_server_dict(server_dict, devices_dict, server_result)
+        return {"servers" : server_dict}
+
+    def _query_tangodb(self, dbproxy: DeviceProxy, sql_query: str, num_cols: int) -> list:
+        """ Query TangoDb with a built-in function and return data as tuples """
+        _, raw_result = dbproxy.command_inout("DbMySqlSelect", sql_query)
+        return self.query_to_tuples(raw_result, num_cols)
+
+    def add_to_devices_dict(self, devices_dict:dict, result:list) -> dict:
+        """ Populate a devices dictionary with the following structure:
+        'device_name': { 'properties' : { 'property_name': ['property_value'] } }
+        """
+        for device, property, value in result:
+            # lowercase data
+            device = device.lower() 
+            property = property.lower()
+            # model dictionary
+            device_data = devices_dict.setdefault(device, {})
+            property_data = device_data.setdefault("properties", {})
+            value_data = property_data.setdefault(property, [])
+            value_data.append(value)
+        return devices_dict
+
+    def add_to_attrs_dict(self, devices_dict:dict, result:list) -> dict:
+        """ Populate a device dictionary with the following structure : 
+        'device_name': { 'attribute_properties' : { 'attribute_name': {'property_name' : ['property_value'] } } }
+        """
+        for device, attribute, property, value in result:
+            # lowercase data
+            device = device.lower()
+            attribute = attribute.lower()
+            property = property.lower()
+            # model dictionary
+            device_data = devices_dict.setdefault(device, {})
+            property_data = device_data.setdefault("attribute_properties", {})
+            attr_data = property_data.setdefault(attribute, {})
+            value_data = attr_data.setdefault(property, [])
+            value_data.append(value)
+        return devices_dict
+
+    def add_to_server_dict(self, server_dict:dict, devices_dict:dict, result:list) -> dict:
+        """ Populate the server dictionary and merge it with the devices dictionary.
+        At the end of the process, the dictionary will have the following structure : 
+        'server_name' : { 'server_instance' : { 'server_class' :
+            'device_name':  { 'properties' : { 'property_name': ['property_value'] } }, 
+                            { 'attribute_properties' : { 'attribute_name': {'property_name' : ['property_value'] } } } } } 
+        """
+        for server, sclass, device in result:
+            # lowercase data
+            device = device.lower()
+            server = server.lower()
+            sclass = sclass.lower()
+            # model dictionary
+            sname, instance = server.split('/')
+            device_data = devices_dict.get(device, {})
+            server_data = server_dict.setdefault(sname, {})
+            instance_data = server_data.setdefault(instance, {})
+            class_data = instance_data.setdefault(sclass, {})
+            # merge the two dictionaries
+            server_dict[sname][instance][sclass][device] = device_data
+        return server_dict
+
+    def query_to_tuples(self, result: list, num_cols: int) -> list:
+        """ Given a query result and its number of columns, transforms the raw result in a list of tuples """
+        return list(zip(*[islice(result, i, None, num_cols) for i in range(num_cols)]))
diff --git a/tangostationcontrol/tangostationcontrol/devices/README.md b/tangostationcontrol/tangostationcontrol/devices/README.md
index 64390631beb1222e68c536757766e50fba64bcbc..d1ced11fcd5a0352f85da4aab3ecd18601eb7916 100644
--- a/tangostationcontrol/tangostationcontrol/devices/README.md
+++ b/tangostationcontrol/tangostationcontrol/devices/README.md
@@ -7,9 +7,9 @@ This directory contains the sources for our custom Tango devices.
 If a new device is added, it will (likely) need to be referenced in several places. Adjust or add the following files (referenced from the repository root), following the pattern shown by the devices already there:
 
 - Adjust `CDB/LOFAR_ConfigDb.json` to create the device in the Tango device database,
-- Adjust `docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py` and `docker-compose/jupyterlab/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py` to make an alias for it available in Jupyter and Jupyter-Lab,
+- Adjust `docker-compose/jupyterlab/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py` to make an alias for it available in Jupyter-Lab,
 - Adjust `tangostationcontrol/tangostationcontrol/devices/boot.py` to add the device to the station initialisation sequence,
-- Add to `docker-compose/` to create a YaML file to start the device in a docker container. NOTE: it needs a unique 57xx port assigned (current _unused_ port value: 5722), a unique 58xx port for ZMQ events, and a unique 59xx port for ZMQ heartbeat
+- Add to `docker-compose/` to create a YaML file to start the device in a docker container. NOTE: it needs a unique 57xx port assigned (current _unused_ port value: 5723), a unique 58xx port for ZMQ events, and a unique 59xx port for ZMQ heartbeat
 - Adjust `tangostationcontrol/setup.cfg` to add an entry point for the device in the package installation,
 - Add to `tangostationcontrol/tangostationcontrol/integration_test/default/devices/` to add an integration test,
 - Adjust `sbin/run_integration_test.sh` to have the device started when running the integration tests,
diff --git a/tangostationcontrol/tangostationcontrol/devices/antennafield.py b/tangostationcontrol/tangostationcontrol/devices/antennafield.py
index 0363b5af38f6706a4a3334ed12b2a50eebbea1b0..2c89283f88c123042c1b8bc531b1623b374d62c8 100644
--- a/tangostationcontrol/tangostationcontrol/devices/antennafield.py
+++ b/tangostationcontrol/tangostationcontrol/devices/antennafield.py
@@ -152,25 +152,25 @@ class AntennaField(lofar_device):
     )
 
     Calibration_SDP_Subband_Weights_50MHz = device_property(
-        doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW columns of each antenna, at 50 MHz. Each antenna is represented by a (real, imag) pair for every subband.",
+        doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW columns of each polarisation of each antenna, at 50 MHz. Each polarisation is represented by a (real, imag) pair for every subband.",
         dtype='DevVarFloatArray',
         mandatory=False
     )
 
     Calibration_SDP_Subband_Weights_150MHz = device_property(
-        doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW columns of each antenna, at 150 MHz. Each antenna is represented by a (real, imag) pair for every subband.",
+        doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW columns of each polarisation of each antenna, at 150 MHz. Each polarisation is represented by a (real, imag) pair for every subband.",
         dtype='DevVarFloatArray',
         mandatory=False
     )
 
     Calibration_SDP_Subband_Weights_200MHz = device_property(
-        doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW columns of each antenna, at 200 MHz. Each antenna is represented by a (real, imag) pair for every subband.",
+        doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW columns of each polarisation of each antenna, at 200 MHz. Each polarisation is represented by a (real, imag) pair for every subband.",
         dtype='DevVarFloatArray',
         mandatory=False
     )
 
     Calibration_SDP_Subband_Weights_250MHz = device_property(
-        doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW columns of each antenna, at 250 MHz. Each antenna is represented by a (real, imag) pair for every subband.",
+        doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW columns of each polarisation of each antenna, at 250 MHz. Each polarisation is represented by a (real, imag) pair for every subband.",
         dtype='DevVarFloatArray',
         mandatory=False
     )
@@ -303,13 +303,13 @@ class AntennaField(lofar_device):
         dtype=(numpy.uint32,), max_dim_x=MAX_ANTENNA, unit="dB")
     Calibration_SDP_Fine_Calibration_Default_R = attribute(
         doc=f"Computed calibration values for the fine calibration of each antenna. Each antenna is represented by a (delay, phase_offset, amplitude_scaling) triplet.",
-        dtype=((numpy.float64,),), max_dim_y=MAX_ANTENNA, max_dim_x=3)
+        dtype=((numpy.float64,),), max_dim_y=MAX_ANTENNA * N_pol, max_dim_x=3)
     Calibration_SDP_Subband_Weights_Default_R = attribute(
-        doc=f"Calibration values for the rows in sdp.FPGA_subband_weights_RW relevant for our antennas, as computed. Each subband of each antenna is represented by a real_imag number (real, imag).",
-        dtype=((numpy.float64,),), max_dim_y=MAX_ANTENNA, max_dim_x=N_subbands * VALUES_PER_COMPLEX)
+        doc=f"Calibration values for the rows in sdp.FPGA_subband_weights_RW relevant for our antennas, as computed. Each subband of each polarisation of each antenna is represented by a real_imag number (real, imag).",
+        dtype=((numpy.float64,),), max_dim_y=MAX_ANTENNA * N_pol, max_dim_x=N_subbands * VALUES_PER_COMPLEX)
     Calibration_SDP_Subband_Weights_R = attribute(
-        doc=f"Calibration values for the rows in sdp.FPGA_subband_weights_RW relevant for our antennas. Each subband of each antenna is represented by a real_imag number (real, imag). Returns the measured values from Calibration_SDP_Subband_Weights_XXXMHz if available, and values computed from Calibration_SDP_Fine_Calibration_Default_R otherwise.",
-        dtype=((numpy.float64,),), max_dim_y=MAX_ANTENNA, max_dim_x=N_subbands * VALUES_PER_COMPLEX)
+        doc=f"Calibration values for the rows in sdp.FPGA_subband_weights_RW relevant for our antennas. Each subband of each polarisation of each antenna is represented by a real_imag number (real, imag). Returns the measured values from Calibration_SDP_Subband_Weights_XXXMHz if available, and values computed from Calibration_SDP_Fine_Calibration_Default_R otherwise.",
+        dtype=((numpy.float64,),), max_dim_y=MAX_ANTENNA * N_pol, max_dim_x=N_subbands * VALUES_PER_COMPLEX)
 
     # ----- Quality and usage information
 
@@ -426,10 +426,14 @@ class AntennaField(lofar_device):
         return input_delay_samples
 
     def read_Calibration_SDP_Fine_Calibration_Default_R(self):
+        def repeat_per_pol(arr):
+            # repeat values twice, and restore the shape (with the inner dimension being twice the size now)
+            return numpy.dstack((arr,arr)).reshape(arr.shape[0] * N_pol, *arr.shape[1:])
+
         # ----- Delay
 
-        # correct for signal delays in the cables
-        signal_delay_seconds = self.read_attribute("Antenna_Cables_Delay_R")
+        # correct for signal delays in the cables (equal for both polarisations)
+        signal_delay_seconds = repeat_per_pol(self.read_attribute("Antenna_Cables_Delay_R"))
 
         # compute the required compensation
         clock = self.sdp_proxy.clock_RW
@@ -438,17 +442,17 @@ class AntennaField(lofar_device):
         # ----- Phase offsets
 
         # we don't have any
-        phase_offsets = numpy.zeros((self.read_attribute("nr_antennas_R"),),dtype=numpy.float64)
+        phase_offsets = repeat_per_pol(numpy.zeros((self.read_attribute("nr_antennas_R"),),dtype=numpy.float64))
 
         # ----- Amplitude
 
         # correct for signal loss in the cables
-        signal_delay_loss = self.read_attribute("Antenna_Cables_Loss_R") - self.Field_Attenuation
+        signal_delay_loss = repeat_per_pol(self.read_attribute("Antenna_Cables_Loss_R") - self.Field_Attenuation)
 
         # return fine scaling to apply
         _, input_attenuation_remaining_factor = loss_compensation(signal_delay_loss)
 
-        # Return as (delay, phase_offset, amplitude) triplet per antenna
+        # Return as (delay, phase_offset, amplitude) triplet per polarisation
         return numpy.stack((input_delay_subsample_seconds, phase_offsets, input_attenuation_remaining_factor), axis=1)
 
     def read_Calibration_SDP_Subband_Weights_Default_R(self):
@@ -459,8 +463,7 @@ class AntennaField(lofar_device):
         nr_antennas = self.read_attribute("nr_antennas_R")
         antenna_to_sdp_mapping = self.read_attribute("Antenna_to_SDP_Mapping_R")
 
-
-        subband_weights = numpy.zeros((nr_antennas, N_subbands, VALUES_PER_COMPLEX), dtype=numpy.float64)
+        subband_weights = numpy.zeros((nr_antennas, N_pol, N_subbands, VALUES_PER_COMPLEX), dtype=numpy.float64)
 
         # compute real_imag weight for each subband
         for antenna_nr in range(nr_antennas):
@@ -468,20 +471,21 @@ class AntennaField(lofar_device):
             if input_nr == -1:
                 continue
 
-            delay, phase_offset, amplitude = delay_phase_amplitude[antenna_nr, :]
+            for pol_nr in range(N_pol):
+                delay, phase_offset, amplitude = delay_phase_amplitude[antenna_nr * N_pol + pol_nr, :]
 
-            for subband_nr in range(N_subbands):
-                frequency = subband_frequency(subband_nr, clock, nyquist_zone[fpga_nr, input_nr])
+                for subband_nr in range(N_subbands):
+                    frequency = subband_frequency(subband_nr, clock, nyquist_zone[fpga_nr, input_nr])
 
-                # turn signal backwards to compensate for the provided delay and offset
-                phase_shift = -(2 * numpy.pi * frequency * delay + phase_offset)
+                    # turn signal backwards to compensate for the provided delay and offset
+                    phase_shift = -(2 * numpy.pi * frequency * delay + phase_offset)
 
-                real = numpy.cos(phase_shift) * amplitude
-                imag = numpy.sin(phase_shift) * amplitude
+                    real = numpy.cos(phase_shift) * amplitude
+                    imag = numpy.sin(phase_shift) * amplitude
 
-                subband_weights[antenna_nr, subband_nr, :] = (real, imag)
+                    subband_weights[antenna_nr, pol_nr, subband_nr, :] = (real, imag)
 
-        return subband_weights.reshape(nr_antennas, N_subbands * VALUES_PER_COMPLEX)
+        return subband_weights.reshape(nr_antennas * N_pol, N_subbands * VALUES_PER_COMPLEX)
 
     def _rcu_band_to_calibration_table(self) -> dict:
         """
@@ -507,7 +511,7 @@ class AntennaField(lofar_device):
 
         # reshape them into their actual form
         for band, caltable in rcu_band_to_caltable.items():
-            rcu_band_to_caltable[band] = numpy.array(caltable).reshape(nr_antennas, N_subbands, 2)
+            rcu_band_to_caltable[band] = numpy.array(caltable).reshape(nr_antennas, N_pol, N_subbands, 2)
 
         return rcu_band_to_caltable
 
@@ -526,7 +530,7 @@ class AntennaField(lofar_device):
         # construct the subband weights based on the rcu_band of each antenna,
         # combining the relevant tables.
         nr_antennas = self.read_attribute("nr_antennas_R")
-        subband_weights = numpy.zeros((nr_antennas, N_subbands, VALUES_PER_COMPLEX), dtype=numpy.float64)
+        subband_weights = numpy.zeros((nr_antennas, N_pol, N_subbands, VALUES_PER_COMPLEX), dtype=numpy.float64)
         for antenna_nr, rcu_band in enumerate(rcu_bands):
             # Skip antennas not connected to RECV. These do not have a valid RCU band selected.
             if recvs[antenna_nr] == 0:
@@ -536,9 +540,9 @@ class AntennaField(lofar_device):
             if antenna_to_sdp_mapping[antenna_nr, 1] == -1:
                 continue
 
-            subband_weights[antenna_nr, :, :] = rcu_band_to_caltable[rcu_band][antenna_nr, :, :]
+            subband_weights[antenna_nr, :, :, :] = rcu_band_to_caltable[rcu_band][antenna_nr, :, :, :]
 
-        return subband_weights.reshape(nr_antennas, N_subbands * VALUES_PER_COMPLEX)
+        return subband_weights.reshape(nr_antennas * N_pol, N_subbands * VALUES_PER_COMPLEX)
 
     def read_Calibration_RCU_Attenuation_dB_R(self):
         # Correct for signal loss in the cables
@@ -815,7 +819,8 @@ class AntennaField(lofar_device):
                 continue
 
             # set weights
-            fpga_subband_weights[fpga_nr, input_nr, :] = real_imag_to_weights(caltable[antenna_nr, :], SDP.SUBBAND_UNIT_WEIGHT)
+            fpga_subband_weights[fpga_nr, input_nr * N_pol + 0, :] = real_imag_to_weights(caltable[antenna_nr * N_pol + 0, :], SDP.SUBBAND_UNIT_WEIGHT)
+            fpga_subband_weights[fpga_nr, input_nr * N_pol + 1, :] = real_imag_to_weights(caltable[antenna_nr * N_pol + 1, :], SDP.SUBBAND_UNIT_WEIGHT)
         self.sdp_proxy.FPGA_subband_weights_RW = fpga_subband_weights.reshape(N_pn, S_pn * N_subbands)
 
 
diff --git a/tangostationcontrol/tangostationcontrol/devices/boot.py b/tangostationcontrol/tangostationcontrol/devices/boot.py
index 6e40c0446ecb2aacde464bae7650c511c6409c0b..7a411e728bac13cbec9adcbb0845b385b6a2520f 100644
--- a/tangostationcontrol/tangostationcontrol/devices/boot.py
+++ b/tangostationcontrol/tangostationcontrol/devices/boot.py
@@ -240,6 +240,7 @@ class Boot(lofar_device):
         dtype='DevVarStringArray',
         mandatory=False,
         default_value=["STAT/Docker/1", # Docker controls the device containers, so it goes before anything else
+                       "STAT/Configuration/1",  # Configuration device loads and update station configuration
                        "STAT/PSOC/1",  # PSOC boot early to detect power delivery failure as fast as possible
                        "STAT/PCON/1",  # PCON boot early because it is responsible for power delivery.
                        "STAT/APSPU/1",  # APS Power Units control other hardware we want to initialise
diff --git a/tangostationcontrol/tangostationcontrol/devices/configuration_device.py b/tangostationcontrol/tangostationcontrol/devices/configuration_device.py
new file mode 100644
index 0000000000000000000000000000000000000000..ad6e63c32c6f2325565c2e026ab5577b50ac5897
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/devices/configuration_device.py
@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the RECV project
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+""" Configuration Device Server for LOFAR2.0
+
+Handles and exposes the station configuration
+
+"""
+# PyTango imports
+from tango import AttrWriteType, Database
+from tango.server import attribute
+
+# Additional import
+from tangostationcontrol.common.configuration import StationConfiguration
+from tangostationcontrol.common.entrypoint import entry
+from tangostationcontrol.devices.lofar_device import lofar_device
+from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
+
+import json
+import logging
+logger = logging.getLogger()
+
+__all__ = ["Configuration", "main"]
+
+@device_logging_to_python()
+class Configuration(lofar_device):
+    # -----------------
+    # Device Properties
+    # -----------------
+    
+    # ----------
+    # Attributes
+    # ----------
+    station_configuration_RW = attribute(dtype=str, access=AttrWriteType.READ_WRITE, doc='The Tango properties of all the devices in this station, as a JSON string.')
+
+    def read_station_configuration_RW(self):
+        return self._dump_configdb()
+    
+    def write_station_configuration_RW(self, station_configuration):
+        """ Takes a JSON string which represents the station configuration 
+        and loads the whole configuration from scratch. 
+        
+        N.B. it does not update, it loads a full new configuration. 
+        """
+        # TODO(Stefano): L2SS-1031 implement load configuration 
+        self.proxy.station_configuration_RW = station_configuration
+    
+    def _dump_configdb(self):
+        """ Returns the TangoDB station configuration as a JSON string """
+        dbdata = self.station_configuration.get_tangodb_data()
+        return json.dumps(dbdata, ensure_ascii=False, indent=4, sort_keys=True)
+    
+    # --------
+    # overloaded functions
+    # --------
+    @log_exceptions()
+    def configure_for_initialise(self):
+        super().configure_for_initialise()
+        self.station_configuration = StationConfiguration(db = Database())
+         
+
+# ----------
+# Run server
+# ----------
+def main(**kwargs):
+    """Main function of the Boot module."""
+    return entry(Configuration, **kwargs)
diff --git a/tangostationcontrol/tangostationcontrol/devices/docker_device.py b/tangostationcontrol/tangostationcontrol/devices/docker_device.py
index cf747e69e37bb3cda9893f0d5e34480507d98b55..5336354d8ad9c5ed0c4776b6c5c254ec4d3a41c8 100644
--- a/tangostationcontrol/tangostationcontrol/devices/docker_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/docker_device.py
@@ -48,6 +48,8 @@ class Docker(lofar_device):
     device_boot_R = attribute_wrapper(comms_annotation={"container": "device-boot"}, datatype=bool)
     device_boot_RW = attribute_wrapper(comms_annotation={"container": "device-boot"}, datatype=bool, access=AttrWriteType.READ_WRITE)
     device_docker_R = attribute_wrapper(comms_annotation={"container": "device-docker"}, datatype=bool)
+    device_configuration_R = attribute_wrapper(comms_annotation={"container": "device-configuration"}, datatype=bool)
+    device_configuration_RW = attribute_wrapper(comms_annotation={"container": "device-configuration"}, datatype=bool, access=AttrWriteType.READ_WRITE)
     # device_docker_RW is not available, as we cannot start our own container`
     device_temperature_manager_R = attribute_wrapper(comms_annotation={"container": "device-temperature-manager"}, datatype=bool)
     device_temperature_manager_RW = attribute_wrapper(comms_annotation={"container": "device-temperature-manager"}, datatype=bool, access=AttrWriteType.READ_WRITE)
@@ -111,8 +113,8 @@ class Docker(lofar_device):
     hdbppts_es_RW = attribute_wrapper(comms_annotation={"container": "hdbppts-es"}, datatype=bool, access=AttrWriteType.READ_WRITE)
     itango_R = attribute_wrapper(comms_annotation={"container": "itango"}, datatype=bool)
     itango_RW = attribute_wrapper(comms_annotation={"container": "itango"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    jupyter_R = attribute_wrapper(comms_annotation={"container": "jupyter"}, datatype=bool)
-    jupyter_RW = attribute_wrapper(comms_annotation={"container": "jupyter"}, datatype=bool, access=AttrWriteType.READ_WRITE)
+    jupyter_lab_R = attribute_wrapper(comms_annotation={"container": "jupyter-lab"}, datatype=bool)
+    jupyter_lab_RW = attribute_wrapper(comms_annotation={"container": "jupyter-lab"}, datatype=bool, access=AttrWriteType.READ_WRITE)
     tangodb_R = attribute_wrapper(comms_annotation={"container": "tangodb"}, datatype=bool)
     tangodb_RW = attribute_wrapper(comms_annotation={"container": "tangodb"}, datatype=bool, access=AttrWriteType.READ_WRITE)
     prometheus_R = attribute_wrapper(comms_annotation={"container": "prometheus"}, datatype=bool)
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py b/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py
index 677b82e0f3fd9e241ab12c32433ad7d45a44dcde..4226d1d121d5be449900d0976def367513cdb5af 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py
@@ -137,6 +137,8 @@ class SDP(opcua_device):
     FPGA_sdp_info_observation_id_RW = attribute_wrapper(comms_annotation=["FPGA_sdp_info_observation_id_RW"], datatype=numpy.uint32, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
     FPGA_sdp_info_station_id_R = attribute_wrapper(comms_annotation=["FPGA_sdp_info_station_id_R"], datatype=numpy.uint32, dims=(N_pn,))
     FPGA_sdp_info_station_id_RW = attribute_wrapper(comms_annotation=["FPGA_sdp_info_station_id_RW"], datatype=numpy.uint32, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
+    FPGA_subband_spectral_inversion_R = attribute_wrapper(comms_annotation=["FPGA_subband_spectral_inversion_R"], datatype=bool, dims=(N_pn,))
+    FPGA_subband_spectral_inversion_RW = attribute_wrapper(comms_annotation=["FPGA_subband_spectral_inversion_RW"], datatype=bool, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
     FPGA_subband_weights_R = attribute_wrapper(comms_annotation=["FPGA_subband_weights_R"], datatype=numpy.uint32, dims=(N_pn, S_pn, N_subbands))
     FPGA_subband_weights_RW = attribute_wrapper(comms_annotation=["FPGA_subband_weights_RW"], datatype=numpy.uint32, dims=(N_pn, S_pn, N_subbands), access=AttrWriteType.READ_WRITE)
     FPGA_time_since_last_pps_R = attribute_wrapper(comms_annotation=["FPGA_time_since_last_pps_R"], datatype=numpy.float_, dims=(N_pn,))
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/common/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/default/common/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/common/test_configuration.py b/tangostationcontrol/tangostationcontrol/integration_test/default/common/test_configuration.py
new file mode 100644
index 0000000000000000000000000000000000000000..dc19fd5fdea9d110aafb2144c81214581dcf462f
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/common/test_configuration.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+from tango import Database
+
+from tangostationcontrol.common.configuration import StationConfiguration
+
+from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
+
+class TestStationConfiguration(BaseIntegrationTestCase):
+
+    sc = StationConfiguration(Database())
+
+    def test_query_to_tuples(self):
+        """ Test whether Tango DB data are correctly converted into tuples """
+        raw_result = ['device1', 'property_name1', 'value1', 'device1', 'property_name2', 'value2']
+        num_col = 3
+        record1 = ('device1', 'property_name1', 'value1')
+        record2 = ('device1', 'property_name2', 'value2')
+        expected_result = [record1, record2]
+        self.assertEqual(self.sc.query_to_tuples(raw_result, num_col), expected_result)
+    
+    def test_add_to_devices_dict(self):
+        """ Test whether data retrieved from DB are correctly inserted into devices dictionary """
+        data = [('device1', 'property_name1', 'value1'), ('device1', 'property_name2', 'value2')]
+        expected_result = {'device1': {'properties': {  'property_name1': ['value1'],
+                                                        'property_name2': ['value2']}}}
+        self.assertEqual(self.sc.add_to_devices_dict({}, data), expected_result)
+
+    def test_add_to_attrs_dict(self):
+        """ Test whether data retrieved from DB are correctly inserted into attributes dictionary """
+        # Two attributes 
+        data_2attrs = [('device1', 'attribute1', 'attr_property_name1', 'value1'), 
+                ('device1', 'attribute2', 'attr_property_name1', 'value2')]
+        expected_result = {'device1': {'attribute_properties': {'attribute1': {'attr_property_name1': ['value1']},
+                                                                'attribute2': {'attr_property_name1': ['value2']}}}}
+        self.assertEqual(self.sc.add_to_attrs_dict({}, data_2attrs), expected_result)
+        # One attribute, two property values
+        data_1attr = [('device1', 'attribute1', 'attr_property_name1', 'value1'), 
+                ('device1', 'attribute1', 'attr_property_name1', 'value2')]
+        expected_result = {'device1': {'attribute_properties': {'attribute1': 
+                                        {'attr_property_name1': ['value1','value2']}}}}
+        self.assertEqual(self.sc.add_to_attrs_dict({}, data_1attr), expected_result)
+    
+    def test_add_to_server_dict(self):
+        """ Test whether data retrieved from DB are correctly inserted into server dictionary """
+        data = [('server_name/server_instance', 'server_class', 'device1')]
+        devices_dict = {'device1': {'properties': {  'property_name1': ['value1'],
+                                                     'property_name2': ['value2']}}}
+        expected_result = {'server_name': {'server_instance': {'server_class': 
+                            {'device1': {'properties': {'property_name1': ['value1'],
+                                                        'property_name2': ['value2']}}}}}}
+        self.assertEqual(self.sc.add_to_server_dict({}, devices_dict, data), expected_result)
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_antennafield.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_antennafield.py
index 14002bca5b81ccfec845bf3291b986c6c8fd34da..1fdb815c93a8744ed791f0d9cfaa3cd8b3861a61 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_antennafield.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_antennafield.py
@@ -15,7 +15,7 @@ from tangostationcontrol.devices.antennafield import AntennaQuality, AntennaUse
 from tangostationcontrol.devices.sdp.common import weight_to_complex
 from tangostationcontrol.devices.sdp.sdp import SDP
 from .base import AbstractTestBases
-from tangostationcontrol.common.constants import N_elements, MAX_ANTENNA, N_pol, N_rcu, N_rcu_inp, DEFAULT_N_HBA_TILES, CLK_200_MHZ, N_pn, S_pn, N_subbands
+from tangostationcontrol.common.constants import N_elements, MAX_ANTENNA, N_pol, N_rcu, N_rcu_inp, DEFAULT_N_HBA_TILES, CLK_200_MHZ, N_pn, A_pn, N_subbands
 
 class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
 
@@ -352,12 +352,17 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
         # and on the exact delay and loss differences between the cables.
         # rather than repeating the computations from the code,
         # we implement this as a regression test.
-        subband_weights = self.sdp_proxy.FPGA_subband_weights_RW.reshape(N_pn, S_pn, N_subbands)
+        subband_weights = self.sdp_proxy.FPGA_subband_weights_RW.reshape(N_pn, A_pn, N_pol, N_subbands)
 
         def to_complex(weight):
             return weight_to_complex(weight, SDP.SUBBAND_UNIT_WEIGHT)
 
-        self.assertAlmostEqual(0.929 + 0j,     to_complex(subband_weights[0, 0,   0]), places=3)
-        self.assertAlmostEqual(0.309 + 0.876j, to_complex(subband_weights[0, 0, 511]), places=3)
-        self.assertAlmostEqual(0.989 + 0j,     to_complex(subband_weights[0, 1,   0]), places=3)
-        self.assertAlmostEqual(0.883 - 0.444j, to_complex(subband_weights[0, 1, 511]), places=3)
+        # weight should be equal for both polarisations, different per antenna
+        self.assertAlmostEqual(0.929 + 0j,     to_complex(subband_weights[0, 0, 0,   0]), places=3)
+        self.assertAlmostEqual(0.309 + 0.876j, to_complex(subband_weights[0, 0, 0, 511]), places=3)
+        self.assertAlmostEqual(0.929 + 0j,     to_complex(subband_weights[0, 0, 1,   0]), places=3)
+        self.assertAlmostEqual(0.309 + 0.876j, to_complex(subband_weights[0, 0, 1, 511]), places=3)
+        self.assertAlmostEqual(0.989 + 0j,     to_complex(subband_weights[0, 1, 0,   0]), places=3)
+        self.assertAlmostEqual(0.883 - 0.444j, to_complex(subband_weights[0, 1, 0, 511]), places=3)
+        self.assertAlmostEqual(0.989 + 0j,     to_complex(subband_weights[0, 1, 1,   0]), places=3)
+        self.assertAlmostEqual(0.883 - 0.444j, to_complex(subband_weights[0, 1, 1, 511]), places=3)
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_configuration.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_configuration.py
new file mode 100644
index 0000000000000000000000000000000000000000..280439df893623458a77aa4324217c4e369fc8b9
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_configuration.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+from tango import DevState
+
+from .base import AbstractTestBases
+
+import json
+
+class TestDeviceConfiguration(AbstractTestBases.TestDeviceBase):
+
+    def setUp(self):
+        super().setUp("STAT/Configuration/1")
+    
+    def test_read_station_configuration(self):
+        self.proxy.warm_boot()
+        self.assertEqual(DevState.ON, self.proxy.state())
+        """ Test whether the station control configuration is correctly retrieved as a JSON string """
+        station_configuration = self.proxy.station_configuration_RW
+        dbdata = json.loads(station_configuration)
+        self.assertTrue(type(dbdata), dict)
+        self.assertGreater(len(dbdata['servers']), 0)
+        # Verify if Configuration Device exists
+        self.assertTrue('configuration' in dbdata['servers']) # server-name
+        self.assertTrue('stat' in dbdata['servers']['configuration']) # server-instance
+        self.assertTrue('configuration' in dbdata['servers']['configuration']['stat']) # server-class
+        self.assertTrue('stat/configuration/1' in dbdata['servers']['configuration']['stat']['configuration']) # device
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/statistics/test_writer_sst.py b/tangostationcontrol/tangostationcontrol/integration_test/default/statistics/test_writer_sst.py
index cac23a518c77d20ed0a339b64e63a9af2c6dd9ef..c2915eab30e1ae0f793f1cc77683a84f586daaa1 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/statistics/test_writer_sst.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/statistics/test_writer_sst.py
@@ -82,7 +82,7 @@ class TestStatisticsWriterSST(BaseIntegrationTestCase):
                     '2021-09-20T12:17:40.000+00:00'
                 )
                 self.assertIsNotNone(stat)
-                self.assertEqual("0.4.0", stat.station_version_id)
+                self.assertEqual("0.5.0", stat.station_version_id)
                 self.assertEqual("0.1", stat.writer_version_id)
 
     def test_insert_tango_SST_statistics(self):