diff --git a/CDB/stations/dummy_positions_ConfigDb.json b/CDB/stations/dummy_positions_ConfigDb.json
new file mode 100644
index 0000000000000000000000000000000000000000..5f998a8102a8ceaad66b7a7a46ed293aa2223b67
--- /dev/null
+++ b/CDB/stations/dummy_positions_ConfigDb.json
@@ -0,0 +1,130 @@
+{
+    "servers": {
+        "RECV": {
+            "STAT": {
+                "RECV": {
+                    "STAT/RECV/1": {
+                        "properties": {
+                            "HBAT_reference_itrf": [
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786",
+                               "3826577.066", "461022.948", "5064892.786"
+                            ],
+                            "HBAT_antenna_itrf_offsets": [
+                               "-1.847", "-1.180", " 1.493",
+                               "-1.581", " 0.003", " 1.186",
+                               "-1.315", " 1.185", " 0.880",
+                               "-1.049", " 2.367", " 0.573",
+                               "-0.882", "-1.575", " 0.804",
+                               "-0.616", "-0.393", " 0.498",
+                               "-0.350", " 0.789", " 0.191",
+                               "-0.083", " 1.971", "-0.116",
+                               " 0.083", "-1.971", " 0.116",
+                               " 0.350", "-0.789", "-0.191",
+                               " 0.616", " 0.393", "-0.498",
+                               " 0.882", " 1.575", "-0.804",
+                               " 1.049", "-2.367", "-0.573",
+                               " 1.315", "-1.185", "-0.880",
+                               " 1.581", "-0.003", "-1.186",
+                               " 1.847", " 1.180", "-1.493"
+                            ]
+                        }
+                    }
+                }
+            }
+        }
+    }
+}
diff --git a/docker-compose/apsct-sim.yml b/docker-compose/apsct-sim.yml
index b9742fdb97ec3f30026d441c668a13732013201e..326e3b130d2f632bf7ce185e554c39de9aa2dc65 100644
--- a/docker-compose/apsct-sim.yml
+++ b/docker-compose/apsct-sim.yml
@@ -14,6 +14,11 @@ services:
          - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
          - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}apsct-sim
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     entrypoint: python3 pypcc2.py --simulator --port 4843 --config APSCTTR
diff --git a/docker-compose/apspu-sim.yml b/docker-compose/apspu-sim.yml
index f5677048fbe1fe28082b219177bc67a2986c31fe..1de9375d639c920e545525e32525771808e81778 100644
--- a/docker-compose/apspu-sim.yml
+++ b/docker-compose/apspu-sim.yml
@@ -14,6 +14,11 @@ services:
          - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
          - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}apspu-sim
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     entrypoint: python3 pypcc2.py --simulator --port 4842 --config APSPUTR
diff --git a/docker-compose/device-apsct.yml b/docker-compose/device-apsct.yml
index 0e258fecdb3a96c3a73714ae2c28cf2e847457a1..a23e8a0a1a9dab234e21b1d4153428fcddda5ee3 100644
--- a/docker-compose/device-apsct.yml
+++ b/docker-compose/device-apsct.yml
@@ -22,6 +22,11 @@ services:
         args:
             SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-apsct
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     ports:
diff --git a/docker-compose/device-apspu.yml b/docker-compose/device-apspu.yml
index 5f325b19fb357e83ab3d35e3acfa1a5cbbb2896a..9fbdda2ce469803b9feb9ab8b56945565ffe06ed 100644
--- a/docker-compose/device-apspu.yml
+++ b/docker-compose/device-apspu.yml
@@ -22,6 +22,11 @@ services:
         args:
             SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-apspu
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     ports:
diff --git a/docker-compose/device-beam.yml b/docker-compose/device-beam.yml
index b7572dda6db2dbebcff62fc90bbb1e99b04b8000..4b765f96ae8706706dd4faf863fd43a2b32fd470 100644
--- a/docker-compose/device-beam.yml
+++ b/docker-compose/device-beam.yml
@@ -17,6 +17,11 @@ services:
         args:
             SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-beam
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     ports:
diff --git a/docker-compose/device-boot.yml b/docker-compose/device-boot.yml
index 330cb723ed3bb5ee8ccd50bf4cb933da4e1fe09c..abbce963a91efa5d86eac045e9c8746db475af72 100644
--- a/docker-compose/device-boot.yml
+++ b/docker-compose/device-boot.yml
@@ -21,6 +21,11 @@ services:
         args:
             SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-boot
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     ports:
diff --git a/docker-compose/device-docker.yml b/docker-compose/device-docker.yml
index a9e4ccfdd6f66eda66f05ea5244fcf0fd732a382..ae73d963de393e3aa3ba1e0520134acc5dfa1aa1 100644
--- a/docker-compose/device-docker.yml
+++ b/docker-compose/device-docker.yml
@@ -22,6 +22,11 @@ services:
         args:
             SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-docker
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     ports:
diff --git a/docker-compose/device-observation_control.yml b/docker-compose/device-observation_control.yml
index d4f6f15d1f4eb80d02cd0c5738dc0a011b9dfc72..2a77d0e5cdc6657260deabfb36f40c646ebf8809 100644
--- a/docker-compose/device-observation_control.yml
+++ b/docker-compose/device-observation_control.yml
@@ -21,6 +21,11 @@ services:
         args:
             SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-observation_control
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     ports:
diff --git a/docker-compose/device-recv.yml b/docker-compose/device-recv.yml
index 25e767726f139ff532dbe649ccb230fabbec0602..0a42904ce8637322df33622a7952cdbb0de746ba 100644
--- a/docker-compose/device-recv.yml
+++ b/docker-compose/device-recv.yml
@@ -22,6 +22,11 @@ services:
         args:
             SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-recv
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     ports:
diff --git a/docker-compose/device-sdp.yml b/docker-compose/device-sdp.yml
index 06a523f606d67811986bd7a13b9a3202cb74e91d..1f3967d6507a4be7eef77e88b5a9aeecd4bfb461 100644
--- a/docker-compose/device-sdp.yml
+++ b/docker-compose/device-sdp.yml
@@ -22,6 +22,11 @@ services:
         args:
             SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-sdp
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     ports:
diff --git a/docker-compose/device-sst.yml b/docker-compose/device-sst.yml
index 86651c7878d844646528b41fb0969dfd19af6eea..4e600ef419fd6149e4ab7a0a7e527acac53369d0 100644
--- a/docker-compose/device-sst.yml
+++ b/docker-compose/device-sst.yml
@@ -22,6 +22,11 @@ services:
         args:
             SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-sst
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
         - control
         - data
diff --git a/docker-compose/device-unb2.yml b/docker-compose/device-unb2.yml
index 2b9b47146a405440ebd36fd84162935fb6b8a56d..3939635b26a9dade6caadb5bcdc4f5136ae40866 100644
--- a/docker-compose/device-unb2.yml
+++ b/docker-compose/device-unb2.yml
@@ -22,6 +22,11 @@ services:
         args:
             SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-unb2
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     ports:
diff --git a/docker-compose/device-xst.yml b/docker-compose/device-xst.yml
index 54ca5a21f911084160d2cec772df06da55ef5cf1..e2cfd8c7d2e38a6a22b5a20868ee7bea72ef9165 100644
--- a/docker-compose/device-xst.yml
+++ b/docker-compose/device-xst.yml
@@ -22,6 +22,11 @@ services:
         args:
             SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-xst
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
         - control
         - data
diff --git a/docker-compose/elk.yml b/docker-compose/elk.yml
index 25bb1b218669baebff50ddc830b049b691349f71..78345420e3a7ce677096cfc22f2fdca33b5756d8 100644
--- a/docker-compose/elk.yml
+++ b/docker-compose/elk.yml
@@ -18,6 +18,11 @@ services:
     build:
         context: elk-configure-host
     container_name: ${CONTAINER_NAME_PREFIX}elk-configure-host
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     privileged: true
@@ -27,6 +32,11 @@ services:
     build:
         context: elk
     container_name: ${CONTAINER_NAME_PREFIX}elk
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     volumes:
diff --git a/docker-compose/itango.yml b/docker-compose/itango.yml
index 02d6801bd8a2f748a4b3d3336352891c78d4882b..3e3df5a537e022a3ea4f82f55bf16e9264db7bb3 100644
--- a/docker-compose/itango.yml
+++ b/docker-compose/itango.yml
@@ -19,6 +19,11 @@ services:
         args:
             SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}itango
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     extra_hosts:
diff --git a/docker-compose/jupyter.yml b/docker-compose/jupyter.yml
index bbc20f269f8a44acff3ce9f36bf11eeef17cea8f..8bc6eb8cefd72b56dda08af054ea1fa7716939f1 100644
--- a/docker-compose/jupyter.yml
+++ b/docker-compose/jupyter.yml
@@ -18,6 +18,11 @@ services:
             CONTAINER_EXECUTION_UID: ${CONTAINER_EXECUTION_UID}
             SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}jupyter
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     volumes:
diff --git a/docker-compose/lofar-device-base.yml b/docker-compose/lofar-device-base.yml
index f01faac2d2f41647708229106a895d3dad23c3e4..dddd50c9a01e2e928a1d36c1723a703afd5deaeb 100644
--- a/docker-compose/lofar-device-base.yml
+++ b/docker-compose/lofar-device-base.yml
@@ -23,6 +23,11 @@ services:
     container_name: ${CONTAINER_NAME_PREFIX}lofar-device-base
     # These parameters are just visual queues, you have to define them again
     # in derived docker-compose files!
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     extra_hosts:
diff --git a/docker-compose/prometheus.yml b/docker-compose/prometheus.yml
index e7924c1a7219adc16e1a3c1780b0bcc43773b3c0..8029e9ba6d83e4af824ca307b8aae17af919333e 100644
--- a/docker-compose/prometheus.yml
+++ b/docker-compose/prometheus.yml
@@ -16,6 +16,11 @@ services:
     build:
         context: prometheus
     container_name: ${CONTAINER_NAME_PREFIX}prometheus
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     volumes:
diff --git a/docker-compose/recv-sim.yml b/docker-compose/recv-sim.yml
index 8fd795be60ef89b23491895dd9809ff67b1c67ae..91d1a13cd696ccc604a896496ad086333375e110 100644
--- a/docker-compose/recv-sim.yml
+++ b/docker-compose/recv-sim.yml
@@ -14,6 +14,11 @@ services:
          - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
          - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}recv-sim
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     entrypoint: python3 pypcc2.py --simulator --port 4840 --config RECVTR
diff --git a/docker-compose/sdptr-sim.yml b/docker-compose/sdptr-sim.yml
index badf707e37621c8b3030121424bacd1393910b87..112b6d4e5b41dff3cddc481d80539c02c39c62ba 100644
--- a/docker-compose/sdptr-sim.yml
+++ b/docker-compose/sdptr-sim.yml
@@ -14,6 +14,11 @@ services:
          - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
          - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}sdptr-sim
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     restart: unless-stopped
diff --git a/docker-compose/sdptr-sim/Dockerfile b/docker-compose/sdptr-sim/Dockerfile
index 4e64ca2a67229e602a705c9e61b0de999e64fad4..678b79ecef5d9425f2993fb26a2163b7c50036f7 100644
--- a/docker-compose/sdptr-sim/Dockerfile
+++ b/docker-compose/sdptr-sim/Dockerfile
@@ -3,7 +3,5 @@ ARG LOCAL_DOCKER_REGISTRY_LOFAR
 
 FROM ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_LOFAR}/sdptr:latest
 
-COPY simulator.conf /sdptr/src/simulator.conf
-
 WORKDIR /sdptr/src
-CMD ["sdptr", "--type=simulator", "--configfile=simulator.conf", "--nodaemon"]
+CMD ["sdptr", "--ip_prefix=127.0.", "--nodaemon"]
diff --git a/docker-compose/sdptr-sim/simulator.conf b/docker-compose/sdptr-sim/simulator.conf
deleted file mode 100644
index 5ad69a8aed4807b815eeea18993d2b06e747b29f..0000000000000000000000000000000000000000
--- a/docker-compose/sdptr-sim/simulator.conf
+++ /dev/null
@@ -1,19 +0,0 @@
-# sdptr.conf
-# configuration file for the SDP Translator.
-#
-# this config file holds settings for all [type].
-#
-# # settings per type
-# [LB_CORE]						  # [ant_band_station_type]
-# n_fpgas = 16                    # 8 or 16
-# first_pfga_nr = 0               # 0 for LB or 16 for HB
-# ip_prefix = 10.99.              # first part of ip (last part is hardware dependent)
-# n_beamsets = 1                  # 1 for 'LB', 'HB Remote' and 'HB International' and 2 for 'HB Core'
-
-
-[simulator]
-n_fpgas = 16
-first_fpga_nr = 0
-ip_prefix = 127.0.
-n_beamsets = 1
-
diff --git a/docker-compose/tango-prometheus-exporter.yml b/docker-compose/tango-prometheus-exporter.yml
index bc43a6777b5595a9d94c13e55322a7adc0a8d84f..87ca7109f6bdd512b97c81e5e32685f7029d9748 100644
--- a/docker-compose/tango-prometheus-exporter.yml
+++ b/docker-compose/tango-prometheus-exporter.yml
@@ -8,6 +8,11 @@ services:
     build:
         context: tango-prometheus-exporter
     container_name: ${CONTAINER_NAME_PREFIX}tango-prometheus-exporter
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     environment:
diff --git a/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter b/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter
index 6e48f0fddf5541bc66d9f57e31297c0027ea97b7..e313399d197d266e49d6da0442ea983c6f92adad 160000
--- a/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter
+++ b/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter
@@ -1 +1 @@
-Subproject commit 6e48f0fddf5541bc66d9f57e31297c0027ea97b7
+Subproject commit e313399d197d266e49d6da0442ea983c6f92adad
diff --git a/docker-compose/unb2-sim.yml b/docker-compose/unb2-sim.yml
index b01802cd0526abe325c710f08fe965d6244cb2ba..f7bff91fe19aecbb9e38cd9573237acf4756f99e 100644
--- a/docker-compose/unb2-sim.yml
+++ b/docker-compose/unb2-sim.yml
@@ -14,6 +14,11 @@ services:
          - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
          - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}unb2-sim
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
     networks:
       - control
     entrypoint: python3 pypcc2.py --simulator --port 4841 --config UNB2
diff --git a/jupyter-notebooks/Beamforming_Test.ipynb b/jupyter-notebooks/Beamforming_Test.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..0ae1c7631d123aafe85e5bc4c91ffa0c200f8c0f
--- /dev/null
+++ b/jupyter-notebooks/Beamforming_Test.ipynb
@@ -0,0 +1,276 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "id": "87c924c0",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import time, sys, datetime\n",
+    "import numpy\n",
+    "sys.path.append('/hosthome/tango/tangostationcontrol/tangostationcontrol')\n",
+    "from beam.delays import delay_calculator"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "id": "438a5eb9",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "STAT/RECV/1 : OFF\n"
+     ]
+    }
+   ],
+   "source": [
+    "# RECV device\n",
+    "device_name = 'STAT/RECV/1'\n",
+    "d=DeviceProxy(device_name) \n",
+    "state = str(d.state())\n",
+    "print(device_name + ' : ' + state)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "id": "3df10bdf",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Device STAT/RECV/1 is now in ON state\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Start RECV device\n",
+    "if state == \"OFF\":\n",
+    "    time.sleep(1)\n",
+    "    d.initialise()\n",
+    "    time.sleep(1)\n",
+    "state = str(d.state())\n",
+    "if state == \"STANDBY\":\n",
+    "    d.set_defaults()\n",
+    "    d.on()\n",
+    "state = str(d.state())\n",
+    "if state == \"ON\":\n",
+    "    print(f\"Device {device_name} is now in ON state\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 20,
+   "id": "8d90ad2c",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "True\n",
+      "True\n",
+      "True\n",
+      "True\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Test RECV properties\n",
+    "recv = d\n",
+    "print(32==len(recv.get_hbat_bf_delay_step_delays()))\n",
+    "print(288==len(recv.get_hbat_reference_itrf()))\n",
+    "print(4608==len(recv.get_hbat_antenna_itrf()))\n",
+    "print(3072==len(recv.get_hbat_signal_input_delays()))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 23,
+   "id": "58a52064",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "STAT/Beam/1 : OFF\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Beam device\n",
+    "b_name = 'STAT/Beam/1'\n",
+    "b = DeviceProxy(b_name)\n",
+    "state = str(b.state())\n",
+    "print(b_name + ' : ' + state)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 24,
+   "id": "79028ac4",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Device STAT/Beam/1 is now in ON state\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Start Beam device\n",
+    "if state == \"OFF\":\n",
+    "    time.sleep(1)\n",
+    "    b.initialise()\n",
+    "    time.sleep(1)\n",
+    "state = str(b.state())\n",
+    "if state == \"STANDBY\":\n",
+    "    b.set_defaults()\n",
+    "    b.on()\n",
+    "state = str(b.state())\n",
+    "if state == \"ON\":\n",
+    "    print(f\"Device {b_name} is now in ON state\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 30,
+   "id": "eaf2716c",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "True\n",
+      "True\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Test Beam attribute\n",
+    "beam = b\n",
+    "print(96==len(beam.HBAT_pointing_direction_R))\n",
+    "print(96==len(beam.HBAT_pointing_timestamp_R))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 38,
+   "id": "b7336c9f",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[1.05088616e-07 1.05088616e-07 1.05088616e-07 ... 1.05088616e-07\n",
+      " 1.05088616e-07 1.05088616e-07]\n",
+      "True\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Test HBAT delays\n",
+    "pointing_direction = numpy.array([[\"J2000\",\"0deg\",\"0deg\"]] * 96).flatten()\n",
+    "delays = b.HBAT_delays(pointing_direction)\n",
+    "print(delays)\n",
+    "print(96*16==len(delays))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 40,
+   "id": "8cdc47c6",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[31. 31. 31. ... 31. 31. 31.]\n",
+      "True\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Test calculate HBAT beamforming delays\n",
+    "HBAT_bf_delays = recv.calculate_HBAT_bf_delays(delays)\n",
+    "print(HBAT_bf_delays)\n",
+    "print(96*32==len(HBAT_bf_delays))"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 43,
+   "id": "294b46a6",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[[0 0 0 ... 0 0 0]\n",
+      " [0 0 0 ... 0 0 0]\n",
+      " [0 0 0 ... 0 0 0]\n",
+      " ...\n",
+      " [0 0 0 ... 0 0 0]\n",
+      " [0 0 0 ... 0 0 0]\n",
+      " [0 0 0 ... 0 0 0]]\n",
+      "[[31 31 31 ... 31 31 31]\n",
+      " [31 31 31 ... 31 31 31]\n",
+      " [31 31 31 ... 31 31 31]\n",
+      " ...\n",
+      " [31 31 31 ... 31 31 31]\n",
+      " [31 31 31 ... 31 31 31]\n",
+      " [31 31 31 ... 31 31 31]]\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Test whole main function\n",
+    "print(recv.read_attribute('HBAT_BF_delays_RW').value)\n",
+    "beam.HBAT_set_pointing(pointing_direction)\n",
+    "print(recv.read_attribute('HBAT_BF_delays_RW').value)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d5c2f175",
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "StationControl",
+   "language": "python",
+   "name": "stationcontrol"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.7.3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/sbin/run_integration_test.sh b/sbin/run_integration_test.sh
index d2dbf03ec7e7920d1dfe72ec45084b3fee900239..7375bce8acb469bf3cabb13def038dfef268c56d 100755
--- a/sbin/run_integration_test.sh
+++ b/sbin/run_integration_test.sh
@@ -29,6 +29,7 @@ sleep 60
 # Do not remove `bash`, otherwise statement ignored by gitlab ci shell!
 bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/LOFAR_ConfigDb.json
 bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/stations/simulators_ConfigDb.json
+bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/stations/dummy_positions_ConfigDb.json
 #bash "${LOFAR20_DIR}"/sbin/update_ConfigDb.sh "${LOFAR20_DIR}"/CDB/integration_ConfigDb.json
 
 cd "$LOFAR20_DIR/docker-compose" || exit 1
diff --git a/tangostationcontrol/tangostationcontrol/clients/attribute_wrapper.py b/tangostationcontrol/tangostationcontrol/clients/attribute_wrapper.py
index e9239a834a248af32886df651a5f2463b4764489..b8e829ede0b5857cc5cc6c4da5d6052118cf57b7 100644
--- a/tangostationcontrol/tangostationcontrol/clients/attribute_wrapper.py
+++ b/tangostationcontrol/tangostationcontrol/clients/attribute_wrapper.py
@@ -72,7 +72,7 @@ class attribute_wrapper(attribute):
         if access == AttrWriteType.READ_WRITE:
             """ if the attribute is of READ_WRITE type, assign the write function to it"""
 
-            @only_in_states([DevState.STANDBY, DevState.ON], log=False)
+            @only_in_states([DevState.STANDBY, DevState.ON, DevState.ALARM], log=False)
             @fault_on_error()
             def write_func_wrapper(device, value):
                 """
@@ -86,7 +86,7 @@ class attribute_wrapper(attribute):
 
         """ Assign the read function to the attribute"""
 
-        @only_in_states([DevState.STANDBY, DevState.ON], log=False)
+        @only_in_states([DevState.STANDBY, DevState.ON, DevState.ALARM], log=False)
         @fault_on_error()
         def read_func_wrapper(device):
             """
diff --git a/tangostationcontrol/tangostationcontrol/devices/beam.py b/tangostationcontrol/tangostationcontrol/devices/beam.py
index 62580130acc586b6c9f1e8da5313e1e2682c114b..1ca5631b4d5d7e65981512fada14a3e398ec6556 100644
--- a/tangostationcontrol/tangostationcontrol/devices/beam.py
+++ b/tangostationcontrol/tangostationcontrol/devices/beam.py
@@ -9,8 +9,7 @@
 
 import numpy
 import datetime
-from functools import partial
-from tango.server import attribute, command, device_property
+from tango.server import attribute, command
 from tango import AttrWriteType, DebugIt, DevState, DeviceProxy, DevVarStringArray, DevVarDoubleArray
 
 # Additional import
@@ -36,23 +35,6 @@ class Beam(lofar_device):
     # -----------------
     # Device Properties
     # -----------------
-    reference_itrf =  device_property(
-        dtype='DevVarFloatArray',
-        mandatory=False,
-        default_value = numpy.tile(numpy.array([3826577.066, 461022.948, 5064892.786]),(96,1)) # CS002LBA, in ITRF2005 timestamp 2012.5
-    )
-
-    antenna_itrf = device_property(
-        dtype='DevVarFloatArray',
-        mandatory=False,
-        default_value = numpy.tile(numpy.array([3826923.546, 460915.441, 5064643.489]),(96,16,1)) # CS001LBA, in ITRF2005 timestamp 2012.5
-    )
-
-    HBAT_signal_input_delays = device_property(
-        dtype='DevVarFloatArray',
-        mandatory=False,
-        default_value = numpy.zeros((96,32), dtype=numpy.float64)
-    )
 
     # ----------
     # Attributes
@@ -83,6 +65,16 @@ class Beam(lofar_device):
         # Set a reference of RECV device
         self.recv_proxy = DeviceProxy("STAT/RECV/1")
 
+        # Retrieve positions from RECV device
+        HBAT_reference_itrf = self.recv_proxy.HBAT_reference_itrf_R
+        HBAT_antenna_itrf_offsets = self.recv_proxy.HBAT_antenna_itrf_offsets_R
+
+        # a delay calculator for each tile
+        self.HBAT_delay_calculators = [delay_calculator(reference_itrf) for reference_itrf in HBAT_reference_itrf]
+
+        # absolute positions of each antenna element
+        self.HBAT_antenna_positions = [reference_itrf + HBAT_antenna_itrf_offsets for reference_itrf in HBAT_reference_itrf]
+
     # --------
     # internal functions
     # --------
@@ -97,36 +89,14 @@ class Beam(lofar_device):
 
         for tile in range(96):
             # initialise delay calculator
-            d = delay_calculator(self.reference_itrf[tile])
+            d = self.HBAT_delay_calculators[tile]
             d.set_measure_time(timestamp)
 
             # calculate the delays based on the set reference position, the set time and now the set direction and antenna positions
-            delays[tile] = d.convert(pointing_direction[tile], self.antenna_itrf[tile])    
+            delays[tile] = d.convert(pointing_direction[tile], self.HBAT_antenna_positions[tile])
 
         return delays
 
-    @staticmethod
-    def _calculate_HBAT_bf_delays(delays: numpy.ndarray, HBAT_signal_input_delays: numpy.ndarray, HBAT_bf_delay_step_delays: numpy.ndarray):
-        """
-        Helper function that converts a signal path delay (in seconds) to an analog beam weight,
-        which is a value per tile per dipole per polarisation.
-        """
-        # Duplicate delay values per polarisation
-        polarised_delays = numpy.tile(delays, 2)                      # output dims -> 96x32           
-
-        # Add signal input delay
-        calibrated_delays = numpy.add(polarised_delays, HBAT_signal_input_delays)
-
-        # Find the right delay step by looking for the closest match in property RECV-> HBAT_bf_delay_step_delays
-        HBAT_bf_delays = numpy.zeros((96,32), dtype=numpy.int64)
-        distance = lambda x , y : numpy.absolute(x-y)
-        for tile in range(96):
-            for at in range(32):
-                delay = calibrated_delays[tile,at]
-                step = min(HBAT_bf_delay_step_delays,key=partial(distance,delay))
-                HBAT_bf_delays[tile,at] = numpy.where(HBAT_bf_delay_step_delays==step)[0][0]
-        return HBAT_bf_delays
-
     def _HBAT_set_pointing(self, pointing_direction: numpy.array, timestamp: datetime.datetime = datetime.datetime.now()):
         """
         Uploads beam weights based on a given pointing direction 2D array (96 tiles x 3 parameters)
@@ -135,11 +105,12 @@ class Beam(lofar_device):
         delays = self._HBAT_delays(pointing_direction, timestamp)
         
         # Convert delays into beam weights
-        HBAT_bf_delay_step_delays = self.recv_proxy.get_hbat_bf_delay_step_delays()
-        HBAT_bf_delays = self._calculate_HBAT_bf_delays(delays, self.HBAT_signal_input_delays, HBAT_bf_delay_step_delays)
+        delays = delays.flatten()
+        HBAT_bf_delay_steps = self.recv_proxy.calculate_HBAT_bf_delay_steps(delays)
+        HBAT_bf_delay_steps = numpy.array(HBAT_bf_delay_steps, dtype=numpy.int64).reshape(96,32)
 
         # Write weights to RECV
-        self.recv_proxy.HBAT_BF_delays_RW = HBAT_bf_delays
+        self.recv_proxy.HBAT_BF_delay_steps_RW = HBAT_bf_delay_steps
 
         # Record where we now point to, now that we've updated the weights.
         # Only the entries within the mask have been updated
@@ -206,6 +177,7 @@ class Beam(lofar_device):
         """
         Uploads beam weights based on a given pointing direction 2D array (96 tiles x 3 parameters)
         """
+        # Reshape the flatten input array
         pointing_direction = numpy.array(pointing_direction).reshape(96,3)
 
         self._HBAT_set_pointing(pointing_direction, timestamp)
diff --git a/tangostationcontrol/tangostationcontrol/devices/lofar_device.py b/tangostationcontrol/tangostationcontrol/devices/lofar_device.py
index 6fed370c4e714813b1a7ce6768cc93209cff1c5e..5ca671d4c73b37c1035c69ed75ce82f08def2095 100644
--- a/tangostationcontrol/tangostationcontrol/devices/lofar_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/lofar_device.py
@@ -93,15 +93,6 @@ class lofar_device(Device, metaclass=DeviceMeta):
         # trigger a write_{name} call. See https://www.tango-controls.org/community/forum/c/development/c/accessing-own-deviceproxy-class/?page=1#post-2021
         self.proxy = DeviceProxy(self.get_name())
 
-        # register a proxy to ourselves, to interact with
-        # our attributes and commands as a client would.
-        #
-        # this is required to get/set attributes.
-        #
-        # we cannot write directly to our attribute, as that would not
-        # trigger a write_{name} call. See https://www.tango-controls.org/community/forum/c/development/c/accessing-own-deviceproxy-class/?page=1#post-2021
-        self.proxy = DeviceProxy(self.get_name())
-
     @log_exceptions()
     def delete_device(self):
         """Hook to delete resources allocated in init_device.
diff --git a/tangostationcontrol/tangostationcontrol/devices/recv.py b/tangostationcontrol/tangostationcontrol/devices/recv.py
index 5eb0362640c54fddc3c34ae28485e9308886260c..22f2f84831eb218c67733026d0271d086e004f33 100644
--- a/tangostationcontrol/tangostationcontrol/devices/recv.py
+++ b/tangostationcontrol/tangostationcontrol/devices/recv.py
@@ -12,6 +12,7 @@
 """
 
 # PyTango imports
+from functools import partial
 from tango import DebugIt
 from tango.server import command
 from tango.server import device_property, attribute
@@ -36,7 +37,6 @@ class RECV(opcua_device):
     # -----------------
     # Device Properties
     # -----------------
-
     ANT_mask_RW_default = device_property(
         dtype='DevVarBooleanArray',
         mandatory=False,
@@ -62,6 +62,34 @@ class RECV(opcua_device):
             14.9781E-9, 15.5063E-9
         ],dtype=numpy.float64)
     )
+    
+    HBAT_reference_itrf = device_property(
+        dtype='DevVarFloatArray',
+        mandatory=False
+    )
+
+    HBAT_antenna_itrf_offsets = device_property(
+        dtype='DevVarFloatArray',
+        mandatory=False
+    )
+
+    HBAT_signal_input_delays = device_property(
+        dtype='DevVarFloatArray',
+        mandatory=False,
+        default_value = numpy.zeros((96,32), dtype=numpy.float64)
+    )
+
+    ITRF_Reference_Frame = device_property(
+        dtype='DevString',
+        mandatory=False,
+        default_value = "ITRF2005"
+    )
+
+    ITRF_Reference_Epoch = device_property(
+        dtype='DevFloat',
+        mandatory=False,
+        default_value = 2015.5
+    )
 
     first_default_settings = [
         # set the masks first, as those filter any subsequent settings
@@ -79,8 +107,8 @@ class RECV(opcua_device):
     
     # The HBAT beamformer delays represent 32 delays for each of the 96 inputs.
     # The 32 delays deconstruct as delays[polarisation][dipole], and each delay is the number of 'delay steps' to apply (0.5ns for HBAT1).
-    HBAT_BF_delays_R             = attribute_wrapper(comms_annotation=["HBAT_BF_delays_R"          ],datatype=numpy.int64  , dims=(32,96))
-    HBAT_BF_delays_RW            = attribute_wrapper(comms_annotation=["HBAT_BF_delays_RW"         ],datatype=numpy.int64  , dims=(32,96), access=AttrWriteType.READ_WRITE)
+    HBAT_BF_delay_steps_R        = attribute_wrapper(comms_annotation=["HBAT_BF_delay_steps_R"     ],datatype=numpy.int64  , dims=(32,96))
+    HBAT_BF_delay_steps_RW       = attribute_wrapper(comms_annotation=["HBAT_BF_delay_steps_RW"    ],datatype=numpy.int64  , dims=(32,96), access=AttrWriteType.READ_WRITE)
     HBAT_LED_on_R                = attribute_wrapper(comms_annotation=["HBAT_LED_on_R"             ],datatype=numpy.bool_  , dims=(32,96))
     HBAT_LED_on_RW               = attribute_wrapper(comms_annotation=["HBAT_LED_on_RW"            ],datatype=numpy.bool_  , dims=(32,96), access=AttrWriteType.READ_WRITE)
     HBAT_PWR_LNA_on_R            = attribute_wrapper(comms_annotation=["HBAT_PWR_LNA_on_R"         ],datatype=numpy.bool_  , dims=(32,96))
@@ -119,19 +147,57 @@ class RECV(opcua_device):
     RECVTR_monitor_rate_RW       = attribute_wrapper(comms_annotation=["RECVTR_monitor_rate_RW"    ],datatype=numpy.int64  , access=AttrWriteType.READ_WRITE)
     RECVTR_translator_busy_R     = attribute_wrapper(comms_annotation=["RECVTR_translator_busy_R"  ],datatype=numpy.bool_  )
 
+    HBAT_antenna_itrf_offsets_R = attribute(access=AttrWriteType.READ,
+        dtype=((numpy.float,),), max_dim_x=3, max_dim_y=16,
+        fget=lambda self: numpy.array(self.HBAT_antenna_itrf_offsets).reshape(16,3))
+
+    HBAT_reference_itrf_R = attribute(access=AttrWriteType.READ,
+        dtype=((numpy.float,),), max_dim_x=3, max_dim_y=96,
+        fget=lambda self: numpy.array(self.HBAT_reference_itrf).reshape(96,3))
+
     # --------
     # overloaded functions
     # --------
 
+    # --------
+    # internal functions
+    # --------
+    def _calculate_HBAT_bf_delay_steps(self, delays: numpy.ndarray):
+        """
+        Helper function that converts a signal path delay (in seconds) to an analog beam weight,
+        which is a value per tile per dipole per polarisation.
+        """
+        # Duplicate delay values per polarisation
+        polarised_delays = numpy.tile(delays, 2)                      # output dims -> 96x32
+
+        # Add signal input delay
+        calibrated_delays = numpy.add(polarised_delays, self.HBAT_signal_input_delays)
+
+        # Find the right delay step by looking for the closest match in property RECV-> HBAT_bf_delay_step_delays
+        HBAT_bf_delay_steps = numpy.zeros((96,32), dtype=numpy.int64)
+        distance = lambda x , y : numpy.absolute(x-y)
+        for tile in range(96):
+            for at in range(32):
+                delay = calibrated_delays[tile,at]
+                step = min(self.HBAT_bf_delay_step_delays,key=partial(distance,delay))
+                HBAT_bf_delay_steps[tile,at] = numpy.where(self.HBAT_bf_delay_step_delays==step)[0][0]
+        return HBAT_bf_delay_steps
+
     # --------
     # Commands
     # --------
-    @command(dtype_out=DevVarFloatArray)
-    @DebugIt()
-    @only_in_states([DevState.ON])
-    def get_hbat_bf_delay_step_delays(self):
-        """ Return the property HBAT_bf_delay_step_delays """
-        return self.HBAT_bf_delay_step_delays
+    
+    @command(dtype_in=DevVarFloatArray, dtype_out=DevVarFloatArray)
+    def calculate_HBAT_bf_delay_steps(self, delays: numpy.ndarray):
+        """ converts a signal path delay (in seconds) to an analog beam weight """
+        
+        # Reshape the flatten input array
+        delays = numpy.array(delays).reshape(96,16)
+        
+        # Calculate the beam weight array
+        HBAT_bf_delay_steps = self._calculate_HBAT_bf_delay_steps(delays)
+        
+        return HBAT_bf_delay_steps.flatten()
 
     @command()
     @DebugIt()
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py b/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py
index c0b6f7a98894b7c7d660828231935b63c5b0f5da..3b0f59d242c7d29ac707b7e7361d2e9dce706424 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py
@@ -108,8 +108,8 @@ class XST(Statistics):
     FPGA_xst_subband_select_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_subband_select_RW"], datatype=numpy.uint32, dims=(8,16), access=AttrWriteType.READ_WRITE)
     FPGA_xst_subband_select_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_subband_select_R"], datatype=numpy.uint32, dims=(8,16))
 
-     FPGA_xst_offload_nof_crosslets_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_nof_crosslets_RW"], datatype=numpy.uint32, dims=(16,), access=AttrWriteType.READ_WRITE)
-     FPGA_xst_offload_nof_crosslets_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_nof_crosslets_R"], datatype=numpy.uint32, dims=(16,))
+    FPGA_xst_offload_nof_crosslets_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_nof_crosslets_RW"], datatype=numpy.uint32, dims=(16,), access=AttrWriteType.READ_WRITE)
+    FPGA_xst_offload_nof_crosslets_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_nof_crosslets_R"], datatype=numpy.uint32, dims=(16,))
 
     # number of packets with valid payloads
     nof_valid_payloads_R    = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_valid_payloads"}, dims=(XSTCollector.MAX_FPGAS,), datatype=numpy.uint64)
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_beam.py b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_beam.py
index 19b578a75111eb9a8922082ef00fb2a217d9e4e0..b7e8bc2a732801fdf80cb978e5b558bfc3909623 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_beam.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_beam.py
@@ -7,54 +7,66 @@
 # Distributed under the terms of the APACHE license.
 # See LICENSE.txt for more info.
 
-import time
 import numpy
+from tango import DevState
 from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 
 from .base import AbstractTestBases
 
 class TestDeviceBeam(AbstractTestBases.TestDeviceBase):
 
+    pointing_direction = numpy.array([["J2000","0deg","0deg"]] * 96).flatten()
+
     def setUp(self):
         super().setUp("STAT/Beam/1")
     
-    def test_write_HBAT_delays(self):
-        """ Test whether the delay values are correctly saved into the relative RECV attribute"""
-
-        self.proxy.initialise()
-        self.proxy.on()
-
-        # setup RECV as well
+    def setup_recv_proxy(self):
+        # setup RECV
         recv_proxy = TestDeviceProxy("STAT/RECV/1")
         recv_proxy.off()
         recv_proxy.initialise()
+        self.assertEqual(DevState.STANDBY, recv_proxy.state())
         recv_proxy.set_defaults()
         recv_proxy.on()
+        self.assertEqual(DevState.ON, recv_proxy.state())
+        return recv_proxy
+    
+    def test_HBAT_delays_dims(self):
+        """Verify HBAT delays are retrieved with correct dimensions"""
+        self.setup_recv_proxy()
 
-        self.proxy.recv_proxy = recv_proxy
+        # setup BEAM
+        self.proxy.init()
+        self.proxy.Initialise()
+        self.assertEqual(DevState.STANDBY, self.proxy.state())
+        self.proxy.set_defaults()
+        self.proxy.on()
+        self.assertEqual(DevState.ON, self.proxy.state())
+
+        # verify HBAT_delays method returns the correct dimensions
+        HBAT_delays = self.proxy.HBAT_delays(self.pointing_direction)
+        self.assertEqual(1536, len(HBAT_delays))    # 96*16
+    
+    def test_set_pointing(self):
+        """Verify if set pointing procedure is correctly executed"""
+        recv_proxy = self.setup_recv_proxy()
+
+        # setup BEAM
+        self.proxy.init()
+        self.proxy.Initialise()
+        self.assertEqual(DevState.STANDBY, self.proxy.state())
+        self.proxy.set_defaults()
+        self.proxy.on()
+        self.assertEqual(DevState.ON, self.proxy.state())
 
         # Verify attribute is present (all zeros if never used before)
-        HBAT_delays_r1 = numpy.array(recv_proxy.read_attribute('HBAT_BF_delays_RW').value)
+        HBAT_delays_r1 = numpy.array(recv_proxy.read_attribute('HBAT_BF_delay_steps_RW').value)
         self.assertIsNotNone(HBAT_delays_r1)
 
-        # Unable to independently test '_calculate_HBAT_bf_delays' because it is not a Tango command, 
-        # thus DeviceProxy cannot access it. On the other hand, the method cannot be unit-tested because it
-        # requires access to a DeviceRecv property from DeviceBeam. This last requirement should change with L2SS-574
-        # 
-        # verify if values are actually transformed
-        # HBAT_delays_flat = self.proxy.HBAT_delays(numpy.array([["J2000","0deg","0deg"]] * 96).flatten()) 
-        # HBAT_delays = numpy.array(HBAT_delays_flat).reshape(96,16)
-        # HBAT_signal_input_delays = numpy.zeros((96,32), dtype=numpy.float64)    # Property of Beam-device
-        # HBAT_bf_delay_step_delays = recv_proxy.get_hbat_bf_delay_step_delays()
-        # HBAT_bf_delays = self.proxy._calculate_HBAT_bf_delays(HBAT_delays, HBAT_signal_input_delays, HBAT_bf_delay_step_delays)
-        # self.assertNotEqual(HBAT_delays, HBAT_bf_delays)
-
-        time.sleep(3)
-        
-        # Verify writing operation does not lead to errors
-        self.proxy.HBAT_set_pointing(numpy.array([["J2000","0deg","0deg"]] * 96).flatten())  # write values to RECV
-        HBAT_delays_r2 = numpy.array(recv_proxy.read_attribute('HBAT_BF_delays_RW').value)
+        self.proxy.HBAT_set_pointing(self.pointing_direction)  # write values to RECV
+        HBAT_delays_r2 = numpy.array(recv_proxy.read_attribute('HBAT_BF_delay_steps_RW').value)
         self.assertIsNotNone(HBAT_delays_r2)
 
         # Verify delays changed (to be discussed)
-        #self.assertFalse((HBAT_delays_r1==HBAT_delays_r2).all()) 
+        #self.assertFalse((HBAT_delays_r1==HBAT_delays_r2).all())
+
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_beam_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_beam_device.py
index 795f9317ffe155a62fb712f74768edc26e1f5f77..c071dddfe8570f987079807fc76c7ef530132069 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_beam_device.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_beam_device.py
@@ -7,17 +7,14 @@
 # Distributed under the terms of the APACHE license.
 # See LICENSE.txt for more info.
 
-from tango import DevState
 from tango.test_context import DeviceTestContext
 
 from tangostationcontrol.devices import beam, lofar_device
 
-import numpy
 import mock
 
 from tangostationcontrol.test import base
 
-
 class TestBeamDevice(base.TestCase):
 
     def setUp(self):
@@ -33,41 +30,12 @@ class TestBeamDevice(base.TestCase):
     
     def test_get_pointing_directions(self):
         """Verify can read pointings attribute and length matches without err"""
-        with DeviceTestContext(beam.Beam, process=True) as proxy:
+        with DeviceTestContext(beam.Beam, process=True, timeout=10) as proxy:
             self.assertEqual(96, len(proxy.read_attribute(
                 "HBAT_pointing_direction_R").value))
 
     def test_get_pointing_timestamps(self):
         """Verify can read timestamps attribute and length matches without err"""
-
-        with DeviceTestContext(beam.Beam, process=True) as proxy:
+        with DeviceTestContext(beam.Beam, process=True, timeout=10) as proxy:
             self.assertEqual(96, len(proxy.read_attribute(
                 "HBAT_pointing_timestamp_R").value))
-
-    def test_HBAT_delays_dims(self):
-        """Verify HBAT delays are retrieved with correct dimensions"""
-        with DeviceTestContext(beam.Beam, process=True) as proxy:
-            proxy.init()
-            proxy.Initialise()
-            self.assertEqual(DevState.STANDBY, proxy.state())
-            proxy.set_defaults()
-            proxy.on()
-            self.assertEqual(DevState.ON, proxy.state())
-
-            # verify HBAT_delays method returns the correct dimensions
-            HBAT_delays = proxy.HBAT_delays(numpy.array([["J2000","0deg","0deg"]] * 96).flatten())
-            self.assertEqual((96*16,), HBAT_delays.shape)
-    
-    def test_HBAT_delays_calculations(self):
-        """Verify the calculations from delays to weights"""
-        with DeviceTestContext(beam.Beam, process=True) as proxy:
-            proxy.init()
-            proxy.Initialise()
-            self.assertEqual(DevState.STANDBY, proxy.state())
-            proxy.set_defaults()
-            proxy.on()
-            self.assertEqual(DevState.ON, proxy.state())
-        
-            # verify property is retrieved (workaround)
-            HBAT_signal_input_delays = beam.Beam.HBAT_signal_input_delays.default_value
-            self.assertTrue((HBAT_signal_input_delays==numpy.zeros((96,32), dtype=numpy.float64)).all())
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_recv_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_recv_device.py
new file mode 100644
index 0000000000000000000000000000000000000000..1fde8856cf55a79a03aaec2cc53ba34a4ad818f0
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_recv_device.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+from tango.test_context import DeviceTestContext
+
+from tangostationcontrol.devices import recv, lofar_device
+
+import mock
+import numpy
+
+from tangostationcontrol.test import base
+
+class TestRecvDevice(base.TestCase):
+
+    # some dummy values for mandatory properties
+    recv_properties = {'OPC_Server_Name': 'example.com', 'OPC_Server_Port': 4840, 'OPC_Time_Out': 5.0}
+
+    def setUp(self):
+        super(TestRecvDevice, self).setUp()     
+
+        # Patch DeviceProxy to allow making the proxies during initialisation
+        # that we otherwise avoid using
+        for device in [lofar_device]:
+            proxy_patcher = mock.patch.object(
+                device, 'DeviceProxy')
+            proxy_patcher.start()
+            self.addCleanup(proxy_patcher.stop)
+    
+    
+    def test_calculate_HBAT_bf_delay_steps(self):
+        """Verify HBAT beamforming calculations are correctly executed"""
+        with DeviceTestContext(recv.RECV, properties=self.recv_properties, process=True) as proxy:
+            delays = numpy.random.rand(96,16).flatten()
+            HBAT_bf_delay_steps = proxy.calculate_HBAT_bf_delay_steps(delays)
+            self.assertEqual(3072, len(HBAT_bf_delay_steps))                             # 96x32=3072
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver.py
index 8f3e7a25046764eeedaf9fa2380b2fdbe76b0b6b..0f54a423502981dd782879a8adc64b0764309540 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver.py
@@ -3,59 +3,14 @@
 import logging
 
 from tango import DeviceProxy, AttributeProxy, DevState, DevFailed
+from tangostationcontrol.toolkit.archiver_util import get_db_config, attribute_name_from_url, device_name_url
 
 import time
 import json
 import pkg_resources
+from functools import wraps
 
 logger = logging.getLogger()
-
-def attribute_name_from_url(attribute_name:str):
-    """
-    For some operations Tango attribute must be transformed from the form 'tango://db:port/domain/family/name/attribute'
-    to canonical 'domain/family/name/attribute'
-    """
-    if attribute_name.startswith('tango://'):
-        return '/'.join(attribute_name.split('/')[3:])
-
-    if len(attribute_name.split('/')) != 4:
-        raise ValueError(f"Expected attribute of format 'domain/family/name/attribute', got {attribute_name}")
-
-    return attribute_name
-
-def device_name_url(device_name:str, tango_host:str = 'databaseds:10000'):
-    """
-    For some operations Tango devices must be transformed from the form 'domain/family/name'
-    to 'tango://db:port/domain/family/name'
-    """
-    if device_name.startswith('tango://'):
-        return device_name
-
-    if len(device_name.split('/')) != 3:
-        raise ValueError(f"Expected device name of format 'domain/family/name', got {device_name}")
-
-    return f"tango://{tango_host}/{device_name}"
-
-def split_tango_name(tango_fqname:str, tango_type:str):
-    """
-    Helper function to split device or attribute Tango full qualified names
-    into its components
-    """
-    if tango_type.lower() == 'device':
-        try:
-            domain, family, member = tango_fqname.split('/')
-            return domain, family, member
-        except ValueError as e:
-            raise ValueError(f"Could not parse device name {tango_fqname}. Please provide FQDN, e.g. STAT/Device/1") from e
-    elif tango_type.lower() == 'attribute':
-        try:
-            domain, family, member, name = tango_fqname.split('/')
-            return domain, family, member, name
-        except ValueError as e:
-            raise ValueError(f"Could not parse attribute name {tango_fqname}. Please provide FQDN, e.g. STAT/Device/1/Attribute") from e
-    else:
-        raise ValueError(f"Invalid value: {tango_type}. Please provide 'device' or 'attribute'.")
-
  
 def warn_if_attribute_not_found():
     """
@@ -64,9 +19,9 @@ def warn_if_attribute_not_found():
     """
     def inner(func):
         @wraps(func)
-        def warn_wrapper(self, *args, **kwargs):
+        def warn_wrapper(self, attribute_name, *args, **kwargs):
             try:
-                return func(self, *args, **kwargs)
+                return func(self, attribute_name, *args, **kwargs)
             except DevFailed as e:
                 if e.args[0].reason == 'Attribute not found':
                     logger.warning(f"Attribute {attribute_name} not found!")
@@ -97,24 +52,12 @@ class Archiver():
         self.es_list = [es_name for es_name in self.get_subscribers(from_db=False)]
         self.cm.write_attribute('Context',context)    # Set default Context Archiving for all the subscribers
     
-    def get_db_config(self, device_name:str) -> dict:
-        """
-        Retrieve the DB credentials from the Tango properties of Configuration Manager or EventSubscribers
-        """
-        device = DeviceProxy(device_name)
-        # example LibConfiguration property value:
-        # ['connect_string= user=postgres password=password host=archiver-timescale port=5432 dbname=hdb', 'host=archiver-timescale', 'libname=libhdb++timescale.so', 'dbname=hdb', 'port=5432', 'user=postgres', 'password=password']
-        config_strs = device.get_property('LibConfiguration')['LibConfiguration']
-
-        config = dict(config_str.split("=",1) for config_str in config_strs)
-        return config
-    
     def get_hdbpp_libname(self, device_name:str):
         """
         Get the hdbpp library name used by the Configuration Manager or by the EventSubscribers
         Useful in the case of different DBMS architectures (e.g. MySQL, TimescaleDB)
         """
-        config = self.get_db_config(device_name)
+        config = get_db_config(device_name)
         return config["libname"]
     
     def get_subscribers(self, from_db:bool=False):
@@ -245,15 +188,14 @@ class Archiver():
         for a in attrs_list:
             attr_fullname = f"{device_name}/{a}".lower()
             attr_proxy = AttributeProxy(attr_fullname)
-            if attr_proxy.is_polled() is True:   # if not polled attribute is also not archived
+            if attr_proxy.is_polled() and not self.is_attribute_archived(attr_fullname):   # if not polled attribute is also not archived
                 try:
                     es = DeviceProxy(es_name or self.get_next_subscriber()) # choose an e.s. or get the first one available
-                    if es.AttributeList is None or not(self.cm.AttributeSearch(a)):
-                        polling_period = attr_proxy.get_poll_period() or self.dev_polling_time  
-                        archive_period = global_archive_period or int(attr_proxy.get_property('archive_period')['archive_period'][0]) or self.dev_archive_time                 
-                        self.add_attribute_to_archiver(attr_fullname,polling_period=polling_period,
-                            event_period=archive_period, es_name = es.name())
-                        #time.sleep(0.5)
+                    polling_period = attr_proxy.get_poll_period() or self.dev_polling_time  
+                    archive_period = global_archive_period or int(attr_proxy.get_property('archive_period')['archive_period'][0]) or self.dev_archive_time                 
+                    self.add_attribute_to_archiver(attr_fullname,polling_period=polling_period,
+                        event_period=archive_period, es_name = es.name())
+                    #time.sleep(0.5)
                 except IndexError as e:
                     logger.warning(f"Attribute {attr_fullname} will not be archived because archive event period is not defined!")
                 except Exception as e:
@@ -284,7 +226,8 @@ class Archiver():
         for a in attrs_list:
             try:
                 attr_fullname = f"{device_name}/{a}".lower()
-                self.remove_attribute_from_archiver(attr_fullname)
+                if self.is_attribute_archived(attr_fullname):
+                    self.remove_attribute_from_archiver(attr_fullname)
             except Exception as e:
                 raise Exception from e
     
@@ -331,16 +274,10 @@ class Archiver():
         """
         attribute_name = attribute_name_from_url(attribute_name)
         attributes = self.cm.AttributeSearch(attribute_name.lower())
-        if len(attributes)>1:
-            # Handle case same attribute_name r/rw 
-            if len(attributes)==2 and (attributes[0].endswith(attributes[1]+'w') or attributes[1].endswith(attributes[0]+'w')):
-                return True
-            else:
-                raise Exception(f"Multiple Attributes Matched: {attributes}")
-        elif len(attributes)==1:
-            return True
-        else:
-            return False
+
+        # search returns all matches in which attribute_name is part of the name,
+        # so check whether an exact match is included.
+        return attribute_name_url(attribute_name) in attributes
     
     def update_archiving_attribute(self, attribute_name: str, polling_period: int, event_period: int, strategy: str = 'RUN'):
         """
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..6cb885c613d5de34178747828cffc589f12c3643
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py
@@ -0,0 +1,63 @@
+"""
+   Utility functions for the Archiver functionality.
+"""
+
+from tango import DeviceProxy
+
+def get_db_config(self, device_name:str) -> dict:
+    """
+    Retrieve the DB credentials from the Tango properties of Configuration Manager or EventSubscribers
+    """
+    device = DeviceProxy(device_name)
+    # example LibConfiguration property value:
+    # ['connect_string= user=postgres password=password host=archiver-timescale port=5432 dbname=hdb', 'host=archiver-timescale', 'libname=libhdb++timescale.so', 'dbname=hdb', 'port=5432', 'user=postgres', 'password=password']
+    config_strs = device.get_property('LibConfiguration')['LibConfiguration']
+
+    config = dict(config_str.split("=",1) for config_str in config_strs)
+    return config
+
+def attribute_name_from_url(attribute_name:str):
+    """
+    For some operations Tango attribute must be transformed from the form 'tango://db:port/domain/family/name/attribute'
+    to canonical 'domain/family/name/attribute'
+    """
+    if attribute_name.startswith('tango://'):
+        return '/'.join(attribute_name.split('/')[3:])
+
+    if len(attribute_name.split('/')) != 4:
+        raise ValueError(f"Expected attribute of format 'domain/family/name/attribute', got {attribute_name}")
+
+    return attribute_name
+
+def device_name_url(device_name:str, tango_host:str = 'databaseds:10000'):
+    """
+    For some operations Tango devices must be transformed from the form 'domain/family/name'
+    to 'tango://db:port/domain/family/name'
+    """
+    if device_name.startswith('tango://'):
+        return device_name
+
+    if len(device_name.split('/')) != 3:
+        raise ValueError(f"Expected device name of format 'domain/family/name', got {device_name}")
+
+    return f"tango://{tango_host}/{device_name}"
+
+def split_tango_name(tango_fqname:str, tango_type:str):
+    """
+    Helper function to split device or attribute Tango full qualified names
+    into its components
+    """
+    if tango_type.lower() == 'device':
+        try:
+            domain, family, member = tango_fqname.split('/')
+            return domain, family, member
+        except ValueError as e:
+            raise ValueError(f"Could not parse device name {tango_fqname}. Please provide FQDN, e.g. STAT/Device/1") from e
+    elif tango_type.lower() == 'attribute':
+        try:
+            domain, family, member, name = tango_fqname.split('/')
+            return domain, family, member, name
+        except ValueError as e:
+            raise ValueError(f"Could not parse attribute name {tango_fqname}. Please provide FQDN, e.g. STAT/Device/1/Attribute") from e
+    else:
+        raise ValueError(f"Invalid value: {tango_type}. Please provide 'device' or 'attribute'.")
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/retriever.py b/tangostationcontrol/tangostationcontrol/toolkit/retriever.py
index 6506ca3c79939ee9fea6c3ab0240938234d83cd4..d62975fd4d3e83108e604905aa4eaac71335afe2 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/retriever.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/retriever.py
@@ -1,7 +1,6 @@
 #! /usr/bin/env python3
 
-from tango import DeviceProxy
-from tangostationcontrol.toolkit.archiver import split_tango_name
+from tangostationcontrol.toolkit.archiver_util import get_db_config, split_tango_name
 
 from abc import ABC, abstractmethod
 from datetime import datetime, timedelta
@@ -15,26 +14,23 @@ class Retriever(ABC):
     """
     The Retriever abstract class implements retrieve operations on a given DBMS
     """
-    
-    def get_db_credentials(self):
-        """
-        Retrieves the DB credentials from the Tango properties of Configuration Manager
-        """
-        cm = DeviceProxy(self.cm_name)
-        config_list = list(cm.get_property('LibConfiguration')['LibConfiguration']) # dictionary {'LibConfiguration': list of strings}
-        if 'connect_string=' in config_list[0]: config_list.pop(0)  # possibly remove connect string because it causes errors
-        host = str([s for s in config_list if "host" in s][0].split('=')[1])
-        dbname = str([s for s in config_list if "dbname" in s][0].split('=')[1])
-        port = str([s for s in config_list if "port" in s][0].split('=')[1])
-        user = str([s for s in config_list if "user" in s][0].split('=')[1])
-        pw = str([s for s in config_list if "password" in s][0].split('=')[1])
-        return host,dbname,port,user,pw
 
-    def create_session(self,libname:str,user:str,pw:str,host:str,port:str,dbname:str):
+    def __init__(self):
+        self.session = self.connect_to_archiving_db()
+        self.ab = self.set_archiver_base()
+    
+    def create_session(self, creds):
         """
-        Returns a session to a DBMS using default credentials.
+        Returns a session to a DBMS using the given credentials.
         """
-        connection_string = f"{libname}://{user}:{pw}@{host}:{port}/{dbname}"
+        libname = creds["libname"]
+        user = creds["user"]
+        password = creds["password"]
+        host = creds["host"]
+        port = creds["port"]
+        dbname = creds["dbname"]
+
+        connection_string = f"{libname}://{user}:{password}@{host}:{port}/{dbname}"
         engine = create_engine(connection_string)
         Session = sessionmaker(bind=engine)
         return Session
@@ -74,10 +70,8 @@ class Retriever(ABC):
             result = self.session.query(self.ab.Attribute.att_conf_id).filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, \
                                     self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
             return result[0]
-        except TypeError as e:
-            raise Exception(f"Attribute {attribute_fqname} not found!") from e
-        except NoResultFound as e:
-            raise Exception(f"No records of attribute {attribute_fqname} found in DB") from e
+        except (TypeError, NoResultFound) as e:
+            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
     
     @abstractmethod
     def get_attribute_datatype(self,attribute_fqname: str):
@@ -103,8 +97,8 @@ class Retriever(ABC):
                     join(self.ab.Attribute,self.ab.Attribute.att_conf_id==base_class.att_conf_id).\
                     filter(and_(self.ab.Attribute.att_conf_id == attr_id,base_class.data_time >= time_delta_db, \
                             base_class.data_time <= time_now_db)).order_by(base_class.data_time).all()
-        except AttributeError as e:
-            raise Exception(f"Empty result: Attribute {attribute_fqname} not found") from e
+        except (AttributeError, TypeError, NoResultFound) as e:
+            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
         return result
 
     def get_attribute_value_by_interval(self,attribute_fqname: str, start_time: datetime, stop_time: datetime, tablename:str):
@@ -121,28 +115,30 @@ class Retriever(ABC):
                     join(self.ab.Attribute,self.ab.Attribute.att_conf_id==base_class.att_conf_id).\
                         filter(and_(self.ab.Attribute.att_conf_id == attr_id,base_class.data_time >= str(start_time), \
                                 base_class.data_time <= str(stop_time))).order_by(base_class.data_time).all()
-        except AttributeError as e:
-            raise Exception(f"Empty result: Attribute {attribute_fqname} not found") from e
+        except (AttributeError, TypeError, NoResultFound) as e:
+            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
         return result
 
 class RetrieverMySQL(Retriever):
     
     def __init__(self, cm_name: str = 'archiving/hdbpp/confmanager01'):
         self.cm_name = cm_name
-        self.session = self.connect_to_archiving_db()
-        self.ab = self.set_archiver_base()
+
+        super().__init__()
     
     def connect_to_archiving_db(self):
         """
         Returns a session to a MySQL DBMS using default credentials.
         """
-        host,dbname,port,user,pw = super().get_db_credentials()
+        creds = get_db_config(self.cm_name)
+
         # Set sqlalchemy library connection
-        if host=='archiver-maria-db':
-            libname = 'mysql+pymysql'         
+        if creds["host"] == 'archiver-maria-db':
+            creds["libname"] = 'mysql+pymysql'         
         else:
-            raise ValueError(f"Invalid hostname: {host}")
-        Session = super().create_session(libname,user,pw,host,port,dbname)
+            raise ValueError(f"Invalid hostname: {creds['host']}, we only support 'archiver-maria-db'")
+
+        Session = self.create_session(creds)
         return Session()
     
     def set_archiver_base(self):
@@ -162,10 +158,8 @@ class RetrieverMySQL(Retriever):
             result = self.session.query(self.ab.DataType.data_type).join(self.ab.Attribute,self.ab.Attribute.att_conf_data_type_id==self.ab.DataType.att_conf_data_type_id).\
                         filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
             return result[0]
-        except TypeError as e:
-            raise Exception(f"Attribute not {attribute_fqname} found!") from e
-        except NoResultFound as e:
-            raise Exception(f"No records of attribute {attribute_fqname} found in DB") from e
+        except (AttributeError, TypeError, NoResultFound) as e:
+            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
     
     def get_attribute_value_by_hours(self,attribute_fqname: str, hours: float = 1.0):
         """
@@ -225,20 +219,22 @@ class RetrieverTimescale(Retriever):
     
     def __init__(self, cm_name: str = 'archiving/hdbppts/confmanager01'):
         self.cm_name = cm_name
-        self.session = self.connect_to_archiving_db()
-        self.ab = self.set_archiver_base()
+
+        super().__init__()
     
     def connect_to_archiving_db(self):
         """
         Returns a session to a MySQL DBMS using default credentials.
         """
-        host,dbname,port,user,pw = super().get_db_credentials()
+        creds = get_db_config(self.cm_name)
+
         # Set sqlalchemy library connection        
-        if host=='archiver-timescale':
-            libname = 'postgresql+psycopg2'
+        if creds["host"] == 'archiver-timescale':
+            creds["libname"] = 'postgresql+psycopg2'
         else:
-            raise ValueError(f"Invalid hostname: {host}")
-        Session = super().create_session(libname,user,pw,host,port,dbname)
+            raise ValueError(f"Invalid hostname: {creds['host']}, we only support 'archiver-timescale'")
+
+        Session = self.create_session(creds)
         return Session()
     
     def set_archiver_base(self):
@@ -258,10 +254,8 @@ class RetrieverTimescale(Retriever):
             result = self.session.query(self.ab.DataType.type).join(self.ab.Attribute,self.ab.Attribute.att_conf_type_id==self.ab.DataType.att_conf_type_id).\
                             filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
             return result[0]
-        except TypeError as e:
-            raise Exception(f"Attribute not {attribute_fqname} found!") from e
-        except NoResultFound as e:
-            raise Exception(f"No records of attribute {attribute_fqname} found in DB") from e
+        except (AttributeError, TypeError, NoResultFound) as e:
+            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
     
     def get_attribute_format(self,attribute_fqname: str):
         """
@@ -274,10 +268,8 @@ class RetrieverTimescale(Retriever):
             result = self.session.query(self.ab.Format.format).join(self.ab.Attribute,self.ab.Attribute.att_conf_format_id==self.ab.Format.att_conf_format_id).\
                 filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
             return result[0]
-        except TypeError as e:
-            raise Exception("Attribute not found!") from e
-        except NoResultFound as e:
-            raise Exception(f"No records of attribute {attribute_fqname} found in DB") from e
+        except (AttributeError, TypeError, NoResultFound) as e:
+            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
     
     def get_attribute_tablename(self,attribute_fqname: str):
         """
@@ -289,10 +281,8 @@ class RetrieverTimescale(Retriever):
             result = self.session.query(self.ab.Attribute.table_name).filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, \
                                     self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
             return result[0]
-        except TypeError as e:
-            raise Exception("Attribute not found!") from e
-        except NoResultFound as e:
-            raise Exception(f"No records of attribute {attribute_fqname} found in DB") from e
+        except (AttributeError, TypeError, NoResultFound) as e:
+            raise ValueError(f"Attribute {attribute_fqname} not found!") from e
     
     def get_attribute_value_by_hours(self, attribute_fqname: str, hours: float = 1.0):
         """
@@ -311,4 +301,4 @@ class RetrieverTimescale(Retriever):
         """
         tablename = self.get_attribute_tablename(attribute_fqname)
         return super().get_attribute_value_by_interval(attribute_fqname,start_time,stop_time,tablename)
-    
\ No newline at end of file
+    
diff --git a/tangostationcontrol/test-requirements.txt b/tangostationcontrol/test-requirements.txt
index 16a9033db5279a246609fdc0f9c941d26c74792a..1c18f0949ae6a3059fa0dcce9059e016a9a76e22 100644
--- a/tangostationcontrol/test-requirements.txt
+++ b/tangostationcontrol/test-requirements.txt
@@ -7,6 +7,7 @@ bandit>=1.6.0 # Apache-2.0
 coverage>=5.2.0 # Apache-2.0
 doc8>=0.8.0 # Apache-2.0
 flake8>=3.8.0 # MIT
+flake8-bugbear>=22.1.11 # MIT
 flake8-breakpoint>=1.1.0 # MIT
 flake8-debugger>=4.0.0 #MIT
 flake8-mock>=0.3 #GPL
diff --git a/tangostationcontrol/tox.ini b/tangostationcontrol/tox.ini
index fc688b430a9cdaa3805f5340e22826003e553080..c33316109926f6be052c0bd99b4ef5cd51a1217c 100644
--- a/tangostationcontrol/tox.ini
+++ b/tangostationcontrol/tox.ini
@@ -76,5 +76,5 @@ commands =
 
 [flake8]
 filename = *.py,.stestr.conf,.txt
-select = W292,B601,B602,T100,M001,F401
+select = W292,B601,B602,T100,M001,F401,B001,B002,B003,B004,B005,B006,B007,B008,B009,B010,B011,B012,B013,B014.B015,B016,B017,B018
 exclude=.tox,.egg-info