diff --git a/.gitignore b/.gitignore
index 60c6519f7724a7ca08cac3263b595400dba9fdd2..6841e3ee2682fb0d5660e2491c5b03ac34f6957e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,6 +12,7 @@
 **/.project
 **/.pydevproject
 **/.settings/org.eclipse.core.resources.prefs
+docs/build
 tangostationcontrol/dist
 tangostationcontrol/build
 **/.ipynb_checkpoints
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index f458c94bb5c002a69aac660d28736b4b0aa65f8a..24124fc8a01456cb9cd10eddee0070db77b3f82e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,6 +1,4 @@
-# TODO(Corne): Update this image to use our own registry once building
-#              images is in place.
-image: artefact.skao.int/ska-tango-images-tango-itango:9.3.7
+image: git.astron.nl:5000/lofar2.0/tango/tango-itango:9.3.7
 variables:
   GIT_SUBMODULE_STRATEGY: recursive
   PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
@@ -8,12 +6,291 @@ cache:
   paths:
     - .cache/pip
 stages:
+  - images
   - building
   - linting
   - static-analysis
   - unit-tests
   - integration-tests
   - packaging
+# See docker-compose/README.md for docker image behavior and explanation
+.base_docker_images:
+  stage: images
+  image: docker:latest
+  tags:
+    - privileged
+  services:
+    - name: docker:dind
+  variables:
+    DOCKER_TLS_CERTDIR: "/certs"
+  before_script:
+    - |
+      if [[ "$CI_COMMIT_BRANCH" == "$CI_DEFAULT_BRANCH" && -z "$CI_COMMIT_TAG" ]]; then
+        tag="latest"
+        echo "Running on tagged default branch '$CI_DEFAULT_BRANCH': tag = 'latest'"
+      else
+        tag="$CI_COMMIT_REF_SLUG"
+        echo "Running on branch '$CI_COMMIT_BRANCH': tag = $tag"
+      fi
+    - apk add --update make bash docker-compose
+    - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
+    - touch /root/.Xauthority
+#    Hack BASH_SOURCE into sourced files, docker its sh shell won't set this
+    - export BASH_SOURCE=$(pwd)/bootstrap/etc/lofar20rc.sh
+#    source the lofarrc file and mask its non zero exit code
+    - . bootstrap/etc/lofar20rc.sh || true
+##    Allow docker image script to execute
+#    - chmod u+x $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh
+.base_docker_images_except:
+  extends: .base_docker_images
+  except:
+    refs:
+      - tags
+      - master
+.base_docker_store_images:
+  extends: .base_docker_images
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh
+docker_store_images_master_tag:
+  extends: .base_docker_store_images
+  only:
+    refs:
+      - tags
+      - master
+docker_store_images_changes:
+  extends: .base_docker_store_images
+  only:
+    changes:
+      - docker-compose/.env
+  except:
+    refs:
+      - tags
+      - master
+docker_build_image_all:
+  extends: .base_docker_images
+  only:
+    refs:
+      - tags
+      - master
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh elk latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh elk-configure-host latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh lofar-device-base latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh prometheus latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh itango latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh grafana latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh jupyter latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh apsct-sim latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh apspu-sim latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh recv-sim latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh sdptr-sim latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh unb2-sim latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-apsct latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-apspu latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-boot latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-docker latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-observation_control latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-recv latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sdp latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sst latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-unb2 latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-xst latest
+docker_build_image_elk:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/elk.yml
+      - docker-compose/elk/*
+      - docker-compose/elk-configure-host/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh elk $tag
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh elk-configure-host $tag
+docker_build_image_lofar_device_base:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/lofar-device-base.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh lofar-device-base $tag
+docker_build_image_prometheus:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/prometheus.yml
+      - docker-compose/prometheus/*
+  except:
+    refs:
+      - tags
+      - master
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh prometheus $tag
+docker_build_image_itango:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/itango.yml
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh itango $tag
+docker_build_image_grafana:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/grafana.yml
+      - docker-compose/grafana/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh grafana $tag
+docker_build_image_jupyter:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/jupyter.yml
+      - docker-compose/jupyter/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh jupyter $tag
+docker_build_image_apsct_sim:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/aspct-sim.yml
+      - docker-compose/pypcc-sim-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh apsct-sim $tag
+docker_build_image_apspu_sim:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/apspu-sim.yml
+      - docker-compose/pypcc-sim-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh apspu-sim $tag
+docker_build_image_recv_sim:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/recv-sim.yml
+      - docker-compose/pypcc-sim-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh recv-sim $tag
+docker_build_image_sdptr_sim:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/sdptr-sim.yml
+      - docker-compose/sdptr-sim/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh sdptr-sim $tag
+docker_build_image_unb2_sim:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/unb2-sim.yml
+      - docker-compose/pypcc-sim-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh unb2-sim $tag
+docker_build_image_device_apsct:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/device-aspct.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-aspct $tag
+docker_build_image_device_apspu:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/device-apspu.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-apspu $tag
+docker_build_image_device_boot:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/device-boot.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-boot $tag
+docker_build_image_device_docker:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/device-docker.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-docker $tag
+docker_build_image_device_ovservation_control:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/device-observation_control.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-observation_control $tag
+docker_build_image_device_recv:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/device-recv.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-recv $tag
+docker_build_image_device_sdp:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/device-sdp.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sdp $tag
+docker_build_image_device_sst:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/device-sst.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sst $tag
+docker_build_image_device_unb2:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/device-unb2.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-unb2 $tag
+docker_build_image_device_xst:
+  extends: .base_docker_images_except
+  only:
+    changes:
+      - docker-compose/device-xst.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-xst $tag
 newline_at_eof:
   stage: linting
   before_script:
@@ -63,6 +340,14 @@ integration_test_docker:
   variables:
     DOCKER_TLS_CERTDIR: "/certs"
   before_script:
+    - |
+      if [[ "$CI_COMMIT_BRANCH" == "$CI_DEFAULT_BRANCH" && -z "$CI_COMMIT_TAG" ]]; then
+        tag="latest"
+        echo "Running on tagged default branch '$CI_DEFAULT_BRANCH': tag = 'latest'"
+      else
+        tag="$CI_COMMIT_REF_SLUG"
+        echo "Running on branch '$CI_COMMIT_BRANCH': tag = $tag"
+      fi
     - apk add --update make bash docker-compose
     - apk add --update bind-tools
     - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
@@ -78,8 +363,12 @@ integration_test_docker:
     - . bootstrap/etc/lofar20rc.sh || true
 #    TANGO_HOST must be unset our databaseds will be unreachable
     - unset TANGO_HOST
-#    Allow integration test to execute
-    - chmod u+x $CI_PROJECT_DIR/sbin/run_integration_test.sh
+##    Allow docker image script to execute
+#    - chmod u+x $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh pull $tag
+##    Allow integration test to execute
+#    - chmod u+x $CI_PROJECT_DIR/sbin/run_integration_test.sh
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/run_integration_test.sh
 wheel_packaging:
diff --git a/CDB/LOFAR_ConfigDb.json b/CDB/LOFAR_ConfigDb.json
index 6886437ce7cf02ee741b5b70f07ebfbf71b08b4d..60b0115925b2ccf01bd27b3e0d072a17c78d580f 100644
--- a/CDB/LOFAR_ConfigDb.json
+++ b/CDB/LOFAR_ConfigDb.json
@@ -21,6 +21,13 @@
                 }
             }
         },
+        "Beam": {
+            "STAT": {
+                "Beam": {
+                    "STAT/Beam/1": {}
+                }
+            }
+        },
         "boot": {
             "STAT": {
                 "Boot": {
diff --git a/bin/dump_ConfigDb.sh b/bin/dump_ConfigDb.sh
index 0dc634c458b76cd5d3c13e2d7dab6e905f66248a..c1f6dc214e32458af1f1d555332ecb40c2b71601 100755
--- a/bin/dump_ConfigDb.sh
+++ b/bin/dump_ConfigDb.sh
@@ -1,4 +1,4 @@
 #!/bin/bash
 
 # writes the JSON dump to stdout, Do not change -i into -it incompatible with gitlab ci!
-docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig python -m dsconfig.
+docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig python -m dsconfig.dump
diff --git a/bin/start-ds.sh b/bin/start-ds.sh
index 7b601c4c8f5e24ac56755ad08a1203a6cbba62d2..a48b0b4554cd2ef9380cbc482b07edfc38203043 100755
--- a/bin/start-ds.sh
+++ b/bin/start-ds.sh
@@ -2,6 +2,11 @@
 
 # Serves as entrypoint script for docker containers
 
+if [[ ! -d "/opt/lofar/tango" ]]; then
+  >&2 echo "/opt/lofar/tango volume does not exist!"
+  exit 1
+fi
+
 # Check required support file exists
 if [[ ! -f "/usr/local/bin/wait-for-it.sh" ]]; then
     >&2 echo "/usr/local/bin/wait-for-it.sh file does not exist!"
@@ -14,6 +19,11 @@ if [[ ! $TANGO_HOST ]]; then
   exit 1
 fi
 
+# Store directory so we can return to it after installation
+CWD=$(pwd)
+
+cd /opt/lofar/tango || exit 1
+
 # Check if configured for specific version
 if [[ $TANGOSTATIONCONTROL ]]; then
   # TODO (Corne): Download version from artifacts or pypi.
@@ -28,4 +38,8 @@ else
   sudo pip install --force-reinstall "$(ls -Art /tmp/tangostationcontrol/*.whl | tail -n 1)"
 fi
 
+# Return to the stored the directory, this preserves the working_dir argument in
+# docker-compose files.
+cd "$CWD" || exit 1
+
 /usr/local/bin/wait-for-it.sh "$TANGO_HOST" --timeout=30 --strict -- "$@"
diff --git a/bootstrap/etc/lofar20rc.sh b/bootstrap/etc/lofar20rc.sh
index 6e4a5c9bc8d6a78c1b61cca02159ee01291d3805..4b9d806d819816a86c5fea3ab8eb59135d8edcfc 100755
--- a/bootstrap/etc/lofar20rc.sh
+++ b/bootstrap/etc/lofar20rc.sh
@@ -16,6 +16,7 @@ if [ ! -f "${LOFAR20_DIR}/.git/hooks/post-checkout" ]; then
   alias git="cp ${LOFAR20_DIR}/bin/update_submodules.sh ${LOFAR20_DIR}/.git/hooks/post-checkout; cp ${LOFAR20_DIR}/bin/update_submodules.sh ${LOFAR20_DIR}/.git/hooks/post-merge; unalias git; git"
 fi
 
+# CI_BUILD_ID does not exist see https://docs.gitlab.com/ee/ci/variables/predefined_variables.html
 if [ ! -z ${CI_BUILD_ID+x} ]; then
     export CONTAINER_NAME_PREFIX=${CI_BUILD_ID}-
 elif [ ! -z ${CI_JOB_ID+x} ]; then
diff --git a/docker-compose/.env b/docker-compose/.env
index c1956e315f8cde0d48b4b5279807025bede69261..53937727c24d398a5d82b24c31f205db50064163 100644
--- a/docker-compose/.env
+++ b/docker-compose/.env
@@ -1,6 +1,7 @@
 DOCKER_REGISTRY_HOST=artefact.skao.int
 DOCKER_REGISTRY_USER=ska-tango-images
 LOCAL_DOCKER_REGISTRY_HOST=git.astron.nl:5000
+LOCAL_DOCKER_REGISTRY_LOFAR=lofar2.0
 LOCAL_DOCKER_REGISTRY_USER=lofar2.0/tango
 
 TANGO_ARCHIVER_VERSION=2021-05-28
diff --git a/docker-compose/Makefile b/docker-compose/Makefile
index d85ff1df88d91db097bdd22b060cfc03b681a04f..6c6e3c888eaa77b7c23ad0fca6ec0d94a8099bf7 100644
--- a/docker-compose/Makefile
+++ b/docker-compose/Makefile
@@ -33,6 +33,8 @@ else ifeq (stop,$(firstword $(MAKECMDGOALS)))
     SERVICE_TARGET = true
 else ifeq (restart,$(firstword $(MAKECMDGOALS)))
     SERVICE_TARGET = true
+else ifeq (up,$(firstword $(MAKECMDGOALS)))
+    SERVICE_TARGET = true
 else ifeq (build,$(firstword $(MAKECMDGOALS)))
     SERVICE_TARGET = true
 else ifeq (build-nocache,$(firstword $(MAKECMDGOALS)))
@@ -143,8 +145,8 @@ build-nocache: ## rebuild images from scratch
 	$(DOCKER_COMPOSE_ARGS) docker-compose -f lofar-device-base.yml -f networks.yml build --progress=plain
 	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) build --no-cache --progress=plain $(SERVICE)
 
-up: minimal  ## start the base TANGO system and prepare all services
-	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) up --no-start --no-recreate
+up: minimal  ## start the base TANGO system and prepare requested services
+	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) up --no-start --no-recreate $(SERVICE)
 
 down:  ## stop all services and tear down the system
 	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) down
diff --git a/docker-compose/README.md b/docker-compose/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..d76a75b0c79dad1574d9d24912a671a39998abee
--- /dev/null
+++ b/docker-compose/README.md
@@ -0,0 +1,69 @@
+# Docker Compose
+
+Documentation on how the LOFAR station control software utilizes docker-compose.
+This documentation is intended for developers listing strategies and their
+respective advantages and disadvantages. In addition, this documentation
+contains developer expectations that they should uphold to.
+
+## Image tagging and change detection
+
+Preventing unnecessary builds of docker images reduces build times and increases
+iteration speed. In order to achieve this the project requires properly tagged
+images and mechanisms for change detection.
+
+For change detection the system relies on git. Git is used to determine the
+directories and files that have changes between the current and previous commit.
+All image related change detection mechanisms are based on this difference.
+
+Using docker cache within the dind service is impractical see:
+https://gitlab.com/gitlab-org/gitlab-foss/-/issues/17861
+
+### Types of containers and specific strategies.
+
+- Devices
+- Simulators
+- Base images
+- Services
+
+Devices, these are detected by changes to the .yml file or directory of the
+respective service inside the docker-compose directory.
+
+Simulators, Since the source code for simulators is maintained by other teams
+we can not accurately determine from our repository if the simulator has
+changed. Instead, the images is build by the teams their respective CI
+pipelines. We simply pull these images as base images.
+
+Base images, these are detected by changes to the .env file in the
+docker-compose directory. When changed they will be downloaded from the remote
+registry and uploaded to our own using matching tags.
+
+Services, same mechanism as devices.
+
+### Setup and maintenance
+
+All behavioral logic to orchestrate change detection and image pushing can be
+found in the sbin/tag_and_push_docker_images.sh script as well as the
+.gitlab-ci.yml. The shell script relies on the fact that each .yml file in the
+docker-compose directory corresponds to one image.
+
+### Gitlab CI phases
+
+Docker images are managed in three phases. First is remote image storing, second
+is image building and change detection with finally image pulling.
+
+Remote images are downloaded and stored on the local registry when the .env
+file for docker-compose has changes.
+
+Local images are build when either the files in the base context directory
+change or if the docker compose file itself has changes. See the gitlab-ci.yml
+for how these changes are detected. All local images will be rebuild and tagged
+latest when a tagged commit is pushed to master.
+
+Local images download the latest image from the registry as cache unless it is
+a tagged commit on master.
+
+Finally, the integration test downloads all images from the registry either
+tagged with the current pipeline or with latest. Should both tags be unavailable
+than the integration test fails. Not all images are needed for the integration
+test. See sbin/tag_and_push_docker_image.sh for how these images are
+differentiated.
diff --git a/docker-compose/apsct-sim.yml b/docker-compose/apsct-sim.yml
index d30f5a026f734bb72ee91c7bf533df677f37ca88..b9742fdb97ec3f30026d441c668a13732013201e 100644
--- a/docker-compose/apsct-sim.yml
+++ b/docker-compose/apsct-sim.yml
@@ -10,6 +10,9 @@ services:
   apsct-sim:
     build:
         context: pypcc-sim-base
+        args:
+         - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
+         - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}apsct-sim
     networks:
       - control
diff --git a/docker-compose/apspu-sim.yml b/docker-compose/apspu-sim.yml
index d3fc5fa04f6ce0d6ddfe4c8f87887ab7500720e3..f5677048fbe1fe28082b219177bc67a2986c31fe 100644
--- a/docker-compose/apspu-sim.yml
+++ b/docker-compose/apspu-sim.yml
@@ -10,6 +10,9 @@ services:
   apspu-sim:
     build:
         context: pypcc-sim-base
+        args:
+         - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
+         - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}apspu-sim
     networks:
       - control
diff --git a/docker-compose/archiver.yml b/docker-compose/archiver.yml
index f1f2a1ec65dd4259b99e675c43cb7500862049f4..12ec2d88959fca75b047cff6004dd6e2b22c294a 100644
--- a/docker-compose/archiver.yml
+++ b/docker-compose/archiver.yml
@@ -94,7 +94,7 @@ services:
           tag: "{{.Name}}"
 
   dsconfig:
-    image: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-dsconfig:${TANGO_DSCONFIG_VERSION}
+    image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-dsconfig:${TANGO_DSCONFIG_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}dsconfig
     networks:
       - control
diff --git a/docker-compose/astor.yml b/docker-compose/astor.yml
index 7010a82afa2fbcf5cb3dd797bda384bb516354f8..502472fc4eedd022388cd13d76e74135e00ff3db 100644
--- a/docker-compose/astor.yml
+++ b/docker-compose/astor.yml
@@ -13,7 +13,7 @@ version: '2'
 
 services:
   astor:
-    image: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-java:${TANGO_JAVA_VERSION}
+    image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-java:${TANGO_JAVA_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}astor
     networks:
       - control
diff --git a/docker-compose/device-apsct.yml b/docker-compose/device-apsct.yml
index 60f65fc47ed81822242282fc743846221acec2d9..0e258fecdb3a96c3a73714ae2c28cf2e847457a1 100644
--- a/docker-compose/device-apsct.yml
+++ b/docker-compose/device-apsct.yml
@@ -20,7 +20,7 @@ services:
     build:
         context: lofar-device-base
         args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-apsct
     networks:
       - control
diff --git a/docker-compose/device-apspu.yml b/docker-compose/device-apspu.yml
index b694b09518215e293d19e1ff551f4f608e6f818d..5f325b19fb357e83ab3d35e3acfa1a5cbbb2896a 100644
--- a/docker-compose/device-apspu.yml
+++ b/docker-compose/device-apspu.yml
@@ -20,7 +20,7 @@ services:
     build:
         context: lofar-device-base
         args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-apspu
     networks:
       - control
diff --git a/docker-compose/device-beam.yml b/docker-compose/device-beam.yml
new file mode 100644
index 0000000000000000000000000000000000000000..97385f16492ec123044033713d0c7b835d2062fd
--- /dev/null
+++ b/docker-compose/device-beam.yml
@@ -0,0 +1,33 @@
+#
+# Requires:
+#   - lofar-device-base.yml
+#
+version: '2'
+
+services:
+  device-beam:
+    image: device-beam
+    # build explicitly, as docker-compose does not understand a local image
+    # being shared among services.
+    build:
+        context: lofar-device-base
+        args:
+            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+    container_name: ${CONTAINER_NAME_PREFIX}device-beam
+    networks:
+      - control
+    ports:
+      - "5711:5711" # unique port for this DS
+    extra_hosts:
+      - "host.docker.internal:host-gateway"
+    volumes:
+      - ..:/opt/lofar/tango:rw
+    environment:
+      - TANGO_HOST=${TANGO_HOST}
+    working_dir: /opt/lofar/tango
+    entrypoint:
+      - bin/start-ds.sh
+      # configure CORBA to _listen_ on 0:port, but tell others we're _reachable_ through ${HOSTNAME}:port, since CORBA
+      # can't know about our Docker port forwarding
+      - l2ss-beam Beam STAT -v -ORBendPoint giop:tcp:0:5711 -ORBendPointPublish giop:tcp:${HOSTNAME}:5711
+    restart: unless-stopped
diff --git a/docker-compose/device-boot.yml b/docker-compose/device-boot.yml
index 3db111410fafde9901fd8f91cb40a1c3560e4242..330cb723ed3bb5ee8ccd50bf4cb933da4e1fe09c 100644
--- a/docker-compose/device-boot.yml
+++ b/docker-compose/device-boot.yml
@@ -19,7 +19,7 @@ services:
     build:
         context: lofar-device-base
         args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-boot
     networks:
       - control
diff --git a/docker-compose/device-docker.yml b/docker-compose/device-docker.yml
index 5a2641e9871f163f27ed7a60d872d30d4fe855e1..a9e4ccfdd6f66eda66f05ea5244fcf0fd732a382 100644
--- a/docker-compose/device-docker.yml
+++ b/docker-compose/device-docker.yml
@@ -20,7 +20,7 @@ services:
     build:
         context: lofar-device-base
         args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-docker
     networks:
       - control
diff --git a/docker-compose/device-observation_control.yml b/docker-compose/device-observation_control.yml
index 33fb0d066fd76b8eb4a9c7753266f16d04157726..d4f6f15d1f4eb80d02cd0c5738dc0a011b9dfc72 100644
--- a/docker-compose/device-observation_control.yml
+++ b/docker-compose/device-observation_control.yml
@@ -19,7 +19,7 @@ services:
     build:
         context: lofar-device-base
         args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-observation_control
     networks:
       - control
diff --git a/docker-compose/device-recv.yml b/docker-compose/device-recv.yml
index a08f566e7b39e095403f00cb5b086420b689d66b..25e767726f139ff532dbe649ccb230fabbec0602 100644
--- a/docker-compose/device-recv.yml
+++ b/docker-compose/device-recv.yml
@@ -20,7 +20,7 @@ services:
     build:
         context: lofar-device-base
         args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-recv
     networks:
       - control
diff --git a/docker-compose/device-sdp.yml b/docker-compose/device-sdp.yml
index f32c34394475c6a7483cb98cd03def1f62cf9ff0..06a523f606d67811986bd7a13b9a3202cb74e91d 100644
--- a/docker-compose/device-sdp.yml
+++ b/docker-compose/device-sdp.yml
@@ -20,7 +20,7 @@ services:
     build:
         context: lofar-device-base
         args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-sdp
     networks:
       - control
diff --git a/docker-compose/device-sst.yml b/docker-compose/device-sst.yml
index 7464cb01f45e584ab705fe9098e0229a1b762295..86651c7878d844646528b41fb0969dfd19af6eea 100644
--- a/docker-compose/device-sst.yml
+++ b/docker-compose/device-sst.yml
@@ -20,7 +20,7 @@ services:
     build:
         context: lofar-device-base
         args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-sst
     networks:
         - control
diff --git a/docker-compose/device-unb2.yml b/docker-compose/device-unb2.yml
index af1329d21a905f3c150c092529978e17f0c0ee37..2b9b47146a405440ebd36fd84162935fb6b8a56d 100644
--- a/docker-compose/device-unb2.yml
+++ b/docker-compose/device-unb2.yml
@@ -20,7 +20,7 @@ services:
     build:
         context: lofar-device-base
         args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-unb2
     networks:
       - control
diff --git a/docker-compose/device-xst.yml b/docker-compose/device-xst.yml
index c4ea684fd94e34fcaaa857a5717ca47745eccc72..54ca5a21f911084160d2cec772df06da55ef5cf1 100644
--- a/docker-compose/device-xst.yml
+++ b/docker-compose/device-xst.yml
@@ -20,7 +20,7 @@ services:
     build:
         context: lofar-device-base
         args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-xst
     networks:
         - control
diff --git a/docker-compose/elk.yml b/docker-compose/elk.yml
index 67f13baee061a74ebd08320f1e9f2f9f3e72f646..25bb1b218669baebff50ddc830b049b691349f71 100644
--- a/docker-compose/elk.yml
+++ b/docker-compose/elk.yml
@@ -6,6 +6,7 @@
 #   - elk-configure-host: Configures the hosts's kernel to be able to use the ELK stack
 #   - elk: ELK stack
 #
+
 version: '2'
 
 volumes:
diff --git a/docker-compose/grafana.yml b/docker-compose/grafana.yml
index 29c93c52c4dc05849aad10fabac12712c12dd4d7..f298db2746961b7d30d2e147192d0dfc58530725 100644
--- a/docker-compose/grafana.yml
+++ b/docker-compose/grafana.yml
@@ -4,6 +4,7 @@
 # Defines:
 #   - grafana: Grafana
 #
+
 version: '2'
 
 #volumes:
diff --git a/docker-compose/grafana/dashboards/archiver_mariadb.json b/docker-compose/grafana/dashboards/archiver_mariadb.json
new file mode 100644
index 0000000000000000000000000000000000000000..73b4aa07650e9c8cac3a4bcf12537cd2fe381c6a
--- /dev/null
+++ b/docker-compose/grafana/dashboards/archiver_mariadb.json
@@ -0,0 +1,799 @@
+{
+  "annotations": {
+    "list": [
+      {
+        "builtIn": 1,
+        "datasource": "-- Grafana --",
+        "enable": true,
+        "hide": true,
+        "iconColor": "rgba(0, 211, 255, 1)",
+        "name": "Annotations & Alerts",
+        "target": {
+          "limit": 100,
+          "matchAny": false,
+          "tags": [],
+          "type": "dashboard"
+        },
+        "type": "dashboard"
+      }
+    ]
+  },
+  "editable": true,
+  "fiscalYearStartMonth": 0,
+  "gnetId": null,
+  "graphTooltip": 0,
+  "links": [],
+  "liveNow": false,
+  "panels": [
+    {
+      "datasource": null,
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "custom": {
+            "align": "auto",
+            "displayMode": "auto"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 80
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 12,
+        "x": 0,
+        "y": 0
+      },
+      "id": 4,
+      "options": {
+        "showHeader": true
+      },
+      "pluginVersion": "8.2.3",
+      "targets": [
+        {
+          "format": "table",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "select name as \"Attribute\"\r\nfrom att_conf ac \r\nwhere ac.domain ='stat' and ac.family = 'sdp' and ac.member ='1'\r\norder by ac.name",
+          "refId": "A",
+          "select": [
+            [
+              {
+                "params": [
+                  "name"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_conf",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": []
+        }
+      ],
+      "title": "STAT/SDP/1 archived attributes",
+      "type": "table"
+    },
+    {
+      "datasource": null,
+      "description": "",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "custom": {
+            "align": "auto",
+            "displayMode": "auto"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 80
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 12,
+        "x": 12,
+        "y": 0
+      },
+      "id": 6,
+      "options": {
+        "showHeader": true
+      },
+      "pluginVersion": "8.2.3",
+      "targets": [
+        {
+          "format": "table",
+          "group": [],
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "select name as \"Attribute\" \r\nfrom att_conf  \r\nwhere domain ='stat' and family = 'recv' and member ='1'\r\norder by name",
+          "refId": "A",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        }
+      ],
+      "title": "STAT/RECV/1  archived attributes",
+      "type": "table"
+    },
+    {
+      "datasource": null,
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "barAlignment": 0,
+            "drawStyle": "points",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 4,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "auto",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "normal"
+            },
+            "thresholdsStyle": {
+              "mode": "off"
+            }
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              }
+            ]
+          }
+        },
+        "overrides": [
+          {
+            "__systemRef": "hideSeriesFrom",
+            "matcher": {
+              "id": "byNames",
+              "options": {
+                "mode": "exclude",
+                "names": [
+                  "temperature [1]"
+                ],
+                "prefix": "All except:",
+                "readOnly": true
+              }
+            },
+            "properties": [
+              {
+                "id": "custom.hideFrom",
+                "value": {
+                  "legend": false,
+                  "tooltip": false,
+                  "viz": true
+                }
+              }
+            ]
+          }
+        ]
+      },
+      "gridPos": {
+        "h": 9,
+        "w": 12,
+        "x": 0,
+        "y": 8
+      },
+      "id": 2,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "list",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.2.1",
+      "targets": [
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": true,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT\ndata_time as \"time\",\nvalue_r as \"value\"\nFROM att_array_devdouble_ro\nJOIN att_conf\nON att_array_devdouble_ro.att_conf_id = att_conf.att_conf_id \nWHERE\natt_conf.domain = 'lts'\nand att_conf.family = 'sdp'\nand att_conf.member = 1\nand att_conf.name = 'fpga_temp_r'\nORDER BY data_time,idx\n",
+          "refId": "A",
+          "select": [
+            [
+              {
+                "params": [
+                  "value"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "timeColumn": "time",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": true,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT\ndata_time as \"time\",\nvalue_r as \"mask\"\nFROM att_array_devboolean_ro\nJOIN att_conf\nON att_array_devboolean_ro.att_conf_id = att_conf.att_conf_id \nWHERE\natt_conf.domain = 'lts'\nand att_conf.family = 'sdp'\nand att_conf.member = 1\nand att_conf.name = 'tr_fpga_mask_r'\nORDER BY data_time,idx\n",
+          "refId": "B",
+          "select": [
+            [
+              {
+                "params": [
+                  "value"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "timeColumn": "time",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [0]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 0\r\nORDER BY t.`time`, t.`index`",
+          "refId": "Masked Values",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [1]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 1\r\nORDER BY t.`time`, t.`index`",
+          "refId": "C",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [2]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 2\r\nORDER BY t.`time`, t.`index`",
+          "refId": "D",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [3]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 3\r\nORDER BY t.`time`, t.`index`",
+          "refId": "E",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [4]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 4\r\nORDER BY t.`time`, t.`index`",
+          "refId": "F",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [5]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 5\r\nORDER BY t.`time`, t.`index`",
+          "refId": "G",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [6]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 6\r\nORDER BY t.`time`, t.`index`",
+          "refId": "H",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [7]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 7\r\nORDER BY t.`time`, t.`index`",
+          "refId": "I",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [8]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 8\r\nORDER BY t.`time`, t.`index`",
+          "refId": "J",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [9]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 9\r\nORDER BY t.`time`, t.`index`",
+          "refId": "K",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [10]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 10\r\nORDER BY t.`time`, t.`index`",
+          "refId": "L",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [11]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 11\r\nORDER BY t.`time`, t.`index`",
+          "refId": "M",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [12]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 12\r\nORDER BY t.`time`, t.`index`",
+          "refId": "N",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [13]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 13\r\nORDER BY t.`time`, t.`index`",
+          "refId": "O",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [14]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 14\r\nORDER BY t.`time`, t.`index`",
+          "refId": "P",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        },
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT t.`time` as \"time\",\r\nt.value as \"temperature [15]\"\r\nFROM SDP_FPGA_temp as t INNER JOIN SDP_TR_FPGA_mask as m\r\nON t.`time`=m.`time` and t.`index`=m.`index`\r\nWHERE m.value = 1\r\nAND t.`time` > $__timeFrom()\r\nAND t.`time` < $__timeTo()\r\nAND t.`index` = 15\r\nORDER BY t.`time`, t.`index`",
+          "refId": "Q",
+          "select": [
+            [
+              {
+                "params": [
+                  "att_conf_id"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_array_devfloat_rw",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        }
+      ],
+      "title": "SDP - FPGA Temperature",
+      "type": "timeseries"
+    }
+  ],
+  "refresh": "",
+  "schemaVersion": 31,
+  "style": "dark",
+  "tags": [],
+  "templating": {
+    "list": []
+  },
+  "time": {
+    "from": "now-24h",
+    "to": "now"
+  },
+  "timepicker": {},
+  "timezone": "",
+  "title": "Archiver-MariaDB",
+  "uid": "2zuNvev7z",
+  "version": 1
+}
diff --git a/docker-compose/grafana/dashboards/archiver_timescale.json b/docker-compose/grafana/dashboards/archiver_timescale.json
new file mode 100644
index 0000000000000000000000000000000000000000..bec06ef691e2a89e8e75e264a7f3d9f120a82ea4
--- /dev/null
+++ b/docker-compose/grafana/dashboards/archiver_timescale.json
@@ -0,0 +1,273 @@
+{
+  "annotations": {
+    "list": [
+      {
+        "builtIn": 1,
+        "datasource": "-- Grafana --",
+        "enable": true,
+        "hide": true,
+        "iconColor": "rgba(0, 211, 255, 1)",
+        "name": "Annotations & Alerts",
+        "target": {
+          "limit": 100,
+          "matchAny": false,
+          "tags": [],
+          "type": "dashboard"
+        },
+        "type": "dashboard"
+      }
+    ]
+  },
+  "editable": true,
+  "fiscalYearStartMonth": 0,
+  "gnetId": null,
+  "graphTooltip": 0,
+  "links": [],
+  "liveNow": false,
+  "panels": [
+    {
+      "datasource": "TimescaleDB",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "align": "auto",
+            "displayMode": "auto"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 80
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 12,
+        "x": 0,
+        "y": 0
+      },
+      "id": 6,
+      "options": {
+        "showHeader": true
+      },
+      "pluginVersion": "8.2.5",
+      "targets": [
+        {
+          "format": "table",
+          "group": [],
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "select name as \"Attribute\",\r\ntable_name as \"Type\"\r\nfrom att_conf ac \r\nwhere \"domain\" ='stat' and \"family\" = 'sdp' and \"member\" ='1'\r\norder by name",
+          "refId": "A",
+          "select": [
+            [
+              {
+                "params": [
+                  "value_r"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_scalar_devdouble",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        }
+      ],
+      "title": "STAT/SDP/1 archived attributes",
+      "type": "table"
+    },
+    {
+      "datasource": "TimescaleDB",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "custom": {
+            "align": "auto",
+            "displayMode": "auto"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 80
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 12,
+        "x": 12,
+        "y": 0
+      },
+      "id": 8,
+      "options": {
+        "showHeader": true
+      },
+      "pluginVersion": "8.2.5",
+      "targets": [
+        {
+          "format": "table",
+          "group": [],
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "select name as \"Attribute\",\r\ntable_name as \"Type\" \r\nfrom att_conf ac \r\nwhere \"domain\" ='stat' and \"family\" = 'recv' and \"member\" ='1'\r\norder by name",
+          "refId": "A",
+          "select": [
+            [
+              {
+                "params": [
+                  "value_r"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "att_scalar_devdouble",
+          "timeColumn": "data_time",
+          "timeColumnType": "timestamp",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        }
+      ],
+      "title": "LTS/RECV/1 archived attributes",
+      "type": "table"
+    },
+    {
+      "datasource": "TimescaleDB",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "continuous-BlYlRd"
+          },
+          "custom": {
+            "fillOpacity": 70,
+            "lineWidth": 1
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 80
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 9,
+        "w": 12,
+        "x": 0,
+        "y": 8
+      },
+      "id": 9,
+      "interval": "10s",
+      "maxDataPoints": 1000,
+      "options": {
+        "colWidth": 0.3,
+        "legend": {
+          "displayMode": "list",
+          "placement": "bottom"
+        },
+        "rowHeight": 0.78,
+        "showValue": "auto",
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.2.4",
+      "targets": [
+        {
+          "format": "time_series",
+          "group": [],
+          "hide": false,
+          "metricColumn": "none",
+          "rawQuery": true,
+          "rawSql": "SELECT time_bucket('00:00:01'::interval, q1.\"time\") AS time,\r\n    mask(array[q1.value[1]], array[q2.\"value\"[1]]) AS \"temperature[1]\",\r\n    mask(array[q1.value[2]], array[q2.\"value\"[2]]) AS \"temperature[2]\",\r\n    mask(array[q1.value[3]], array[q2.\"value\"[3]]) AS \"temperature[3]\",\r\n    mask(array[q1.value[4]], array[q2.\"value\"[4]]) AS \"temperature[4]\",\r\n    mask(array[q1.value[5]], array[q2.\"value\"[5]]) AS \"temperature[5]\",\r\n    mask(array[q1.value[6]], array[q2.\"value\"[6]]) AS \"temperature[6]\",\r\n    mask(array[q1.value[7]], array[q2.\"value\"[7]]) AS \"temperature[7]\",\r\n    mask(array[q1.value[8]], array[q2.\"value\"[8]]) AS \"temperature[8]\",\r\n    mask(array[q1.value[9]], array[q2.\"value\"[9]]) AS \"temperature[9]\",\r\n    mask(array[q1.value[10]], array[q2.\"value\"[10]]) AS \"temperature[10]\",\r\n    mask(array[q1.value[11]], array[q2.\"value\"[11]]) AS \"temperature[11]\",\r\n    mask(array[q1.value[12]], array[q2.\"value\"[12]]) AS \"temperature[12]\",\r\n    mask(array[q1.value[13]], array[q2.\"value\"[13]]) AS \"temperature[13]\",\r\n    mask(array[q1.value[14]], array[q2.\"value\"[14]]) AS \"temperature[14]\",\r\n    mask(array[q1.value[15]], array[q2.\"value\"[15]]) AS \"temperature[15]\"\r\n   FROM ( SELECT aad.data_time AS \"time\",\r\n            aad.value_r AS value\r\n           FROM att_array_devdouble aad\r\n             JOIN att_conf ac ON aad.att_conf_id = ac.att_conf_id\r\n          WHERE aad.value_r IS NOT NULL AND ac.domain = 'stat'::text AND ac.family = 'sdp'::text AND ac.member = '1'::text AND ac.name = 'fpga_temp_r'::text\r\n          ORDER BY aad.data_time) q1\r\n     JOIN ( SELECT aab.data_time AS \"time\",\r\n            aab.value_r AS value\r\n           FROM att_array_devboolean aab\r\n             JOIN att_conf ac ON aab.att_conf_id = ac.att_conf_id\r\n          WHERE aab.value_r IS NOT NULL AND ac.domain = 'stat'::text AND ac.family = 'sdp'::text AND ac.member = '1'::text AND ac.name = 'tr_fpga_mask_r'::text\r\n          ORDER BY aab.data_time) q2 ON time_bucket('00:00:01'::interval, q1.\"time\") = time_bucket('00:00:01'::interval, q2.\"time\")\r\n  WHERE $__timeFilter(q1.\"time\")\r\n  ORDER BY q1.\"time\";",
+          "refId": "B",
+          "select": [
+            [
+              {
+                "params": [
+                  "temperature[1]"
+                ],
+                "type": "column"
+              }
+            ]
+          ],
+          "table": "sdp_masked_temp_values",
+          "timeColumn": "temp_time",
+          "timeColumnType": "timestamptz",
+          "where": [
+            {
+              "name": "$__timeFilter",
+              "params": [],
+              "type": "macro"
+            }
+          ]
+        }
+      ],
+      "title": "SDP - FPGA Temperature",
+      "type": "status-history"
+    }
+  ],
+  "refresh": "",
+  "schemaVersion": 32,
+  "style": "dark",
+  "tags": [],
+  "templating": {
+    "list": []
+  },
+  "time": {
+    "from": "now-5m",
+    "to": "now"
+  },
+  "timepicker": {},
+  "timezone": "",
+  "title": "Archiver-Timescale",
+  "uid": "M7zA7Hc7k",
+  "version": 1
+}
diff --git a/docker-compose/grafana/datasources/archiver-timescale.yaml b/docker-compose/grafana/datasources/archiver-timescale.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..69a896dae776283d41283c33688088700f85cadf
--- /dev/null
+++ b/docker-compose/grafana/datasources/archiver-timescale.yaml
@@ -0,0 +1,45 @@
+apiVersion: 1
+
+datasources:
+  # <string, required> name of the datasource. Required
+  - name: TimescaleDB
+    # <string, required> datasource type. Required
+    type: postgres
+    # <string, required> access mode. proxy or direct (Server or Browser in the UI). Required
+    access: proxy
+    # <int> org id. will default to orgId 1 if not specified
+    orgId: 1
+    # <string> custom UID which can be used to reference this datasource in other parts of the configuration, if not specified will be generated automatically
+    uid: ZqArtG97z
+    # <string> url
+    url: archiver-timescale
+    # <string> Deprecated, use secureJsonData.password
+    password:
+    # <string> database user, if used
+    user: postgres
+    # <string> database name, if used
+    database: hdb
+    # <bool> enable/disable basic auth
+    basicAuth: false
+    # <string> basic auth username
+    basicAuthUser:
+    # <string> Deprecated, use secureJsonData.basicAuthPassword
+    basicAuthPassword:
+    # <bool> enable/disable with credentials headers
+    withCredentials:
+    # <bool> mark as default datasource. Max one per org
+    isDefault: false
+    # <map> fields that will be converted to json and stored in jsonData
+    jsonData:
+      # <string> determines whether or with what priority a secure TLS/SSL TCP/IP connection will be negotiated with the server.
+      sslmode: "disable"
+      # <bool> enable TimescaleDB
+      timescaledb: true
+    # <string> json object of data that will be encrypted.
+    secureJsonData:
+      # <string> database password, if used
+      password: password
+    version: 1
+    # <bool> allow users to edit datasources from the UI.
+    editable: false
+    
diff --git a/docker-compose/integration-test.yml b/docker-compose/integration-test.yml
index defb45e3c3183516131795b283372ca784635d8c..e2be9144ef7d73b7108609a917529c019e109c62 100644
--- a/docker-compose/integration-test.yml
+++ b/docker-compose/integration-test.yml
@@ -11,7 +11,7 @@ services:
     build:
         context: itango
         args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}integration-test
     networks:
       - control
diff --git a/docker-compose/itango.yml b/docker-compose/itango.yml
index 9b01c4ea25e2abc5849c9a98c29cc7601ba1115f..02d6801bd8a2f748a4b3d3336352891c78d4882b 100644
--- a/docker-compose/itango.yml
+++ b/docker-compose/itango.yml
@@ -17,7 +17,7 @@ services:
     build:
         context: itango
         args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}itango
     networks:
       - control
diff --git a/docker-compose/jive.yml b/docker-compose/jive.yml
index 456ae1fc96771bad1ab6b99e52e3b0c9c046c20c..5a2caea9a1d9d6fb19d235781abc33a3230412e8 100644
--- a/docker-compose/jive.yml
+++ b/docker-compose/jive.yml
@@ -18,7 +18,7 @@ version: '2'
 
 services:
   jive:
-    image: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-java:${TANGO_JAVA_VERSION}
+    image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-java:${TANGO_JAVA_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}jive
     network_mode: host
     volumes:
diff --git a/docker-compose/jupyter.yml b/docker-compose/jupyter.yml
index 1e1deea6f0e22299544f988602efc676bbe6200c..bbc20f269f8a44acff3ce9f36bf11eeef17cea8f 100644
--- a/docker-compose/jupyter.yml
+++ b/docker-compose/jupyter.yml
@@ -7,6 +7,7 @@
 # Defines:
 #   - jupyter: Jupyter Notebook with iTango support
 #
+
 version: '2'
 
 services:
@@ -15,7 +16,7 @@ services:
         context: jupyter
         args:
             CONTAINER_EXECUTION_UID: ${CONTAINER_EXECUTION_UID}
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}jupyter
     networks:
       - control
@@ -30,10 +31,6 @@ services:
     user: ${CONTAINER_EXECUTION_UID}
     working_dir: /jupyter-notebooks
     entrypoint:
-      - /usr/local/bin/wait-for-it.sh
-      - ${TANGO_HOST}
-      - --timeout=30
-      - --strict
-      - --
+      - /opt/lofar/tango/bin/start-ds.sh
       - /usr/bin/tini -- /usr/local/bin/jupyter-notebook --port=8888 --no-browser --ip=0.0.0.0 --allow-root --NotebookApp.token= --NotebookApp.password=
     restart: unless-stopped
diff --git a/docker-compose/jupyter/Dockerfile b/docker-compose/jupyter/Dockerfile
index 5393cece6a74ff1de85e9c37ce6a8307e3a66cf5..cc1652e4a45bc14805632ec1d4056beaab1fd34c 100644
--- a/docker-compose/jupyter/Dockerfile
+++ b/docker-compose/jupyter/Dockerfile
@@ -10,23 +10,13 @@ ENV HOME=/home/user
 RUN sudo mkdir -p ${HOME}
 RUN sudo chown ${CONTAINER_EXECUTION_UID} -R ${HOME}
 
-# ipython 7.28 is broken in combination with Jupyter, it causes connection errors with notebooks
-RUN sudo pip3 install ipython==7.27.0
-
-RUN sudo pip3 install jupyter
-RUN sudo pip3 install ipykernel
-RUN sudo pip3 install jupyter_bokeh
-# Install matplotlib, jupyterplot
-RUN sudo pip3 install matplotlib jupyterplot
-
-# Allow Download as -> PDF via html
-RUN sudo pip3 install nbconvert
-RUN sudo pip3 install notebook-as-pdf
+COPY requirements.txt ./
+RUN sudo pip3 install -r requirements.txt
 
 # see https://github.com/jupyter/nbconvert/issues/1434
 RUN sudo bash -c "echo DEFAULT_ARGS += [\\\"--no-sandbox\\\"] >> /usr/local/lib/python3.7/dist-packages/pyppeteer/launcher.py"
 RUN sudo apt-get update -y
-RUN sudo apt-get install -y gconf-service libasound2 libatk1.0-0 libatk-bridge2.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget libcairo-gobject2 libxinerama1 libgtk2.0-0 libpangoft2-1.0-0 libthai0 libpixman-1-0 libxcb-render0 libharfbuzz0b libdatrie1 libgraphite2-3 libgbm1
+RUN sudo apt-get install -y git gconf-service libasound2 libatk1.0-0 libatk-bridge2.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget libcairo-gobject2 libxinerama1 libgtk2.0-0 libpangoft2-1.0-0 libthai0 libpixman-1-0 libxcb-render0 libharfbuzz0b libdatrie1 libgraphite2-3 libgbm1
 
 # Allow Download as -> PDF via LaTeX
 RUN sudo apt-get install -y texlive-xetex texlive-fonts-recommended texlive-latex-recommended
@@ -43,15 +33,8 @@ RUN sudo chown ${CONTAINER_EXECUTION_UID} -R /opt/ipython-profiles
 COPY jupyter-kernels /usr/local/share/jupyter/kernels/
 
 # Install patched jupyter executable
-RUN sudo pip3 install python-logstash-async
 COPY jupyter-notebook /usr/local/bin/jupyter-notebook
 
-#Install further python modules
-RUN sudo pip3 install PyMySQL[rsa] sqlalchemy
-
-# Packages to interface with testing hardware directly
-RUN sudo pip3 install pyvisa pyvisa-py opcua
-
 # Add Tini. Tini operates as a process subreaper for jupyter. This prevents kernel crashes.
 ENV TINI_VERSION v0.6.0
 ENV JUPYTER_RUNTIME_DIR=/tmp
diff --git a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
index 74692f10fff62684c21047eb373aebd70a876155..03e7b75b4649c79647c5bb573b22e41eb330c159 100644
--- a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
+++ b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
@@ -7,7 +7,8 @@ sst = DeviceProxy("STAT/SST/1")
 xst = DeviceProxy("STAT/XST/1")
 unb2 = DeviceProxy("STAT/UNB2/1")
 boot = DeviceProxy("STAT/Boot/1")
+beam = DeviceProxy("STAT/Beam/1")
 docker = DeviceProxy("STAT/Docker/1")
 
 # Put them in a list in case one wants to iterate
-devices = [apsct, apspu, recv, sdp, sst, xst, unb2, boot, docker]
+devices = [apsct, apspu, recv, sdp, sst, xst, unb2, boot, beam, docker]
diff --git a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/02-stationcontrol.py b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/02-stationcontrol.py
new file mode 100644
index 0000000000000000000000000000000000000000..d21ed1cf013d73b700cbc72e3d89ef9541efcacc
--- /dev/null
+++ b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/02-stationcontrol.py
@@ -0,0 +1 @@
+import tangostationcontrol
diff --git a/docker-compose/jupyter/requirements.txt b/docker-compose/jupyter/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..7d8be5631439e6b5dbae530ce06ac64cdd6cc542
--- /dev/null
+++ b/docker-compose/jupyter/requirements.txt
@@ -0,0 +1,16 @@
+GitPython >= 3.1.24 # BSD
+ipython >=7.27.0,!=7.28.0 # BSD
+jupyter
+ipykernel
+jupyter_bokeh
+matplotlib
+jupyterplot
+nbconvert
+notebook-as-pdf
+python-logstash-async
+PyMySQL[rsa]
+psycopg2-binary >= 2.9.2 #LGPL
+sqlalchemy
+pyvisa
+pyvisa-py
+opcua
diff --git a/docker-compose/lofar-device-base.yml b/docker-compose/lofar-device-base.yml
index ce110ed85ba0cfb20b607ab7d08e70505d2392e8..f01faac2d2f41647708229106a895d3dad23c3e4 100644
--- a/docker-compose/lofar-device-base.yml
+++ b/docker-compose/lofar-device-base.yml
@@ -10,6 +10,7 @@
 # Requires:
 #   - tango.yml
 #
+
 version: '2'
 
 services:
@@ -18,7 +19,7 @@ services:
     build:
         context: lofar-device-base
         args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}lofar-device-base
     # These parameters are just visual queues, you have to define them again
     # in derived docker-compose files!
diff --git a/docker-compose/lofar-device-base/lofar-requirements.txt b/docker-compose/lofar-device-base/lofar-requirements.txt
index 10ad55d977c97793a352c13da323d84d3c826c0e..95ed439cd121c0dc72b0c9a1c69d409e0bacc57e 100644
--- a/docker-compose/lofar-device-base/lofar-requirements.txt
+++ b/docker-compose/lofar-device-base/lofar-requirements.txt
@@ -1,5 +1,2 @@
-# Do not put tangostationcontrol dependencies here
-astropy
-
-# requirements to build tangocontrol 
+# Do not put tangostationcontrol dependencies here, only setup.py / __init__.py
 GitPython >= 3.1.24 # BSD
diff --git a/docker-compose/logviewer.yml b/docker-compose/logviewer.yml
index bf0c9b2d51cbdb7334a579184114de6925fd37a1..08da4000b23925980e1683465fa4fdd4c05f04ae 100644
--- a/docker-compose/logviewer.yml
+++ b/docker-compose/logviewer.yml
@@ -12,7 +12,7 @@ version: '2'
 
 services:
   logviewer:
-    image: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-java:${TANGO_JAVA_VERSION}
+    image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-java:${TANGO_JAVA_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}logviewer
     networks:
       - control
diff --git a/docker-compose/pogo.yml b/docker-compose/pogo.yml
index 826daac9fbd6ef3226a690832eedab505bbeaba3..954841746b9f0338d4a84fdae7e043fde04be460 100644
--- a/docker-compose/pogo.yml
+++ b/docker-compose/pogo.yml
@@ -20,7 +20,7 @@ volumes:
 
 services:
   pogo:
-    image: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-pogo:${TANGO_POGO_VERSION}
+    image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-pogo:${TANGO_POGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}pogo
     networks:
       - control
diff --git a/docker-compose/prometheus.yml b/docker-compose/prometheus.yml
index 604f4bf4bde93dd6d68aaf7f3b1da2fd3f884e83..e7924c1a7219adc16e1a3c1780b0bcc43773b3c0 100644
--- a/docker-compose/prometheus.yml
+++ b/docker-compose/prometheus.yml
@@ -4,8 +4,12 @@
 # Defines:
 #   - prometheus: Prometheus
 #
+
 version: '2'
 
+volumes:
+  prometheus-data: {}
+
 services:
   prometheus:
     image: prometheus
@@ -14,6 +18,8 @@ services:
     container_name: ${CONTAINER_NAME_PREFIX}prometheus
     networks:
       - control
+    volumes:
+      - prometheus-data:/prometheus
     ports:
       - "9090:9090"
     logging:
diff --git a/docker-compose/prometheus/Dockerfile b/docker-compose/prometheus/Dockerfile
index cc1494f98dbce6c66e437b001af2a88320ca0ffa..ad8e5165b06b55a3ca1e273d09ee2fbf6c69db1c 100644
--- a/docker-compose/prometheus/Dockerfile
+++ b/docker-compose/prometheus/Dockerfile
@@ -1,3 +1,5 @@
 FROM prom/prometheus
 
 COPY prometheus.yml /etc/prometheus/prometheus.yml
+
+CMD ["--config.file=/etc/prometheus/prometheus.yml", "--storage.tsdb.path=/prometheus", "--web.console.libraries=/usr/share/prometheus/console_libraries", "--web.console.templates=/usr/share/prometheus/consoles", "--storage.tsdb.retention.time=31d"]
diff --git a/docker-compose/pypcc-sim-base/Dockerfile b/docker-compose/pypcc-sim-base/Dockerfile
index c65c5b6f836e889f9b3c364ceace5f7b9b821628..f0f37dec5613b988ba3c471428aa426606cf9d5a 100644
--- a/docker-compose/pypcc-sim-base/Dockerfile
+++ b/docker-compose/pypcc-sim-base/Dockerfile
@@ -1,10 +1,6 @@
-FROM ubuntu:20.04
+ARG LOCAL_DOCKER_REGISTRY_HOST
+ARG LOCAL_DOCKER_REGISTRY_LOFAR
 
-COPY requirements.txt /requirements.txt
+FROM ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_LOFAR}/pypcc:latest
 
-RUN apt-get update && apt-get install -y python3 python3-pip python3-yaml git && \
-    pip3 install -r requirements.txt && \
-    git clone --depth 1 --branch master https://git.astron.nl/lofar2.0/pypcc
-
-WORKDIR /pypcc
-CMD ["python3","pypcc2.py","--simulator","--port","4843"]
+CMD ["python3", "pypcc2.py", "--simulator", "--port","4843"]
diff --git a/docker-compose/pypcc-sim-base/requirements.txt b/docker-compose/pypcc-sim-base/requirements.txt
deleted file mode 100644
index 2cd015945c044fcd1e39a823f49a807fc519ac67..0000000000000000000000000000000000000000
--- a/docker-compose/pypcc-sim-base/requirements.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-opcua
-numpy
-recordclass>=0.16,<0.16.1
\ No newline at end of file
diff --git a/docker-compose/recv-sim.yml b/docker-compose/recv-sim.yml
index effee8b298b855e2007e50c379fa3df45010bd05..8fd795be60ef89b23491895dd9809ff67b1c67ae 100644
--- a/docker-compose/recv-sim.yml
+++ b/docker-compose/recv-sim.yml
@@ -10,6 +10,9 @@ services:
   recv-sim:
     build:
         context: pypcc-sim-base
+        args:
+         - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
+         - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}recv-sim
     networks:
       - control
diff --git a/docker-compose/rest.yml b/docker-compose/rest.yml
index 467319399d6f3fec12a74068fea182195014b59e..94e1168455ddfefa20796c352e92d27e07f9a115 100644
--- a/docker-compose/rest.yml
+++ b/docker-compose/rest.yml
@@ -13,7 +13,7 @@ version: '2'
 
 services:
   rest:
-    image: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-rest:${TANGO_REST_VERSION}
+    image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-rest:${TANGO_REST_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}tango-rest
     networks:
       - control
diff --git a/docker-compose/sdptr-sim.yml b/docker-compose/sdptr-sim.yml
index c81c3db9ae4744e013b5a92f1ebb5e9bdaa6e92c..badf707e37621c8b3030121424bacd1393910b87 100644
--- a/docker-compose/sdptr-sim.yml
+++ b/docker-compose/sdptr-sim.yml
@@ -10,6 +10,9 @@ services:
   sdptr-sim:
     build:
         context: sdptr-sim
+        args:
+         - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
+         - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}sdptr-sim
     networks:
       - control
diff --git a/docker-compose/sdptr-sim/Dockerfile b/docker-compose/sdptr-sim/Dockerfile
index 57fe98141f180a4d15a1e2d87c2c67be8f5894ff..4e64ca2a67229e602a705c9e61b0de999e64fad4 100644
--- a/docker-compose/sdptr-sim/Dockerfile
+++ b/docker-compose/sdptr-sim/Dockerfile
@@ -1,20 +1,7 @@
-FROM ubuntu:20.04
+ARG LOCAL_DOCKER_REGISTRY_HOST
+ARG LOCAL_DOCKER_REGISTRY_LOFAR
 
-# Install build tools for sdptr and the C language OPC-UA lib
-RUN apt-get update && \
-    DEBIAN_FRONTEND=noninteractive apt-get install -y software-properties-common && \
-    DEBIAN_FRONTEND=noninteractive add-apt-repository ppa:open62541-team/ppa && \
-    apt-get update && \
-    DEBIAN_FRONTEND=noninteractive apt-get install -y autoconf automake git make g++ build-essential pkg-config libboost-dev libboost-regex-dev libboost-system-dev libboost-program-options-dev libopen62541-1-dev libopen62541-1-tools && \
-    apt-get clean
-
-# Install SDPTR
-RUN cd / && git clone --depth 1 --branch master https://git.astron.nl/lofar2.0/sdptr
-
-RUN cd /sdptr && \
-    autoreconf -v -f -i && \
-    ./configure && \
-    bash -c "make -j `nproc` install"
+FROM ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_LOFAR}/sdptr:latest
 
 COPY simulator.conf /sdptr/src/simulator.conf
 
diff --git a/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter b/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter
index 774d39a40ca19c9d979ad22565e57b4af3e9a831..6e48f0fddf5541bc66d9f57e31297c0027ea97b7 160000
--- a/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter
+++ b/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter
@@ -1 +1 @@
-Subproject commit 774d39a40ca19c9d979ad22565e57b4af3e9a831
+Subproject commit 6e48f0fddf5541bc66d9f57e31297c0027ea97b7
diff --git a/docker-compose/tango.yml b/docker-compose/tango.yml
index 937cc5c8ecbe00b553d4692988e6cc2e5d7c51ef..19500fca1eeba859f74e7ba54fc3cbb021ea0ce6 100644
--- a/docker-compose/tango.yml
+++ b/docker-compose/tango.yml
@@ -15,7 +15,7 @@ volumes:
 
 services:
   tangodb:
-    image: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-db:${TANGO_DB_VERSION}
+    image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-db:${TANGO_DB_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}tangodb
     networks:
       - control
@@ -37,7 +37,7 @@ services:
     restart: unless-stopped
 
   databaseds:
-    image: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-cpp:${TANGO_CPP_VERSION}
+    image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-cpp:${TANGO_CPP_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}databaseds
     networks:
       - control
diff --git a/docker-compose/tangotest.yml b/docker-compose/tangotest.yml
index 357c91df487b51379db221f7cb984bc05018f5e3..a97290d48f437b1c65b0bef01f6788fb525b2275 100644
--- a/docker-compose/tangotest.yml
+++ b/docker-compose/tangotest.yml
@@ -11,7 +11,7 @@ version: '2'
 
 services:
   tangotest:
-    image: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-java:${TANGO_JAVA_VERSION}
+    image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-java:${TANGO_JAVA_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}tangotest
     networks:
       - control
diff --git a/docker-compose/timescaledb/Dockerfile b/docker-compose/timescaledb/Dockerfile
index 86e7a820c3fc4e4ba0eaa8edaa9c7e421c87231f..4ca5dd07826a14bc17d7d982cdd1198fbe657980 100644
--- a/docker-compose/timescaledb/Dockerfile
+++ b/docker-compose/timescaledb/Dockerfile
@@ -5,5 +5,5 @@ COPY resources/01_admin.sh docker-entrypoint-initdb.d/002_admin.sh
 COPY resources/02_hdb_schema.sql docker-entrypoint-initdb.d/003_hdb_schema.sql
 COPY resources/03_hdb_roles.sql docker-entrypoint-initdb.d/004_hdb_roles.sql
 COPY resources/04_hdb_ext_aggregates.sql docker-entrypoint-initdb.d/005_hdb_ext_aggregates.sql
-COPY resources/05_lofar_views.sql docker-entrypoint-initdb.d/006_lofar_views.sql
+COPY resources/05_lofar_func.sh docker-entrypoint-initdb.d/006_lofar_func.sh
 COPY resources/06_cleanup.sql docker-entrypoint-initdb.d/007_cleanup.sql
diff --git a/docker-compose/timescaledb/resources/05_lofar_func.sh b/docker-compose/timescaledb/resources/05_lofar_func.sh
new file mode 100644
index 0000000000000000000000000000000000000000..4797c7a6df2acf11e3709221a9bd4fd335128264
--- /dev/null
+++ b/docker-compose/timescaledb/resources/05_lofar_func.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+psql << EOF
+\c hdb
+CREATE OR REPLACE FUNCTION mask(double precision[], boolean[])
+RETURNS double precision[] LANGUAGE sql
+AS \$function\$ SELECT ARRAY(SELECT
+case when \$2[i] then \$1[i]
+else '0'::double precision end
+FROM generate_subscripts(\$1,1) g(i)) \$function\$;
+EOF
diff --git a/docker-compose/timescaledb/resources/05_lofar_views.sql b/docker-compose/timescaledb/resources/05_lofar_views.sql
deleted file mode 100644
index 82aa8e5de360b434ecf081d1b02c000dff5d50f7..0000000000000000000000000000000000000000
--- a/docker-compose/timescaledb/resources/05_lofar_views.sql
+++ /dev/null
@@ -1,54 +0,0 @@
--- LOFAR 2.0 CUSTOMIZED VIEWS
-
-\c hdb
--- SDP FPGA Temperature
-create or replace view "sdp_fpga_temp" as
-select
-  ac.att_name as "attribute",
-  aad.data_time AS "time",
-  aad.value_r
-FROM att_array_devdouble aad join att_conf ac 
-on aad.att_conf_id = ac.att_conf_id 
-where aad.value_R is not null
-and ac."domain" ='stat' and ac."family" ='sdp' and ac."member" ='1'
-ORDER BY aad.data_time; 
-
--- SDP FPGA Mask 
-create or replace view "sdp_tr_fpga_mask" as
-select
-  ac.att_name as "attribute",
-  aab.data_time AS "time",
-  aab.value_r
-FROM att_array_devboolean aab join att_conf ac 
-on aab.att_conf_id = ac.att_conf_id 
-where aab.value_R is not null
-and ac."domain" ='stat' and ac."family" ='sdp' and ac."member" ='1'
-ORDER BY aab.data_time;
-
--- SDP Masked values (rounded to 1 second)
-create or replace view "sdp_masked_temp_values" as 
-select time_bucket('1 second',t.time) as "temp_time",
-time_bucket('1 second',m.time) as "mask_time",
-t.value_r as "temperature",
-m.value_r as "mask"
-from sdp_fpga_temp as t
-inner join sdp_tr_fpga_mask as m
-on time_bucket('1 second',t.time) = time_bucket('1 second',m.time) 
-/* Replace if possible with SQL loop */
-where m.value_r[1]=true and
-m.value_r[2]=true and
-m.value_r[3]=true and
-m.value_r[4]=true and
-m.value_r[5]=true and
-m.value_r[6]=true and
-m.value_r[7]=true and
-m.value_r[8]=true and
-m.value_r[9]=true and
-m.value_r[10]=true and
-m.value_r[11]=true and
-m.value_r[12]=true and
-m.value_r[13]=true and
-m.value_r[14]=true and
-m.value_r[15]=true and
-m.value_r[16]=true
-order by t."time" ;
diff --git a/docker-compose/unb2-sim.yml b/docker-compose/unb2-sim.yml
index d1ecaaa70a3c1e52f39ab1453d2ec8eb191f8831..b01802cd0526abe325c710f08fe965d6244cb2ba 100644
--- a/docker-compose/unb2-sim.yml
+++ b/docker-compose/unb2-sim.yml
@@ -10,6 +10,9 @@ services:
   unb2-sim:
     build:
         context: pypcc-sim-base
+        args:
+         - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
+         - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}unb2-sim
     networks:
       - control
diff --git a/docs/source/devices/beam.rst b/docs/source/devices/beam.rst
new file mode 100644
index 0000000000000000000000000000000000000000..c469162dfe25fafc1c79cecae15c7e1b36cf47e4
--- /dev/null
+++ b/docs/source/devices/beam.rst
@@ -0,0 +1,8 @@
+Beam
+====================
+
+The ``beam == DeviceProxy("STAT/Beam/1")`` device sets up the beamforming on the station:
+
+- The HBA tiles in RECV need *analog beamforming* to combine their 16 antennas into a single input for the RCU,
+- THe LBAs or HBA tiles need *digital beamforming* in SDP to combine their signals into beamlets.
+
diff --git a/docs/source/devices/sdp.rst b/docs/source/devices/sdp.rst
index c4d4032f47e7e4988437adb54dc7778bd53dc4b1..2ca1ea6fa95e7295a21041d12242e15cec3b8001 100644
--- a/docs/source/devices/sdp.rst
+++ b/docs/source/devices/sdp.rst
@@ -62,6 +62,8 @@ Waveform Generator
 
 The antenna input of SDP can be replaced by an internal waveform generator for debugging and testing purposes. The generator is configured per antenna per FPGA:
 
+.. note:: The Waveform Generator needs to be toggled off and on using ``FPGA_wg_enable_RW`` for new settings to become active on the station.
+
 :FPGA_wg_enable_RW: Whether the waveform generator is enabled for each input.
 
   :type: ``bool[N_fpgas][N_ants_per_fpga]``
@@ -91,5 +93,6 @@ For example, the following code inserts a wave on LBA subband 102 on FPGAs 8 - 1
   sdp.FPGA_wg_amplitude_RW = [[0.1] * 12] * 16
   sdp.FPGA_wg_frequency_RW = [[102 * 200e6/1024] * 12] * 16
 
-  # enable waveform generator
+  # toggle and enable waveform generator
+  sdp.FPGA_wg_enable_RW = [[False] * 12] * 16
   sdp.FPGA_wg_enable_RW = [[True] * 12] * 16
diff --git a/docs/source/devices/sst-xst.rst b/docs/source/devices/sst-xst.rst
index cdb689e457dc2d6abebcfc1391f057135f30b722..cce03f691cc488c8c06d8910db9f3bd8da63a828 100644
--- a/docs/source/devices/sst-xst.rst
+++ b/docs/source/devices/sst-xst.rst
@@ -8,6 +8,8 @@ The statistics are exposed in two ways, as:
 - *Attributes*, representing the most recently received values,
 - *TCP stream*, to allow the capture and recording of the statistics over any period of time.
 
+If the statistics are not received or zero, see :ref:`statistics-debugging`.
+
 See the following links for a full description of the SST and XST monitoring and control points:
 
 - https://support.astron.nl/confluence/pages/viewpage.action?spaceKey=L2M&title=L2+STAT+Decision%3A+SC+-+SDP+OPC-UA+interface
@@ -63,7 +65,7 @@ Typically, ``N_ant == 192``, and ``N_blocks == 136``.
 
 The metadata refers to the *blocks*, which are emitted by the FPGAs to represent the XSTs between 12 x 12 consecutive antennas. The following code converts block numbers to the indices of the first antenna pair in a block::
 
-  from common.baselines import baseline_from_index
+  from tangostationcontrol.common.baselines import baseline_from_index
 
   def first_antenna_pair(block_nr: int) -> int:
       coarse_a, coarse_b = baseline_from_index(block_nr)
@@ -71,11 +73,30 @@ The metadata refers to the *blocks*, which are emitted by the FPGAs to represent
 
 Conversely, to calculate the block index for an antenna pair ``(a,b)``, use::
 
-  from common.baselines import baseline_index
+  from tangostationcontrol.common.baselines import baseline_index
 
   def block_nr(a: int, b: int) -> int:
       return baseline_index(a // 12, b // 12)
 
+Configuring the XSTs
+`````````````````````````````
+
+The XSTs can be configured with several settings:
+
+.. note:: The XST processing needs to be toggled off and on using ``FPGA_xst_processing_enable_RW`` for new settings to become active on the station.
+
+:FPGA_xst_processing_enable_RW: Whether XSTs are computed on each FPGA.
+
+  :type: ``bool[N_fpgas]``
+
+:FPGA_xst_integration_interval_RW: The time interval to integrate over, per FPGA, in seconds.
+
+  :type: ``float[N_fpgas]``
+
+:FPGA_xst_subband_select_RW: The subband to cross correlate, per FPGA. Note: only the entries ``[x][1]`` should be set, the rest should be zero.
+
+  :type: ``uint32[N_fpgas][8]``
+
 Subscribe to statistics streams
 ---------------------------------
 
diff --git a/docs/source/devices/using.rst b/docs/source/devices/using.rst
index b5c41bd8089bc88d606ee0fe7bf8c442ff259475..e328467f6bb01b17577bc8b5e1b99b5caeed7090 100644
--- a/docs/source/devices/using.rst
+++ b/docs/source/devices/using.rst
@@ -50,7 +50,9 @@ FAULT
 ``````````
 
 If a device enters the ``FAULT`` state, it means an error occurred that is fundamental to the operation of the software device. For example, the connection
-to the hardware was lost.
+to the hardware was lost. TO see the error reason, use
+
+:status(): The verbose status of the device, f.e. the reason why the device went to ``FAULT``.
 
 Interaction with the device in the ``FAULT`` state is undefined, and attributes cannot be read or written. The device needs to be reinitialised, which
 typically involves the following sequence of commands::
diff --git a/docs/source/faq.rst b/docs/source/faq.rst
index 631c6fa3a3f17571970c2debd638c6c0bf5c7624..05022cd81032316038876a788d4bac0ea8e645c7 100644
--- a/docs/source/faq.rst
+++ b/docs/source/faq.rst
@@ -147,6 +147,17 @@ Let's see where the packets get stuck. Let us assume your MTU=9000 network inter
 
 - If still on error was found, you've likely hit a bug in our software.
 
+Inspecting SST/XST packets
+``````````````````````````````````````````````````````````````````````````````````````````````````````````````
+
+The fields ``sst.last_packet_R`` and ``xst.last_packet_R`` contain a raw dump of the last received packet for that statistic. Parsing these packets is aided greatly by using our packet parser::
+
+  from tangostationcontrol.devices.sdp.statistics_packet import SSTPacket, XSTPacket
+
+  # print the headers of the last received packets
+  print(SSTPacket(bytes(sst.last_packet_R)).header())
+  print(XSTPacket(bytes(xst.last_packet_R)).header())
+
 Other containers
 --------------------------------------------------------------------------------------------------------------
 
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 119d95164313538606e9f310f2e65ca6aca233d8..7baab29343d3f7c4015e6ed60e6ea4692e6073e3 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -19,6 +19,7 @@ Even without having access to any LOFAR2.0 hardware, you can install the full st
    installation
    interfaces/overview
    devices/using
+   devices/beam
    devices/boot
    devices/docker
    devices/recv
diff --git a/sbin/run_integration_test.sh b/sbin/run_integration_test.sh
index c3c37983ae63688658211337ebc17e83fa7ff546..05e076ed00f79858fa44be6e23f3b94370672cb9 100755
--- a/sbin/run_integration_test.sh
+++ b/sbin/run_integration_test.sh
@@ -11,11 +11,15 @@ fi
 
 cd "$LOFAR20_DIR/docker-compose" || exit 1
 
-# Make sure builds are recent, and use our building parameters.
-make build
+# Build only the required images, please do not build everything that makes CI
+# take really long to finish, especially grafana / jupyter / prometheus.
+# jupyter is physically large > 2.5gb and overlayfs is really slow.
+make build device-sdp device-recv device-sst device-unb2 device-xst
+make build sdptr-sim recv-sim unb2-sim apsct-sim apspu-sim
+make build databaseds dsconfig elk integration-test
 
 # Start and stop sequence
-make stop device-boot device-docker device-apsct device-apspu device-sdp device-recv device-sst device-unb2 device-xst sdptr-sim recv-sim unb2-sim apsct-sim apspu-sim
+make stop device-boot device-docker device-apsct device-apspu device-sdp device-recv device-sst device-unb2 device-xst device-beam sdptr-sim recv-sim unb2-sim apsct-sim apspu-sim
 make start databaseds dsconfig elk
 
 # Give dsconfig and databaseds time to start
@@ -32,7 +36,7 @@ make start sdptr-sim recv-sim unb2-sim apsct-sim apspu-sim
 # Give the simulators time to start
 sleep 5
 
-make start device-boot device-apsct device-apspu device-sdp device-recv device-sst device-unb2 device-xst
+make start device-boot device-apsct device-apspu device-sdp device-recv device-sst device-unb2 device-xst device-beam
 
 # Give devices time to restart
 # TODO(Corne Lukken): Use a nicer more reliable mechanism
diff --git a/sbin/tag_and_push_docker_image.sh b/sbin/tag_and_push_docker_image.sh
index 799ab1cd779bb5caf840685f339080b57916063b..631413235d4ba9b2f39b9b0e216d31cd21913bdf 100755
--- a/sbin/tag_and_push_docker_image.sh
+++ b/sbin/tag_and_push_docker_image.sh
@@ -1,40 +1,177 @@
 #!/bin/bash -e
 
-# Tag and push which image version?
-DOCKER_TAG=latest
+function usage {
+    echo "./$(basename "$0")
+      no arguments, downloads remote images and pushes these to ASTRON registry.
+      The versions downloaded are controlled by the docker-compose/.env file"
+    echo ""
+    echo "./$(basename "$0") -h
+      displays this help message"
+    echo ""
+    echo "./$(basename "$0") <docker service name> <tag>
+      downloads latest version of image from the ASTRON registry, builds the
+      specified service and pushes the image with the specified tag to the
+      ASTRON registry"
+    echo ""
+    echo "./$(basename "$0") pull <tag>
+      downloads all images for the integration test with the specified tag
+      falling back to 'latest' if unavailable. Should neither exist on the
+      ASTRON registry the script will exit 1. The images are retagged to match
+      the output of docker-compose."
+}
 
-# Change to git tag or git hash if no tag
-VERSION=$(date +"%Y-%M-%d")
+# list of arguments expected in the input
+optstring=":h"
 
-SKA_REPO="nexus.engageska-portugal.pt/ska-docker"
-LOFAR_REPO="git.astron.nl:5000/lofar2.0/tango"
+while getopts ${optstring} arg; do
+  case ${arg} in
+    h)
+      usage
+      exit 0
+      ;;
+    :)
+      echo "$0: Must supply an argument to -$OPTARG." >&2
+      exit 1
+      ;;
+    ?)
+      echo "Invalid option: -${OPTARG}."
+      exit 2
+      ;;
+  esac
+done
 
-# Compile a list of the SKA images
-SKA_IMAGES=$(for i in $(docker images | grep -E ${DOCKER_TAG} | grep -E ${SKA_REPO} | cut -d' ' -f1); do printf "%s " "${i}"; done)
+if [ -z "${LOFAR20_DIR+x}" ]; then
+  echo "LOFAR20_DIR not set, did you forget to source lofar20rc.sh?"
+  exit 1
+fi
 
-# Compile a list of LOFAR2.0 images
-LOFAR_IMAGES=$(for i in $(docker images | grep -E ${DOCKER_TAG} | grep -E -v "${SKA_REPO}|${LOFAR_REPO}" | cut -d' ' -f1); do printf "%s " "${i}"; done)
+# shellcheck disable=SC1090
+. "${LOFAR20_DIR}/docker-compose/.env" || exit 1
 
-function tag_and_push()
-{
-    (
-        docker tag "${1}" "${2}"
-        docker push "${2}"
-    ) &
-}
+# List of images and their tag
+REMOTE_IMAGES=(
+  "tango-dsconfig:${TANGO_DSCONFIG_VERSION}" "tango-java:${TANGO_JAVA_VERSION}"
+  "tango-itango:${TANGO_ITANGO_VERSION}" "tango-pogo:${TANGO_POGO_VERSION}"
+  "tango-cpp:${TANGO_CPP_VERSION}" "tango-db:${TANGO_DB_VERSION}"
+  "tango-dsconfig:${TANGO_DSCONFIG_VERSION}" "tango-rest:${TANGO_REST_VERSION}"
+)
 
-# Rename the SKA images for the LOFAR2.0 repo
-# and push them to the LOFAR2.0 repo
-for IMAGE in ${SKA_IMAGES}; do
-    PUSH_IMAGE=${IMAGE//${SKA_REPO}/${LOFAR_REPO}}:${VERSION}
-    tag_and_push "${IMAGE}" "${PUSH_IMAGE}"
-done
+# If first argument of bash script not set run first stage
+if [ -z "${1+x}" ]; then
+  echo "Pulling and retagging remote images"
 
-# Rename the LOFAR2.0 images for the LOFAR2.0 repo
-# and push them to the LOFAR2.0 repo
-for IMAGE in ${LOFAR_IMAGES}; do
-    PUSH_IMAGE=${LOFAR_REPO}/${IMAGE}:${VERSION}
-    tag_and_push "${IMAGE}" "${PUSH_IMAGE}"
-done
+  # Iterate over al the REMOTE_IMAGES and pull them from remote and push local
+  for image in "${REMOTE_IMAGES[@]}"; do
+    remote_url="${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-${image}"
+    local_url="${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/${image}"
+    docker pull "${remote_url}"
+    docker tag "${remote_url}" "${local_url}"
+    docker push "${local_url}"
+  done
+
+  exit 0
+fi
+
+# Triple tuple of docker-compose names, image names and if necessary for
+# integration tests.
+# TODO(Corne): Have this list generated from the .yml files
+LOCAL_IMAGES=(
+  "elk elk y" "elk-configure-host elk-configure-host y"
+  "lofar-device-base lofar-device-base y"
+
+  "apsct-sim docker-compose_apsct-sim y" "apspu-sim docker-compose_apspu-sim y"
+  "recv-sim docker-compose_recv-sim y" "sdptr-sim docker-compose_sdptr-sim y"
+  "unb2-sim docker-compose_unb2-sim y"
+
+  "device-apsct device-apsct y" "device-apspu device-apspu y"
+  "device-boot device-boot y" "device-docker device-docker y"
+  "device-observation_control device-observation_control y"
+  "device-recv device-recv y" "device-sdp device-sdp y"
+  "device-sst device-sst y" "device-unb2 device-unb2 y"
+  "device-xst device-xst y"
+
+  "itango docker-compose_itango y"
+
+  "grafana grafana n" "prometheus prometheus n"
+  "jupyter docker-compose_jupyter n"
+  "integration-test docker-compose_integration-test n"
+  "tango-prometheus-exporter docker-compose_tango-prometheus-exporter n"
+)
+
+
+
+# If first argument set run second stage, determine LOCAL_IMAGE to build and
+# push from the argument
+if [ ! -z "${1+x}" ] && [ "${1}" != "pull" ]; then
+
+  # The second argument must pass the tag variable must be set
+  if [ -z "${2+x}" ]; then
+    echo "Error, second argument must pass tag variable"
+    exit 1
+  fi
+
+  # Set the tag and image variable, variables $1 and $2 are shadowed later
+  local_image="${1}"
+  tag="${2}"
+
+  cd "${LOFAR20_DIR}/docker-compose" || exit 1
+
+  # Loop through images and find the specified one
+  for image in "${LOCAL_IMAGES[@]}"; do
+    # Set, splits tuple into $1 and $2. this shadows previous variables
+    # shellcheck disable=SC2086
+    set -- $image
+    if [ "${local_image}" == "${1}" ]; then
+      echo "Building image for ${1} container"
+      local_url="${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/${2}"
+
+      # If tag is not latest, than it is not a tagged master build and we can
+      # pull the latest image as cache.
+      if [ "${tag}" != "latest" ]; then
+        docker pull "${local_url}:latest"
+      fi
+
+      make build "${1}"
+      docker tag "${2}" "${local_url}:${tag}"
+      docker push "${local_url}:${tag}"
+    fi
+  done
+
+  exit 0
+fi
+
+# Final stage, pull images for integration cache try special tag image first
+# if it fails download latest instead
+if [ ! -z "${1+x}" ] && [ "${1}" == "pull" ]; then
+  echo "Pulling images for integration test cache"
+
+  # The second argument must pass the tag variable must be set
+  if [ -z "${2+x}" ]; then
+    echo "Error, second argument must pass tag variable"
+    exit 1
+  fi
+
+  # Set the tag variable
+  tag="${2}"
+
+  for image in "${LOCAL_IMAGES[@]}"; do
+      # Set, splits tuple into $1 and $2. this shadows previous variables
+    # shellcheck disable=SC2086
+    set -- $image
+
+    # Only download images which are needed for integration test
+    if [ "${3}" == "y" ]; then
+      local_url="${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/${2}"
+      # Pull images, at least one of the two images must succeed
+      echo "docker pull ${local_url}:${tag}"
+      docker pull "${local_url}:${tag}" || docker pull "${local_url}:latest" || exit 1
+      # Ensure the images will have the same tags as generated by docker-compose
+      docker tag "${local_url}:${tag}" "${2}" || docker tag "${local_url}:latest" "${2}" || exit 1
+    fi
+  done
+
+  exit 0
+fi
 
-wait
+# Somehow nothing ran, that is an error do not fail silently
+exit 1
diff --git a/tangostationcontrol/requirements.txt b/tangostationcontrol/requirements.txt
index b1620255b5a45c9e3f653661e65de6732fd93a07..f2d3743cd07dcea9c3d88255f9a583dfe67f714b 100644
--- a/tangostationcontrol/requirements.txt
+++ b/tangostationcontrol/requirements.txt
@@ -4,10 +4,10 @@
 
 asyncua >= 0.9.90 # LGPLv3
 PyMySQL[rsa] >= 1.0.2 # MIT
+psycopg2-binary >= 2.9.2 #LGPL
 sqlalchemy >= 1.4.26 #MIT
-GitPython >= 3.1.24 # BSD
 snmp >= 0.1.7 # GPL3
 h5py >= 3.5.0 # BSD
 psutil >= 5.8.0 # BSD
 docker >= 5.0.3 # Apache 2
-python-logstash-async >= 2.3.0 # MIT
\ No newline at end of file
+python-logstash-async >= 2.3.0 # MIT
diff --git a/tangostationcontrol/setup.cfg b/tangostationcontrol/setup.cfg
index e6101c890893c961e1eff27ce3d3ab4e019ea312..31662e40d639c2af2de417e734e794dc5679d4a4 100644
--- a/tangostationcontrol/setup.cfg
+++ b/tangostationcontrol/setup.cfg
@@ -36,6 +36,7 @@ where=./
 console_scripts =
     l2ss-apsct = tangostationcontrol.devices.apsct:main
     l2ss-apspu = tangostationcontrol.devices.apspu:main
+    l2ss-beam = tangostationcontrol.devices.beam:main
     l2ss-boot = tangostationcontrol.devices.boot:main
     l2ss-docker-device = tangostationcontrol.devices.docker_device:main
     l2ss-observation = tangostationcontrol.devices.observation:main
diff --git a/tangostationcontrol/tangostationcontrol/common/lofar_logging.py b/tangostationcontrol/tangostationcontrol/common/lofar_logging.py
index 673af8f959edbdc748545bdcf6f08388c3fa944b..f6c6457d2d002276b91e6115026f892b6cb22fdf 100644
--- a/tangostationcontrol/tangostationcontrol/common/lofar_logging.py
+++ b/tangostationcontrol/tangostationcontrol/common/lofar_logging.py
@@ -125,6 +125,9 @@ def configure_logger(logger: logging.Logger=None, log_extra=None, debug=False):
     # don't spam errors for git, as we use it in our log handler, which would result in an infinite loop
     logging.getLogger("git").setLevel(logging.ERROR)
 
+    # don't spam debug messages when fetching URLs
+    logging.getLogger("urllib3").setLevel(logging.INFO)
+
     # for now, also log to stderr
     # Set up logging in a way that it can be understood by a human reader, be
     # easily grep'ed, be parsed with a couple of shell commands and
diff --git a/tangostationcontrol/tangostationcontrol/devices/README.md b/tangostationcontrol/tangostationcontrol/devices/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..f546fb339077c328e55dfd359c8cb23ccfbd64d1
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/devices/README.md
@@ -0,0 +1,18 @@
+# Devices
+
+This directory contains the sources for our custom Tango devices.
+
+## Adding a new device
+
+If a new device is added, it will (likely) need to be referenced in several places. Adjust or add the following files (referenced from the repository root), following the pattern shown by the devices already there:
+
+- Adjust `CDB/LOFAR_ConfigDb.json` to create the device in the Tango device database,
+- Adjust `docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py` to make an alias for it available in Jupyter,
+- Adjust `tangostationcontrol/tangostationcontrol/devices/boot.py` to add the device to the station initialisation sequence,
+- Add to `docker-compose/` to create a YaML file to start the device in a docker container. NOTE: it needs a unique 57xx port assigned,
+- Adjust `tangostationcontrol/setup.cfg` to add an entry point for the device in the package installation,
+- Add to `tangostationcontrol/tangostationcontrol/integration_test/devices/` to add an integration test,
+- Adjust `sbin/run_integration_test.sh` to have the device started when running the integration tests,
+- Add to `docs/source/devices/` to mention the device in the end-user documentation.
+- Adjust `docs/source/index.rst` to include the newly created file in `docs/source/devices/`.
+
diff --git a/tangostationcontrol/tangostationcontrol/devices/beam.py b/tangostationcontrol/tangostationcontrol/devices/beam.py
new file mode 100644
index 0000000000000000000000000000000000000000..1711a8a89f9db0e73cc4bf2fd38e81a7a95ded01
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/devices/beam.py
@@ -0,0 +1,51 @@
+# -*- coding: utf-8 -*-
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+""" Beam Device Server for LOFAR2.0
+
+"""
+
+# PyTango imports
+from tango import AttrWriteType
+import numpy
+# Additional import
+
+from tangostationcontrol.devices.device_decorators import *
+from tangostationcontrol.common.entrypoint import entry
+from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper
+from tangostationcontrol.devices.lofar_device import lofar_device
+from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
+
+__all__ = ["Beam", "main"]
+
+
+@device_logging_to_python()
+class Beam(lofar_device):
+    # -----------------
+    # Device Properties
+    # -----------------
+
+    # ----------
+    # Attributes
+    # ----------
+
+    pass
+
+    # --------
+    # overloaded functions
+    # --------
+
+
+    # --------
+    # Commands
+    # --------
+
+
+# ----------
+# Run server
+# ----------
+def main(**kwargs):
+    """Main function of the ObservationControl module."""
+    return entry(Beam, **kwargs)
diff --git a/tangostationcontrol/tangostationcontrol/devices/boot.py b/tangostationcontrol/tangostationcontrol/devices/boot.py
index c4df70bf5027a8068a1e58326c5ad442e63ad4fc..5f2e9b76e47a9ed61207a07fbc0584ec355b6620 100644
--- a/tangostationcontrol/tangostationcontrol/devices/boot.py
+++ b/tangostationcontrol/tangostationcontrol/devices/boot.py
@@ -251,6 +251,7 @@ class Boot(lofar_device):
                        "STAT/SDP/1",    # SDP controls the mask for SST/XST/BST, so initialise it first
                        "STAT/SST/1",
                        "STAT/XST/1",
+                       "STAT/Beam/1",   # Accesses RECV and SDP
                       ],
     )
 
diff --git a/tangostationcontrol/tangostationcontrol/devices/recv.py b/tangostationcontrol/tangostationcontrol/devices/recv.py
index 5070b8cef0f54e5d4165701ccaefe0a637faf2d7..e6f0d1f36ed16790f5532b7f9cafce3419ff8579 100644
--- a/tangostationcontrol/tangostationcontrol/devices/recv.py
+++ b/tangostationcontrol/tangostationcontrol/devices/recv.py
@@ -61,6 +61,9 @@ class RECV(opcua_device):
     RCU_LED_colour_R = attribute(dtype=(numpy.uint32,), max_dim_x=32, fget=lambda self: (2 * self.proxy.RCU_LED_green_on_R + 4 * self.proxy.RCU_LED_red_on_R).astype(numpy.uint32))
 
     ANT_mask_RW                  = attribute_wrapper(comms_annotation=["ANT_mask_RW"               ],datatype=numpy.bool_  , dims=(3,32), access=AttrWriteType.READ_WRITE)
+
+    # The HBAT beamformer delays represent 32 delays for each of the 96 inputs.
+    # The 32 delays deconstruct as delays[polarisation][dipole], and each delay is the number of 'delay steps' to apply (0.5ns for HBAT1).
     HBAT_BF_delays_R             = attribute_wrapper(comms_annotation=["HBAT_BF_delays_R"          ],datatype=numpy.int64  , dims=(32,96))
     HBAT_BF_delays_RW            = attribute_wrapper(comms_annotation=["HBAT_BF_delays_RW"         ],datatype=numpy.int64  , dims=(32,96), access=AttrWriteType.READ_WRITE)
     HBAT_LED_on_R                = attribute_wrapper(comms_annotation=["HBAT_LED_on_R"             ],datatype=numpy.bool_  , dims=(32,96))
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/statistics_packet.py b/tangostationcontrol/tangostationcontrol/devices/sdp/statistics_packet.py
index c98ae9b5bdc604e8a55480cc5473e658b10cefa1..59c74e296c2eebdd677d448bdae523be8d149934 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/statistics_packet.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/statistics_packet.py
@@ -75,13 +75,13 @@ class StatisticsPacket(object):
                 "Invalid SDP statistics packet: packet marker (first byte) is {}, not one of 'SBX'.".format(
                     self.marker))
 
+    # format string for the header, see unpack below
+    header_format = ">cBL HHB BHL BBH HQ"
+    header_size = struct.calcsize(header_format)
+
     def unpack(self):
         """ Unpack the packet into properties of this object. """
 
-        # format string for the header, see unpack below
-        self.header_format = ">cBL HHB BHL BBH HQ"
-        self.header_size = struct.calcsize(self.header_format)
-
         # unpack fields
         try:
             (self.marker_raw,
@@ -335,11 +335,35 @@ def main(args=None, **kwargs):
     import sys
     import pprint
 
-    # read all of stdin, even though we only parse the first packet. we're too lazy to intelligently decide when
-    # the packet is complete and can stop reading.
-    data = sys.stdin.buffer.read()
-    packet = SSTPacket(data)
-
-    # print header & payload
-    pprint.pprint(packet.header())
-    pprint.pprint(packet.payload)
+    # packet counter
+    nr = 0
+
+    # byte offset in the stream
+    offset = 0
+
+    while True:
+        # read just the header
+        header = sys.stdin.buffer.read(StatisticsPacket.header_size)
+        if not header:
+            break
+
+        # read the payload
+        packet = StatisticsPacket(header)
+        payload_size = packet.expected_size() - len(header)
+        payload = sys.stdin.buffer.read(payload_size)
+
+        # construct the packet based on type
+        if packet.marker == 'S':
+            packet = SSTPacket(header + payload)
+        elif packet.marker == 'X':
+            packet = XSTPacket(header + payload)
+        elif packet.marker == 'B':
+            packet = BSTPacket(header + payload)
+
+        # print header
+        print(f"# Packet {nr} starting at offset {offset}")
+        pprint.pprint(packet.header())
+
+        # increment counters
+        nr += 1
+        offset += len(header) + len(payload)
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/device_proxy.py b/tangostationcontrol/tangostationcontrol/integration_test/device_proxy.py
index 00ba0904c7bd01fa2ce1453a4c6701d6a4246e14..25c92411ecaababad20007868cfd19bdc3e9e18a 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/device_proxy.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/device_proxy.py
@@ -1,8 +1,25 @@
+import logging
+import time
+
 from tango import DeviceProxy
 
+logger = logging.getLogger()
+
 
 class TestDeviceProxy(DeviceProxy):
 
     def __init__(self, *args, **kwargs):
         super(TestDeviceProxy, self).__init__(*args, **kwargs)
         self.set_timeout_millis(10000)
+
+    @staticmethod
+    def test_device_turn_off(endpoint):
+        d = TestDeviceProxy(endpoint)
+        try:
+            d.Off()
+        except Exception as e:
+            """Failing to turn Off devices should not raise errors here"""
+            logger.error(f"Failed to turn device off in teardown {e}")
+
+            """Wait for 1 second to prevent propagating reconnection errors"""
+            time.sleep(1)
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/devices/base.py b/tangostationcontrol/tangostationcontrol/integration_test/devices/base.py
index 555f7256ea49d68465b1c45bf038d47b39beeb25..ef4854a8241aaee7e6099ae7d417d7b94acfa21e 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/devices/base.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/devices/base.py
@@ -34,11 +34,19 @@ class AbstractTestBases:
             # make sure the device starts in Off
             self.proxy.Off()
 
+            self.addCleanup(TestDeviceProxy.test_device_turn_off, self.name)
+
             super().setUp()
 
-        def tearDown(self):
-            """Turn device Off in teardown to prevent blocking tests"""
-            self.proxy.Off()
+        def test_device_fetch_state(self):
+            """Test if we can successfully fetch state"""
+
+            self.assertEqual(DevState.OFF, self.proxy.state())
+
+        def test_device_ping(self):
+            """Test if we can successfully ping the device server"""
+
+            self.assertGreater(self.proxy.ping(), 0)
 
         def test_device_initialize(self):
             """Test if we can transition to standby"""
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_apsct.py b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_apsct.py
index ca73fc236a7486b858298e863663cf997c70ccc8..d973581e88cceb7510d2d257b006aa81ebbfbdfb 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_apsct.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_apsct.py
@@ -9,6 +9,7 @@
 
 from .base import AbstractTestBases
 
+
 class TestDeviceAPSCT(AbstractTestBases.TestDeviceBase):
 
     def setUp(self):
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_apspu.py b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_apspu.py
index b9d2bc3d44acf0d4c2d3e1dd9b0adc095c86037b..5ebadc24029ce4a8e4a8b56805685d69aec73f2b 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_apspu.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_apspu.py
@@ -9,6 +9,7 @@
 
 from .base import AbstractTestBases
 
+
 class TestDeviceAPSPU(AbstractTestBases.TestDeviceBase):
 
     def setUp(self):
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_beam.py b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_beam.py
new file mode 100644
index 0000000000000000000000000000000000000000..b6be958cfde3b9f236947e9b1b76195c844bad40
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_beam.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+from .base import AbstractTestBases
+
+class TestDeviceBeam(AbstractTestBases.TestDeviceBase):
+
+    def setUp(self):
+        super().setUp("STAT/Beam/1")
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_boot.py b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_boot.py
index cc39c45d0a35da050aa041f0e5f2063df6312169..6d93080cc59ca176aea226893927f3321f503d89 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_boot.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_boot.py
@@ -11,6 +11,7 @@ import time
 
 from .base import AbstractTestBases
 
+
 class TestDeviceBoot(AbstractTestBases.TestDeviceBase):
 
     def setUp(self):
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_recv.py b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_recv.py
index 26e02ef312214df4a9304eafd050ea1438002da2..e96c385a7f976bc3ecb76d48b509b61d80454819 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_recv.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_recv.py
@@ -9,6 +9,7 @@
 
 from .base import AbstractTestBases
 
+
 class TestDeviceRECV(AbstractTestBases.TestDeviceBase):
 
     def setUp(self):
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_sdp.py b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_sdp.py
index 2df399ed4607ea802abcf647c02078193f1b7f03..7de27c34b9746c1541c2b7091c977ed32ce9e535 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_sdp.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_sdp.py
@@ -7,11 +7,14 @@
 # Distributed under the terms of the APACHE license.
 # See LICENSE.txt for more info.
 
+from tango._tango import DevState
 from .base import AbstractTestBases
 
+
 class TestDeviceSDP(AbstractTestBases.TestDeviceBase):
 
     def setUp(self):
+        """Intentionally recreate the device object in each test"""
         super().setUp("STAT/SDP/1")
 
     def test_device_sdp_read_attribute(self):
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_sst.py b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_sst.py
index 38f3528f531660704baa00f70a7073974256a19f..60675e121364b52fb692b2f8461001bfdc78b50a 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_sst.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_sst.py
@@ -15,24 +15,13 @@ from tango._tango import DevState
 
 from .base import AbstractTestBases
 
+
 class TestDeviceSST(AbstractTestBases.TestDeviceBase):
 
     def setUp(self):
+        """Intentionally recreate the device object in each test"""
         super().setUp("STAT/SST/1")
 
-    def test_device_on(self):
-        """Test if we can transition to on"""
-
-        port_property = {"Statistics_Client_TCP_Port": "4999"}
-        self.proxy.put_property(port_property)
-        self.proxy.initialise()
-
-        self.assertEqual(DevState.STANDBY, self.proxy.state())
-
-        self.proxy.on()
-
-        self.assertEqual(DevState.ON, self.proxy.state())
-
     def test_device_sst_send_udp(self):
         port_property = {"Statistics_Client_TCP_Port": "4998"}
         self.proxy.put_property(port_property)
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_unb2.py b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_unb2.py
index a35f70adde7af58408882e3b5ee3256a4838db84..d5731630188879e5f79f94f98951f8d6c1637ace 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_unb2.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/devices/test_device_unb2.py
@@ -9,7 +9,9 @@
 
 from .base import AbstractTestBases
 
+
 class TestDeviceUNB2(AbstractTestBases.TestDeviceBase):
 
     def setUp(self):
+        """Intentionally recreate the device object in each test"""
         super().setUp("STAT/UNB2/1")
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/automatic_polling_performance_test/Tango_Controls-Automatic_polling_performance_test.md b/tangostationcontrol/tangostationcontrol/test/devices/automatic_polling_performance_test/Tango_Controls-Automatic_polling_performance_test.md
new file mode 100644
index 0000000000000000000000000000000000000000..3078d28472daf767475b070ad85492e785167b1e
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/test/devices/automatic_polling_performance_test/Tango_Controls-Automatic_polling_performance_test.md
@@ -0,0 +1,236 @@
+# Test purpose
+
+Tango Controls Device Servers (DS) can automatically poll device attribute values. The default for the polling is a single thread that runs in the DS process. It has the purpose to call the `read` function of every attribute in all devices that run in the DS.
+The highest polling rate among all the attributes in all devices determines how often the polling thread runs. This can lead to a situation where the single polling thread is unable to finish executing all attribute `read` functions. When that happens, the attributes that did not have their `read` function executed will not have updated values. Since the polling thread always follows the same order of attributes, this will lead to some attribute values never be updated.
+
+We investigate whether using more polling threads can alleviate the situation for a single or multiple devices that run in the same DS.
+
+# References
+
+Please refer to the reference documents for in-depth information about the automatic polling and how to configure a DS to use a dedicated thread per device for polling.
+
+[Attribute polling in Tango Controls](https://tango-controls.readthedocs.io/en/latest/development/device-api/ds-guideline/device-server-guidelines.html#tango-polling-mechanism)
+
+[Device polling in Tango Controls](https://tango-controls.readthedocs.io/en/latest/development/device-api/device-polling.html)
+
+[Configuring a DS to use per Device threads in polling](https://tango-controls.readthedocs.io/en/latest/development/advanced/reference.html#dserver-class-device-properties)
+
+# Test set-up
+
+- Two devices run in the same DS.
+    - Tango DB is modified to have a DS named  `monitoring_performance_test/1`.
+    
+    - Tango DB is modified to have the DS named `monitoring_performance_test/1` run two devices named `test/monitoring_performance/1` and `test/monitoring_performance/2`. Both devices instantiate the same class `Monitoring_Performance_Device`.
+    
+    - Execute the DS like this: `bin/start-DS.sh devices/test/devices/automatic_polling_performance_test/monitoring_performance_test.py 1`
+    
+    - Get a DeviceProxy object to both devices like this:
+    
+        ```python
+        d1 = DeviceProxy('test/monitoring_performance/1')
+        d2 = DeviceProxy('test/monitoring_performance/2')
+        ```
+    
+        This will execute the device code and perform the automatic polling.
+    
+    - Devices in the appended data (section Data) are labelled d1 and d2.
+- Each device has 4 read-only attributes (MPs) that are arrays with 2e6 doubles.
+    - In Tango DB automatic polling every 1s is enabled for each array.
+- Two scenarios:
+    1. On read a random number gets generated and the entire array is populated with it. Populating the array with a new random number on every read access prevents caching.
+    2. A 0-filled array is created in init_device and copied to the attribute when read is called.
+- The number of polling threads is adjusted in init_device. The number of polling threads is a DS setting. Due to inconsistencies how Tango Controls handles input parameters, it is nor possible to pass parameters to devices.
+- Number of polling threads: 1, 10, 100
+
+# Test execution
+
+- The DS source code is modified for the number of polling threads according to the test set-up outlined above.
+- The DS is started manually.
+- The attribute polling resumes automatically as soon as the device state is set to ON in init_device.
+- The test script creates two Python processes that are assigned to one of the two devices each.
+    - The process creates a `DeviceProxy` object for its device and runs 
+    - The process executes `attribute_polling_stats` and the results are printed out.
+    - The process exits.
+- The DS is allowed to run for approximately 10 polling iterations.
+    - The DS processes will print statistics about the polling. 
+- The test is manually stopped and the output copied.
+
+Test results are in the attached file.
+
+# Findings
+
+The tests have shown that polling gets significantly delayed under certain circumstances:
+
+- When the read function takes a longer time to return an attribute's value.
+    Examples that have been tried out in this test but that are not excluding other reasons:
+    - Creating a numpy array of size 2e6 on the fly with the same random number in every value.
+    - Reading of a 2e5 values array from an OPC-UA server and converting it to a numpy array.
+
+From this finding other reasons that will have a negative impact on polling become immediately obvious and need to be avoided no matter what:
+
+- Fetching data on-the-fly over a slow communication link in an attribute's ` read` function.
+- Fetching a big amount of data on-the-fly over a fast communication link in an attribute's ` read` function. 
+- Computing data on-the-fly in an attribute's `read`function.
+
+Adding more polling threads to a DS does not alleviate this situation. The reason lies in the way how Tango Controls polling works. As a default the polling is performed by one thread per DS or, if the polling thread pool size is increased, by at most one thread per device. Adding twice as many polling threads as devices that are running in a DS does not change the situation as the data suggests.
+
+# Recommendation
+
+For devices that contain attributes with values that are big in byte size, i.e. arrays of significant size, it is strongly recommended to assess the situation, i.e. measure how long reading the data over a communication link takes. If it is essential to poll a high volume attribute at a polling rate that exceeds the performance capabilities of the DS's polling thread, several options are viable:
+
+- Distribute high volume attributes among separate devices which run in their own DS.
+    If necessary create more and more devices with less and less attributes until the desired polling rate can be accomplished. Even if this means that each high volume attribute exists in its own device. To Tango Controls or to device clients it does not matter if a device contains one or many attributes.
+- Distribute high volume attributes among separate devices but continue running them in the same DS. It is necessary to increase the number of polling threads to at least the number of devices in the DS.
+
+The two solutions above are mutually exclusive. Other solutions that alleviate the load on single polling threads can be added to either of the above as well:
+
+- Lower the polling rate so that the polling thread has more time to perform calling the attribute `read` functions. This means that an attribute's read function is allowed to take longer to perform its tasks. Note that this does not solve the original problem that the `read` function is just too slow when it is called.
+- Move the updating of attribute values to their own process. Then the read function can return the current value immediately because the value gets independently updated.
+
+There is an entirely different way to lower the pressure on the automatic polling: manual polling, i.e. performing everything that the polling thread does for a selected set of attributes. Tango Controls allows to manually send events. This opens the possibility to perform the reading of values over communication links, checking for value changes and sending out of archive or on-change events from separate threads or processes when it is convenient.
+
+# Executive summary
+
+Even Tango Controls cannot perform magic. High volume attributes cannot be polled and infinite velocity. One has to distribute the polling load either over more DS, more threads within a DS or more processes within a DS.
+
+# Data
+
+Filling the array with the same newly created random number on ever read.
+
+threads = 1
+d1
+	iterations = 10
+
+	Polling duration
+	min = 0.7053400000000001[s]
+	max = 0.7174940000000001[s]
+	median = 0.7123280000000001[s]
+	mean = 0.7121181[s]
+	stddev = 0.004004320403014733[s]
+	
+	Polling delay
+	min = 0.792[s]
+	max = 2.207[s]
+	median = 0.8115[s]
+	mean = 1.2221[s]
+	stddev = 0.6406804897919087[s]
+d2
+	iterations = 10
+
+	Polling duration
+	min = 0.689903[s]
+	max = 0.715033[s]
+	median = 0.7069909999999999[s]
+	mean = 0.7061663000000001[s]
+	stddev = 0.00792590103458277[s]
+	
+	Polling delay
+	min = 0.744[s]
+	max = 2.245[s]
+	median = 0.758[s]
+	mean = 1.2010999999999998[s]
+	stddev = 0.681659805181441[s]
+
+threads = 10
+d1
+	iterations = 10
+
+	Polling duration
+	min = 0.700119[s]
+	max = 0.7102459999999999[s]
+	median = 0.710067[s]
+	mean = 0.7068808[s]
+	stddev = 0.004127314376201529[s]
+	
+	Polling delay
+	min = 0.802[s]
+	max = 2.196[s]
+	median = 0.806[s]
+	mean = 1.2213[s]
+	stddev = 0.6370044034384692[s]
+d2
+	iterations = 10
+
+	Polling duration
+	min = 0.6984130000000001[s]
+	max = 0.706296[s]
+	median = 0.7044239999999999[s]
+	mean = 0.7036658000000001[s]
+	stddev = 0.0025871636902213896[s]
+	
+	Polling delay
+	min = 0.758[s]
+	max = 2.24[s]
+	median = 0.759[s]
+	mean = 1.3504[s]
+	stddev = 0.7247257688256988[s]
+
+threads = 100
+d1
+	iterations = 10
+
+	Polling duration
+	min = 0.690158[s]
+	max = 0.720522[s]
+	median = 0.7119365[s]
+	mean = 0.7107762[s]
+	stddev = 0.008783150821886167[s]
+	
+	Polling delay
+	min = 0.79[s]
+	max = 2.209[s]
+	median = 0.8[s]
+	mean = 1.2176000000000002[s]
+	stddev = 0.6462041782594724[s]
+d2
+	iterations = 10
+
+	Polling duration
+	min = 0.702939[s]
+	max = 0.724869[s]
+	median = 0.7119840000000001[s]
+	mean = 0.7122735[s]
+	stddev = 0.006137572716473502[s]
+	
+	Polling delay
+	min = 0.749[s]
+	max = 2.25[s]
+	median = 0.755[s]
+	mean = 1.2005[s]
+	stddev = 0.6824934065615579[s]
+
+
+Returning a 0-filled array that was created in init_device
+threads = 100
+d1
+	iterations = 10
+
+	Polling duration
+	min = 0.005712[s]
+	max = 0.008997999999999999[s]
+	median = 0.0065065[s]
+	mean = 0.006732[s]
+	stddev = 0.0009050982267135427[s]
+	
+	Polling delay
+	min = 0.998[s]
+	max = 1.001[s]
+	median = 1.0[s]
+	mean = 0.9997[s]
+	stddev = 0.0007810249675906477[s]
+d2
+	iterations = 10
+
+	Polling duration
+	min = 0.0062759999999999995[s]
+	max = 0.008672000000000001[s]
+	median = 0.0069180000000000005[s]
+	mean = 0.0070902[s]
+	stddev = 0.0007260824746542229[s]
+	
+	Polling delay
+	min = 0.996[s]
+	max = 1.003[s]
+	median = 0.999[s]
+	mean = 0.9997[s]
+	stddev = 0.002491987158875375[s]
\ No newline at end of file
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/automatic_polling_performance_test/automatic_polling_performance_test.json b/tangostationcontrol/tangostationcontrol/test/devices/automatic_polling_performance_test/automatic_polling_performance_test.json
new file mode 100644
index 0000000000000000000000000000000000000000..0fa271289f3254df9c61158dd1a94b9884945b09
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/test/devices/automatic_polling_performance_test/automatic_polling_performance_test.json
@@ -0,0 +1,20 @@
+{
+    "servers":
+    {
+        "monitoring_performance_test":
+        {
+            "1":
+            {
+                "Monitoring_Performance_Device":
+                {
+                    "test/monitoring_performance/1":
+                    {
+                    },
+                    "test/monitoring_performance/2":
+                    {
+                    }
+                }
+            }
+        }
+    }
+}
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/automatic_polling_performance_test/monitoring_performance_test.py b/tangostationcontrol/tangostationcontrol/test/devices/automatic_polling_performance_test/monitoring_performance_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..e5360f26d97ab84564116b1eb98a6f5df58a972e
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/test/devices/automatic_polling_performance_test/monitoring_performance_test.py
@@ -0,0 +1,120 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR2.0 project
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+# TODO(Corne): Remove sys.path.append hack once packaging is in place!
+import os, sys
+currentdir = os.path.dirname(os.path.realpath(__file__))
+parentdir = os.path.dirname(currentdir)
+parentdir = os.path.dirname(parentdir)
+sys.path.append(parentdir)
+
+import time
+import numpy
+from tango import DevState, Util
+from tango.server import run, Device, attribute
+from numpy import random
+
+__all__ = ["Monitoring_Performance_Device", "main"]
+
+POLLING_THREADS = 100
+ARRAY_SIZE = 2000000
+
+class Monitoring_Performance_Device(Device):
+    global ARRAY_SIZE
+    def read_array(self):
+        print("{} {}".format(time.time(), self.get_name()))
+        return self._array
+
+    array1_r = attribute(
+        dtype = (numpy.double,),
+        max_dim_x = ARRAY_SIZE,
+        period = 1000,
+        rel_change = 0.1,
+        archive_period = 1000,
+        archive_rel_change = 0.1,
+        max_value = 1.0,
+        min_value = 0.0,
+        fget = read_array,
+    )
+
+    array2_r = attribute(
+        dtype = (numpy.double,),
+        max_dim_x = ARRAY_SIZE,
+        period = 1000,
+        rel_change = 0.1,
+        archive_period = 1000,
+        archive_rel_change = 0.1,
+        max_value = 1.0,
+        min_value = 0.0,
+        fget = read_array,
+    )
+
+    array3_r = attribute(
+        dtype = (numpy.double,),
+        max_dim_x = ARRAY_SIZE,
+        period = 1000,
+        rel_change = 0.1,
+        archive_period = 1000,
+        archive_rel_change = 0.1,
+        max_value = 1.0,
+        min_value = 0.0,
+        fget = read_array,
+    )
+
+    array4_r = attribute(
+        dtype = (numpy.double,),
+        max_dim_x = ARRAY_SIZE,
+        period = 1000,
+        rel_change = 0.1,
+        archive_period = 1000,
+        archive_rel_change = 0.1,
+        max_value = 1.0,
+        min_value = 0.0,
+        fget = read_array,
+    )
+
+    def init_device(self):
+        Device.init_device(self)
+
+        util = Util.instance()
+        print("Current polling thread pool size = {}".format(util.get_polling_threads_pool_size()))
+        util.set_polling_threads_pool_size(POLLING_THREADS)
+        print("New polling thread pool size = {}".format(util.get_polling_threads_pool_size()))
+        print("Array size = {}".format(ARRAY_SIZE))
+
+        self.set_state(DevState.OFF)
+
+        self._array = numpy.zeros(ARRAY_SIZE)
+
+        self.array1_r.set_data_ready_event(True)
+        self.set_change_event("array1_r", True, True)
+        self.set_archive_event("array1_r", True, True)
+
+        self.array2_r.set_data_ready_event(True)
+        self.set_change_event("array2_r", True, True)
+        self.set_archive_event("array2_r", True, True)
+
+        self.array3_r.set_data_ready_event(True)
+        self.set_change_event("array3_r", True, True)
+        self.set_archive_event("array3_r", True, True)
+
+        self.array4_r.set_data_ready_event(True)
+        self.set_change_event("array4_r", True, True)
+        self.set_archive_event("array4_r", True, True)
+
+        self.set_state(DevState.ON)
+
+    def delete_device(self):
+        self.set_state(DevState.OFF)
+
+def main(args = None, **kwargs):
+    return run((Monitoring_Performance_Device, ), args = args, **kwargs)
+
+if __name__ == '__main__':
+    main()
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver.py
index 771fab87a0431defec19ce755b07a5eaa22ca2d6..7562e88620c897bbc09c35fce92f87540d9bb04b 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver.py
@@ -1,47 +1,59 @@
 #! /usr/bin/env python3
 
-#from logging import raiseExceptions
 import logging
 
 from tango import DeviceProxy, AttributeProxy
-from datetime import datetime, timedelta
 
 import time
 import json, os
-from sqlalchemy import create_engine, and_
-from sqlalchemy.orm import sessionmaker
-from sqlalchemy.orm.exc import NoResultFound
-from .archiver_base import *
 
 logger = logging.getLogger()
 
-def parse_attribute_name(attribute_name:str):
+def attribute_name_from_url(attribute_name:str):
     """
     For some operations Tango attribute must be transformed from the form 'tango://db:port/domain/family/name/attribute'
     to canonical 'domain/family/name/attribute'
     """
-    chunk_num = len(attribute_name.split('/'))
-    if (chunk_num==7 and attribute_name.split('/')[0]=='tango:'): 
+    if attribute_name.startswith('tango://'):
         return '/'.join(attribute_name.split('/')[3:])
-    else:
-        if (chunk_num!=4):
-            raise AttributeFormatException
-        else:
-            return attribute_name
 
-def parse_device_name(device_name:str, tango_host:str = 'databaseds:10000'):
+    if len(attribute_name.split('/')) != 4:
+        raise ValueError(f"Expected attribute of format 'domain/family/name/attribute', got {attribute_name}")
+
+    return attribute_name
+
+def device_name_url(device_name:str, tango_host:str = 'databaseds:10000'):
     """
     For some operations Tango devices must be transformed from the form 'domain/family/name'
     to 'tango://db:port/domain/family/name'
     """
-    chunk_num = len(device_name.split('/'))
-    if (chunk_num==3):
-        return 'tango://'+tango_host+'/'+device_name
-    elif (chunk_num==6 and device_name.split('/')[0]=='tango:'):
+    if device_name.startswith('tango://'):
         return device_name
+
+    if len(device_name.split('/')) != 3:
+        raise ValueError(f"Expected device name of format 'domain/family/name', got {device_name}")
+
+    return f"tango://{tango_host}/{device_name}"
+
+def split_tango_name(tango_fqname:str, tango_type:str):
+    """
+    Helper function to split device or attribute Tango full qualified names
+    into its components
+    """
+    if tango_type.lower() == 'device':
+        try:
+            domain, family, member = tango_fqname.split('/')
+            return domain, family, member
+        except ValueError as e:
+            raise ValueError(f"Could not parse device name {tango_fqname}. Please provide FQDN, e.g. STAT/Device/1") from e
+    elif tango_type.lower() == 'attribute':
+        try:
+            domain, family, member, name = tango_fqname.split('/')
+            return domain, family, member, name
+        except ValueError as e:
+            raise ValueError(f"Could not parse attribute name {tango_fqname}. Please provide FQDN, e.g. STAT/Device/1/Attribute") from e
     else:
-        raise Exception(f'{device_name} is a wrong device name')
-    
+        raise ValueError(f"Invalid value: {tango_type}. Please provide 'device' or 'attribute'.")
 
 class Archiver():
     """
@@ -52,7 +64,7 @@ class Archiver():
     dev_polling_time = None
     dev_archive_time = None
 
-    def __init__(self, selector_filename:str = None, cm_name: str = 'archiving/hdbpp/confmanager01', context: str = 'RUN'):
+    def __init__(self, selector_filename:str = None, cm_name: str = 'archiving/hdbppts/confmanager01', context: str = 'RUN'):
         self.cm_name = cm_name
         self.cm = DeviceProxy(cm_name)
         try: 
@@ -67,7 +79,7 @@ class Archiver():
         try:
             self.apply_selector()
         except Exception as e:
-            raise Exception("Error in selecting configuration! Archiving framework will not be updated!") from e
+            raise Exception("Error in selecting configuration. Archiving framework will not be updated.") from e
     
     def get_db_config(self, device_name:str):
         """
@@ -177,7 +189,7 @@ class Archiver():
             es_state = es.state() # ping the device server
             if 'FAULT' in str(es_state):
                 raise Exception(f"{es_name} is in FAULT state")
-            self.cm.ArchiverAdd(parse_device_name(es_name))     
+            self.cm.ArchiverAdd(device_name_url(es_name))
         except Exception as e:
             if 'already_present' in str(e):
                 logger.warning(f"Subscriber {es_name} already present in Configuration Manager")
@@ -191,7 +203,7 @@ class Archiver():
         The ConfigurationManager and EventSubscriber devices must be already up and running.
         The archiving-DBMS must be already properly configured.
         """
-        attribute_name = parse_attribute_name(attribute_name) 
+        attribute_name = attribute_name_from_url(attribute_name)
         try:
             self.cm.write_attribute('SetAttributeName', attribute_name)
             self.cm.write_attribute('SetArchiver', es_name or self.get_next_subscriber())
@@ -237,7 +249,7 @@ class Archiver():
         """
         Stops the data archiving of the attribute passed as input, and remove it from the subscriber's list. 
         """
-        attribute_name = parse_attribute_name(attribute_name)
+        attribute_name = attribute_name_from_url(attribute_name)
         try:
             self.cm.AttributeStop(attribute_name)
             self.cm.AttributeRemove(attribute_name)
@@ -278,7 +290,7 @@ class Archiver():
             exclude_list = [a.lower() for a in exclude]
             attrs_list = [a.lower() for a in list(attributes_nok) if a.lower() not in exclude_list]
             for a in attrs_list:
-                attr_fullname = parse_attribute_name(a)
+                attr_fullname = attribute_name_from_url(a)
                 self.remove_attribute_from_archiver(attr_fullname)
 
     def start_archiving_attribute(self, attribute_name:str):
@@ -286,7 +298,7 @@ class Archiver():
         Starts the archiving of the attribute passed as input.
         The attribute must be already present in the subscriber's list
         """
-        attribute_name = parse_attribute_name(attribute_name)
+        attribute_name = attribute_name_from_url(attribute_name)
         try:
             self.cm.AttributeStart(attribute_name)
         except Exception as e:
@@ -300,7 +312,7 @@ class Archiver():
         Stops the archiving of the attribute passed as input.
         The attribute must be already present in the subscriber's list
         """
-        attribute_name = parse_attribute_name(attribute_name)
+        attribute_name = attribute_name_from_url(attribute_name)
         try:
             self.cm.AttributeStop(attribute_name)
         except Exception as e:
@@ -313,14 +325,14 @@ class Archiver():
         """
         Check if an attribute is in the archiving list
         """
-        attribute_name = parse_attribute_name(attribute_name)
+        attribute_name = attribute_name_from_url(attribute_name)
         attributes = self.cm.AttributeSearch(attribute_name.lower())
         if len(attributes)>1:
             # Handle case same attribute_name r/rw 
             if len(attributes)==2 and (attributes[0].endswith(attributes[1]+'w') or attributes[1].endswith(attributes[0]+'w')):
                 return True
             else:
-                raise Exception(f"Multiple Attributes Matched! {attributes}")
+                raise Exception(f"Multiple Attributes Matched: {attributes}")
         elif len(attributes)==1:
             return True
         else:
@@ -371,7 +383,7 @@ class Archiver():
         """
         Return the error related to the attribute
         """
-        attribute_name = parse_attribute_name(attribute_name)
+        attribute_name = attribute_name_from_url(attribute_name)
         errs_dict = self.get_subscriber_errors()
         for e in errs_dict:
             if attribute_name in e:
@@ -396,7 +408,7 @@ class Archiver():
         """
         Given an attribute name, return the event subscriber associated with it
         """
-        attribute_name = parse_attribute_name(attribute_name)
+        attribute_name = attribute_name_from_url(attribute_name)
         # If the ConfManager manages more than one subscriber
         if len(self.get_subscribers())>1:
             for es_name in self.get_subscribers():
@@ -411,7 +423,7 @@ class Archiver():
         """
         Return the attribute archiving frequency in events/minute 
         """
-        attribute_name = parse_attribute_name(attribute_name)
+        attribute_name = attribute_name_from_url(attribute_name)
         if self.is_attribute_archived(attribute_name):
             es = DeviceProxy(self.get_attribute_subscriber(attribute_name))
             freq_dict = dict((a,r) for a,r in zip(es.AttributeList,es.AttributeRecordFreqList))
@@ -425,7 +437,7 @@ class Archiver():
         """
         Return the attribute failure archiving frequency in events/minute 
         """
-        attribute_name = parse_attribute_name(attribute_name)
+        attribute_name = attribute_name_from_url(attribute_name)
         if self.is_attribute_archived(attribute_name):
             es = DeviceProxy(self.get_attribute_subscriber(attribute_name))
             fail_dict = dict((a,r) for a,r in zip(es.AttributeList,es.AttributeFailureFreqList))
@@ -461,139 +473,5 @@ class Selector():
             data = json.load(f)
             f.close()
         except FileNotFoundError as e:
-            raise Exception("JSON configuration file not found!") from e
+            raise
         return data
-
-class Retriever():
-    """
-    The Retriever class implements retrieve operations on a given DBMS
-    """
-    def __init__(self, cm_name: str = 'archiving/hdbpp/confmanager01'):
-        self.cm_name = cm_name
-        self.session = self.connect_to_archiving_db()
-    
-    def get_db_credentials(self):
-        """
-        Retrieves the DB credentials from the Tango properties of Configuration Manager
-        """
-        cm = DeviceProxy(self.cm_name)
-        config_list = cm.get_property('LibConfiguration')['LibConfiguration'] # dictionary {'LibConfiguration': list of strings}
-        host = str([s for s in config_list if "host" in s][0].split('=')[1])
-        dbname = str([s for s in config_list if "dbname" in s][0].split('=')[1])
-        port = str([s for s in config_list if "port" in s][0].split('=')[1])
-        user = str([s for s in config_list if "user" in s][0].split('=')[1])
-        pw = str([s for s in config_list if "password" in s][0].split('=')[1])
-        return host,dbname,port,user,pw
-
-    def connect_to_archiving_db(self):
-        """
-        Returns a session to a MySQL DBMS using default credentials.
-        """
-        host,dbname,port,user,pw = self.get_db_credentials()
-        engine = create_engine('mysql+pymysql://'+user+':'+pw+'@'+host+':'+port+'/'+dbname)
-        Session = sessionmaker(bind=engine)
-        return Session()
-
-    def get_all_archived_attributes(self):
-        """
-        Returns a list of the archived attributes in the DB.
-        """
-        attrs = self.session.query(Attribute).order_by(Attribute.att_conf_id).all()
-        # Returns the representation as set in __repr__ method of the mapper class
-        return attrs
-
-    def get_archived_attributes_by_device(self,device_fqname: str):
-        """
-        Takes as input the fully-qualified name of a device and returns a list of its archived attributes
-        """
-        try:
-            [domain, family, member] = device_fqname.split('/')
-        except:
-            raise AttributeFormatException(f"Could not parse device name {device_fqname}. Please provide FQDN, e.g. STAT/Device/1")
-        attrs = self.session.query(Attribute).filter(and_(Attribute.domain == domain, Attribute.family == family, \
-                                Attribute.member == member)).all()
-        # Returns the representation as set in __repr__ method of the mapper class
-        return attrs
-
-    def get_attribute_id(self,attribute_fqname: str):
-        """
-        Takes as input the fully-qualified name of an attribute and returns its id.
-        """
-        try:
-            [domain, family, member, name] = attribute_fqname.split('/')
-        except:
-            raise AttributeFormatException(f"Could not parse attribute name {attribute_fqname}. Please provide FQDN, e.g. STAT/Device/1/Attribute")
-        try:
-            result = self.session.query(Attribute.att_conf_id).filter(and_(Attribute.domain == domain, Attribute.family == family, \
-                                    Attribute.member == member, Attribute.name == name)).one()
-            return result[0]
-        except TypeError as e:
-            raise Exception("Attribute not found!") from e
-        except NoResultFound as e:
-            raise Exception(f"No records of attribute {attribute_fqname} found in DB") from e
-
-    def get_attribute_datatype(self,attribute_fqname: str):
-        """
-        Takes as input the fully-qualified name of an attribute and returns its Data-Type.
-        Data Type name indicates the type (e.g. string, int, ...) and the read/write property. The name is used
-        as DB table name suffix in which values are stored.
-        """
-        try:
-            [domain, family, member, name] = attribute_fqname.split('/')
-        except:
-            raise AttributeFormatException(f"Could not parse attribute name {attribute_fqname}. Please provide FQDN, e.g. STAT/Device/1/Attribute")
-        try:
-            result = self.session.query(DataType.data_type).join(Attribute,Attribute.att_conf_data_type_id==DataType.att_conf_data_type_id).\
-                        filter(and_(Attribute.domain == domain, Attribute.family == family, Attribute.member == member, Attribute.name == name)).one()
-            return result[0]
-        except TypeError as e:
-            raise Exception("Attribute not found!") from e
-        except NoResultFound as e:
-            raise Exception(f"No records of attribute {attribute_fqname} found in DB") from e
-
-    def get_attribute_value_by_hours(self,attribute_fqname: str, hours: float = 1.0):
-        """
-        Takes as input the attribute fully-qualified name and the number of past hours since the actual time 
-        (e.g. hours=1 retrieves values in the last hour, hours=8.5 retrieves values in the last eight hours and half).
-        Returns a list of timestamps and a list of values
-        """
-        attr_id = self.get_attribute_id(attribute_fqname)
-        attr_datatype = self.get_attribute_datatype(attribute_fqname)
-        attr_table_name = 'att_'+str(attr_datatype)
-        # Retrieves the class that maps the DB table given the tablename
-        base_class = get_class_by_tablename(attr_table_name)
-        # Retrieves the timestamp 
-        time_now = datetime.now()
-        time_delta = time_now - timedelta(hours=hours)
-        # Converts the timestamps in the right format for the query
-        time_now_db = str(time_now.strftime("%Y-%m-%d %X"))
-        time_delta_db = str(time_delta.strftime("%Y-%m-%d %X"))
-        try:
-            result = self.session.query(base_class).\
-                    join(Attribute,Attribute.att_conf_id==base_class.att_conf_id).\
-                    filter(and_(Attribute.att_conf_id == attr_id,base_class.data_time >= time_delta_db, \
-                            base_class.data_time <= time_now_db)).order_by(base_class.data_time).all()
-        except AttributeError as e:
-            raise Exception(f"Empty result! Attribute {attribute_fqname} not found") from e
-        return result
-
-    def get_attribute_value_by_interval(self,attribute_fqname: str, start_time: datetime, stop_time: datetime):
-        '''
-        Takes as input the attribute name and a certain starting and ending point-time. 
-        The datetime format is pretty flexible (e.g. "YYYY-MM-dd hh:mm:ss").
-        Returns a list of timestamps and a list of values
-        '''
-        attr_id = self.get_attribute_id(attribute_fqname)
-        attr_datatype = self.get_attribute_datatype(attribute_fqname)
-        attr_table_name = 'att_'+str(attr_datatype)
-        # Retrieves the class that maps the DB table given the tablename
-        base_class = get_class_by_tablename(attr_table_name)
-        try:
-            result = self.session.query(base_class).\
-                    join(Attribute,Attribute.att_conf_id==base_class.att_conf_id).\
-                        filter(and_(Attribute.att_conf_id == attr_id,base_class.data_time >= str(start_time), \
-                                base_class.data_time <= str(stop_time))).order_by(base_class.data_time).all()
-        except AttributeError as e:
-            raise Exception(f"Empty result! Attribute {attribute_fqname} not found") from e
-        return result
-        
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_base.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_mysql.py
similarity index 98%
rename from tangostationcontrol/tangostationcontrol/toolkit/archiver_base.py
rename to tangostationcontrol/tangostationcontrol/toolkit/archiver_base_mysql.py
index 4440957bb8546a9e42638a0a6b441e43119fa601..4224b2349e7554d51a0a918fc22f70a01c022cdf 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_base.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_mysql.py
@@ -7,7 +7,7 @@ from sqlalchemy.dialects.mysql import DOUBLE,TIMESTAMP,BLOB, FLOAT, BIGINT
 from sqlalchemy.sql.expression import table
 from typing import List
 from itertools import groupby
-import numpy as np
+import numpy
 
 #Declarative system used to define classes mapped to relational DB tables
 Base = declarative_base()
@@ -28,10 +28,9 @@ class Attribute(Base):
     family = Column(String)
     member = Column(String)
     name = Column(String)
-    
-    
+     
     def __repr__(self):
-        return f"<Attribute(fullname='{self.att_name}',data_type ='{self.att_conf_data_type_id}',ttl='{self.att_ttl}',facility ='{elf.facility}',domain ='{self.domain}',family ='{self.family}',member ='{self.member}',name ='{self.name}')>"
+        return f"<Attribute(fullname='{self.att_name}',data_type ='{self.att_conf_data_type_id}',ttl='{self.att_ttl}',facility ='{self.facility}',domain ='{self.domain}',family ='{self.family}',member ='{self.member}',name ='{self.name}')>"
     
 class DataType(Base):
     """
@@ -906,14 +905,14 @@ def build_array_from_record(rows: List[Array], dim_x: int):
     """
     Converts Array database items in Python lists
     """
-    matrix = np.array([])
+    matrix = numpy.array([])
     for i in range(0,dim_x):
-        x = np.array([item for item in rows if item.idx==i]) #group records by array index
+        x = numpy.array([item for item in rows if item.idx==i]) #group records by array index
         if i==0:
-            matrix = np.append(matrix,x)    #append first row
+            matrix = numpy.append(matrix,x)    #append first row
         else:
-            matrix = np.vstack([matrix,x])  #stack vertically
-    result = np.transpose(matrix)   #transpose -> each row is a distinct array of value
+            matrix = numpy.vstack([matrix,x])  #stack vertically
+    result = numpy.transpose(matrix)   #transpose -> each row is a distinct array of value
     list_result = result.tolist()
     return list_result
     
@@ -921,8 +920,8 @@ def get_values_from_record(data_matrix: List[Array]):
     """
     Returns a matrix of values from a matrix of Array records
     """
-    array_matrix = np.matrix(data_matrix)
-    value_matrix = np.empty(array_matrix.shape)
+    array_matrix = numpy.matrix(data_matrix)
+    value_matrix = numpy.empty(array_matrix.shape)
     for index in range(array_matrix.size):    # for each object element
         value_matrix.itemset(index,array_matrix.item(index).value_r) # extract the value from object and put in the matrix
     return value_matrix
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py
new file mode 100644
index 0000000000000000000000000000000000000000..0480777234333dc7e8e36e2f0cd6db519a06453c
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py
@@ -0,0 +1,473 @@
+#! /usr/bin/env python3
+
+from sqlalchemy.dialects.postgresql import ARRAY,TIMESTAMP,FLOAT, JSON
+from sqlalchemy.dialects.postgresql.base import BYTEA
+from sqlalchemy.dialects.postgresql.ranges import INT4RANGE, INT8RANGE
+from sqlalchemy.sql.sqltypes import INTEGER, TEXT, Boolean
+from sqlalchemy.orm import declarative_base
+from sqlalchemy import Column, Integer, String
+from sqlalchemy.sql.expression import table
+from typing import List
+from itertools import groupby
+import numpy
+
+#Declarative system used to define classes mapped to relational DB tables
+Base = declarative_base()
+
+class Attribute(Base):
+    """
+    Class that represents a Tango Attribute mapped to table 'att_conf'
+    """
+    __tablename__ = 'att_conf'
+    __table_args__ = {'extend_existing': True}
+    
+    att_conf_id = Column(Integer, primary_key=True)
+    att_name = Column(String)
+    att_conf_type_id = Column(Integer)
+    att_conf_format_id = Column(Integer)
+    table_name = Column(String)
+    cs_name = Column(String)
+    domain = Column(String)
+    family = Column(String)
+    member = Column(String)
+    name = Column(String)
+    ttl = Column(Integer)
+     
+    def __repr__(self):
+        return f"<Attribute(fullname='{self.att_name}',data_type ='{self.att_conf_type_id}',format='{self.att_conf_format_id}',table_name='{self.table_name}',cs_name ='{self.cs_name}',domain ='{self.domain}',family ='{self.family}',member ='{self.member}',name ='{self.name}'),ttl='{self.ttl}'>"
+    
+class DataType(Base):
+    """
+    Class that represents a Tango Data Type mapped to table 'att_conf_data_type'
+    """
+    __tablename__ = 'att_conf_type'
+    __table_args__ = {'extend_existing': True}
+    
+    att_conf_type_id = Column(Integer, primary_key=True)
+    type = Column(String)
+    
+    def __repr__(self):
+        return f"<DataType(type='{self.type}')>"
+
+class Format(Base):
+    """
+    Class that represents a Tango Format mapped to table 'att_conf_format'
+    """
+    __tablename__ = 'att_conf_format'
+    __table_args__ = {'extend_existing': True}
+
+    att_conf_format_id = Column(Integer, primary_key=True)
+    format = Column(String)
+    format_num = Column(Integer)
+
+    def __repr__(self):
+        return f"<Format(format='{self.format}', format_num='{self.format_num}')>"
+
+class Scalar(Base):
+    """
+    Abstract class that represents Super-class of Scalar mapper classes
+    """
+    # In the concrete inheritance use case, it is common that the base class is not represented 
+    # within the database, only the subclasses. In other words, the base class is abstract.
+    __abstract__ = True 
+
+    # Primary key is not defined for tables which store values, but SQLAlchemy requires a mandatory 
+    # primary key definition. Anyway, this definition is on Python-side and does not compromise
+    # DBMS architecture
+    att_conf_id = Column(Integer, primary_key=True)
+    data_time = Column(TIMESTAMP, primary_key=True)
+    quality = Column(Integer)
+    att_error_desc_id = Column(Integer)
+    details = Column(JSON)
+
+class Scalar_Boolean(Scalar):
+    """
+    Class that represents a Tango Boolean mapped to table 'att_scalar_devboolean'
+    """
+    __tablename__ = 'att_scalar_devboolean'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(Boolean)
+    value_w = Column(Boolean)
+
+    def __repr__(self):
+        return f"<Scalar_Boolean(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Scalar_Double(Scalar):
+    """
+    Class that represents a Tango Double mapped to table 'att_scalar_devdouble'
+    """
+    __tablename__ = 'att_scalar_devdouble'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(FLOAT)
+    value_w = Column(FLOAT)
+
+    def __repr__(self):
+        return f"<Scalar_Double(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Scalar_Encoded(Scalar):
+    """
+    Class that represents a Tango Encoded mapped to table 'att_scalar_devencoded'
+    """
+    __tablename__ = 'att_scalar_devencoded'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(BYTEA)
+    value_w = Column(BYTEA)
+
+    def __repr__(self):
+        return f"<Scalar_Encoded(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Scalar_Enum(Scalar):
+    """
+    Class that represents a Tango Enum mapped to table 'att_scalar_devenum'
+    """
+    __tablename__ = 'att_scalar_devenum'
+    __table_args__ = {'extend_existing': True}
+    value_r_label = Column(TEXT)
+    value_r = Column(INTEGER)
+    value_w_label = Column(TEXT)
+    value_w = Column(INTEGER)
+
+    def __repr__(self):
+        return f"<Scalar_Enum(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r_label='{self.value_r_label}',value_r='{self.value_r}',value_w_label='{self.value_w_label}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Scalar_Float(Scalar):
+    """
+    Class that represents a Tango Float mapped to table 'att_scalar_devfloat'
+    """
+    __tablename__ = 'att_scalar_devfloat'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(FLOAT)
+    value_w = Column(FLOAT)
+
+    def __repr__(self):
+        return f"<Scalar_Float(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Scalar_Long(Scalar):
+    """
+    Class that represents a Tango Long mapped to table 'att_scalar_devlong'
+    """
+    __tablename__ = 'att_scalar_devlong'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(INT4RANGE)
+    value_w = Column(INT4RANGE)
+
+    def __repr__(self):
+        return f"<Scalar_Long(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Scalar_Long64(Scalar):
+    """
+    Class that represents a Tango Long64 mapped to table 'att_scalar_devlong64'
+    """
+    __tablename__ = 'att_scalar_devlong64'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(INT8RANGE)
+    value_w = Column(INT8RANGE)
+
+    def __repr__(self):
+        return f"<Scalar_Long64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Scalar_Short(Scalar):
+    """
+    Class that represents a Tango Short mapped to table 'att_scalar_devshort'
+    """
+    __tablename__ = 'att_scalar_devshort'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(INTEGER)
+    value_w = Column(INTEGER)
+
+    def __repr__(self):
+        return f"<Scalar_Short(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Scalar_State(Scalar):
+    """
+    Class that represents a Tango State mapped to table 'att_scalar_devstate'
+    """
+    __tablename__ = 'att_scalar_devstate'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(INTEGER)
+    value_w = Column(INTEGER)
+
+    def __repr__(self):
+        return f"<Scalar_State(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Scalar_String(Scalar):
+    """
+    Class that represents a Tango String mapped to table 'att_scalar_devstring'
+    """
+    __tablename__ = 'att_scalar_devstring'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(TEXT)
+    value_w = Column(TEXT)
+
+    def __repr__(self):
+        return f"<Scalar_String(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Scalar_UChar(Scalar):
+    """
+    Class that represents a Tango UChar mapped to table 'att_scalar_devuchar'
+    """
+    __tablename__ = 'att_scalar_devuchar'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(INTEGER)
+    value_w = Column(INTEGER)
+
+    def __repr__(self):
+        return f"<Scalar_UChar(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Scalar_ULong(Scalar):
+    """
+    Class that represents a Tango ULong mapped to table 'att_scalar_devulong'
+    """
+    __tablename__ = 'att_scalar_devulong'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(INTEGER)
+    value_w = Column(INTEGER)
+
+    def __repr__(self):
+        return f"<Scalar_ULong(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Scalar_ULong64(Scalar):
+    """
+    Class that represents a Tango ULong64 mapped to table 'att_scalar_devulong64'
+    """
+    __tablename__ = 'att_scalar_devulong64'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(INTEGER)
+    value_w = Column(INTEGER)
+
+    def __repr__(self):
+        return f"<Scalar_ULong64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Scalar_UShort(Scalar):
+    """
+    Class that represents a Tango UShort mapped to table 'att_scalar_devushort'
+    """
+    __tablename__ = 'att_scalar_devushort'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(INTEGER)
+    value_w = Column(INTEGER)
+
+    def __repr__(self):
+        return f"<Scalar_UShort(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array(Base):
+    """
+    Abstract class that represents Super-class of Array mapper classes
+    """
+    __abstract__ = True
+    # Primary key is not defined for tables which store values, but SQLAlchemy requires a mandatory 
+    # primary key definition. Anyway, this definition is on Python-side and does not compromise
+    # DBMS architecture
+    att_conf_id = Column(Integer, primary_key=True)
+    data_time = Column(TIMESTAMP, primary_key=True)
+    quality = Column(Integer)
+    att_error_desc_id = Column(Integer)
+    details = Column(JSON)
+
+class Array_Boolean(Array):
+    """
+    Class that represents a Tango Boolean Array mapped to table 'att_array_devboolean'
+    """
+    __tablename__ = 'att_array_devboolean'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(ARRAY(Boolean))
+    value_w = Column(ARRAY(Boolean))
+
+    def __repr__(self):
+        return f"<Array_Boolean(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array_Double(Array):
+    """
+    Class that represents a Tango Double Array mapped to table 'att_array_devdouble'
+    """
+    __tablename__ = 'att_array_devdouble'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(ARRAY(FLOAT))
+    value_w = Column(ARRAY(FLOAT))
+
+    def __repr__(self):
+        return f"<Array_Double(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array_Encoded(Array):
+    """
+    Class that represents a Tango Encoded Array mapped to table 'att_array_devencoded'
+    """
+    __tablename__ = 'att_array_devencoded'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(ARRAY(BYTEA))
+    value_w = Column(ARRAY(BYTEA))
+
+    def __repr__(self):
+        return f"<Array_Encoded(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array_Enum(Array):
+    """
+    Class that represents a Tango Enum Array mapped to table 'att_array_devenum'
+    """
+    __tablename__ = 'att_array_devenum'
+    __table_args__ = {'extend_existing': True}
+    value_r_label = Column(ARRAY(TEXT))
+    value_r = Column(ARRAY(INTEGER))
+    value_w_label = Column(ARRAY(TEXT))
+    value_w = Column(ARRAY(INTEGER))
+
+    def __repr__(self):
+        return f"<Array_Enum(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r_label='{self.value_r_label}',value_r='{self.value_r}',value_w_label='{self.value_w_label}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array_Float(Array):
+    """
+    Class that represents a Tango Float Array mapped to table 'att_array_devfloat'
+    """
+    __tablename__ = 'att_array_devfloat'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(ARRAY(FLOAT))
+    value_w = Column(ARRAY(FLOAT))
+
+    def __repr__(self):
+        return f"<Array_Float(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array_Long(Array):
+    """
+    Class that represents a Tango Long Array mapped to table 'att_array_devlong'
+    """
+    __tablename__ = 'att_array_devlong'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(ARRAY(INT4RANGE))
+    value_w = Column(ARRAY(INT4RANGE))
+
+    def __repr__(self):
+        return f"<Array_Long(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array_Long64(Array):
+    """
+    Class that represents a Tango Long64 Array mapped to table 'att_array_devlong64'
+    """
+    __tablename__ = 'att_array_devlong64'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(ARRAY(INT8RANGE))
+    value_w = Column(ARRAY(INT8RANGE))
+
+    def __repr__(self):
+        return f"<Array_Long64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array_Short(Array):
+    """
+    Class that represents a Tango Short Array mapped to table 'att_array_devshort'
+    """
+    __tablename__ = 'att_array_devshort'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(ARRAY(INTEGER))
+    value_w = Column(ARRAY(INTEGER))
+
+    def __repr__(self):
+        return f"<Array_Short(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array_State(Array):
+    """
+    Class that represents a Tango State Array mapped to table 'att_array_devstate'
+    """
+    __tablename__ = 'att_array_devstate'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(ARRAY(INT4RANGE))
+    value_w = Column(ARRAY(INT4RANGE))
+
+    def __repr__(self):
+        return f"<Array_State(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array_String(Array):
+    """
+    Class that represents a Tango String Array mapped to table 'att_array_devstring'
+    """
+    __tablename__ = 'att_array_devstring'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(ARRAY(TEXT))
+    value_w = Column(ARRAY(TEXT))
+
+    def __repr__(self):
+        return f"<Array_String(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array_UChar(Array):
+    """
+    Class that represents a Tango UChar Array mapped to table 'att_array_devuchar'
+    """
+    __tablename__ = 'att_array_devuchar'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(ARRAY(INTEGER))
+    value_w = Column(ARRAY(INTEGER))
+
+    def __repr__(self):
+        return f"<Array_UChar(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array_ULong(Array):
+    """
+    Class that represents a Tango ULong Array mapped to table 'att_array_devulong'
+    """
+    __tablename__ = 'att_array_devulong'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(ARRAY(INTEGER))
+    value_w = Column(ARRAY(INTEGER))
+
+    def __repr__(self):
+        return f"<Array_ULong(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array_ULong64(Array):
+    """
+    Class that represents a Tango ULong64 Array mapped to table 'att_array_devulong64'
+    """
+    __tablename__ = 'att_array_devulong64'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(ARRAY(INTEGER))
+    value_w = Column(ARRAY(INTEGER))
+
+    def __repr__(self):
+        return f"<Array_ULong64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+class Array_UShort(Array):
+    """
+    Class that represents a Tango UShort Array mapped to table 'att_array_devushort'
+    """
+    __tablename__ = 'att_array_devushort'
+    __table_args__ = {'extend_existing': True}
+    value_r = Column(ARRAY(INTEGER))
+    value_w = Column(ARRAY(INTEGER))
+
+    def __repr__(self):
+        return f"<Array_UShort(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
+
+def get_class_by_tablename(tablename: str):
+    """
+    Returns class reference mapped to a table.    
+    """
+    for mapper in Base.registry.mappers:
+        c = mapper.class_
+        classname = c.__name__
+        if not classname.startswith('_'):
+            if hasattr(c, '__tablename__') and c.__tablename__ == tablename:
+                return c
+    return None
+
+def build_array_from_record(rows: List[Array], dim_x: int):
+    """
+    Converts Array database items in Python lists
+    """
+    matrix = numpy.array([])
+    for i in range(0,dim_x):
+        x = numpy.array([item for item in rows if item.idx==i]) #group records by array index
+        if i==0:
+            matrix = numpy.append(matrix,x)    #append first row
+        else:
+            matrix = numpy.vstack([matrix,x])  #stack vertically
+    result = numpy.transpose(matrix)   #transpose -> each row is a distinct array of value
+    list_result = result.tolist()
+    return list_result
+    
+def get_values_from_record(data_matrix: List[Array]):
+    """
+    Returns a matrix of values from a matrix of Array records
+    """
+    array_matrix = numpy.matrix(data_matrix)
+    value_matrix = numpy.empty(array_matrix.shape)
+    for index in range(array_matrix.size):    # for each object element
+        value_matrix.itemset(index,array_matrix.item(index).value_r) # extract the value from object and put in the matrix
+    return value_matrix
+
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/__init__.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/lofar2.json b/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/lofar2.json
index 90533e18a0fd28b26286cff70be8d5f190b14a6e..6c30be0698175ef914841fa165a4f2a97543d12a 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/lofar2.json
+++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/lofar2.json
@@ -11,7 +11,7 @@
         },
         "STAT/SDP/1": {
             "environment": "development",
-            "include": [],
+            "include": ["FPGA_temp_R","TR_fpga_mask_R"],
             "exclude": ["FPGA_scrap_R","FPGA_scrap_RW"]      
         },
         "STAT/SST/1": {
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/attribute_polling_stats.py b/tangostationcontrol/tangostationcontrol/toolkit/attribute_polling_stats.py
new file mode 100644
index 0000000000000000000000000000000000000000..fcfb502320d4de084f219244289f6ddfa064e996
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/attribute_polling_stats.py
@@ -0,0 +1,39 @@
+import numpy
+import tango
+
+def attribute_polling_stats(dp: tango._tango.DeviceProxy = None, iterations: int = 10, polling_time: float = 1.0, quiet = False):
+    if dp is not None:
+        print('Will sample the device server\'s polling time {} times with a pause of {}s between each sampling.'.format(iterations, polling_time))
+    else:
+        print('A DeviceProxy object is needed!')
+        return
+    import numpy
+    from time import sleep
+    polling_durations = []
+    polling_delays = []
+    value = numpy.double(0)
+    iterations_left = iterations
+    while iterations_left > 0:
+        iterations_left -= 1
+        string = dp.polling_status()[0].split('\n')
+        polling_duration = numpy.double(string[3].split('=')[-1].strip()) / 1e3
+        polling_delay = numpy.double(string[5].split('=')[-1].split(',')[0].strip()) / 1e3
+        polling_durations.append(polling_duration)
+        polling_delays.append(polling_delay)
+        if not quiet:
+            print('Iteration #{}, {} iterations left, polling duration = {}s, polling delay = {}s.'.format(iterations - iterations_left, iterations_left, polling_duration, polling_delay))
+        sleep(polling_time)
+    durations = numpy.array(polling_durations)
+    delays = numpy.array(polling_delays)
+    def compute_and_print(result):
+        min = numpy.min(result)
+        max = numpy.max(result)
+        median = numpy.median(result)
+        mean = numpy.mean(result)
+        std = numpy.std(result)
+        print("\tmin = {}[s]\n\tmax = {}[s]\n\tmedian = {}[s]\n\tmean = {}[s]\n\tstddev = {}[s]".format(min, max, median, mean, std))
+    print("\n\titerations = {}\n\n\tPolling duration".format(iterations))
+    compute_and_print(durations)
+    print("\n\tPolling delay")
+    compute_and_print(delays)
+    return (durations, delays)
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/retriever.py b/tangostationcontrol/tangostationcontrol/toolkit/retriever.py
new file mode 100644
index 0000000000000000000000000000000000000000..b84802cde128ca2f256f5f18ffb78dec3b9ea29f
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/retriever.py
@@ -0,0 +1,314 @@
+#! /usr/bin/env python3
+
+from tango import DeviceProxy, AttributeProxy
+from tangostationcontrol.toolkit.archiver import split_tango_name
+
+from abc import ABC, abstractmethod
+from datetime import datetime, timedelta
+from sqlalchemy import create_engine, and_
+from sqlalchemy.orm import sessionmaker
+from sqlalchemy.orm.exc import NoResultFound
+import importlib
+import numpy
+
+class Retriever(ABC):
+    """
+    The Retriever abstract class implements retrieve operations on a given DBMS
+    """
+    
+    def get_db_credentials(self):
+        """
+        Retrieves the DB credentials from the Tango properties of Configuration Manager
+        """
+        cm = DeviceProxy(self.cm_name)
+        config_list = list(cm.get_property('LibConfiguration')['LibConfiguration']) # dictionary {'LibConfiguration': list of strings}
+        if 'connect_string=' in config_list[0]: config_list.pop(0)  # possibly remove connect string because it causes errors
+        host = str([s for s in config_list if "host" in s][0].split('=')[1])
+        dbname = str([s for s in config_list if "dbname" in s][0].split('=')[1])
+        port = str([s for s in config_list if "port" in s][0].split('=')[1])
+        user = str([s for s in config_list if "user" in s][0].split('=')[1])
+        pw = str([s for s in config_list if "password" in s][0].split('=')[1])
+        return host,dbname,port,user,pw
+
+    def create_session(self,libname:str,user:str,pw:str,host:str,port:str,dbname:str):
+        """
+        Returns a session to a DBMS using default credentials.
+        """
+        connection_string = f"{libname}://{user}:{pw}@{host}:{port}/{dbname}"
+        engine = create_engine(connection_string)
+        Session = sessionmaker(bind=engine)
+        return Session
+    
+    @abstractmethod
+    def set_archiver_base(self):
+        return
+   
+    @abstractmethod
+    def connect_to_archiving_db(self):
+        return
+
+    def get_all_archived_attributes(self):
+        """
+        Returns a list of the archived attributes in the DB.
+        """
+        attrs = self.session.query(self.ab.Attribute).order_by(self.ab.Attribute.att_conf_id).all()
+        # Returns the representation as set in __repr__ method of the mapper class
+        return attrs
+
+    def get_archived_attributes_by_device(self,device_fqname: str):
+        """
+        Takes as input the fully-qualified name of a device and returns a list of its archived attributes
+        """
+        domain, family, member = split_tango_name(device_fqname,"device")
+        attrs = self.session.query(self.ab.Attribute).filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, \
+                                self.ab.Attribute.member == member)).all()
+        # Returns the representation as set in __repr__ method of the mapper class
+        return attrs
+
+    def get_attribute_id(self,attribute_fqname: str):
+        """
+        Takes as input the fully-qualified name of an attribute and returns its id.
+        """
+        domain, family, member, name = split_tango_name(attribute_fqname,"attribute")
+        try:
+            result = self.session.query(self.ab.Attribute.att_conf_id).filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, \
+                                    self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
+            return result[0]
+        except TypeError as e:
+            raise Exception(f"Attribute {attribute_fqname} not found!") from e
+        except NoResultFound as e:
+            raise Exception(f"No records of attribute {attribute_fqname} found in DB") from e
+    
+    @abstractmethod
+    def get_attribute_datatype(self,attribute_fqname: str):
+        return
+    
+    def get_attribute_value_by_hours(self, attribute_fqname: str, hours: float, tablename:str):
+        """
+        Takes as input the attribute fully-qualified name and the number of past hours since the actual time 
+        (e.g. hours=1 retrieves values in the last hour, hours=8.5 retrieves values in the last eight hours and half).
+        Returns a list of timestamps and a list of values
+        """
+        attr_id = self.get_attribute_id(attribute_fqname)
+        # Retrieves the class that maps the DB table given the tablename
+        base_class = self.ab.get_class_by_tablename(tablename)    
+        # Retrieves the timestamp 
+        time_now = datetime.now()
+        time_delta = time_now - timedelta(hours=hours)
+        # Converts the timestamps in the right format for the query
+        time_now_db = str(time_now.strftime("%Y-%m-%d %X"))
+        time_delta_db = str(time_delta.strftime("%Y-%m-%d %X"))
+        try:
+            result = self.session.query(base_class).\
+                    join(self.ab.Attribute,self.ab.Attribute.att_conf_id==base_class.att_conf_id).\
+                    filter(and_(self.ab.Attribute.att_conf_id == attr_id,base_class.data_time >= time_delta_db, \
+                            base_class.data_time <= time_now_db)).order_by(base_class.data_time).all()
+        except AttributeError as e:
+            raise Exception(f"Empty result: Attribute {attribute_fqname} not found") from e
+        return result
+
+    def get_attribute_value_by_interval(self,attribute_fqname: str, start_time: datetime, stop_time: datetime, tablename:str):
+        """
+        Takes as input the attribute name and a certain starting and ending point-time. 
+        The datetime format is pretty flexible (e.g. "YYYY-MM-dd hh:mm:ss").
+        Returns a list of timestamps and a list of values
+        """
+        attr_id = self.get_attribute_id(attribute_fqname)
+        # Retrieves the class that maps the DB table given the tablename
+        base_class = self.ab.get_class_by_tablename(tablename)
+        try:
+            result = self.session.query(base_class).\
+                    join(self.ab.Attribute,self.ab.Attribute.att_conf_id==base_class.att_conf_id).\
+                        filter(and_(self.ab.Attribute.att_conf_id == attr_id,base_class.data_time >= str(start_time), \
+                                base_class.data_time <= str(stop_time))).order_by(base_class.data_time).all()
+        except AttributeError as e:
+            raise Exception(f"Empty result: Attribute {attribute_fqname} not found") from e
+        return result
+
+class RetrieverMySQL(Retriever):
+    
+    def __init__(self, cm_name: str = 'archiving/hdbpp/confmanager01'):
+        self.cm_name = cm_name
+        self.session = self.connect_to_archiving_db()
+        self.ab = self.set_archiver_base()
+    
+    def connect_to_archiving_db(self):
+        """
+        Returns a session to a MySQL DBMS using default credentials.
+        """
+        host,dbname,port,user,pw = super().get_db_credentials()
+        # Set sqlalchemy library connection
+        if host=='archiver-maria-db':
+            libname = 'mysql+pymysql'         
+        else:
+            raise ValueError(f"Invalid hostname: {host}")
+        Session = super().create_session(libname,user,pw,host,port,dbname)
+        return Session()
+    
+    def set_archiver_base(self):
+        """
+        Sets the right mapper class following the DBMS connection
+        """    
+        return importlib.import_module('.archiver_base_mysql', package=__package__)
+    
+    def get_attribute_datatype(self,attribute_fqname: str):
+        """
+        Takes as input the fully-qualified name of an attribute and returns its Data-Type.
+        Data Type name indicates the type (e.g. string, int, ...) and the read/write property. The name is used
+        as DB table name suffix in which values are stored.
+        """
+        domain, family, member, name = split_tango_name(attribute_fqname,"attribute")
+        try:
+            result = self.session.query(self.ab.DataType.data_type).join(self.ab.Attribute,self.ab.Attribute.att_conf_data_type_id==self.ab.DataType.att_conf_data_type_id).\
+                        filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
+            return result[0]
+        except TypeError as e:
+            raise Exception(f"Attribute not {attribute_fqname} found!") from e
+        except NoResultFound as e:
+            raise Exception(f"No records of attribute {attribute_fqname} found in DB") from e
+    
+    def get_attribute_value_by_hours(self,attribute_fqname: str, hours: float = 1.0):
+        """
+        Takes as input the attribute fully-qualified name and the number of past hours since the actual time 
+        (e.g. hours=1 retrieves values in the last hour, hours=8.5 retrieves values in the last eight hours and half).
+        Returns a list of timestamps and a list of values
+        """
+        attr_datatype = self.get_attribute_datatype(attribute_fqname)
+        # Retrieves the class that maps the DB table given the tablename
+        tablename = f"att_{attr_datatype}"
+        return super().get_attribute_value_by_hours(attribute_fqname,hours,tablename) 
+
+
+    def get_attribute_value_by_interval(self,attribute_fqname: str, start_time: datetime, stop_time: datetime):
+        """
+        Takes as input the attribute name and a certain starting and ending point-time. 
+        The datetime format is pretty flexible (e.g. "YYYY-MM-dd hh:mm:ss").
+        Returns a list of timestamps and a list of values
+        """
+        attr_datatype = self.get_attribute_datatype(attribute_fqname)
+        # Retrieves the class that maps the DB table given the tablename
+        tablename = f"att_{attr_datatype}"           
+        return super().get_attribute_value_by_interval(attribute_fqname,start_time,stop_time,tablename)
+    
+    # DRAFT #
+    def get_masked_fpga_temp(self,start_time: datetime, stop_time: datetime,temp_attr_name:str='stat/sdp/1/fpga_temp_r',
+                    mask_attr_name:str='stat/sdp/1/tr_fpga_mask_r'):
+        """
+        Returns a list of SDP/fpga_temp_r values, but only if SDP/tr_fpga_mask_r values are TRUE
+        """
+        mask_values = self.get_attribute_value_by_interval(mask_attr_name,start_time,stop_time)
+        temp_values = self.get_attribute_value_by_interval(temp_attr_name,start_time,stop_time)
+        # Since timestamps can be not syncrhonized, remove first or last element from arrays
+        if len(mask_values)==len(temp_values):
+            first_mask_datatime = mask_values[0].data_time
+            first_temp_datatime = temp_values[0].data_time
+            if (first_mask_datatime>first_temp_datatime):
+                mask_values = mask_values[:-int(mask_values[0].dim_x_r)]
+                temp_values = temp_values[int(temp_values[0].dim_x_r):]
+            elif (first_mask_datatime<first_temp_datatime):
+                mask_values = mask_values[int(mask_values[0].dim_x_r)]
+                temp_values = temp_values[:-int(temp_values[0].dim_x_r):]
+        else:
+            raise Exception
+        # Convert DB Array records into Python lists
+        mask_data = self.ab.build_array_from_record(mask_values,mask_values[0].dim_x_r)
+        temp_data = self.ab.build_array_from_record(temp_values,temp_values[0].dim_x_r)
+        # Extract only the value from the array 
+        mask_array_values = self.ab.get_values_from_record(mask_data)
+        temp_array_values = self.ab.get_values_from_record(temp_data)
+        # Multiply the matrix
+        #masked_values = np.multiply(temp_array_values,mask_array_values)
+        masked_values = numpy.ma.masked_array(temp_array_values,mask=numpy.invert(mask_array_values.astype(bool)))
+        return masked_values, mask_values, temp_values
+
+class RetrieverTimescale(Retriever):
+    
+    def __init__(self, cm_name: str = 'archiving/hdbppts/confmanager01'):
+        self.cm_name = cm_name
+        self.session = self.connect_to_archiving_db()
+        self.ab = self.set_archiver_base()
+    
+    def connect_to_archiving_db(self):
+        """
+        Returns a session to a MySQL DBMS using default credentials.
+        """
+        host,dbname,port,user,pw = super().get_db_credentials()
+        # Set sqlalchemy library connection        
+        if host=='archiver-timescale':
+            libname = 'postgresql+psycopg2'
+        else:
+            raise ValueError(f"Invalid hostname: {host}")
+        Session = super().create_session(libname,user,pw,host,port,dbname)
+        return Session()
+    
+    def set_archiver_base(self):
+        """
+        Sets the right mapper class following the DBMS connection
+        """
+        return importlib.import_module('.archiver_base_ts', package=__package__)
+    
+    def get_attribute_datatype(self,attribute_fqname: str):
+        """
+        Takes as input the fully-qualified name of an attribute and returns its Data-Type.
+        Data Type name indicates the type (e.g. string, int, ...) and the read/write property. The name is used
+        as DB table name suffix in which values are stored.
+        """
+        domain, family, member, name = split_tango_name(attribute_fqname,"attribute")
+        try:
+            result = self.session.query(self.ab.DataType.type).join(self.ab.Attribute,self.ab.Attribute.att_conf_type_id==self.ab.DataType.att_conf_type_id).\
+                            filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
+            return result[0]
+        except TypeError as e:
+            raise Exception(f"Attribute not {attribute_fqname} found!") from e
+        except NoResultFound as e:
+            raise Exception(f"No records of attribute {attribute_fqname} found in DB") from e
+    
+    def get_attribute_format(self,attribute_fqname: str):
+        """
+        Takes as input the fully-qualified name of an attribute and returns its format.
+        Formats are basically three: Scalar, Spectrum and Image.
+        * Works only for POSTGRESQL * 
+        """
+        domain, family, member, name = split_tango_name(attribute_fqname,"attribute")
+        try:
+            result = self.session.query(self.ab.Format.format).join(self.ab.Attribute,self.ab.Attribute.att_conf_format_id==self.ab.Format.att_conf_format_id).\
+                filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
+            return result[0]
+        except TypeError as e:
+            raise Exception("Attribute not found!") from e
+        except NoResultFound as e:
+            raise Exception(f"No records of attribute {attribute_fqname} found in DB") from e
+    
+    def get_attribute_tablename(self,attribute_fqname: str):
+        """
+        Takes as input the fully-qualified name of an attribute and returns the tablename where it is stored.
+        * Works only for POSTGRESQL * 
+        """
+        domain, family, member, name = split_tango_name(attribute_fqname,"attribute")
+        try:
+            result = self.session.query(self.ab.Attribute.table_name).filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, \
+                                    self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
+            return result[0]
+        except TypeError as e:
+            raise Exception("Attribute not found!") from e
+        except NoResultFound as e:
+            raise Exception(f"No records of attribute {attribute_fqname} found in DB") from e
+    
+    def get_attribute_value_by_hours(self, attribute_fqname: str, hours: float = 1.0):
+        """
+        Takes as input the attribute fully-qualified name and the number of past hours since the actual time 
+        (e.g. hours=1 retrieves values in the last hour, hours=8.5 retrieves values in the last eight hours and half).
+        Returns a list of timestamps and a list of values
+        """
+        tablename = self.get_attribute_tablename(attribute_fqname)
+        return super().get_attribute_value_by_hours(attribute_fqname,hours,tablename)
+    
+    def get_attribute_value_by_interval(self,attribute_fqname: str, start_time: datetime, stop_time: datetime):
+        """
+        Takes as input the attribute name and a certain starting and ending point-time. 
+        The datetime format is pretty flexible (e.g. "YYYY-MM-dd hh:mm:ss").
+        Returns a list of timestamps and a list of values
+        """
+        tablename = self.get_attribute_tablename(attribute_fqname)
+        return super().get_attribute_value_by_interval(attribute_fqname,start_time,stop_time,tablename)
+    
\ No newline at end of file