diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index aa04562e240eb10b6ba86c82a87d80b3f68685d8..a08fe46bc6d697b670f030d8c8f72530e8eb6d68 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -48,9 +48,9 @@ trigger_prepare:
     - apk add --update make bash docker-compose
     - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
     - touch /root/.Xauthority
-#    Hack BASH_SOURCE into sourced files, docker its sh shell won't set this
+    #    Hack BASH_SOURCE into sourced files, docker its sh shell won't set this
     - export BASH_SOURCE=$(pwd)/bootstrap/etc/lofar20rc.sh
-#    source the lofarrc file and mask its non zero exit code
+    #    source the lofarrc file and mask its non zero exit code
     - . bootstrap/etc/lofar20rc.sh || true
 ##    Allow docker image script to execute
 #    - chmod u+x $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh
@@ -68,7 +68,7 @@ trigger_prepare:
 .base_docker_store_images:
   extends: .base_docker_images
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh
 
 # Download all remote images and store them on our image registry for tagged
@@ -98,7 +98,7 @@ docker_build_image_all:
   rules:
     - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) || $CI_COMMIT_TAG
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh lofar-device-base latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh prometheus latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh itango latest
@@ -148,7 +148,7 @@ docker_build_image_lofar_device_base:
       - docker-compose/lofar-device-base.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh lofar-device-base $tag
 docker_build_image_prometheus:
   extends: .base_docker_images_except
@@ -159,7 +159,7 @@ docker_build_image_prometheus:
       - docker-compose/prometheus.yml
       - docker-compose/prometheus/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh prometheus $tag
 docker_build_image_itango:
   extends: .base_docker_images_except
@@ -170,7 +170,7 @@ docker_build_image_itango:
       - docker-compose/itango.yml
       - docker-compose/itango/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh itango $tag
 docker_build_image_grafana:
   extends: .base_docker_images_except
@@ -181,7 +181,7 @@ docker_build_image_grafana:
       - docker-compose/grafana.yml
       - docker-compose/grafana/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh grafana $tag
 docker_build_image_loki:
   extends: .base_docker_images_except
@@ -192,7 +192,7 @@ docker_build_image_loki:
       - docker-compose/loki.yml
       - docker-compose/loki/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh loki $tag
 docker_build_image_logstash:
   extends: .base_docker_images_except
@@ -203,7 +203,7 @@ docker_build_image_logstash:
       - docker-compose/logstash.yml
       - docker-compose/logstash/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh logstash $tag
 docker_build_image_jupyter:
   extends: .base_docker_images_except
@@ -214,7 +214,7 @@ docker_build_image_jupyter:
       - docker-compose/jupyter-lab.yml
       - docker-compose/jupyterlab/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh jupyter-lab $tag
 docker_build_image_apsct_sim:
   extends: .base_docker_images_except
@@ -225,7 +225,7 @@ docker_build_image_apsct_sim:
       - docker-compose/aspct-sim.yml
       - docker-compose/pypcc-sim-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh apsct-sim $tag
 docker_build_image_ccd_sim:
   extends: .base_docker_images_except
@@ -236,7 +236,7 @@ docker_build_image_ccd_sim:
       - docker-compose/ccd-sim.yml
       - docker-compose/pypcc-sim-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh ccd-sim $tag
 docker_build_image_apspu_sim:
   extends: .base_docker_images_except
@@ -247,7 +247,7 @@ docker_build_image_apspu_sim:
       - docker-compose/apspu-sim.yml
       - docker-compose/pypcc-sim-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh apspu-sim $tag
 docker_build_image_recv_sim:
   extends: .base_docker_images_except
@@ -258,7 +258,7 @@ docker_build_image_recv_sim:
       - docker-compose/recv-sim.yml
       - docker-compose/pypcc-sim-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh recv-sim $tag
 docker_build_image_sdptr_sim:
   extends: .base_docker_images_except
@@ -269,7 +269,7 @@ docker_build_image_sdptr_sim:
       - docker-compose/sdptr-sim.yml
       - docker-compose/sdptr-sim/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh sdptr-sim $tag
 docker_build_image_unb2_sim:
   extends: .base_docker_images_except
@@ -280,7 +280,7 @@ docker_build_image_unb2_sim:
       - docker-compose/unb2-sim.yml
       - docker-compose/pypcc-sim-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh unb2-sim $tag
 docker_build_image_device_apsct:
   extends: .base_docker_images_except
@@ -291,7 +291,7 @@ docker_build_image_device_apsct:
       - docker-compose/device-aspct.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-aspct $tag
 docker_build_image_device_ccd:
   extends: .base_docker_images_except
@@ -302,7 +302,7 @@ docker_build_image_device_ccd:
       - docker-compose/device-ccd.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-ccd $tag
 docker_build_image_device_configuration:
   extends: .base_docker_images_except
@@ -313,7 +313,7 @@ docker_build_image_device_configuration:
       - docker-compose/device-configuration.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-configuration $tag
 docker_build_image_device_apspu:
   extends: .base_docker_images_except
@@ -324,7 +324,7 @@ docker_build_image_device_apspu:
       - docker-compose/device-apspu.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-apspu $tag
 docker_build_image_device_psoc:
   extends: .base_docker_images_except
@@ -335,7 +335,7 @@ docker_build_image_device_psoc:
       - docker-compose/device-psoc.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-psoc $tag
 docker_build_image_device_pcon:
   extends: .base_docker_images_except
@@ -346,7 +346,7 @@ docker_build_image_device_pcon:
       - docker-compose/device-pcon.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-pcon $tag
 docker_build_image_device_tilebeam:
   extends: .base_docker_images_except
@@ -357,7 +357,7 @@ docker_build_image_device_tilebeam:
       - docker-compose/device-tilebeam.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-tilebeam $tag
 docker_build_image_device_beamlet:
   extends: .base_docker_images_except
@@ -368,7 +368,7 @@ docker_build_image_device_beamlet:
       - docker-compose/device-beamlet.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-beamlet $tag
 docker_build_image_device_digitalbeam:
   extends: .base_docker_images_except
@@ -379,7 +379,7 @@ docker_build_image_device_digitalbeam:
       - docker-compose/device-digitalbeam.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-digitalbeam $tag
 docker_build_image_device_boot:
   extends: .base_docker_images_except
@@ -390,7 +390,7 @@ docker_build_image_device_boot:
       - docker-compose/device-boot.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-boot $tag
 docker_build_image_device_docker:
   extends: .base_docker_images_except
@@ -401,7 +401,7 @@ docker_build_image_device_docker:
       - docker-compose/device-docker.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-docker $tag
 docker_build_image_device_observation:
   extends: .base_docker_images_except
@@ -412,7 +412,7 @@ docker_build_image_device_observation:
       - docker-compose/device-observation.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-observation $tag
 docker_build_image_device_observation_control:
   extends: .base_docker_images_except
@@ -423,7 +423,7 @@ docker_build_image_device_observation_control:
       - docker-compose/device-observation-control.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-observation-control $tag
 docker_build_image_device_antennafield:
   extends: .base_docker_images_except
@@ -434,7 +434,7 @@ docker_build_image_device_antennafield:
       - docker-compose/device-antennafield.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-antennafield $tag
 docker_build_image_device_recv:
   extends: .base_docker_images_except
@@ -445,7 +445,7 @@ docker_build_image_device_recv:
       - docker-compose/device-recv.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-recv $tag
 docker_build_image_device_sdp:
   extends: .base_docker_images_except
@@ -456,7 +456,7 @@ docker_build_image_device_sdp:
       - docker-compose/device-sdp.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sdp $tag
 docker_build_image_device_bst:
   extends: .base_docker_images_except
@@ -467,7 +467,7 @@ docker_build_image_device_bst:
       - docker-compose/device-bst.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-bst $tag
 docker_build_image_device_sst:
   extends: .base_docker_images_except
@@ -478,7 +478,7 @@ docker_build_image_device_sst:
       - docker-compose/device-sst.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sst $tag
 docker_build_image_device_unb2:
   extends: .base_docker_images_except
@@ -489,7 +489,7 @@ docker_build_image_device_unb2:
       - docker-compose/device-unb2.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-unb2 $tag
 docker_build_image_device_xst:
   extends: .base_docker_images_except
@@ -500,7 +500,7 @@ docker_build_image_device_xst:
       - docker-compose/device-xst.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-xst $tag
 docker_build_image_device_temperature_manager:
   extends: .base_docker_images_except
@@ -511,7 +511,7 @@ docker_build_image_device_temperature_manager:
       - docker-compose/device-temperature-manager.yml
       - docker-compose/lofar-device-base/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-temperature-manager $tag
 docker_build_image_archiver_timescale:
   extends: .base_docker_images_except
@@ -522,7 +522,7 @@ docker_build_image_archiver_timescale:
       - docker-compose/archiver-timescale.yml
       - docker-compose/timescaledb/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh archiver-timescale $tag
 docker_build_image_hdbpp:
   extends: .base_docker_images_except
@@ -533,7 +533,7 @@ docker_build_image_hdbpp:
       - docker-compose/archiver-timescale.yml
       - docker-compose/hdbpp/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbpp $tag
 docker_build_image_hdbppts_cm:
   extends: .base_docker_images_except
@@ -544,7 +544,7 @@ docker_build_image_hdbppts_cm:
       - docker-compose/archiver-timescale.yml
       - docker-compose/hdbppts-cm/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-cm $tag
 docker_build_image_hdbppts_es:
   extends: .base_docker_images_except
@@ -555,20 +555,35 @@ docker_build_image_hdbppts_es:
       - docker-compose/archiver-timescale.yml
       - docker-compose/hdbppts-es/*
   script:
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-es $tag
 newline_at_eof:
   stage: linting
   before_script:
     - pip3 install -r tangostationcontrol/test-requirements.txt
   script:
-#     TODO(Corne): Ignore shell files in submodules more cleanly
+    #     TODO(Corne): Ignore shell files in submodules more cleanly
     - flake8 --filename *.sh,*.conf,*.md,*.yml --select=W292 --exclude docker-compose/tango-prometheus-exporter,.tox,.egg-info,docker
-python_linting:
+
+run_black:
+  stage: linting
+  script:
+    - cd tangostationcontrol
+    - tox -e black
+
+run_flake8:
   stage: linting
   script:
     - cd tangostationcontrol
     - tox -e pep8
+
+run_pylint:
+  stage: linting
+  allow_failure: true
+  script:
+    - cd tangostationcontrol
+    - tox -e pylint
+
 bandit:
   stage: static-analysis
   script:
@@ -578,12 +593,12 @@ xenon:
   stage: static-analysis
   allow_failure: true
   script:
-   - cd tangostationcontrol
-   - tox -e xenon
+    - cd tangostationcontrol
+    - tox -e xenon
 shellcheck:
   stage: static-analysis
   script:
-#     TODO(Corne): L2SS-962: Ignore shell files in submodules
+    #     TODO(Corne): L2SS-962: Ignore shell files in submodules
     - shellcheck --version
     - shellcheck **/*.sh
 sphinx-documentation:
@@ -639,20 +654,20 @@ integration_test_docker:
     - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
   script:
     - touch /root/.Xauthority
-#    Hack BASH_SOURCE into sourced files, docker its sh shell won't set this
+    #    Hack BASH_SOURCE into sourced files, docker its sh shell won't set this
     - export BASH_SOURCE=$(pwd)/bootstrap/etc/lofar20rc.sh
-#    Hack HOSTNAME env variable into host.docker.internal, set in docker-compose
+    #    Hack HOSTNAME env variable into host.docker.internal, set in docker-compose
     - export HOSTNAME=host.docker.internal
-#    source the lofarrc file and mask its non zero exit code
+    #    source the lofarrc file and mask its non zero exit code
     - . bootstrap/etc/lofar20rc.sh || true
-#    TANGO_HOST must be unset our databaseds will be unreachable
+    #    TANGO_HOST must be unset our databaseds will be unreachable
     - unset TANGO_HOST
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh pull $tag
-#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash -e $CI_PROJECT_DIR/sbin/run_integration_test.sh
   after_script:
-#    Collect output of all containers
+    #    Collect output of all containers
     - |
       mkdir -p log
       for container in $(docker ps -a --format "{{.Names}}")
@@ -674,7 +689,7 @@ wheel_packaging:
   image: ubuntu:bionic
   when: manual
   rules:
-  - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) || $CI_COMMIT_TAG
+    - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH) || $CI_COMMIT_TAG
   before_script:
     - apt-get update
     - apt-get install ansible -y
diff --git a/bin/dump_ConfigDb.sh b/bin/dump_ConfigDb.sh
index 2532b8e275a3c4a609dc9b618fb143f8815f94a6..c2273438abfe09873c8f89cd9f61ecf65e1efdc2 100755
--- a/bin/dump_ConfigDb.sh
+++ b/bin/dump_ConfigDb.sh
@@ -1,4 +1,6 @@
 #!/bin/bash
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 # writes the JSON dump to stdout, Do not change -i into -it incompatible with gitlab ci!
 docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig bash -c '
diff --git a/bin/itango_console.sh b/bin/itango_console.sh
index c2474781787257c6ab3ee0656659415f09380b29..69b28589673af23659e5e069bf979ce4fb5d57d3 100755
--- a/bin/itango_console.sh
+++ b/bin/itango_console.sh
@@ -1,2 +1,5 @@
 #!/bin/bash
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 exec docker exec -it "${CONTAINER_NAME_PREFIX}"itango itango3
diff --git a/bin/itango_shell.sh b/bin/itango_shell.sh
index 334953de3cca29ff459f6c861637c0859b1da97b..cbd4679e3a7b1d17a9a84b60e117381378d34ed7 100755
--- a/bin/itango_shell.sh
+++ b/bin/itango_shell.sh
@@ -1,2 +1,5 @@
 #!/bin/bash
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 exec docker exec -it "${CONTAINER_NAME_PREFIX}"itango /bin/bash
diff --git a/bin/start-ds.sh b/bin/start-ds.sh
index fcbbb2a5e921ccb6ead741f535e1c670b3a1bdba..d1989bf0a32af2eb8b6289cea76f56a7bff65e10 100755
--- a/bin/start-ds.sh
+++ b/bin/start-ds.sh
@@ -1,4 +1,6 @@
 #!/bin/bash
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 set -e
 
diff --git a/bin/start-jive.sh b/bin/start-jive.sh
index 04260afc4a6a1578ba5d6c8ee9299ec1b48dcd7e..d4a41e5af628ad05d2fc1fa0c7023bb206dcdf49 100755
--- a/bin/start-jive.sh
+++ b/bin/start-jive.sh
@@ -1,4 +1,7 @@
 #!/bin/bash
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 OS=$(uname)
 
 case ${OS} in
diff --git a/bin/update_submodules.sh b/bin/update_submodules.sh
index 9dcb9745849c01bbf61b9ffae92c5c7cc21a5a8f..be2eb2761a3e7ef12c0248be5e1667dfaa55c4f5 100755
--- a/bin/update_submodules.sh
+++ b/bin/update_submodules.sh
@@ -1,2 +1,5 @@
 #!/bin/bash
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 git submodule update --init
diff --git a/bootstrap/bin/updatePythonEnv.sh b/bootstrap/bin/updatePythonEnv.sh
index 0f0d1becdcc2db08a6a7c3f93682df63124a9c67..112fb66adfec65cd31a1f36c10cca28c0976cf09 100755
--- a/bootstrap/bin/updatePythonEnv.sh
+++ b/bootstrap/bin/updatePythonEnv.sh
@@ -1,4 +1,6 @@
 #! /usr/bin/env bash
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 venv=${VIRTUAL_ENV:?This is currently not a Python3 venv!  Will not upgrade venv packages.}
 echo -e "\nFound a Python3 venv in \"${VIRTUAL_ENV}\".\nWill now proceed with package upgrades.\n"
diff --git a/bootstrap/etc/lofar20rc.sh b/bootstrap/etc/lofar20rc.sh
index 4b9d806d819816a86c5fea3ab8eb59135d8edcfc..fecf24bf44448aa6524fe8f1c0d60a41e89b1c1d 100755
--- a/bootstrap/etc/lofar20rc.sh
+++ b/bootstrap/etc/lofar20rc.sh
@@ -1,4 +1,7 @@
 #! /usr/bin/env bash -e
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 # Set up the LOFAR2.0 environment.
 # For the time being it is assumend that the LOFAR2.0 environment has to
 # co-exist with a LOFAR1 environment.
diff --git a/bootstrap/sbin/checkout_shallow_copy_lofar_repos.sh b/bootstrap/sbin/checkout_shallow_copy_lofar_repos.sh
index eec4919f44e0b2c36faae7ebc796b4ce41c12315..03c66b6b8891c130cefc844ff067dbca98350c18 100755
--- a/bootstrap/sbin/checkout_shallow_copy_lofar_repos.sh
+++ b/bootstrap/sbin/checkout_shallow_copy_lofar_repos.sh
@@ -1,4 +1,6 @@
 #! /usr/bin/env bash
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 # Clean out local dirs and then clone a shallow copy
 # of LOFAR2.0 repos.
diff --git a/bootstrap/sbin/delete_all_docker_images.sh b/bootstrap/sbin/delete_all_docker_images.sh
index 7cccb90c3ede668f9e97eeb9956eac82176ac9a9..6af68c5f7ff6e599c0ecdfe8f13494e38840f56f 100755
--- a/bootstrap/sbin/delete_all_docker_images.sh
+++ b/bootstrap/sbin/delete_all_docker_images.sh
@@ -1,4 +1,6 @@
 #! /usr/bin/env bash
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 function help()
 {
diff --git a/bootstrap/sbin/rebuild_system_from_scratch.sh b/bootstrap/sbin/rebuild_system_from_scratch.sh
index bfbb52361032b49dd8333b3ec9704804630db7bb..ff363f21a10e0e01160d1a7460273de433a2c135 100755
--- a/bootstrap/sbin/rebuild_system_from_scratch.sh
+++ b/bootstrap/sbin/rebuild_system_from_scratch.sh
@@ -1,4 +1,6 @@
 #! /usr/bin/env bash
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 HOME_DIR=${LOFAR20_DIR:-${PWD}}
 if [ ! -d ${HOME_DIR}/bootstrap ]; then
diff --git a/docker-compose/Makefile b/docker-compose/Makefile
index 6961532bf0002e1379744ea1ed9f177e7fdd89fe..a4438da8c505e1fdfb0bdc040457a69aba6df1f2 100644
--- a/docker-compose/Makefile
+++ b/docker-compose/Makefile
@@ -1,3 +1,6 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 # Set dir of Makefile to a variable to use later
 MAKEPATH := $(abspath $(lastword $(MAKEFILE_LIST)))
 BASEDIR := $(notdir $(patsubst %/,%,$(dir $(MAKEPATH))))
diff --git a/docker-compose/apsct-sim.yml b/docker-compose/apsct-sim.yml
index 9dc5c14a9e0a6a13521e1fe442b6b5eef848155a..1f0bd32eea02dee2f072da68ff4f1833308ed1c1 100644
--- a/docker-compose/apsct-sim.yml
+++ b/docker-compose/apsct-sim.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an APSCT simulator
 #
@@ -9,10 +11,10 @@ version: '2.1'
 services:
   apsct-sim:
     build:
-        context: pypcc-sim-base
-        args:
-         - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
-         - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
+      context: pypcc-sim-base
+      args:
+        - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
+        - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}apsct-sim
     logging:
       driver: "json-file"
diff --git a/docker-compose/apspu-sim.yml b/docker-compose/apspu-sim.yml
index 0f164a5a3321ab25d7083ce896a56bcc4e6af2e0..5f35e510bb98627ebb82ed57ef89169c3f6c5195 100644
--- a/docker-compose/apspu-sim.yml
+++ b/docker-compose/apspu-sim.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an APSPU simulator
 #
@@ -9,10 +11,10 @@ version: '2.1'
 services:
   apspu-sim:
     build:
-        context: pypcc-sim-base
-        args:
-         - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
-         - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
+      context: pypcc-sim-base
+      args:
+        - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
+        - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}apspu-sim
     logging:
       driver: "json-file"
diff --git a/docker-compose/archiver-timescale.yml b/docker-compose/archiver-timescale.yml
index 303256f13d7a013590d7b3d121537a572acefb22..1cfbc598e20db62208a6c5071da251770195c656 100644
--- a/docker-compose/archiver-timescale.yml
+++ b/docker-compose/archiver-timescale.yml
@@ -1,15 +1,18 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 version: '2.1'
 
 volumes:
-  archiver-timescale-data: {}
+  archiver-timescale-data: { }
 
 services:
   archiver-timescale:
     image: timescaledb
     build:
-        context: timescaledb
-        args: 
-            SOURCE_IMAGE: timescale/timescaledb:${PG_TIMESCALEDB_VERSION}
+      context: timescaledb
+      args:
+        SOURCE_IMAGE: timescale/timescaledb:${PG_TIMESCALEDB_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}archiver-timescale
     networks:
       - control
@@ -38,13 +41,13 @@ services:
         syslog-format: rfc3164
         tag: "{{.Name}}"
     restart: unless-stopped
-  
+
   hdbpp:
     image: hdbpp
     build:
-        context: hdbpp
-        args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-cpp:${TANGO_CPP_VERSION}
+      context: hdbpp
+      args:
+        SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-cpp:${TANGO_CPP_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}hdbpp
     networks:
       - control
@@ -56,16 +59,16 @@ services:
       - "host.docker.internal:host-gateway"
     environment:
       - TANGO_HOST=${TANGO_HOST}
-  
+
   hdbppts-cm:
     image: hdbppts-cm
     build:
-        context: hdbppts-cm
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/hdbpp:latest
+      context: hdbppts-cm
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/hdbpp:latest
     container_name: ${CONTAINER_NAME_PREFIX}hdbppts-cm
     networks:
-      - control    
+      - control
     depends_on:
       - databaseds
       - dsconfig
@@ -81,21 +84,21 @@ services:
       wait-for-it.sh ${TANGO_HOST} --timeout=30 --strict --
             hdbppcm-srv 01"
     logging:
-        driver: syslog
-        options:
-          syslog-address: udp://${LOG_HOSTNAME}:1514
-          syslog-format: rfc3164
-          tag: "{{.Name}}"
-  
+      driver: syslog
+      options:
+        syslog-address: udp://${LOG_HOSTNAME}:1514
+        syslog-format: rfc3164
+        tag: "{{.Name}}"
+
   hdbppts-es:
     image: hdbppts-es
     build:
-        context: hdbppts-es
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/hdbpp:latest
+      context: hdbppts-es
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/hdbpp:latest
     container_name: ${CONTAINER_NAME_PREFIX}hdbppts-es
     networks:
-      - control    
+      - control
     depends_on:
       - hdbppts-cm
       - databaseds
@@ -112,9 +115,9 @@ services:
       wait-for-it.sh ${TANGO_HOST} --timeout=30 --strict --
             hdbppes-srv 01"
     logging:
-        driver: syslog
-        options:
-          syslog-address: udp://${LOG_HOSTNAME}:1514
-          syslog-format: rfc3164
-          tag: "{{.Name}}"
+      driver: syslog
+      options:
+        syslog-address: udp://${LOG_HOSTNAME}:1514
+        syslog-format: rfc3164
+        tag: "{{.Name}}"
     restart: unless-stopped
diff --git a/docker-compose/ccd-sim.yml b/docker-compose/ccd-sim.yml
index b02d3693abc28ad95fde8b57515613c8cae09204..11bf6358f93dc33d6e4c8628eb23233fbd19eda9 100644
--- a/docker-compose/ccd-sim.yml
+++ b/docker-compose/ccd-sim.yml
@@ -1,18 +1,20 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an APSCT simulator
 #
 # Defines:
-#   - apsct-sim 
+#   - apsct-sim
 #
-version: '2.1' 
+version: '2.1'
 
 services:
   ccd-sim:
     build:
-        context: pypcc-sim-base
-        args:
-         - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
-         - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
+      context: pypcc-sim-base
+      args:
+        - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
+        - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}ccd-sim
     logging:
       driver: "json-file"
diff --git a/docker-compose/device-antennafield.yml b/docker-compose/device-antennafield.yml
index 32bf15bf4d60d1d0f7d38f8cdec0592e79fe58d7..ba7ae6b00d99d70b0c3547da889bd792309223e1 100644
--- a/docker-compose/device-antennafield.yml
+++ b/docker-compose/device-antennafield.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -19,10 +21,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-antennafield
     logging:
       driver: "json-file"
@@ -38,7 +40,7 @@ services:
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5815
diff --git a/docker-compose/device-apsct.yml b/docker-compose/device-apsct.yml
index e90644f83c1013ba75e39efbc198437594c80cdc..8b139d654185407747bbefc4b47e031b1e176564 100644
--- a/docker-compose/device-apsct.yml
+++ b/docker-compose/device-apsct.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -18,10 +20,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-apsct
     logging:
       driver: "json-file"
@@ -37,7 +39,7 @@ services:
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5809
diff --git a/docker-compose/device-apspu.yml b/docker-compose/device-apspu.yml
index 3d6dcff86c8d5d5a24b91c814cf47e5f011457d9..f809bcd3ee5a1611b283e5ceb3952f0e276148a6 100644
--- a/docker-compose/device-apspu.yml
+++ b/docker-compose/device-apspu.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -18,10 +20,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-apspu
     logging:
       driver: "json-file"
@@ -37,7 +39,7 @@ services:
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5810
diff --git a/docker-compose/device-beamlet.yml b/docker-compose/device-beamlet.yml
index ab27d681dab70a3155d6f25adb43df7e72c5dfec..42a372188c4832e9f19764c4c84b43acf9886887 100644
--- a/docker-compose/device-beamlet.yml
+++ b/docker-compose/device-beamlet.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -18,10 +20,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-beamlet
     logging:
       driver: "json-file"
@@ -37,7 +39,7 @@ services:
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5812
diff --git a/docker-compose/device-boot.yml b/docker-compose/device-boot.yml
index 9e2579a2cef9fe2b73cb90ba033e238304e31abd..8c06b9ddd2cd0327d6578e7e46b4f7d542bb3f36 100644
--- a/docker-compose/device-boot.yml
+++ b/docker-compose/device-boot.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches a LOFAR2.0 station's
 # ObservationControl device. It also runs the dynamically
@@ -17,10 +19,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-boot
     logging:
       driver: "json-file"
diff --git a/docker-compose/device-bst.yml b/docker-compose/device-bst.yml
index 9d66573dfbdd8bb9dccb4ede695be71517199850..8c4812c5e04b782bd549f9aa7b3fcaa9df21b1b4 100644
--- a/docker-compose/device-bst.yml
+++ b/docker-compose/device-bst.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -18,10 +20,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-bst
     logging:
       driver: "json-file"
@@ -29,18 +31,18 @@ services:
         max-size: "100m"
         max-file: "10"
     networks:
-        - control
-        - data
+      - control
+      - data
     ports:
-        - "5003:5003/udp" # port to receive SST UDP packets on
-        - "5103:5103/tcp" # port to emit SST TCP packets on
-        - "5717:5717" # unique port for this DS
-        - "5817:5817" # ZeroMQ event port
-        - "5917:5917" # ZeroMQ heartbeat port
+      - "5003:5003/udp" # port to receive SST UDP packets on
+      - "5103:5103/tcp" # port to emit SST TCP packets on
+      - "5717:5717" # unique port for this DS
+      - "5817:5817" # ZeroMQ event port
+      - "5917:5917" # ZeroMQ heartbeat port
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5817
diff --git a/docker-compose/device-ccd.yml b/docker-compose/device-ccd.yml
index 314d6398e7eedbc72b3b0b09c5b140c4c07168c6..542f80000c85457b8d7a10268197b1a59bfb65dc 100644
--- a/docker-compose/device-ccd.yml
+++ b/docker-compose/device-ccd.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -18,10 +20,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-ccd
     logging:
       driver: "json-file"
@@ -37,7 +39,7 @@ services:
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5821
diff --git a/docker-compose/device-configuration.yml b/docker-compose/device-configuration.yml
index 64e175f1b273ea6bf258ba923b9a7ea869181f9b..2bcdeafa9f6a20c1b3bfb42b83e085f92c8c7347 100644
--- a/docker-compose/device-configuration.yml
+++ b/docker-compose/device-configuration.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -18,10 +20,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-configuration
     logging:
       driver: "json-file"
@@ -39,9 +41,9 @@ services:
     volumes:
       - ..:/opt/lofar/tango:rw
     environment:
-    - TANGO_HOST=${TANGO_HOST}
-    - TANGO_ZMQ_EVENT_PORT=5822
-    - TANGO_ZMQ_HEARTBEAT_PORT=5922
+      - TANGO_HOST=${TANGO_HOST}
+      - TANGO_ZMQ_EVENT_PORT=5822
+      - TANGO_ZMQ_HEARTBEAT_PORT=5922
     healthcheck:
       test: l2ss-health STAT/Configuration/1
       interval: 1m
diff --git a/docker-compose/device-digitalbeam.yml b/docker-compose/device-digitalbeam.yml
index 0a2d9583ad54880f48304ff10f39249223b3f020..2cfe7eea9db95b5e56607d202a9e7a5403a9168f 100644
--- a/docker-compose/device-digitalbeam.yml
+++ b/docker-compose/device-digitalbeam.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -18,10 +20,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-digitalbeam
     logging:
       driver: "json-file"
@@ -37,7 +39,7 @@ services:
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5813
diff --git a/docker-compose/device-docker.yml b/docker-compose/device-docker.yml
index 1be8c1e830f6f7012e4eab2718e7fa01ef903c4e..99c53633151037f89368d9a766eae8df30aa5cb4 100644
--- a/docker-compose/device-docker.yml
+++ b/docker-compose/device-docker.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -18,10 +20,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-docker
     logging:
       driver: "json-file"
diff --git a/docker-compose/device-observation-control.yml b/docker-compose/device-observation-control.yml
index e4398e88e18e39b18ec371192de9af027e904590..95cc0f6d882a4f7d8858440ec15026f34e0c02c1 100644
--- a/docker-compose/device-observation-control.yml
+++ b/docker-compose/device-observation-control.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches a LOFAR2.0 station's
 # ObservationControl device. It also runs the dynamically
@@ -17,10 +19,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-observation-control
     logging:
       driver: "json-file"
@@ -36,7 +38,7 @@ services:
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5803
diff --git a/docker-compose/device-observation.yml b/docker-compose/device-observation.yml
index c0ac58f6c26b2252e5c653a00021a4ceb95c75e9..096214271e4b5ae3e67af2ce9ee36ae6936d350d 100644
--- a/docker-compose/device-observation.yml
+++ b/docker-compose/device-observation.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches a LOFAR2.0 station's
 # Observation device.
@@ -16,10 +18,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-observation
     logging:
       driver: "json-file"
@@ -35,7 +37,7 @@ services:
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5818
diff --git a/docker-compose/device-pcon.yml b/docker-compose/device-pcon.yml
index 4f1c8bd4ca44b952d8e4a876ef3c9ea42f4f175b..88f3f417f839693d21a03ebf1d2c927ed51ca89c 100644
--- a/docker-compose/device-pcon.yml
+++ b/docker-compose/device-pcon.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Requires:
 #   - lofar-device-base.yml
@@ -5,7 +7,7 @@
 version: '2.1'
 
 volumes:
-  iers-data: {}
+  iers-data: { }
 
 services:
   device-pcon:
@@ -13,10 +15,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-pcon
     logging:
       driver: "json-file"
diff --git a/docker-compose/device-psoc.yml b/docker-compose/device-psoc.yml
index a98c1195b71d2567974cc41d6a3a80daed3e3f88..1e8379e056c88e6d74e92171c910d2ea45d9b115 100644
--- a/docker-compose/device-psoc.yml
+++ b/docker-compose/device-psoc.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Requires:
 #   - lofar-device-base.yml
@@ -5,7 +7,7 @@
 version: '2.1'
 
 volumes:
-  iers-data: {}
+  iers-data: { }
 
 services:
   device-psoc:
@@ -13,10 +15,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-psoc
     logging:
       driver: "json-file"
diff --git a/docker-compose/device-recv.yml b/docker-compose/device-recv.yml
index e34592d6cfb3ddd3fa1ab4a20304ded6f855c1ed..bbd378861e12c645b0ce846536edc66ecd114365 100644
--- a/docker-compose/device-recv.yml
+++ b/docker-compose/device-recv.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -18,10 +20,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-recv
     logging:
       driver: "json-file"
@@ -37,7 +39,7 @@ services:
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5807
diff --git a/docker-compose/device-sdp.yml b/docker-compose/device-sdp.yml
index 7d68d7f15b90be1d4d8b871720b5a0426e1e800c..79bf6fc215b9eaed663f53e04339d4e4f93ed2e4 100644
--- a/docker-compose/device-sdp.yml
+++ b/docker-compose/device-sdp.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -18,10 +20,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-sdp
     logging:
       driver: "json-file"
@@ -37,7 +39,7 @@ services:
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5801
diff --git a/docker-compose/device-sst.yml b/docker-compose/device-sst.yml
index 7892f3e2a2775ccde17d0a9fcc3c8fe010898b31..4e8c41bb7e526de3fb7c4795a481ab1af2603e85 100644
--- a/docker-compose/device-sst.yml
+++ b/docker-compose/device-sst.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -18,10 +20,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-sst
     logging:
       driver: "json-file"
@@ -29,18 +31,18 @@ services:
         max-size: "100m"
         max-file: "10"
     networks:
-        - control
-        - data
+      - control
+      - data
     ports:
-        - "5001:5001/udp" # port to receive SST UDP packets on
-        - "5101:5101/tcp" # port to emit SST TCP packets on
-        - "5702:5702" # unique port for this DS
-        - "5802:5802" # ZeroMQ event port
-        - "5902:5902" # ZeroMQ heartbeat port
+      - "5001:5001/udp" # port to receive SST UDP packets on
+      - "5101:5101/tcp" # port to emit SST TCP packets on
+      - "5702:5702" # unique port for this DS
+      - "5802:5802" # ZeroMQ event port
+      - "5902:5902" # ZeroMQ heartbeat port
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5802
diff --git a/docker-compose/device-temperature-manager.yml b/docker-compose/device-temperature-manager.yml
index d2dfc0f5b55c3cc9d153fd03f0c2c4d68d184ee9..a8d8bf61401f71693d74f4b7cb2f62e0a6d82145 100644
--- a/docker-compose/device-temperature-manager.yml
+++ b/docker-compose/device-temperature-manager.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Requires:
 #   - lofar-device-base.yml
@@ -5,7 +7,7 @@
 version: '2.1'
 
 volumes:
-  iers-data: {}
+  iers-data: { }
 
 services:
   device-temperature-manager:
@@ -13,10 +15,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-temperature-manager
     logging:
       driver: "json-file"
diff --git a/docker-compose/device-tilebeam.yml b/docker-compose/device-tilebeam.yml
index e7e4b75411b5db50659a13b0965087dddfae432e..4b5e780d5366aee465d9fb50d1ed44ecb855af9b 100644
--- a/docker-compose/device-tilebeam.yml
+++ b/docker-compose/device-tilebeam.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Requires:
 #   - lofar-device-base.yml
@@ -5,7 +7,7 @@
 version: '2.1'
 
 volumes:
-  iers-data: {}
+  iers-data: { }
 
 services:
   device-tilebeam:
@@ -13,10 +15,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-tilebeam
     logging:
       driver: "json-file"
diff --git a/docker-compose/device-unb2.yml b/docker-compose/device-unb2.yml
index 223e3e5042246e382a4f33ea3747daf4966ebfe4..1a9e3870beef22ee1bf5ffaec16237a5310b93c3 100644
--- a/docker-compose/device-unb2.yml
+++ b/docker-compose/device-unb2.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -18,10 +20,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-unb2
     logging:
       driver: "json-file"
@@ -37,7 +39,7 @@ services:
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5804
diff --git a/docker-compose/device-xst.yml b/docker-compose/device-xst.yml
index fc07c7463afb06674006b49689ecb950b8b2483c..a58ac172edbe0e66caf4d8463cbec60c30cd9749 100644
--- a/docker-compose/device-xst.yml
+++ b/docker-compose/device-xst.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
@@ -18,10 +20,10 @@ services:
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}device-xst
     logging:
       driver: "json-file"
@@ -29,18 +31,18 @@ services:
         max-size: "100m"
         max-file: "10"
     networks:
-        - control
-        - data
+      - control
+      - data
     ports:
-        - "5002:5002/udp" # port to receive XST UDP packets on
-        - "5102:5102/tcp" # port to emit XST TCP packets on
-        - "5706:5706" # unique port for this DS
-        - "5806:5806" # ZeroMQ event port
-        - "5906:5906" # ZeroMQ heartbeat port
+      - "5002:5002/udp" # port to receive XST UDP packets on
+      - "5102:5102/tcp" # port to emit XST TCP packets on
+      - "5706:5706" # unique port for this DS
+      - "5806:5806" # ZeroMQ event port
+      - "5906:5906" # ZeroMQ heartbeat port
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TANGO_ZMQ_EVENT_PORT=5806
diff --git a/docker-compose/grafana.yml b/docker-compose/grafana.yml
index 16462fcd66e7973604f0fa97e338d114247b72d7..b1c99e497b173958a36ee8683493b8d02ac13b50 100644
--- a/docker-compose/grafana.yml
+++ b/docker-compose/grafana.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches Grafana
 #
@@ -15,7 +17,7 @@ services:
   grafana:
     image: grafana
     build:
-        context: grafana
+      context: grafana
     container_name: ${CONTAINER_NAME_PREFIX}grafana
     networks:
       - control
diff --git a/docker-compose/integration-test.yml b/docker-compose/integration-test.yml
index 8b143e9cd85d0f7eecdfbdf8ebb4b57fe2e5a2ef..363b997cf82d593be67a4052070db9cdf44ba1fb 100644
--- a/docker-compose/integration-test.yml
+++ b/docker-compose/integration-test.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches integration tests
 #
@@ -9,17 +11,17 @@ version: '2.1'
 services:
   integration-test:
     build:
-        context: .
-        dockerfile: ci-runner/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/ci-runner:latest
+      context: .
+      dockerfile: ci-runner/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/ci-runner:latest
     container_name: ${CONTAINER_NAME_PREFIX}integration-test
     networks:
       - control
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
-        - ..:/opt/lofar/tango:rw
+      - ..:/opt/lofar/tango:rw
     environment:
       - TANGO_HOST=${TANGO_HOST}
       - TEST_MODULE=${TEST_MODULE}
diff --git a/docker-compose/itango.yml b/docker-compose/itango.yml
index cfa658da78fa9350e4ef482c027ab3a17f464eb4..e338cf5ee614836080bb393dfb764eb834a75ec1 100644
--- a/docker-compose/itango.yml
+++ b/docker-compose/itango.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches an interactive iTango session.
 #
diff --git a/docker-compose/jupyter-lab.yml b/docker-compose/jupyter-lab.yml
index 6c4b6e75047a84bdc5268d37f2ca3cef15a76977..c02fae77acec328e9431c95cae3f1efbf4bd54f2 100644
--- a/docker-compose/jupyter-lab.yml
+++ b/docker-compose/jupyter-lab.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches Jupyter Lab for interactive iTango sessions over HTTP.
 #
@@ -13,10 +15,10 @@ version: '2.1'
 services:
   jupyter-lab:
     build:
-        context: jupyterlab
-        args:
-            CONTAINER_EXECUTION_UID: ${CONTAINER_EXECUTION_UID}
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: jupyterlab
+      args:
+        CONTAINER_EXECUTION_UID: ${CONTAINER_EXECUTION_UID}
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}jupyter-lab
     logging:
       driver: "json-file"
diff --git a/docker-compose/lofar-device-base.yml b/docker-compose/lofar-device-base.yml
index 43bbcf635b594298e56081c021095fd07e8b6488..60cfb3bafb563fe3ec06d174089635718337f09f 100644
--- a/docker-compose/lofar-device-base.yml
+++ b/docker-compose/lofar-device-base.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that forms the basis for LOFAR tango devices
 #
@@ -17,10 +19,10 @@ services:
   lofar-device-base:
     image: lofar-device-base
     build:
-        context: .
-        dockerfile: lofar-device-base/Dockerfile
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+      context: .
+      dockerfile: lofar-device-base/Dockerfile
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}lofar-device-base
     # These parameters are just visual queues, you have to define them again
     # in derived docker-compose files!
diff --git a/docker-compose/logstash.yml b/docker-compose/logstash.yml
index 0a19ad3df76c3bddaa91e12d263fa2f3443ce579..15882a7521b7baad206c6774da5aae38c7926e53 100644
--- a/docker-compose/logstash.yml
+++ b/docker-compose/logstash.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches Logstash-output-loki
 #
@@ -9,9 +11,9 @@ services:
   logstash:
     image: logstash
     build:
-        context: logstash
-        args: 
-            SOURCE_IMAGE: grafana/logstash-output-loki:main
+      context: logstash
+      args:
+        SOURCE_IMAGE: grafana/logstash-output-loki:main
     container_name: ${CONTAINER_NAME_PREFIX}logstash
     logging:
       driver: "json-file"
diff --git a/docker-compose/loki.yml b/docker-compose/loki.yml
index 317b2e3ff67fcc9316f5895509deebf22ecc3e66..bc92cea394b3258ad0247276f763dd0f03e6eaae 100644
--- a/docker-compose/loki.yml
+++ b/docker-compose/loki.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches a LOKI instance.
 # See https://grafana.com/docs/loki/latest/installation/docker/
diff --git a/docker-compose/networks.yml b/docker-compose/networks.yml
index 0799fa1c0e10f903d8ee8c566455f7bbaaaee343..387161f50fa007a72eef5008ab65beb99cc9f509 100644
--- a/docker-compose/networks.yml
+++ b/docker-compose/networks.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that describes our docker networks.
 #
diff --git a/docker-compose/prometheus-node-exporter.yml b/docker-compose/prometheus-node-exporter.yml
index a6d5b00f9b173380ef548566c5c8333826e7f031..b0b97cf04332af45182c7fc1ad0b37bec0789e53 100644
--- a/docker-compose/prometheus-node-exporter.yml
+++ b/docker-compose/prometheus-node-exporter.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches Prometheus Node Exporter
 #
diff --git a/docker-compose/prometheus.yml b/docker-compose/prometheus.yml
index 0ccc8adb3501c1ac161df9bf418474ba1eac5ee4..736becdd81b5786b2cf44544d4b20f08aff3deed 100644
--- a/docker-compose/prometheus.yml
+++ b/docker-compose/prometheus.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches Prometheus
 #
@@ -8,13 +10,13 @@
 version: '2.1'
 
 volumes:
-  prometheus-data: {}
+  prometheus-data: { }
 
 services:
   prometheus:
     image: prometheus
     build:
-        context: prometheus
+      context: prometheus
     container_name: ${CONTAINER_NAME_PREFIX}prometheus
     logging:
       driver: "json-file"
diff --git a/docker-compose/recv-sim.yml b/docker-compose/recv-sim.yml
index 2dfeb4837cb00d891ec0213418698fba61b13c84..f4a91c1b3899ee789d026afac578ab5c0078f21a 100644
--- a/docker-compose/recv-sim.yml
+++ b/docker-compose/recv-sim.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches a RECV simulator
 #
@@ -9,10 +11,10 @@ version: '2.1'
 services:
   recv-sim:
     build:
-        context: pypcc-sim-base
-        args:
-         - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
-         - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
+      context: pypcc-sim-base
+      args:
+        - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
+        - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}recv-sim
     logging:
       driver: "json-file"
diff --git a/docker-compose/rest.yml b/docker-compose/rest.yml
index a1d09ecb3b65e2577b028bb319d920c896281474..82092a26e60c26a33bdea18997049d6333244bd2 100644
--- a/docker-compose/rest.yml
+++ b/docker-compose/rest.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches Astor, sending the display to a remote X11
 # display.
@@ -21,18 +23,18 @@ services:
     # time the hostname changes as the container is restarted.
     hostname: tango-rest
     environment:
-    - TANGO_HOST=${TANGO_HOST}
+      - TANGO_HOST=${TANGO_HOST}
     ports:
-    - 8080:8080
+      - 8080:8080
     entrypoint:
-    - /usr/local/bin/wait-for-it.sh
-    - ${TANGO_HOST}
-    - --timeout=30
-    - --strict
-    - --
-    - /usr/bin/supervisord
-    - --configuration
-    - /etc/supervisor/supervisord.conf
+      - /usr/local/bin/wait-for-it.sh
+      - ${TANGO_HOST}
+      - --timeout=30
+      - --strict
+      - --
+      - /usr/bin/supervisord
+      - --configuration
+      - /etc/supervisor/supervisord.conf
     logging:
       driver: syslog
       options:
diff --git a/docker-compose/schemas.yml b/docker-compose/schemas.yml
index dc4cb4aa80947c97eab1579be98434c7bd651bcf..434122f63b6f6c84a35cd308c34f8a8114424a6e 100644
--- a/docker-compose/schemas.yml
+++ b/docker-compose/schemas.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches a LOFAR2.0 station's
 # ObservationControl device. It also runs the dynamically
@@ -19,13 +21,13 @@ services:
     # time the hostname changes as the container is restarted.
     hostname: schemas
     environment:
-    - NGINX_HOST=schemas
-    - NGINX_PORT=80
+      - NGINX_HOST=schemas
+      - NGINX_PORT=80
     ports:
-    - 9999:80
+      - 9999:80
     logging:
       driver: "json-file"
       options:
         max-size: "100m"
         max-file: "10"
-    restart: unless-stopped
\ No newline at end of file
+    restart: unless-stopped
diff --git a/docker-compose/sdptr-sim.yml b/docker-compose/sdptr-sim.yml
index e9fbdd7a09bbdd29df1a1d0e1ae94f73dec2bb65..f914eea6023a74a364a5d6acb4e2ab9239091162 100644
--- a/docker-compose/sdptr-sim.yml
+++ b/docker-compose/sdptr-sim.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches a SDP translator simulator
 #
@@ -9,10 +11,10 @@ version: '2.1'
 services:
   sdptr-sim:
     build:
-        context: sdptr-sim
-        args:
-         - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
-         - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
+      context: sdptr-sim
+      args:
+        - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
+        - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}sdptr-sim
     logging:
       driver: "json-file"
diff --git a/docker-compose/tango-prometheus-exporter.yml b/docker-compose/tango-prometheus-exporter.yml
index 5eca2be50ac8d1c1e88a4018c6de7354b02a695e..6f4b6dc79f50bd3732127a3aa10d5497e48bbfb1 100644
--- a/docker-compose/tango-prometheus-exporter.yml
+++ b/docker-compose/tango-prometheus-exporter.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches the Tango -> Prometheus adapter
 #
@@ -6,7 +8,7 @@ version: '2.1'
 services:
   tango-prometheus-exporter:
     build:
-        context: tango-prometheus-exporter
+      context: tango-prometheus-exporter
     container_name: ${CONTAINER_NAME_PREFIX}tango-prometheus-exporter
     logging:
       driver: "json-file"
diff --git a/docker-compose/tango.yml b/docker-compose/tango.yml
index 65cda98f8b1c045ed67673129c39417f78a8c2d3..456a99e1d37ffc565c1a6969cd9b79b6f89ba70b 100644
--- a/docker-compose/tango.yml
+++ b/docker-compose/tango.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file for TANGO database and database device server
 #
@@ -11,7 +13,7 @@
 version: '2.1'
 
 volumes:
-  tangodb: {}
+  tangodb: { }
 
 services:
   tangodb:
@@ -71,9 +73,9 @@ services:
 
   dsconfig:
     build:
-        context: dsconfig
-        args:
-            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-dsconfig:${TANGO_DSCONFIG_VERSION}
+      context: dsconfig
+      args:
+        SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-dsconfig:${TANGO_DSCONFIG_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}dsconfig
     networks:
       - control
diff --git a/docker-compose/unb2-sim.yml b/docker-compose/unb2-sim.yml
index e29c805010f559f5254fd116f5a75b497710367c..06ec924bd024698c66014abdbd50a210027b3156 100644
--- a/docker-compose/unb2-sim.yml
+++ b/docker-compose/unb2-sim.yml
@@ -1,3 +1,5 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 #
 # Docker compose file that launches a UNB2 simulator
 #
@@ -9,10 +11,10 @@ version: '2.1'
 services:
   unb2-sim:
     build:
-        context: pypcc-sim-base
-        args:
-         - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
-         - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
+      context: pypcc-sim-base
+      args:
+        - LOCAL_DOCKER_REGISTRY_HOST=${LOCAL_DOCKER_REGISTRY_HOST}
+        - LOCAL_DOCKER_REGISTRY_LOFAR=${LOCAL_DOCKER_REGISTRY_LOFAR}
     container_name: ${CONTAINER_NAME_PREFIX}unb2-sim
     logging:
       driver: "json-file"
diff --git a/sbin/load_ConfigDb.sh b/sbin/load_ConfigDb.sh
index 0fe57087a89dc08ebef51d4679a687ebbf6a144a..a36bfb99d6b97b2944d78f6b44249a950dcd4a61 100755
--- a/sbin/load_ConfigDb.sh
+++ b/sbin/load_ConfigDb.sh
@@ -1,4 +1,6 @@
 #!/bin/bash
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 if [ ${#} -eq 1 ]; then
     file=${1}
diff --git a/sbin/run_integration_test.sh b/sbin/run_integration_test.sh
index 4906d14f3689ae946b8d49a7367ac7412ec17696..e38259880426f5734e77f2ff6cbc080096aae893 100755
--- a/sbin/run_integration_test.sh
+++ b/sbin/run_integration_test.sh
@@ -1,4 +1,6 @@
 #!/bin/bash -e
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 # Usage function explains how parameters are parsed
 function usage {
diff --git a/sbin/tag_and_push_docker_image.sh b/sbin/tag_and_push_docker_image.sh
index f7649ba048907862704c4622d784a0da0a4b83ea..934681d216b3f5f67142dec1b37e03cb5a06fb97 100755
--- a/sbin/tag_and_push_docker_image.sh
+++ b/sbin/tag_and_push_docker_image.sh
@@ -1,4 +1,6 @@
 #!/bin/bash -e
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 function usage {
     echo "./$(basename "$0")
@@ -66,7 +68,7 @@ LOCAL_IMAGES=(
   "apsct-sim docker-compose_apsct-sim y" "apspu-sim docker-compose_apspu-sim y"
   "ccd-sim docker-compose_ccd-sim y"
   "recv-sim docker-compose_recv-sim y" "sdptr-sim docker-compose_sdptr-sim y"
-  "unb2-sim docker-compose_unb2-sim y" 
+  "unb2-sim docker-compose_unb2-sim y"
 
   "device-antennafield device-antennafield y"
   "device-apsct device-apsct y" "device-apspu device-apspu y"
diff --git a/sbin/update_ConfigDb.sh b/sbin/update_ConfigDb.sh
index f1401d9c6e40601036449553d2919c434c7f8bf1..f3216f53a26297cbddd13ef1a821dc14f77e96c6 100755
--- a/sbin/update_ConfigDb.sh
+++ b/sbin/update_ConfigDb.sh
@@ -1,4 +1,6 @@
 #!/bin/bash
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 if [ ${#} -eq 1 ]; then
     file=${1}
diff --git a/tangostationcontrol/docs/source/conf.py b/tangostationcontrol/docs/source/conf.py
index 90b156ff1ac73a80f5a2dbc2c12d0931113eb744..f1cd59b63a2fdc92f7f2840dd4c12d9a6f83e24a 100644
--- a/tangostationcontrol/docs/source/conf.py
+++ b/tangostationcontrol/docs/source/conf.py
@@ -10,16 +10,16 @@
 # add these directories to sys.path here. If the directory is relative to the
 # documentation root, use os.path.abspath to make it absolute, like shown here.
 #
-#import os
-#import sys
-#sys.path.insert(0, os.path.abspath('../../devices'))
+# import os
+# import sys
+# sys.path.insert(0, os.path.abspath('../../devices'))
 
 
 # -- Project information -----------------------------------------------------
 
-project = 'LOFAR2.0 Station Control'
-copyright = '2021, Stichting ASTRON'
-author = 'Stichting ASTRON'
+project = "LOFAR2.0 Station Control"
+copyright = "2021, Stichting ASTRON"
+author = "Stichting ASTRON"
 
 
 # -- General configuration ---------------------------------------------------
@@ -27,12 +27,10 @@ author = 'Stichting ASTRON'
 # Add any Sphinx extension module names here, as strings. They can be
 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
 # ones.
-extensions = [
-    "sphinx.ext.graphviz"
-]
+extensions = ["sphinx.ext.graphviz"]
 
 # Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = ["_templates"]
 
 # List of patterns, relative to source directory, that match files and
 # directories to ignore when looking for source files.
@@ -45,13 +43,13 @@ exclude_patterns = []
 # The theme to use for HTML and HTML Help pages.  See the documentation for
 # a list of builtin themes.
 #
-html_theme = 'sphinx_rtd_theme'
+html_theme = "sphinx_rtd_theme"
 
 # Add any paths that contain custom static files (such as style sheets) here,
 # relative to this directory. They are copied after the builtin static files,
 # so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['static']
+html_static_path = ["static"]
 
 html_css_files = [
-    'css/custom.css',
+    "css/custom.css",
 ]
diff --git a/tangostationcontrol/setup.py b/tangostationcontrol/setup.py
index b908cbe55cb344569d32de1dfc10ca7323828dc5..50e272e92058ab05ab9454d8e78ada8de374fcc4 100644
--- a/tangostationcontrol/setup.py
+++ b/tangostationcontrol/setup.py
@@ -1,3 +1,6 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import setuptools
 
 setuptools.setup()
diff --git a/tangostationcontrol/tangostationcontrol/__init__.py b/tangostationcontrol/tangostationcontrol/__init__.py
index 2e6117676ba3a0a077b76c09a71eb313efb9a53d..4693821e362ad74f3bc0997232dc8f806aedbb02 100644
--- a/tangostationcontrol/tangostationcontrol/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/__init__.py
@@ -1,3 +1,6 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 try:
     from importlib import metadata
 except ImportError:  # for Python<3.8
diff --git a/tangostationcontrol/tangostationcontrol/beam/__init__.py b/tangostationcontrol/tangostationcontrol/beam/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/beam/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/beam/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/beam/delays.py b/tangostationcontrol/tangostationcontrol/beam/delays.py
index 4bbbe78968cdc7292ae1b271d31e866886f6e47d..f7d5d68054b9256b65e249ef8ce84944e9fbf967 100644
--- a/tangostationcontrol/tangostationcontrol/beam/delays.py
+++ b/tangostationcontrol/tangostationcontrol/beam/delays.py
@@ -1,15 +1,20 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
+import datetime
+
 import casacore.measures
 import numpy
-import datetime
+
 
 def subtract(a, b) -> numpy.ndarray:
     return numpy.array([x - y for x, y in zip(a, b)])
 
 
 class Delays:
-    def __init__(self, itrf: list([float])):
+    def __init__(self, itrf: list[[float]]):
 
-        """ Create a measure object, configured for the specified terrestrial location. """
+        """Create a measure object, configured for the specified terrestrial location."""
 
         measure = casacore.measures.measures()
         frame_location = measure.position("ITRF", *[f"{x}m" for x in itrf])
@@ -22,20 +27,20 @@ class Delays:
         self.measure_time = None
 
     def set_measure_time(self, utc_time: datetime.datetime):
-        """ Configure the measure object for the specified time. """
-        utc_time_str = utc_time.isoformat(' ')
+        """Configure the measure object for the specified time."""
+        utc_time_str = utc_time.isoformat(" ")
         frame_time = self.measure.epoch("UTC", utc_time_str)
 
         if not self.measure.do_frame(frame_time):
             raise ValueError(f"measure.do_frame failed for UTC time {utc_time_str}")
 
     def get_direction_vector(self, pointing: numpy.ndarray) -> numpy.ndarray:
-        """ Compute direction vector for a given pointing, relative to the measure. """
+        """Compute direction vector for a given pointing, relative to the measure."""
 
         return self.get_direction_vector_bulk([pointing]).flatten()
 
     def get_direction_vector_bulk(self, pointings: numpy.ndarray) -> numpy.ndarray:
-        """ Compute direction vectors for the given pointings, relative to the measure. """
+        """Compute direction vectors for the given pointings, relative to the measure."""
 
         angles0 = numpy.empty(len(pointings))
         angles1 = numpy.empty(len(pointings))
@@ -47,15 +52,18 @@ class Delays:
         # Convert polar to carthesian coordinates
         # see also https://github.com/casacore/casacore/blob/e793b3d5339d828a60339d16476bf688a19df3ec/casa/Quanta/MVDirection.cc#L67
         direction_vectors = numpy.array(
-               [numpy.cos(angles0) * numpy.cos(angles1),
+            [
+                numpy.cos(angles0) * numpy.cos(angles1),
                 numpy.sin(angles0) * numpy.cos(angles1),
-                numpy.sin(angles1)])
+                numpy.sin(angles1),
+            ]
+        )
 
         # Return array [directions][angles]
         return direction_vectors.T
 
     def is_valid_direction(self, direction) -> bool:
-        """ Check validity of the direction measure """
+        """Check validity of the direction measure"""
         try:
             _ = self.measure.direction(*direction)
         except (RuntimeError, TypeError) as e:
@@ -64,22 +72,26 @@ class Delays:
         return True
 
     def delays(self, direction, antenna_absolute_itrf: list([float])) -> numpy.ndarray:
-        """ Get the delays for a direction and *absolute* antenna positions.
+        """Get the delays for a direction and *absolute* antenna positions.
 
-            These are the delays that have to be applied to the signal chain in order to line up the signal.
-            Positions closer to the source will result in a positive delay.
+        These are the delays that have to be applied to the signal chain in order to line up the signal.
+        Positions closer to the source will result in a positive delay.
 
-            Returns delays[antenna]. """
+        Returns delays[antenna]."""
 
-        return self.delays_bulk([direction], numpy.array(antenna_absolute_itrf) - self.reference_itrf).flatten()
+        return self.delays_bulk(
+            [direction], numpy.array(antenna_absolute_itrf) - self.reference_itrf
+        ).flatten()
 
-    def delays_bulk(self, directions: numpy.ndarray, antenna_relative_itrfs: numpy.ndarray) -> numpy.ndarray:
-        """ Get the delays for each direction and each *relative* antenna position.
+    def delays_bulk(
+        self, directions: numpy.ndarray, antenna_relative_itrfs: numpy.ndarray
+    ) -> numpy.ndarray:
+        """Get the delays for each direction and each *relative* antenna position.
 
-            These are the delays that have to be applied to the signal chain in order to line up the signal.
-            Positions closer to the source will result in a positive delay.
+        These are the delays that have to be applied to the signal chain in order to line up the signal.
+        Positions closer to the source will result in a positive delay.
 
-            Returns delays[antenna][direction]. """
+        Returns delays[antenna][direction]."""
 
         # obtain the direction vector for each pointing
         try:
@@ -96,7 +108,8 @@ class Delays:
             return numpy.inner(relative_itrf, direction_vectors) / 299792458.0
 
         # apply for each position
-        delays = numpy.apply_along_axis(get_delay_all_directions, 1, antenna_relative_itrfs)
+        delays = numpy.apply_along_axis(
+            get_delay_all_directions, 1, antenna_relative_itrfs
+        )
 
         return delays
-
diff --git a/tangostationcontrol/tangostationcontrol/beam/geo.py b/tangostationcontrol/tangostationcontrol/beam/geo.py
index 97e964dfd2b36a9305367d496c0ed65fd0f06c22..900d4f942f63b39519370b8e31e9de211c7a3fb1 100644
--- a/tangostationcontrol/tangostationcontrol/beam/geo.py
+++ b/tangostationcontrol/tangostationcontrol/beam/geo.py
@@ -1,12 +1,16 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
+import math
+
 import etrsitrs
+import geohash
 import lofarantpos.geo
 import numpy
-import math
-import geohash
 
 """
    LOFAR station positions are measured in ETRS89, which are the coordinates of the position as it would be in 1989.
-   
+
    These coordinates are carthesian (X, Y, Z), with (0, 0, 0) being the center of the Earth.
 
    The ETRS89 positions differ from the current due to tectonic movements. Periodically, these differences are modelled
@@ -19,6 +23,7 @@ import geohash
    The ETRSitrs package does all the transformation calculations for us.
 """
 
+
 def _apply_fn_on_one_element_or_array(fn, array: numpy.array) -> numpy.array:
     if array.ndim == 1:
         # convert a single coordinate triple
@@ -27,32 +32,44 @@ def _apply_fn_on_one_element_or_array(fn, array: numpy.array) -> numpy.array:
         # convert each coordinate triple
         return numpy.apply_along_axis(fn, 1, array)
 
-def ETRS_to_ITRF(ETRS_coordinates: numpy.array, ITRF_reference_frame: str = "ITRF2005", ITRF_reference_epoch: float = 2015.5) -> numpy.array:
-    """ Convert an array of coordinate triples from ETRS to ITRF, in the given reference frame and epoch. """
+
+def ETRS_to_ITRF(
+    ETRS_coordinates: numpy.array,
+    ITRF_reference_frame: str = "ITRF2005",
+    ITRF_reference_epoch: float = 2015.5,
+) -> numpy.array:
+    """Convert an array of coordinate triples from ETRS to ITRF, in the given reference frame and epoch."""
 
     # fetch converter
-    ETRS_to_ITRF_fn = etrsitrs.convert_fn("ETRF2000", ITRF_reference_frame, ITRF_reference_epoch)
+    ETRS_to_ITRF_fn = etrsitrs.convert_fn(
+        "ETRF2000", ITRF_reference_frame, ITRF_reference_epoch
+    )
 
     return _apply_fn_on_one_element_or_array(ETRS_to_ITRF_fn, ETRS_coordinates)
 
+
 def ETRS_to_GEO(ETRS_coordinates: numpy.array) -> numpy.array:
-    """ Convert an array of coordinate triples from ETRS to latitude/longitude (degrees). """
+    """Convert an array of coordinate triples from ETRS to latitude/longitude (degrees)."""
 
     def ETRS_to_GEO_fn(etrs_coords):
         geo_coords = lofarantpos.geo.geographic_from_xyz(etrs_coords)
 
-        return numpy.array([
-            geo_coords['lat_rad'] * 180 / math.pi,
-            geo_coords['lon_rad'] * 180 / math.pi
-            ])
+        return numpy.array(
+            [
+                geo_coords["lat_rad"] * 180 / math.pi,
+                geo_coords["lon_rad"] * 180 / math.pi,
+            ]
+        )
 
     return _apply_fn_on_one_element_or_array(ETRS_to_GEO_fn, ETRS_coordinates)
 
+
 # Geo coordinates are only used for rough positioning. The difference between ITRF and ETRS matters little here
 ITRF_to_GEO = ETRS_to_GEO
 
+
 def GEO_to_GEOHASH(GEO_coordinates: numpy.array) -> numpy.array:
-    """ Convert an array of latitude/longitude (degrees) tuples to geohash strings. """
+    """Convert an array of latitude/longitude (degrees) tuples to geohash strings."""
 
     def GEO_to_GEOHASH_fn(geo_coords):
         return geohash.encode(geo_coords[0], geo_coords[1], precision=16)
diff --git a/tangostationcontrol/tangostationcontrol/beam/hba_tile.py b/tangostationcontrol/tangostationcontrol/beam/hba_tile.py
index 9ab46e5bf544f652ef5790188016c1a7fcf3c487..55afa40416917fc68536f5b1cc4df2dc34a1433c 100644
--- a/tangostationcontrol/tangostationcontrol/beam/hba_tile.py
+++ b/tangostationcontrol/tangostationcontrol/beam/hba_tile.py
@@ -1,10 +1,15 @@
-import numpy
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 from math import sin, cos
 
+import numpy
+
+
 class HBATAntennaOffsets(object):
     """
         This class helps calculate the absolute offsets of the antennas within a tile,
-        based on their relative orientation to ETRS. 
+        based on their relative orientation to ETRS.
 
         These offsets are Known in LOFAR1 as "iHBADeltas".
 
@@ -29,37 +34,60 @@ class HBATAntennaOffsets(object):
             https://git.astron.nl/RD/lofar-referentie-vlak/-/blob/master/data/dts/dts.ipynb
             https://git.astron.nl/ro/lofar/-/blob/master/MAC/Deployment/data/Coordinates/calc_hba_deltas.py
             https://github.com/brentjens/lofar-antenna-positions/blob/master/lofarantpos/db.py#L208
-        """
+    """
 
     """ Model of the HBAT1 tile, as offsets of each antenna with respect to the reference center, in metres. """
-    HBAT1_BASE_ANTENNA_OFFSETS = numpy.array(
-        [[-1.5, +1.5, 0.0], [-0.5, +1.5, 0.0], [+0.5, +1.5, 0.0], [+1.5, +1.5, 0.0],
-         [-1.5, +0.5, 0.0], [-0.5, +0.5, 0.0], [+0.5, +0.5, 0.0], [+1.5, +0.5, 0.0],
-         [-1.5, -0.5, 0.0], [-0.5, -0.5, 0.0], [+0.5, -0.5, 0.0], [+1.5, -0.5, 0.0],
-         [-1.5, -1.5, 0.0], [-0.5, -1.5, 0.0], [+0.5, -1.5, 0.0], [+1.5, -1.5, 0.0]]) * 1.25
+    HBAT1_BASE_ANTENNA_OFFSETS = (
+        numpy.array(
+            [
+                [-1.5, +1.5, 0.0],
+                [-0.5, +1.5, 0.0],
+                [+0.5, +1.5, 0.0],
+                [+1.5, +1.5, 0.0],
+                [-1.5, +0.5, 0.0],
+                [-0.5, +0.5, 0.0],
+                [+0.5, +0.5, 0.0],
+                [+1.5, +0.5, 0.0],
+                [-1.5, -0.5, 0.0],
+                [-0.5, -0.5, 0.0],
+                [+0.5, -0.5, 0.0],
+                [+1.5, -0.5, 0.0],
+                [-1.5, -1.5, 0.0],
+                [-0.5, -1.5, 0.0],
+                [+0.5, -1.5, 0.0],
+                [+1.5, -1.5, 0.0],
+            ]
+        )
+        * 1.25
+    )
 
     @staticmethod
     def rotation_matrix(rad: float) -> numpy.array:
-        """ Return a rotation matrix for coordinates for a given number of radians. """
+        """Return a rotation matrix for coordinates for a given number of radians."""
 
         rotation_matrix = numpy.array(
-            [[ cos(rad), sin(rad), 0],
-             [-sin(rad), cos(rad), 0],
-             [        0,        0, 1]])
+            [[cos(rad), sin(rad), 0], [-sin(rad), cos(rad), 0], [0, 0, 1]]
+        )
 
         return rotation_matrix
 
     @staticmethod
-    def ITRF_offsets(base_antenna_offsets: numpy.array, PQR_rotation: float, PQR_to_ETRS_rotation_matrix: numpy.array) -> numpy.array:
-        """ Return the antenna offsets in ITRF, given:
-
-            :param: base_antenna_offsets:        antenna offsets within an unrotated tile (16x3).
-            :param: PQR_rotation:                rotation of the tile(s) in PQR space (radians).
-            :param: PQR_to_ETRS_rotation_matrix: rotation matrix for PQR -> ETRS conversion (3x3).
+    def ITRF_offsets(
+        base_antenna_offsets: numpy.array,
+        PQR_rotation: float,
+        PQR_to_ETRS_rotation_matrix: numpy.array,
+    ) -> numpy.array:
+        """Return the antenna offsets in ITRF, given:
+
+        :param: base_antenna_offsets:        antenna offsets within an unrotated tile (16x3).
+        :param: PQR_rotation:                rotation of the tile(s) in PQR space (radians).
+        :param: PQR_to_ETRS_rotation_matrix: rotation matrix for PQR -> ETRS conversion (3x3).
         """
 
         # Offsets in PQR are derived by rotating the base tile by the specified number of radians
-        PQR_offsets = numpy.inner(base_antenna_offsets, HBATAntennaOffsets.rotation_matrix(PQR_rotation))
+        PQR_offsets = numpy.inner(
+            base_antenna_offsets, HBATAntennaOffsets.rotation_matrix(PQR_rotation)
+        )
 
         # The PQR->ETRS mapping is a rotation as well
         ETRS_offsets = numpy.inner(PQR_offsets, PQR_to_ETRS_rotation_matrix)
diff --git a/tangostationcontrol/tangostationcontrol/clients/__init__.py b/tangostationcontrol/tangostationcontrol/clients/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/clients/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/clients/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/clients/attribute_wrapper.py b/tangostationcontrol/tangostationcontrol/clients/attribute_wrapper.py
index 9f970cb306b6de791d756bc13b9ccc7082fb070a..501243b0351545a54bb1ecd9b2fbfd4f2a4fff7e 100644
--- a/tangostationcontrol/tangostationcontrol/clients/attribute_wrapper.py
+++ b/tangostationcontrol/tangostationcontrol/clients/attribute_wrapper.py
@@ -1,18 +1,20 @@
-from operator import mul
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
+import logging
 from functools import reduce
+from operator import mul
 
 import numpy
-from tango.server import attribute
 from tango import AttrWriteType, AttReqType
-
+from tango.server import attribute
 from tangostationcontrol.devices.device_decorators import fault_on_error
-import logging
 
 logger = logging.getLogger()
 
 
 class AttributeIO(object):
-    """ Holds the I/O functionality for an attribute for a specific device. """
+    """Holds the I/O functionality for an attribute for a specific device."""
 
     def __init__(self, device, attribute_wrapper):
         # Link to the associated device
@@ -29,8 +31,8 @@ class AttributeIO(object):
         self.write_function = lambda value: None
 
     def cached_read_function(self):
-        """ Return the last (written) value, if available. Otherwise, read
-            from the device. """
+        """Return the last (written) value, if available. Otherwise, read
+        from the device."""
 
         if self.cached_value is not None:
             return self.cached_value
@@ -39,7 +41,7 @@ class AttributeIO(object):
         return self.cached_value
 
     def cached_write_function(self, value):
-        """ Writes the given value to the device, and updates the cache. """
+        """Writes the given value to the device, and updates the cache."""
 
         # flexible array sizes are not supported by all clients. make sure we only write arrays of maximum size.
         if self.attribute_wrapper.shape != ():
@@ -54,7 +56,9 @@ class AttributeIO(object):
                     value_shape = (len(value),)
 
             if value_shape != self.attribute_wrapper.shape:
-                raise ValueError(f"Tried writing an array of shape {value_shape} into an attribute of shape {self.attribute_wrapper.shape}")
+                raise ValueError(
+                    f"Tried writing an array of shape {value_shape} into an attribute of shape {self.attribute_wrapper.shape}"
+                )
 
         self.write_function(value)
         self.cached_value = value
@@ -65,7 +69,15 @@ class AttributeWrapper(attribute):
     Wraps all the attributes in a wrapper class to manage most of the redundant code behind the scenes
     """
 
-    def __init__(self, comms_id=None, comms_annotation=None, datatype=None, dims=(1,), access=AttrWriteType.READ, **kwargs):
+    def __init__(
+        self,
+        comms_id=None,
+        comms_annotation=None,
+        datatype=None,
+        dims=(1,),
+        access=AttrWriteType.READ,
+        **kwargs,
+    ):
         """
         wraps around the tango Attribute class. Provides an easier interface for 1d or 2d arrays. Also provides a way to abstract
         managing the communications interface.
@@ -81,9 +93,11 @@ class AttributeWrapper(attribute):
         # see also https://pytango.readthedocs.io/en/stable/server_api/server.html?highlight=devlong#module-tango.server for
         # more details about type conversion Python/numpy -> PyTango
         if "numpy" not in str(datatype) and datatype != str and datatype != bool:
-            raise ValueError(f"Attribute needs to be a Tango-supported numpy, str or bool type, but has type {datatype}")
+            raise ValueError(
+                f"Attribute needs to be a Tango-supported numpy, str or bool type, but has type {datatype}"
+            )
 
-        self.comms_id = comms_id # store data that can be used to identify the comms interface to use. not used by the wrapper itself
+        self.comms_id = comms_id  # store data that can be used to identify the comms interface to use. not used by the wrapper itself
         self.comms_annotation = comms_annotation  # store data that can be used by the comms interface. not used by the wrapper itself
 
         self.datatype = datatype
@@ -120,7 +134,7 @@ class AttributeWrapper(attribute):
         self.shape = shape
 
         if access == AttrWriteType.READ_WRITE:
-            """ If the attribute is of READ_WRITE type, assign the write and read functions to it"""
+            """If the attribute is of READ_WRITE type, assign the write and read functions to it"""
 
             # we return the last written value, as we are the only ones in control,
             # and the hardware does not necessarily return what we've written
@@ -137,7 +151,9 @@ class AttributeWrapper(attribute):
 
                     return io.cached_write_function(value)
                 except Exception as e:
-                    raise e.__class__(f"Could not write attribute {comms_annotation}") from e
+                    raise e.__class__(
+                        f"Could not write attribute {comms_annotation}"
+                    ) from e
 
             @fault_on_error()
             def read_func_wrapper(device):
@@ -154,12 +170,14 @@ class AttributeWrapper(attribute):
 
                     return io.cached_read_function()
                 except Exception as e:
-                    raise e.__class__(f"Could not read attribute {comms_annotation}") from e
+                    raise e.__class__(
+                        f"Could not read attribute {comms_annotation}"
+                    ) from e
 
             self.fset = write_func_wrapper
             self.fget = read_func_wrapper
         else:
-            """ Assign the read function to the attribute"""
+            """Assign the read function to the attribute"""
 
             @fault_on_error()
             def read_func_wrapper(device):
@@ -176,7 +194,9 @@ class AttributeWrapper(attribute):
 
                     return io.read_function()
                 except Exception as e:
-                    raise e.__class__(f"Could not read attribute {comms_annotation}") from e
+                    raise e.__class__(
+                        f"Could not read attribute {comms_annotation}"
+                    ) from e
 
             self.fget = read_func_wrapper
 
@@ -186,7 +206,15 @@ class AttributeWrapper(attribute):
         #
         # NOTE: fisallowed=<callable> does not work: https://gitlab.com/tango-controls/pytango/-/issues/435
         # So we have to use fisallowed=<str> here, which causes the function device.<str> to be called.
-        super().__init__(dtype=dtype, max_dim_y=max_dim_y, max_dim_x=max_dim_x, access=access, fisallowed="is_attribute_access_allowed", format=str(dims), **kwargs)
+        super().__init__(
+            dtype=dtype,
+            max_dim_y=max_dim_y,
+            max_dim_x=max_dim_x,
+            access=access,
+            fisallowed="is_attribute_access_allowed",
+            format=str(dims),
+            **kwargs,
+        )
 
     def get_attribute_io(self, device):
         """
@@ -205,30 +233,42 @@ class AttributeWrapper(attribute):
         and return a read and write function that the wrapper will use to get/set data.
         """
         try:
-            read_attr_func, write_attr_func = client.setup_attribute(self.comms_annotation, self)
+            read_attr_func, write_attr_func = client.setup_attribute(
+                self.comms_annotation, self
+            )
 
             io = self.get_attribute_io(device)
-            io.read_function  = read_attr_func
+            io.read_function = read_attr_func
             io.write_function = write_attr_func
         except Exception as e:
-            raise Exception(f"Exception while setting {client.__class__.__name__} attribute with annotation: '{self.comms_annotation}'") from e
+            raise Exception(
+                f"Exception while setting {client.__class__.__name__} attribute with annotation: '{self.comms_annotation}'"
+            ) from e
 
     async def async_set_comm_client(self, device, client):
         """
-          Asynchronous version of set_comm_client.
+        Asynchronous version of set_comm_client.
         """
         try:
-            read_attr_func, write_attr_func = await client.setup_attribute(self.comms_annotation, self)
+            read_attr_func, write_attr_func = await client.setup_attribute(
+                self.comms_annotation, self
+            )
 
             io = self.get_attribute_io(device)
-            io.read_function  = read_attr_func
+            io.read_function = read_attr_func
             io.write_function = write_attr_func
         except Exception as e:
-            raise Exception(f"Exception while setting {client.__class__.__name__} attribute with annotation: '{self.comms_annotation}'") from e
+            raise Exception(
+                f"Exception while setting {client.__class__.__name__} attribute with annotation: '{self.comms_annotation}'"
+            ) from e
 
     def set_pass_func(self, device):
-        logger.debug("using pass function for attribute with annotation: {}".format(self.comms_annotation))
+        logger.debug(
+            "using pass function for attribute with annotation: {}".format(
+                self.comms_annotation
+            )
+        )
 
         io = self.get_attribute_io(device)
-        io.read_function  = lambda: None
+        io.read_function = lambda: None
         io.write_function = lambda value: None
diff --git a/tangostationcontrol/tangostationcontrol/clients/comms_client.py b/tangostationcontrol/tangostationcontrol/clients/comms_client.py
index 0188753a2f6ca1d439873903d9b8d021dcc7ee62..b757aedb5ada488825d98d359cf980e08bb36bcf 100644
--- a/tangostationcontrol/tangostationcontrol/clients/comms_client.py
+++ b/tangostationcontrol/tangostationcontrol/clients/comms_client.py
@@ -1,25 +1,29 @@
-from threading import Thread
-import time
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import asyncio
+import logging
+import time
 from abc import ABC, abstractmethod
+from threading import Thread
 
-import logging
 logger = logging.getLogger()
 
+
 class AbstractCommClient(ABC):
     @abstractmethod
     def start(self):
-        """ Start communication with the client. """
+        """Start communication with the client."""
 
     @abstractmethod
     def stop(self):
-        """ Stop communication with the client. """
+        """Stop communication with the client."""
 
     def ping(self):  # noqa: B027
-        """ Check whether the connection is still alive.
+        """Check whether the connection is still alive.
 
-            Clients that override this method must raise an Exception if the
-            connection died. """
+        Clients that override this method must raise an Exception if the
+        connection died."""
         pass
 
     @abstractmethod
@@ -42,6 +46,7 @@ class AbstractCommClient(ABC):
         convert values between Python and OPC-UA.
         """
 
+
 class CommClient(AbstractCommClient, Thread):
     """
     Abstracts communication with a client, for instance, over the network, by handling connect(), disconnect(), and ping()
@@ -49,9 +54,7 @@ class CommClient(AbstractCommClient, Thread):
     """
 
     def __init__(self, fault_func, try_interval=2):
-        """
-
-        """
+        """ """
         self.fault_func = fault_func
         self.try_interval = try_interval
         self.stopping = False
@@ -83,7 +86,9 @@ class CommClient(AbstractCommClient, Thread):
                     self.connect()
                 except Exception as e:
                     logger.exception("Fault condition in communication detected.")
-                    self.fault_func(f"FAULT: Connection failed: {e.__class__.__name__}: {e}")
+                    self.fault_func(
+                        f"FAULT: Connection failed: {e.__class__.__name__}: {e}"
+                    )
                     return
 
             # keep checking if the connection is still alive
@@ -104,15 +109,15 @@ class CommClient(AbstractCommClient, Thread):
                 time.sleep(self.try_interval)
 
     def ping(self):
-        """ Check whether the connection is still alive.
+        """Check whether the connection is still alive.
 
-            Clients that override this method must raise an Exception if the
-            connection died. """
+        Clients that override this method must raise an Exception if the
+        connection died."""
         pass
 
     def stop(self):
         """
-          Stop connecting & disconnect. Can take a few seconds for the timeouts to hit.
+        Stop connecting & disconnect. Can take a few seconds for the timeouts to hit.
         """
 
         if not self.ident:
@@ -124,6 +129,7 @@ class CommClient(AbstractCommClient, Thread):
 
         self.disconnect()
 
+
 class AsyncCommClient(object):
     """
     Abstracts communication with a client, for instance, over the network, by handling connect(), disconnect(), and ping()
@@ -134,18 +140,18 @@ class AsyncCommClient(object):
 
     def __init__(self, fault_func=lambda: None, event_loop=None):
         """
-          Create an Asynchronous communication client.
+        Create an Asynchronous communication client.
 
-          fault_func: Function to call to put the device to FAULT if an error is detected.
-          event_loop: Aysncio event loop to use. If None, a new event loop is created and
-                      run in a separate thread. Only share event loops if any of the functions
-                      executed doesn't stall, as asyncio used a cooperative multitasking model.
+        fault_func: Function to call to put the device to FAULT if an error is detected.
+        event_loop: Aysncio event loop to use. If None, a new event loop is created and
+                    run in a separate thread. Only share event loops if any of the functions
+                    executed doesn't stall, as asyncio used a cooperative multitasking model.
 
-                      If the executed functions can stall (for a bit), use a dedicated loop to avoid
-                      interfering with other users of the event loop.
+                    If the executed functions can stall (for a bit), use a dedicated loop to avoid
+                    interfering with other users of the event loop.
 
-                      All coroutines need to be executed in this loop, which wil also be stored
-                      as the `event_loop` member of this object.
+                    All coroutines need to be executed in this loop, which wil also be stored
+                    as the `event_loop` member of this object.
         """
         self.fault_func = fault_func
         self.running = False
@@ -161,7 +167,12 @@ class AsyncCommClient(object):
                 loop.run_forever()
 
             self.event_loop = asyncio.new_event_loop()
-            self.event_loop_thread = Thread(target=run_loop, args=(self.event_loop,), name=f"AsyncCommClient {self.name()} event loop", daemon=True)
+            self.event_loop_thread = Thread(
+                target=run_loop,
+                args=(self.event_loop,),
+                name=f"AsyncCommClient {self.name()} event loop",
+                daemon=True,
+            )
             self.event_loop_thread.start()
         else:
             self.event_loop = event_loop
@@ -176,7 +187,7 @@ class AsyncCommClient(object):
             self.event_loop_thread.join()
 
     def name(self):
-        """ The name of this CommClient, for use in logs. """
+        """The name of this CommClient, for use in logs."""
         return self.__class__.__name__
 
     @abstractmethod
@@ -193,7 +204,7 @@ class AsyncCommClient(object):
         """
 
     async def watch_connection(self):
-        """ Notice when the connection goes down. """
+        """Notice when the connection goes down."""
 
         try:
             logger.info(f"[AsyncCommClient {self.name()}] Start watching")
@@ -203,10 +214,14 @@ class AsyncCommClient(object):
                 try:
                     await self.ping()
                 except Exception as e:
-                    logger.exception(f"[AsyncCommClient {self.name()}] Ping failed: connection considered lost")
+                    logger.exception(
+                        f"[AsyncCommClient {self.name()}] Ping failed: connection considered lost"
+                    )
 
                     # connection error, go to fault
-                    self.fault_func(f"FAULT: Connection lost: {e.__class__.__name__}: {e}")
+                    self.fault_func(
+                        f"FAULT: Connection lost: {e.__class__.__name__}: {e}"
+                    )
 
                     # disconnect will cancel us
                     await self.disconnect()
@@ -220,7 +235,9 @@ class AsyncCommClient(object):
             pass
         except Exception as e:
             # log immediately, or the exception will only be printed once this task is awaited
-            logger.exception(f"[AsyncCommClient {self.name()}] Exception raised while watching")
+            logger.exception(
+                f"[AsyncCommClient {self.name()}] Exception raised while watching"
+            )
 
             raise
         finally:
@@ -254,15 +271,16 @@ class AsyncCommClient(object):
         except asyncio.CancelledError as e:
             pass
         except Exception as e:
-            logger.exception(f"[AsyncCommClient {self.name()}] Watcher thread raised exception")
+            logger.exception(
+                f"[AsyncCommClient {self.name()}] Watcher thread raised exception"
+            )
 
             # the task stopped eithr way, so no need to bother our caller with this
 
         await self.disconnect()
 
     def sync_stop(self):
-        """ Synchronous version of stop(). """
+        """Synchronous version of stop()."""
 
         future = asyncio.run_coroutine_threadsafe(self.stop(), self.event_loop)
         return future.result()
-
diff --git a/tangostationcontrol/tangostationcontrol/clients/docker_client.py b/tangostationcontrol/tangostationcontrol/clients/docker_client.py
index a7b487b66656f727b9bd794fcadf9fbe9c50e7fb..630d4c9c1bd4080648d2574b1bf073a513d11f0c 100644
--- a/tangostationcontrol/tangostationcontrol/clients/docker_client.py
+++ b/tangostationcontrol/tangostationcontrol/clients/docker_client.py
@@ -1,13 +1,18 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import logging
+
 import docker
 
 from .comms_client import AsyncCommClient
 
 logger = logging.getLogger()
 
+
 class DockerClient(AsyncCommClient):
     """
-      Controls & queries running docker containers.
+    Controls & queries running docker containers.
     """
 
     def __init__(self, base_url, fault_func, event_loop=None):
@@ -39,7 +44,7 @@ class DockerClient(AsyncCommClient):
                 return False
 
             # expected values: running, restarting, paused, exited, created
-            return container.status == 'running'
+            return container.status == "running"
 
         def write_function(value):
             container = self.client.containers.get(container_name)
diff --git a/tangostationcontrol/tangostationcontrol/clients/opcua_client.py b/tangostationcontrol/tangostationcontrol/clients/opcua_client.py
index 15261dd922d3865631402ca8c1af5144bb856f0c..5a4f7f7251c2af212b701cf73f009014a795ca09 100644
--- a/tangostationcontrol/tangostationcontrol/clients/opcua_client.py
+++ b/tangostationcontrol/tangostationcontrol/clients/opcua_client.py
@@ -1,12 +1,15 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
+import asyncio
+import logging
 import socket
-import numpy
+
 import asyncua
-import asyncio
+import numpy
 from asyncua import Client
-
 from tangostationcontrol.clients.comms_client import AsyncCommClient
 
-import logging
 logger = logging.getLogger()
 
 __all__ = ["OPCUAConnection", "ProtocolAttribute"]
@@ -27,8 +30,9 @@ numpy_to_OPCua_dict = {
     str: asyncua.ua.VariantType.String,
 }
 
+
 def numpy_to_opcua(numpy_val):
-    """ Convert a numpy type to a corresponding opcua Variant type. """
+    """Convert a numpy type to a corresponding opcua Variant type."""
 
     numpy_type = type(numpy_val)
 
@@ -37,14 +41,17 @@ def numpy_to_opcua(numpy_val):
     try:
         ua_type = numpy_to_OPCua_dict[numpy_type]
     except KeyError as e:
-        raise TypeError(f"Could not convert {numpy_val} (type {type(numpy_val).__name__}) to an OPC UA type.") from e 
+        raise TypeError(
+            f"Could not convert {numpy_val} (type {type(numpy_val).__name__}) to an OPC UA type."
+        ) from e
 
     return asyncua.ua.uatypes.Variant(Value=numpy_val, VariantType=ua_type)
 
+
 class OPCUAConnection(AsyncCommClient):
     """
-      Connects to OPC-UA in the foreground or background, and sends HELLO
-      messages to keep a check on the connection. On connection failure, reconnects once.
+    Connects to OPC-UA in the foreground or background, and sends HELLO
+    messages to keep a check on the connection. On connection failure, reconnects once.
     """
 
     def __init__(self, address, namespace, timeout, fault_func, event_loop=None):
@@ -76,17 +83,23 @@ class OPCUAConnection(AsyncCommClient):
         try:
             await self.client.connect()
         except (socket.error, IOError, OSError) as e:
-            raise IOError(f"Could not connect to OPC-UA server {self._servername()}") from e
+            raise IOError(
+                f"Could not connect to OPC-UA server {self._servername()}"
+            ) from e
 
         logger.debug(f"Connected to OPC-UA server {self._servername()}")
 
         # determine namespace used
         if type(self.namespace) is str:
-            self.name_space_index = await self.client.get_namespace_index(self.namespace)
+            self.name_space_index = await self.client.get_namespace_index(
+                self.namespace
+            )
         elif type(self.namespace) is int:
             self.name_space_index = self.namespace
         else:
-            raise TypeError(f"namespace must be of type str or int, but is of type {type(self.namespace).__name__}")
+            raise TypeError(
+                f"namespace must be of type str or int, but is of type {type(self.namespace).__name__}"
+            )
 
         self.obj = self.client.get_objects_node()
 
@@ -115,26 +128,31 @@ class OPCUAConnection(AsyncCommClient):
 
         if isinstance(annotation, dict):
             # check if required path inarg is present
-            if annotation.get('path') is None:
-                raise Exception(f"OPC-ua mapping requires a path argument in the annotation, was given: {annotation}")
+            if annotation.get("path") is None:
+                raise Exception(
+                    f"OPC-ua mapping requires a path argument in the annotation, was given: {annotation}"
+                )
 
             path = annotation.get("path")  # required
         elif isinstance(annotation, list):
             path = annotation
         else:
-            raise Exception(f"OPC-ua mapping requires either a list of the path or dict with the path. Was given {type(annotation)} type containing: {annotation}")
-
+            raise Exception(
+                f"OPC-ua mapping requires either a list of the path or dict with the path. Was given {type(annotation)} type containing: {annotation}"
+            )
 
         # add path prefix
         path = self.node_path_prefix + path
 
         # prepend namespace index for each element if none is given
-        path = [name if ':' in name else f'{self.name_space_index}:{name}' for name in path]
+        path = [
+            name if ":" in name else f"{self.name_space_index}:{name}" for name in path
+        ]
 
         return path
 
     async def get_node(self, path):
-        """ Retrieve an OPC-UA node from either the cache, or the server. """
+        """Retrieve an OPC-UA node from either the cache, or the server."""
 
         if not path:
             return self.obj
@@ -152,8 +170,12 @@ class OPCUAConnection(AsyncCommClient):
 
         for child_node in child_nodes:
             # add node to the cache
-            child_path = parent_path + [f"{self.name_space_index}:{child_node.DisplayName.Text}"]
-            self._node_cache[",".join(child_path)] = self.client.get_node(child_node.NodeId)
+            child_path = parent_path + [
+                f"{self.name_space_index}:{child_node.DisplayName.Text}"
+            ]
+            self._node_cache[",".join(child_path)] = self.client.get_node(
+                child_node.NodeId
+            )
 
         # lookup in cache again. if the name is valid, it should be in there.
         if cache_key in self._node_cache:
@@ -169,21 +191,34 @@ class OPCUAConnection(AsyncCommClient):
         try:
             node = await self.get_node(path)
         except Exception as e:
-            logger.exception(f"Could not get node: {path} on server {self._servername()}")
-            raise Exception(f"Could not get node: {path} on server {self._servername()}") from e
+            logger.exception(
+                f"Could not get node: {path} on server {self._servername()}"
+            )
+            raise Exception(
+                f"Could not get node: {path} on server {self._servername()}"
+            ) from e
 
         # get all the necessary data to set up the read/write functions from the AttributeWrapper
         dim_x = attribute.dim_x
         dim_y = attribute.dim_y
-        ua_type = numpy_to_OPCua_dict[attribute.datatype] # convert the numpy type to a corresponding UA type
+        ua_type = numpy_to_OPCua_dict[
+            attribute.datatype
+        ]  # convert the numpy type to a corresponding UA type
 
         # configure and return the read/write functions
         prot_attr = ProtocolAttribute(node, dim_x, dim_y, ua_type)
 
         try:
             # NOTE: debug statement tries to get the qualified name, this may not always work. in that case forgo the name and just print the path
-            node_name = str(node.get_browse_name())[len("QualifiedName(2:"):]
-            logger.debug("connected OPC ua node {} of type {} to attribute with dimensions: {} x {} ".format(str(node_name)[:len(node_name)-1], str(ua_type)[len("VariantType."):], dim_x, dim_y))
+            node_name = str(node.get_browse_name())[len("QualifiedName(2:") :]
+            logger.debug(
+                "connected OPC ua node {} of type {} to attribute with dimensions: {} x {} ".format(
+                    str(node_name)[: len(node_name) - 1],
+                    str(ua_type)[len("VariantType.") :],
+                    dim_x,
+                    dim_y,
+                )
+            )
         except Exception:
             "B001 Do not use bare `except:`, it also catches unexpected events"
             "like memory errors, interrupts, system exit"
@@ -196,15 +231,18 @@ class OPCUAConnection(AsyncCommClient):
 
         # Tango will call these from a separate polling thread.
         def read_function():
-            return asyncio.run_coroutine_threadsafe(prot_attr.read_function(), self.event_loop).result()
+            return asyncio.run_coroutine_threadsafe(
+                prot_attr.read_function(), self.event_loop
+            ).result()
 
         def write_function(value):
-            asyncio.run_coroutine_threadsafe(prot_attr.write_function(value), self.event_loop).result()
+            asyncio.run_coroutine_threadsafe(
+                prot_attr.write_function(value), self.event_loop
+            ).result()
 
         # return the read/write functions
         return read_function, write_function
 
-
     async def _call_method(self, method_path, *args):
         method_path = self.get_node_path(method_path)
 
@@ -221,7 +259,9 @@ class OPCUAConnection(AsyncCommClient):
         return result
 
     def call_method(self, method_path, *args):
-        return asyncio.run_coroutine_threadsafe(self._call_method(method_path, *args), self.event_loop).result()
+        return asyncio.run_coroutine_threadsafe(
+            self._call_method(method_path, *args), self.event_loop
+        ).result()
 
 
 class ProtocolAttribute:
@@ -251,7 +291,7 @@ class ProtocolAttribute:
             # have to actually decode() the result.
 
             def fix_string(s):
-                return s.encode('latin-1',errors="replace").decode('latin-1')
+                return s.encode("latin-1", errors="replace").decode("latin-1")
 
             if type(value) == list and len(value) > 0 and type(value[0]) == str:
                 value = [fix_string(v) for v in value]
@@ -263,7 +303,9 @@ class ProtocolAttribute:
                 return value
             elif self.dim_y != 0:
                 # 2D array
-                value = numpy.array(numpy.split(numpy.array(value), indices_or_sections=self.dim_y))
+                value = numpy.array(
+                    numpy.split(numpy.array(value), indices_or_sections=self.dim_y)
+                )
             else:
                 # 1D array
                 value = numpy.array(value)
@@ -271,8 +313,9 @@ class ProtocolAttribute:
             return value
         except Exception as e:
             # Log "value" that gave us this issue
-            raise ValueError(f"Failed to parse atribute value retrieved from OPC-UA: {value}") from e
-
+            raise ValueError(
+                f"Failed to parse atribute value retrieved from OPC-UA: {value}"
+            ) from e
 
     async def write_function(self, value):
         """
@@ -287,7 +330,9 @@ class ProtocolAttribute:
             value = value.tolist() if type(value) == numpy.ndarray else value
 
         try:
-            await self.node.set_data_value(asyncua.ua.uatypes.Variant(Value=value, VariantType=self.ua_type))
+            await self.node.set_data_value(
+                asyncua.ua.uatypes.Variant(Value=value, VariantType=self.ua_type)
+            )
         except (TypeError, asyncua.ua.uaerrors.BadTypeMismatch) as e:
             # A type conversion went wrong or there is a type mismatch.
             #
@@ -296,26 +341,28 @@ class ProtocolAttribute:
             if type(value) == list:
                 our_type = "list({dtype}) x ({dimensions})".format(
                     dtype=(type(value[0]).__name__ if value else ""),
-                    dimensions=len(value))
+                    dimensions=len(value),
+                )
             else:
-                our_type = "{dtype}".format(
-                    dtype=type(value))
+                our_type = "{dtype}".format(dtype=type(value))
 
             is_scalar = (self.dim_x + self.dim_y) == 1
 
             if is_scalar:
-                expected_server_type = "{dtype} (scalar)".format(
-                    dtype=self.ua_type)
+                expected_server_type = "{dtype} (scalar)".format(dtype=self.ua_type)
             else:
                 expected_server_type = "{dtype} x ({dim_x}, {dim_y})".format(
-                    dtype=self.ua_type,
-                    dim_x=self.dim_x,
-                    dim_y=self.dim_y)
+                    dtype=self.ua_type, dim_x=self.dim_x, dim_y=self.dim_y
+                )
 
             actual_server_type = "{dtype} x {dimensions}".format(
                 dtype=await self.node.read_data_type_as_variant_type(),
-                dimensions=(await self.node.read_array_dimensions()) or "(dimensions unknown)")
+                dimensions=(await self.node.read_array_dimensions())
+                or "(dimensions unknown)",
+            )
 
             attribute_name = (await self.node.read_display_name()).to_string()
 
-            raise TypeError(f"Cannot write value to OPC-UA attribute '{attribute_name}': tried to convert data type {our_type} to expected server type {expected_server_type}, server reports type {actual_server_type}") from e
+            raise TypeError(
+                f"Cannot write value to OPC-UA attribute '{attribute_name}': tried to convert data type {our_type} to expected server type {expected_server_type}, server reports type {actual_server_type}"
+            ) from e
diff --git a/tangostationcontrol/tangostationcontrol/clients/snmp_client.py b/tangostationcontrol/tangostationcontrol/clients/snmp_client.py
index b94292eb2067207695111740b668b990cda3467d..3494b3f9a3fcd4868eff33867ce3ba18f1e62fd5 100644
--- a/tangostationcontrol/tangostationcontrol/clients/snmp_client.py
+++ b/tangostationcontrol/tangostationcontrol/clients/snmp_client.py
@@ -1,13 +1,13 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from tangostationcontrol.clients.comms_client import CommClient
-
-from pysnmp import hlapi
-from pysnmp.smi import builder, compiler
+import logging
 from os import path
 
-
 import numpy
-import logging
+from pysnmp import hlapi
+from pysnmp.smi import builder, compiler
+from tangostationcontrol.clients.comms_client import CommClient
 
 logger = logging.getLogger()
 
@@ -46,29 +46,40 @@ class SNMPComm:
         # context data sets the version used. Default SNMPv2
         self.ctx_data = hlapi.ContextData()
 
-
     def getter(self, objs):
-        return next(hlapi.getCmd(self.engine, self.community, self.transport, self.ctx_data, *objs))
+        return next(
+            hlapi.getCmd(
+                self.engine, self.community, self.transport, self.ctx_data, *objs
+            )
+        )
 
     def setter(self, objs):
-        return next(hlapi.setCmd(self.engine, self.community, self.transport, self.ctx_data, *objs))
+        return next(
+            hlapi.setCmd(
+                self.engine, self.community, self.transport, self.ctx_data, *objs
+            )
+        )
 
 
 class SNMPClient(CommClient):
     """
-        messages to keep a check on the connection. On connection failure, reconnects once.
+    messages to keep a check on the connection. On connection failure, reconnects once.
     """
 
     def start(self):
         super().start()
 
-    def __init__(self, community, host, timeout, version, fault_func, try_interval=2, port=161):
+    def __init__(
+        self, community, host, timeout, version, fault_func, try_interval=2, port=161
+    ):
         """
         Create the SNMP engine
         """
         super().__init__(fault_func, try_interval)
 
-        logger.debug(f"setting up SNMP engine with host: {host} and community: {community}")
+        logger.debug(
+            f"setting up SNMP engine with host: {host} and community: {community}"
+        )
         self.SNMP_comm = SNMPComm(community, host, version, port)
 
         # only sets up the engine, doesn't connect
@@ -81,14 +92,16 @@ class SNMPClient(CommClient):
             name = annotation["name"]
 
             # SNMP has tables that require an index number to access them. regular non-table variable have an index of 0
-            idx = annotation.get('index', 0)
+            idx = annotation.get("index", 0)
 
             # SNMP values like to use weird units like tenths of amps because its all integers. We added a scaling factor to correct for that.
-            scaling_factor = annotation.get('scaling_factor', 1)
+            scaling_factor = annotation.get("scaling_factor", 1)
 
             return mib, name, idx, scaling_factor
         except KeyError:
-            raise ValueError(f"SNMP attribute annotation requires a dict argument with both a 'name' and 'mib' key. Instead got: {annotation}")        
+            raise ValueError(
+                f"SNMP attribute annotation requires a dict argument with both a 'name' and 'mib' key. Instead got: {annotation}"
+            )
 
     def setup_value_conversion(self, attribute):
         """
@@ -115,7 +128,9 @@ class SNMPClient(CommClient):
 
         # get all the necessary data to set up the read/write functions from the AttributeWrapper
         dim_x, dim_y, dtype = self.setup_value_conversion(attribute)
-        snmp_attr = SNMPAttribute(self.SNMP_comm, mib, name, idx, dtype, dim_x, dim_y, scaling_factor)
+        snmp_attr = SNMPAttribute(
+            self.SNMP_comm, mib, name, idx, dtype, dim_x, dim_y, scaling_factor
+        )
 
         # return the read/write functions
         def read_function():
@@ -126,9 +141,11 @@ class SNMPClient(CommClient):
 
         return read_function, write_function
 
-class SNMPAttribute:
 
-    def __init__(self, comm: SNMPComm, mib, name, idx, dtype, dim_x, dim_y, scaling_factor=1):
+class SNMPAttribute:
+    def __init__(
+        self, comm: SNMPComm, mib, name, idx, dtype, dim_x, dim_y, scaling_factor=1
+    ):
 
         self.comm = comm
         self.mib = mib
@@ -140,11 +157,16 @@ class SNMPAttribute:
         self.len = self.get_len(dim_x, dim_y)
         self.is_scalar = self.len == 1
 
-        self.objID = tuple(hlapi.ObjectIdentity(self.mib, self.name, self.idx + i) for i in range(self.len))
-        self.objs = tuple((hlapi.ObjectType(i) for i in self.objID), )
+        self.objID = tuple(
+            hlapi.ObjectIdentity(self.mib, self.name, self.idx + i)
+            for i in range(self.len)
+        )
+        self.objs = tuple(
+            (hlapi.ObjectType(i) for i in self.objID),
+        )
 
     def get_len(self, dim_x, dim_y):
-        """""Small helper function to not clutter the __init__"""
+        """ ""Small helper function to not clutter the __init__"""
 
         if dim_x == 0:
             dim_x = 1
@@ -158,10 +180,14 @@ class SNMPAttribute:
         """
 
         # get all of the values
-        errorIndication, errorStatus, errorIndex, *varBinds = self.comm.getter(self.objs)
+        errorIndication, errorStatus, errorIndex, *varBinds = self.comm.getter(
+            self.objs
+        )
 
         if errorIndication is not None:
-            raise IOError(f"An error occurred while attempting to write '{self.name}'. errorIndication: {errorIndication}")
+            raise IOError(
+                f"An error occurred while attempting to write '{self.name}'. errorIndication: {errorIndication}"
+            )
 
         # get all the values in a list converted to the correct type
         val_lst = self.convert(varBinds)
@@ -177,13 +203,18 @@ class SNMPAttribute:
         if self.is_scalar and type(value) != list:
             value = [value]
 
-        write_obj = tuple(hlapi.ObjectType(self.objID[i], value[i]) for i in range(len(self.objID)))
+        write_obj = tuple(
+            hlapi.ObjectType(self.objID[i], value[i]) for i in range(len(self.objID))
+        )
 
-        errorIndication, errorStatus, errorIndex, *varBinds = self.comm.setter(write_obj)
+        errorIndication, errorStatus, errorIndex, *varBinds = self.comm.setter(
+            write_obj
+        )
 
         if errorIndication is not None:
-            raise IOError(f"An error occurred while attempting to write '{self.name}'. errorIndication: {errorIndication}")
-
+            raise IOError(
+                f"An error occurred while attempting to write '{self.name}'. errorIndication: {errorIndication}"
+            )
 
     def convert(self, var_binds):
         """
@@ -207,8 +238,16 @@ class SNMPAttribute:
             return isinstance(value, (hlapi.Integer32, hlapi.Integer))
 
         def is_an_hlapi_number_type(value):
-            return isinstance(value, (hlapi.TimeTicks, hlapi.Counter32, hlapi.Gauge32,
-                                      hlapi.Integer32, hlapi.Integer))
+            return isinstance(
+                value,
+                (
+                    hlapi.TimeTicks,
+                    hlapi.Counter32,
+                    hlapi.Gauge32,
+                    hlapi.Integer32,
+                    hlapi.Integer,
+                ),
+            )
 
         def is_an_hlapi_string_type(value):
             return isinstance(value, (hlapi.OctetString, hlapi.ObjectIdentity))
@@ -227,7 +266,7 @@ class SNMPAttribute:
 
         def convert_integer_to_str(value):
             if value.namedValues:
-                result  = value.prettyPrint()
+                result = value.prettyPrint()
             else:
                 result = numpy.int64(value)
 
@@ -242,14 +281,14 @@ class SNMPAttribute:
         elif needs_conversion_from_string_to_str(value):
             result = str(value)
         else:
-            raise TypeError(f"Error: did not find a valid snmp type. Got: {type(value)}, expected one of: '{snmp_to_numpy_dict.keys()}'")
+            raise TypeError(
+                f"Error: did not find a valid snmp type. Got: {type(value)}, expected one of: '{snmp_to_numpy_dict.keys()}'"
+            )
 
         return result
 
 
-
 class MIBLoader:
-
     def __init__(self, mib_dir: str):
         self.mibBuilder = builder.MibBuilder()
         logger.debug(mib_dir)
@@ -257,9 +296,13 @@ class MIBLoader:
         if not path.isabs(mib_dir):
             mib_dir = "/" + mib_dir
 
-        compiler.addMibCompiler(self.mibBuilder, sources=[f'file://{mib_dir}', ])
+        compiler.addMibCompiler(
+            self.mibBuilder,
+            sources=[
+                f"file://{mib_dir}",
+            ],
+        )
         logger.debug(f"mib sources: {self.mibBuilder.getMibSources()}")
 
     def load_pymib(self, mib_name):
         self.mibBuilder.loadModules(mib_name)
-
diff --git a/tangostationcontrol/tangostationcontrol/clients/statistics/__init__.py b/tangostationcontrol/tangostationcontrol/clients/statistics/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/clients/statistics/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/clients/statistics/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/clients/statistics/client.py b/tangostationcontrol/tangostationcontrol/clients/statistics/client.py
index fdde3da3fc06a9cc1e900fbc2c1a067f8a248b5e..c9af9e802dca2c6c66a3880ec61249bba664d2ad 100644
--- a/tangostationcontrol/tangostationcontrol/clients/statistics/client.py
+++ b/tangostationcontrol/tangostationcontrol/clients/statistics/client.py
@@ -1,22 +1,15 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from queue import Queue
 import logging
+from queue import Queue
 from typing import Optional
 
 import numpy
-
 from tangostationcontrol.clients.comms_client import AsyncCommClient
+from tangostationcontrol.clients.statistics.consumer import StatisticsConsumer
 from tangostationcontrol.clients.tcp_replicator import TCPReplicator
 from tangostationcontrol.clients.udp_receiver import UDPReceiver
-from tangostationcontrol.clients.statistics.consumer import StatisticsConsumer
 
 logger = logging.getLogger()
 
@@ -30,8 +23,15 @@ class StatisticsClient(AsyncCommClient):
       and provides a CommClient interface to expose points to a Device Server.
     """
 
-    def __init__(self, collector, udp_options, tcp_options, fault_func, event_loop=None,
-                 queuesize=STATISTICS_CLIENT_QUEUE_SIZE):
+    def __init__(
+        self,
+        collector,
+        udp_options,
+        tcp_options,
+        fault_func,
+        event_loop=None,
+        queuesize=STATISTICS_CLIENT_QUEUE_SIZE,
+    ):
         """
         Create the statistics client and connect() to it and get the object node.
 
@@ -68,8 +68,7 @@ class StatisticsClient(AsyncCommClient):
         self.tcp = TCPReplicator(self.tcp_options, self.queuesize)
         self.statistics = StatisticsConsumer(self.collector_queue, self.collector)
 
-        self.udp = UDPReceiver([self.collector_queue, self.tcp],
-                               self.udp_options)
+        self.udp = UDPReceiver([self.collector_queue, self.tcp], self.udp_options)
 
     async def ping(self):
         if not self.statistics.is_alive():
@@ -117,21 +116,32 @@ class StatisticsClient(AsyncCommClient):
 
         # redirect to right object. this works as long as the parameter names are unique among them.
         if annotation["type"] == "statistics":
+
             def read_function():
                 return _process_statistics_annotation()
+
         elif annotation["type"] == "udp":
+
             def read_function():
                 return self.udp.parameters[parameter]
+
         elif annotation["type"] == "queue":
             if parameter == "collector_fill_percentage":
+
                 def read_function():
-                    return numpy.uint64(self._queue_fill_percentage(self.collector_queue))
+                    return numpy.uint64(
+                        self._queue_fill_percentage(self.collector_queue)
+                    )
+
             elif parameter == "replicator_fill_percentage":
+
                 def read_function():
                     return numpy.array(self.tcp.client_queue_fill_percentage)[:64]
+
             else:
                 raise ValueError(f"Unknown queue parameter requested: {parameter}")
         elif annotation["type"] == "replicator":
+
             def read_function():
                 return _process_replicator_annotation()
 
@@ -139,17 +149,21 @@ class StatisticsClient(AsyncCommClient):
             if annotation.get("reshape", False):
                 # force array into the shape of the attribute
                 if attribute.dim_y > 1:
-                    return self.collector.parameters[parameter].reshape(attribute.dim_y, attribute.dim_x)
+                    return self.collector.parameters[parameter].reshape(
+                        attribute.dim_y, attribute.dim_x
+                    )
                 else:
                     return self.collector.parameters[parameter].reshape(attribute.dim_x)
             else:
                 return self.collector.parameters[parameter]
 
         def _process_replicator_annotation():
-            parameters_dict = {"clients"          : numpy.array(self.tcp.clients(), dtype=str),
-                               "nof_bytes_sent"   : numpy.uint64(self.tcp.stats.nof_bytes_sent),
-                               "nof_packets_sent" : numpy.uint64(self.tcp.stats.nof_packets_sent),
-                               "nof_tasks_pending": numpy.uint64(self.tcp.nof_tasks_pending)}
+            parameters_dict = {
+                "clients": numpy.array(self.tcp.clients(), dtype=str),
+                "nof_bytes_sent": numpy.uint64(self.tcp.stats.nof_bytes_sent),
+                "nof_packets_sent": numpy.uint64(self.tcp.stats.nof_packets_sent),
+                "nof_tasks_pending": numpy.uint64(self.tcp.nof_tasks_pending),
+            }
             if parameter in parameters_dict:
                 return parameters_dict[parameter]
             raise ValueError(f"Unknown replicator parameter requested: {parameter}")
diff --git a/tangostationcontrol/tangostationcontrol/clients/statistics/client_thread.py b/tangostationcontrol/tangostationcontrol/clients/statistics/client_thread.py
index 04a288de676bfe02ec8e442951827d563f029438..2c199cb6ddddfab636c7f97769634f434e7e8015 100644
--- a/tangostationcontrol/tangostationcontrol/clients/statistics/client_thread.py
+++ b/tangostationcontrol/tangostationcontrol/clients/statistics/client_thread.py
@@ -1,15 +1,9 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
+import logging
 from abc import ABC
 from abc import abstractmethod
-import logging
 
 logger = logging.getLogger()
 
diff --git a/tangostationcontrol/tangostationcontrol/clients/statistics/consumer.py b/tangostationcontrol/tangostationcontrol/clients/statistics/consumer.py
index 497c7eb763c362ee3ba3c8c7854be8d03664efda..62f2838f9944f0d6541f976a47a17ad7ab67d2fb 100644
--- a/tangostationcontrol/tangostationcontrol/clients/statistics/consumer.py
+++ b/tangostationcontrol/tangostationcontrol/clients/statistics/consumer.py
@@ -1,26 +1,19 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import logging
-from threading import Thread
-from queue import Queue
 import time
+from queue import Queue
+from threading import Thread
 
 from lofar_station_client.statistics.collector import StatisticsCollector
-
 from tangostationcontrol.clients.statistics.client_thread import StatisticsClientThread
 
 logger = logging.getLogger()
 
 
 class StatisticsConsumer(Thread, StatisticsClientThread):
-    """ Base class to process statistics packets from a queue, asynchronously. """
+    """Base class to process statistics packets from a queue, asynchronously."""
 
     # Maximum time to wait for the Thread to get unstuck, if we want to stop
     DISCONNECT_TIMEOUT = 10.0
@@ -58,7 +51,9 @@ class StatisticsConsumer(Thread, StatisticsClientThread):
         if self.exception_counter == 1:
             logger.exception(f"Could not parse statistics packet")
         else:
-            logger.exception(f"Could not parse {self.exception_counter} statistics packets in the last {int(time_since_log)} seconds")
+            logger.exception(
+                f"Could not parse {self.exception_counter} statistics packets in the last {int(time_since_log)} seconds"
+            )
 
         self.last_exception_time = time.time()
         self.exception_counter = 0
@@ -100,4 +95,6 @@ class StatisticsConsumer(Thread, StatisticsClientThread):
 
         if self.is_alive():
             # there is nothing we can do except wait (stall) longer, which could be indefinitely.
-            logger.error(f"Statistics thread did not shut down after {self.DISCONNECT_TIMEOUT} seconds, just leaving it dangling. Please attach a debugger to thread ID {self.ident}.")
+            logger.error(
+                f"Statistics thread did not shut down after {self.DISCONNECT_TIMEOUT} seconds, just leaving it dangling. Please attach a debugger to thread ID {self.ident}."
+            )
diff --git a/tangostationcontrol/tangostationcontrol/clients/tcp_replicator.py b/tangostationcontrol/tangostationcontrol/clients/tcp_replicator.py
index 4462895361a2f6207e20f7c2bc3a6daf299b19aa..503c1e96ae4631eb87059f5875075260012abcdb 100644
--- a/tangostationcontrol/tangostationcontrol/clients/tcp_replicator.py
+++ b/tangostationcontrol/tangostationcontrol/clients/tcp_replicator.py
@@ -1,3 +1,6 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import asyncio
 import atexit
 import logging
@@ -70,8 +73,8 @@ class TCPReplicator(Thread, StatisticsClientThread):
     we kindly ask to not change this static variable at runtime.
     """
     _DEFAULT_OPTIONS = {
-        "tcp_bind"       : '0.0.0.0',
-        "tcp_port"       : 6666,
+        "tcp_bind": "0.0.0.0",
+        "tcp_port": 6666,
         "tcp_buffer_size": 128000000,  # In bytes
     }
 
@@ -133,10 +136,14 @@ class TCPReplicator(Thread, StatisticsClientThread):
 
         @property
         def queue_fill_percentage(self):
-            return 100 * self.queue.qsize() / self.queue.maxsize if self.queue.maxsize else 0
+            return (
+                100 * self.queue.qsize() / self.queue.maxsize
+                if self.queue.maxsize
+                else 0
+            )
 
         async def worker(self):
-            """ Take packets from the queue and transmit them across our clients. """
+            """Take packets from the queue and transmit them across our clients."""
             while True:
                 try:
                     packet = await self.queue.get()
@@ -180,7 +187,9 @@ class TCPReplicator(Thread, StatisticsClientThread):
         It manages an asyncio event loop to orchestrate our TCPServerProtocol.
         """
         try:
-            logger.info(f"Starting TCPReplicator thread for {self.options['tcp_bind']}:{self.options['tcp_port']}")
+            logger.info(
+                f"Starting TCPReplicator thread for {self.options['tcp_bind']}:{self.options['tcp_port']}"
+            )
 
             atexit.register(self.join)
 
@@ -199,13 +208,20 @@ class TCPReplicator(Thread, StatisticsClientThread):
 
     def _client_connected(self):
         async def _cb(_, writer):
-            self._connected_clients.append(TCPReplicator.TcpReplicatorClient(self.stats, writer, self.queue_size))
+            self._connected_clients.append(
+                TCPReplicator.TcpReplicatorClient(self.stats, writer, self.queue_size)
+            )
 
         return _cb
 
     async def _serve(self):
-        self._server = await asyncio.start_server(self._client_connected(), self.options['tcp_bind'],
-                                                  self.options['tcp_port'], reuse_address=True, start_serving=True)
+        self._server = await asyncio.start_server(
+            self._client_connected(),
+            self.options["tcp_bind"],
+            self.options["tcp_port"],
+            reuse_address=True,
+            start_serving=True,
+        )
         self._initialization_semaphore.release()
         async with self._server:
             await self._server.serve_forever()
@@ -226,22 +242,28 @@ class TCPReplicator(Thread, StatisticsClientThread):
             raise TypeError("Data must be byte-like object")
 
         futures.wait(
-                [
-                    asyncio.run_coroutine_threadsafe(self._transmit_to_client(c, data), self._loop)
-                    for c in self._connected_clients]
+            [
+                asyncio.run_coroutine_threadsafe(
+                    self._transmit_to_client(c, data), self._loop
+                )
+                for c in self._connected_clients
+            ]
         )
 
     def join(self, timeout=None):
         logging.info(
             "Received shutdown request on TCPReplicator thread for "
-            f"{self.options['tcp_bind']}:{self.options['tcp_port']}")
+            f"{self.options['tcp_bind']}:{self.options['tcp_port']}"
+        )
 
         # Unregister _clean_shutdown to prevent double execution and make
         # sure the thread gets cleaned up on stop/join
         atexit.unregister(self.join)
 
         if self._loop and self._loop.is_running():
-            asyncio.run_coroutine_threadsafe(self._clean_shutdown(), self._loop).result()
+            asyncio.run_coroutine_threadsafe(
+                self._clean_shutdown(), self._loop
+            ).result()
 
         # Only call join at the end otherwise Thread will falsely assume
         # all child 'processes' have stopped
@@ -261,9 +283,10 @@ class TCPReplicator(Thread, StatisticsClientThread):
             # there is nothing we can do except wait (stall) longer, which
             # could be indefinitely.
             logger.error(
-                    f"TCP thread for {self.options['tcp_bind']}:{self.options['tcp_port']} did not shutdown after"
-                    f"{self.DISCONNECT_TIMEOUT} seconds, just leaving it dangling."
-                    f"Please attach a debugger to thread ID {self.ident}.")
+                f"TCP thread for {self.options['tcp_bind']}:{self.options['tcp_port']} did not shutdown after"
+                f"{self.DISCONNECT_TIMEOUT} seconds, just leaving it dangling."
+                f"Please attach a debugger to thread ID {self.ident}."
+            )
 
     def put(self, packet):
         self.transmit(packet)
@@ -282,9 +305,12 @@ class TCPReplicator(Thread, StatisticsClientThread):
         await asyncio.gather(*self._tasks, return_exceptions=True)
 
     def clients(self):
-        """ Return the list of connected clients. """
+        """Return the list of connected clients."""
 
-        return [f"{client.transport.get_extra_info('peername')}" for client in self._connected_clients]
+        return [
+            f"{client.transport.get_extra_info('peername')}"
+            for client in self._connected_clients
+        ]
 
     @property
     def client_queue_fill_percentage(self):
@@ -292,7 +318,7 @@ class TCPReplicator(Thread, StatisticsClientThread):
 
     @property
     def nof_tasks_pending(self):
-        """ Return the number of pending tasks in our event loop. """
+        """Return the number of pending tasks in our event loop."""
 
         # asyncio.all_tasks is not thread safe, and can fail on a race condition if another
         # thread adds a task to a different loop while we're in all_tasks().
@@ -301,7 +327,7 @@ class TCPReplicator(Thread, StatisticsClientThread):
         #
         # See https://bugs.python.org/issue36607 and https://support.astron.nl/jira/browse/L2SS-560
         #
-        # This is fixed (in a similar manner as here) in python 3.7.4+, see 
+        # This is fixed (in a similar manner as here) in python 3.7.4+, see
         #   https://github.com/python/cpython/blob/v3.7.3/Lib/asyncio/tasks.py#L34
         # versus
         #   https://github.com/python/cpython/blob/v3.7.4/Lib/asyncio/tasks.py#L34
diff --git a/tangostationcontrol/tangostationcontrol/clients/udp_receiver.py b/tangostationcontrol/tangostationcontrol/clients/udp_receiver.py
index 05252bfc6ef4504ac46ec9cf17a8434ccaaab9eb..0796061b9cdc51667753759a5065316c59886d64 100644
--- a/tangostationcontrol/tangostationcontrol/clients/udp_receiver.py
+++ b/tangostationcontrol/tangostationcontrol/clients/udp_receiver.py
@@ -1,12 +1,14 @@
-from queue import Full
-from queue import Queue
-from threading import Thread
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import logging
-import numpy
 import socket
 import time
-from typing import List # not needed for python3.9+, where we can use the type "list[Queue]" directly
+from queue import Full
+from queue import Queue
+from threading import Thread
 
+import numpy
 from tangostationcontrol.clients.statistics.client_thread import StatisticsClientThread
 from tangostationcontrol.common.constants import MAX_ETH_FRAME_SIZE
 
@@ -25,34 +27,34 @@ class UDPReceiver(Thread, StatisticsClientThread):
         "poll_timeout": 0.1,
     }
 
-    def __init__(self, queues: List[Queue], options: dict = None):
+    def __init__(self, queues: list[Queue], options: dict = None):
         self.queues = queues
 
         try:
-            options['udp_host']
+            options["udp_host"]
         except KeyError:
             raise
 
         try:
-            options['udp_port']
+            options["udp_port"]
         except KeyError:
             raise
 
         self.options = self._parse_options(options)
 
-        self.host = self.options['udp_host']
-        self.port = self.options['udp_port']
-        self.poll_timeout = self.options['poll_timeout']
+        self.host = self.options["udp_host"]
+        self.port = self.options["udp_port"]
+        self.poll_timeout = self.options["poll_timeout"]
 
         self.parameters = {
             # Number of packets we received
-            "nof_packets_received":  numpy.uint64(0),
+            "nof_packets_received": numpy.uint64(0),
             # Number of bytes we received
-            "nof_bytes_received":    numpy.uint64(0),
+            "nof_bytes_received": numpy.uint64(0),
             # Number of packets we had to drop due to a full queue
-            "nof_packets_dropped":   numpy.uint64(0),
+            "nof_packets_dropped": numpy.uint64(0),
             # Packets are at most 9000 bytes, the largest payload (well, MTU) of an Ethernet Jumbo frame
-            "last_packet":           numpy.zeros((MAX_ETH_FRAME_SIZE,), dtype=numpy.uint8),
+            "last_packet": numpy.zeros((MAX_ETH_FRAME_SIZE,), dtype=numpy.uint8),
             # Timestamp of when the last packet was received
             "last_packet_timestamp": numpy.uint64(0),
         }
@@ -66,12 +68,12 @@ class UDPReceiver(Thread, StatisticsClientThread):
         self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
 
         # Increase buffers to prevent data loss when our class isn't listening.
-        self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 16*1024*1024)
+        self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 16 * 1024 * 1024)
 
         # specify what host and port to listen on
         self.sock.bind((self.host, self.port))
 
-        # Make sure we can stop receiving packets even if none arrive. 
+        # Make sure we can stop receiving packets even if none arrive.
         # Without this, the recvmsg() call blocks indefinitely if no packet arrives.
         self.sock.settimeout(self.poll_timeout)
 
@@ -92,10 +94,14 @@ class UDPReceiver(Thread, StatisticsClientThread):
             try:
                 packet, _, _, _ = self.sock.recvmsg(MAX_ETH_FRAME_SIZE)
 
-                self.parameters["nof_packets_received"]  += numpy.uint64(1)
-                self.parameters["nof_bytes_received"]    += numpy.uint64(len(packet))
-                self.parameters["last_packet"]           = numpy.frombuffer(packet, dtype=numpy.uint8)
-                self.parameters["last_packet_timestamp"] = numpy.uint64(int(time.time()))
+                self.parameters["nof_packets_received"] += numpy.uint64(1)
+                self.parameters["nof_bytes_received"] += numpy.uint64(len(packet))
+                self.parameters["last_packet"] = numpy.frombuffer(
+                    packet, dtype=numpy.uint8
+                )
+                self.parameters["last_packet_timestamp"] = numpy.uint64(
+                    int(time.time())
+                )
 
                 # Forward packet to processing threads
                 for queue in self.queues:
@@ -111,7 +117,9 @@ class UDPReceiver(Thread, StatisticsClientThread):
 
     def join(self, timeout=0):
         self.stream_on = False
-        logging.info("Sent shutdown to UDP thread for {}:{}".format(self.host, self.port))
+        logging.info(
+            "Sent shutdown to UDP thread for {}:{}".format(self.host, self.port)
+        )
 
         super().join(timeout)
 
@@ -133,4 +141,6 @@ class UDPReceiver(Thread, StatisticsClientThread):
 
         if self.is_alive():
             # there is nothing we can do except wait (stall) longer, which could be indefinitely.
-            logger.error(f"UDP thread for {self.host}:{self.port} did not shut down after {self.DISCONNECT_TIMEOUT} seconds, just leaving it dangling. Please attach a debugger to thread ID {self.ident}.")
+            logger.error(
+                f"UDP thread for {self.host}:{self.port} did not shut down after {self.DISCONNECT_TIMEOUT} seconds, just leaving it dangling. Please attach a debugger to thread ID {self.ident}."
+            )
diff --git a/tangostationcontrol/tangostationcontrol/common/__init__.py b/tangostationcontrol/tangostationcontrol/common/__init__.py
index 3708ddd2ada03def2063b8b061246d0c1cdaf9ce..a212ea51d7847a8f4e0340d00932f723c51f2857 100644
--- a/tangostationcontrol/tangostationcontrol/common/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/common/__init__.py
@@ -1,10 +1,8 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR2.0 Station Control project.
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from .observation_controller import ObservationController
 
-__all__ = ['ObservationController', ]
+__all__ = [
+    "ObservationController",
+]
diff --git a/tangostationcontrol/tangostationcontrol/common/baselines.py b/tangostationcontrol/tangostationcontrol/common/baselines.py
index b9b0ca8038c0d881d602df37f99203d733f283fc..7f08a130d0993d1c3fb3c424c500ad2e498027c2 100644
--- a/tangostationcontrol/tangostationcontrol/common/baselines.py
+++ b/tangostationcontrol/tangostationcontrol/common/baselines.py
@@ -1,13 +1,18 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 """
   Baseline calculation functions.
 """
 
 import math
 
+
 def nr_baselines(nr_inputs: int) -> int:
-    """ Return the number of baselines (unique pairs) that exist between a given number of inputs. """
+    """Return the number of baselines (unique pairs) that exist between a given number of inputs."""
     return nr_inputs * (nr_inputs + 1) // 2
 
+
 """
 
  Baselines are ordered like:
@@ -40,20 +45,23 @@ def nr_baselines(nr_inputs: int) -> int:
 
 """
 
+
 def baseline_index(major: int, minor: int) -> int:
-    """ Provide a total ordering of baselines: give the unique array index for the baseline (major,minor),
-        with major >= minor. """
+    """Provide a total ordering of baselines: give the unique array index for the baseline (major,minor),
+    with major >= minor."""
 
     if major < minor:
-        raise ValueError(f"major < minor: {major} < {minor}. Since we do not store the conjugates this will lead to processing errors.")
+        raise ValueError(
+            f"major < minor: {major} < {minor}. Since we do not store the conjugates this will lead to processing errors."
+        )
 
     return major * (major + 1) // 2 + minor
 
+
 def baseline_from_index(index: int) -> tuple:
-    """ Return the (major,minor) input pair given a baseline index. """
+    """Return the (major,minor) input pair given a baseline index."""
 
     major = int((math.sqrt(float(8 * index + 1)) - 0.99999) / 2)
-    minor = index - baseline_index(major,0)
-
-    return (major,minor)
+    minor = index - baseline_index(major, 0)
 
+    return (major, minor)
diff --git a/tangostationcontrol/tangostationcontrol/common/cables.py b/tangostationcontrol/tangostationcontrol/common/cables.py
index b48123a26615a1c7d2ca2d97ffdea686968609f2..807bd90e2ac05db2782ee6a5f94b9924a98e9783 100644
--- a/tangostationcontrol/tangostationcontrol/common/cables.py
+++ b/tangostationcontrol/tangostationcontrol/common/cables.py
@@ -1,21 +1,26 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 from dataclasses import dataclass
 
+
 @dataclass(frozen=True)
 class CableType:
-    """ A cable used in LOFAR, with its properties. """
+    """A cable used in LOFAR, with its properties."""
+
     name: str
     length: int
     delay: float
     loss: dict
 
     def speed(self):
-        """ Return the speed of the signal in this cable, in m/s. """
+        """Return the speed of the signal in this cable, in m/s."""
 
         return self.length / self.delay
 
     def get_loss(self, antenna_type: str, rcu_band_select: int) -> float:
-        """ Get the appropiate loss value (in dB), for the given
-            antenna type and RCU band selection. """
+        """Get the appropiate loss value (in dB), for the given
+        antenna type and RCU band selection."""
 
         if antenna_type == "LBA":
             if rcu_band_select == 1:
@@ -23,7 +28,9 @@ class CableType:
             elif rcu_band_select == 2:
                 return self.loss[50]
             else:
-                raise ValueError(f"Unsupported RCU band selection for LBA: {rcu_band_select}")
+                raise ValueError(
+                    f"Unsupported RCU band selection for LBA: {rcu_band_select}"
+                )
         elif antenna_type == "HBA":
             if rcu_band_select == 1:
                 return self.loss[200]
@@ -32,21 +39,58 @@ class CableType:
             elif rcu_band_select == 4:
                 return self.loss[250]
             else:
-                raise ValueError(f"Unsupported RCU band selection for HBA: {rcu_band_select}")
+                raise ValueError(
+                    f"Unsupported RCU band selection for HBA: {rcu_band_select}"
+                )
 
         raise ValueError(f"Unsupported antenna type: {antenna_type}")
 
+
 # Global list of all known cable types.
 #
 # NB: The LOFAR1 equivalents of these tables are:
 #          - MAC/Deployment/data/StaticMetaData/CableDelays/
 #          - MAC/Deployment/data/StaticMetaData/CableAttenuation.conf
 cable_types = {}
-cable_types[  "0m"] = CableType(name=  "0m", length=  0, delay=000.0000e-9, loss={50: 0.00, 150: 0.00, 200:  0.00, 250:  0.00})
-cable_types[ "50m"] = CableType(name= "50m", length= 50, delay=199.2573e-9, loss={50: 2.05, 150: 3.64, 200:  4.24, 250:  4.46})
-cable_types[ "80m"] = CableType(name= "80m", length= 80, delay=326.9640e-9, loss={50: 3.32, 150: 5.87, 200:  6.82, 250:  7.19})
-cable_types[ "85m"] = CableType(name= "85m", length= 85, delay=342.5133e-9, loss={50: 3.53, 150: 6.22, 200:  7.21, 250:  7.58})
-cable_types["115m"] = CableType(name="115m", length=115, delay=465.5254e-9, loss={50: 4.74, 150: 8.35, 200:  9.70, 250: 10.18})
-cable_types["120m"] = CableType(name="120m", length=120, delay=493.8617e-9, loss={50: 4.85, 150: 8.55, 200:  9.92, 250: 10.42}) # used on CS030
-cable_types["130m"] = CableType(name="130m", length=130, delay=530.6981e-9, loss={50: 5.40, 150: 9.52, 200: 11.06, 250: 11.61})
-
+cable_types["0m"] = CableType(
+    name="0m",
+    length=0,
+    delay=000.0000e-9,
+    loss={50: 0.00, 150: 0.00, 200: 0.00, 250: 0.00},
+)
+cable_types["50m"] = CableType(
+    name="50m",
+    length=50,
+    delay=199.2573e-9,
+    loss={50: 2.05, 150: 3.64, 200: 4.24, 250: 4.46},
+)
+cable_types["80m"] = CableType(
+    name="80m",
+    length=80,
+    delay=326.9640e-9,
+    loss={50: 3.32, 150: 5.87, 200: 6.82, 250: 7.19},
+)
+cable_types["85m"] = CableType(
+    name="85m",
+    length=85,
+    delay=342.5133e-9,
+    loss={50: 3.53, 150: 6.22, 200: 7.21, 250: 7.58},
+)
+cable_types["115m"] = CableType(
+    name="115m",
+    length=115,
+    delay=465.5254e-9,
+    loss={50: 4.74, 150: 8.35, 200: 9.70, 250: 10.18},
+)
+cable_types["120m"] = CableType(
+    name="120m",
+    length=120,
+    delay=493.8617e-9,
+    loss={50: 4.85, 150: 8.55, 200: 9.92, 250: 10.42},
+)  # used on CS030
+cable_types["130m"] = CableType(
+    name="130m",
+    length=130,
+    delay=530.6981e-9,
+    loss={50: 5.40, 150: 9.52, 200: 11.06, 250: 11.61},
+)
diff --git a/tangostationcontrol/tangostationcontrol/common/calibration.py b/tangostationcontrol/tangostationcontrol/common/calibration.py
index e08810bab69072db16f62c3cf9cf6a64b2862f98..31fd0491f67f608257ae7cfe54b23fad9f0453e6 100644
--- a/tangostationcontrol/tangostationcontrol/common/calibration.py
+++ b/tangostationcontrol/tangostationcontrol/common/calibration.py
@@ -1,19 +1,23 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import numpy
 
+
 def delay_compensation(delays_seconds: numpy.ndarray, clock: int):
-    """ Return the delay compensation required to line up
-        signals that are delayed by "delays" seconds. The returned values
-        are the delay to apply, in samples (coarse) and remaining seconds
-        (fine), as a tuple (samples, remainder).
+    """Return the delay compensation required to line up
+    signals that are delayed by "delays" seconds. The returned values
+    are the delay to apply, in samples (coarse) and remaining seconds
+    (fine), as a tuple (samples, remainder).
 
-        The coarse delay is to be applied in sdp.FPGA_signal_input_delays_RW,
-        the fine delay is to be incorporated into sdp.FPGA_subband_weights_RW.
+    The coarse delay is to be applied in sdp.FPGA_signal_input_delays_RW,
+    the fine delay is to be incorporated into sdp.FPGA_subband_weights_RW.
 
-        Note that the remainder is -1/2 * sample <= remainder <= 1/2 * sample.
+    Note that the remainder is -1/2 * sample <= remainder <= 1/2 * sample.
 
-        Applying this correction equalises the signal across all inputs to be delayed
-        max(round(delay_samples)) samples, instead of their value in delay_seconds.
-        So we do _not exactly_ delay all signals to match the longest.
+    Applying this correction equalises the signal across all inputs to be delayed
+    max(round(delay_samples)) samples, instead of their value in delay_seconds.
+    So we do _not exactly_ delay all signals to match the longest.
     """
 
     # NB: signal_* are the amount of delay the signal obtained in our processing
@@ -37,23 +41,25 @@ def delay_compensation(delays_seconds: numpy.ndarray, clock: int):
 
     return (input_delays_samples, input_delays_subsample_seconds)
 
+
 def dB_to_factor(dB: numpy.ndarray) -> numpy.ndarray:
-    """ Convert values in decibel (dB) into their equivalent scaling factors. """
+    """Convert values in decibel (dB) into their equivalent scaling factors."""
     return 10 ** (dB / 10)
 
+
 def loss_compensation(losses_dB: numpy.ndarray):
-    """ Return the attenuation required to line up
-        signals that are dampened by "lossed_dB" decibel.
+    """Return the attenuation required to line up
+    signals that are dampened by "lossed_dB" decibel.
 
-        Returned are the signal attenuations in whole dBs (coarse), and
-        the remaining scaling (as a factor), as a tuple (whole dBs, remainder).
+    Returned are the signal attenuations in whole dBs (coarse), and
+    the remaining scaling (as a factor), as a tuple (whole dBs, remainder).
 
-        The coarse attenuation is to be applied in recv.RCU_attenuation_dB_RW,
-        the fine scaling is to be incorporated into sdp.FPGA_subband_weights_RW.
+    The coarse attenuation is to be applied in recv.RCU_attenuation_dB_RW,
+    the fine scaling is to be incorporated into sdp.FPGA_subband_weights_RW.
 
-        Applying this correction equalises the signal across the inputs
-        to be dampened max(round(losses_dB)) instead of their value
-        in losses_dB. So we do _not_ fully dampen towards the weakest signal.
+    Applying this correction equalises the signal across the inputs
+    to be dampened max(round(losses_dB)) instead of their value
+    in losses_dB. So we do _not_ fully dampen towards the weakest signal.
     """
 
     # NB: signal_* are the amount of loss the signal obtained in our processing
@@ -64,7 +70,9 @@ def loss_compensation(losses_dB: numpy.ndarray):
     signal_attenuation_integer_dB = numpy.round(losses_dB).astype(numpy.uint32)
 
     # correct for the coarse loss by dampening the signals to line up.
-    input_attenuation_integer_dB = max(signal_attenuation_integer_dB) - signal_attenuation_integer_dB
+    input_attenuation_integer_dB = (
+        max(signal_attenuation_integer_dB) - signal_attenuation_integer_dB
+    )
 
     # compute the remainder, as a scaling factor
     signal_loss_remainder_dB = losses_dB - signal_attenuation_integer_dB
diff --git a/tangostationcontrol/tangostationcontrol/common/configuration.py b/tangostationcontrol/tangostationcontrol/common/configuration.py
index 0087c2c055095d1c78179e4c08ae803d2aaf1dd8..6daf9c580f8d4da2e5bd34bb58ae3fecb3a56e16 100644
--- a/tangostationcontrol/tangostationcontrol/common/configuration.py
+++ b/tangostationcontrol/tangostationcontrol/common/configuration.py
@@ -1,19 +1,15 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from tango import DeviceProxy, Database, DevFailed, DbDevInfo
-
-from itertools import islice
 import json
 import logging
+from itertools import islice
+
+from tango import DeviceProxy, Database, DevFailed, DbDevInfo
+
 logger = logging.getLogger()
 
+
 class StationConfiguration:
 
     DEVICE_PROPERTIES_QUERY = "SELECT device, property_device.name, property_device.value FROM property_device \
@@ -32,55 +28,69 @@ class StationConfiguration:
     SERVER_QUERY = "SELECT server, class, name FROM device \
                 WHERE class != 'DServer' \
                 ORDER BY server ASC"
-    
+
     # Servers that NEVER must be removed
-    DEFAULT_SKIP_SERVER_NAMES = ['configuration/stat', 'databaseds/2', 'tangorestserver/rest', 'tangotest/test', 'tangoaccesscontrol/1']
+    DEFAULT_SKIP_SERVER_NAMES = [
+        "configuration/stat",
+        "databaseds/2",
+        "tangorestserver/rest",
+        "tangotest/test",
+        "tangoaccesscontrol/1",
+    ]
 
-    def __init__(self, db: Database, tangodb_timeout:int = 10000):
-        self.db = db                                         # TangoDB
-        self.dbproxy = DeviceProxy(db.dev_name())            # TangoDB Proxy
-        self.dbproxy.set_timeout_millis(tangodb_timeout)     # Set a security timeout (default is 3000ms)
+    def __init__(self, db: Database, tangodb_timeout: int = 10000):
+        self.db = db  # TangoDB
+        self.dbproxy = DeviceProxy(db.dev_name())  # TangoDB Proxy
+        self.dbproxy.set_timeout_millis(
+            tangodb_timeout
+        )  # Set a security timeout (default is 3000ms)
 
     #
     #   DUMPING TANGO DATABASE
     #
-    
+
     def get_tangodb_data(self) -> dict:
-        """ Dump a subset of TANGO database into dictionary. 
-        
-        The dictionary contains the info about all the Devices used in the 
+        """Dump a subset of TANGO database into dictionary.
+
+        The dictionary contains the info about all the Devices used in the
         present environment, including their Properties values, their Attribute Properties,
         and the namespace of the DeviceServers which incapsulate each Device.
         """
         # Create empty dictionaries to be populated
         devices_dict = {}
         server_dict = {}
-        
+
         # Populate devices dictionary from query data
-        device_property_result = self._query_tangodb(self.dbproxy, self.DEVICE_PROPERTIES_QUERY, 3)
+        device_property_result = self._query_tangodb(
+            self.dbproxy, self.DEVICE_PROPERTIES_QUERY, 3
+        )
         devices_dict = self.add_to_devices_dict(devices_dict, device_property_result)
-        
+
         # Populate devices dictionary from query data
-        attrs_property_result = self._query_tangodb(self.dbproxy, self.ATTRS_PROPERTIES_QUERY, 4)
+        attrs_property_result = self._query_tangodb(
+            self.dbproxy, self.ATTRS_PROPERTIES_QUERY, 4
+        )
         devices_dict = self.add_to_attrs_dict(devices_dict, attrs_property_result)
-        
+
         # Populate server dictionary from query data and merge it with devices dict
         server_result = self._query_tangodb(self.dbproxy, self.SERVER_QUERY, 3)
         server_dict = self.add_to_server_dict(server_dict, devices_dict, server_result)
-        return {"servers" : server_dict}
+        return {"servers": server_dict}
 
-    def _query_tangodb(self, dbproxy: DeviceProxy, sql_query: str, num_cols: int) -> list:
-        """ Query TangoDb with a built-in function and return data as tuples """
+    def _query_tangodb(
+        self, dbproxy: DeviceProxy, sql_query: str, num_cols: int
+    ) -> list:
+        """Query TangoDb with a built-in function and return data as tuples"""
         _, raw_result = dbproxy.command_inout("DbMySqlSelect", sql_query)
         return self.query_to_tuples(raw_result, num_cols)
 
-    def add_to_devices_dict(self, devices_dict:dict, result:list) -> dict:
-        """ Populate a devices dictionary with the following structure:
+    def add_to_devices_dict(self, devices_dict: dict, result: list) -> dict:
+        """Populate a devices dictionary with the following structure:
         'device_name': { 'properties' : { 'property_name': ['property_value'] } }
         """
         for device, property, value in result:
             # lowercase data
-            device = device.lower() 
+            device = device.lower()
             property = property.lower()
             # model dictionary
             device_data = devices_dict.setdefault(device, {})
@@ -89,8 +99,8 @@ class StationConfiguration:
             value_data.append(value)
         return devices_dict
 
-    def add_to_attrs_dict(self, devices_dict:dict, result:list) -> dict:
-        """ Populate a device dictionary with the following structure : 
+    def add_to_attrs_dict(self, devices_dict: dict, result: list) -> dict:
+        """Populate a device dictionary with the following structure :
         'device_name': { 'attribute_properties' : { 'attribute_name': {'property_name' : ['property_value'] } } }
         """
         for device, attribute, property, value in result:
@@ -106,12 +116,14 @@ class StationConfiguration:
             value_data.append(value)
         return devices_dict
 
-    def add_to_server_dict(self, server_dict:dict, devices_dict:dict, result:list) -> dict:
-        """ Populate the server dictionary and merge it with the devices dictionary.
-        At the end of the process, the dictionary will have the following structure : 
+    def add_to_server_dict(
+        self, server_dict: dict, devices_dict: dict, result: list
+    ) -> dict:
+        """Populate the server dictionary and merge it with the devices dictionary.
+        At the end of the process, the dictionary will have the following structure :
         'server_name' : { 'server_instance' : { 'server_class' :
-            'device_name':  { 'properties' : { 'property_name': ['property_value'] } }, 
-                            { 'attribute_properties' : { 'attribute_name': {'property_name' : ['property_value'] } } } } } 
+            'device_name':  { 'properties' : { 'property_name': ['property_value'] } },
+                            { 'attribute_properties' : { 'attribute_name': {'property_name' : ['property_value'] } } } } }
         """
         for server, sclass, device in result:
             # lowercase data
@@ -119,7 +131,7 @@ class StationConfiguration:
             server = server.lower()
             sclass = sclass.lower()
             # model dictionary
-            sname, instance = server.split('/')
+            sname, instance = server.split("/")
             device_data = devices_dict.get(device, {})
             server_data = server_dict.setdefault(sname, {})
             instance_data = server_data.setdefault(instance, {})
@@ -129,18 +141,18 @@ class StationConfiguration:
         return server_dict
 
     def query_to_tuples(self, result: list, num_cols: int) -> list:
-        """ Given a query result and its number of columns, transforms the raw result in a list of tuples """
+        """Given a query result and its number of columns, transforms the raw result in a list of tuples"""
         return list(zip(*[islice(result, i, None, num_cols) for i in range(num_cols)]))
 
     #
     #   LOADING JSON INTO TANGO DATABASE
     #
 
-    def load_configdb(self, station_configuration:str, update:bool= False):
-        """ Takes a JSON string which represents the station configuration 
-            and loads the whole configuration. 
-            
-            N.B. with flag update=False, it does not update, it loads a full new configuration.
+    def load_configdb(self, station_configuration: str, update: bool = False):
+        """Takes a JSON string which represents the station configuration
+        and loads the whole configuration.
+
+        N.B. with flag update=False, it does not update, it loads a full new configuration.
         """
         # Convert json string into dictionary
         try:
@@ -154,17 +166,24 @@ class StationConfiguration:
         # Select if update or loading configuration from scratch
         if not update:
             # Select the servers to be removed after having built a proper select query
-            server_select_query = self.build_select_server_query(self.DEFAULT_SKIP_SERVER_NAMES)
-            servers_to_be_removed = [ server for server, _, _ in self._query_tangodb(self.dbproxy, server_select_query, 3)]
+            server_select_query = self.build_select_server_query(
+                self.DEFAULT_SKIP_SERVER_NAMES
+            )
+            servers_to_be_removed = [
+                server
+                for server, _, _ in self._query_tangodb(
+                    self.dbproxy, server_select_query, 3
+                )
+            ]
             for server in servers_to_be_removed:
                 # Remove devices
                 self.delete_server(server)
         # Select new configuration and add to DB
         self.add_server(tangodb_dict)
 
-    def build_select_server_query(self, default_skip_server_names:list):
-        """ Select the servers to be removed except the default ones in TangoDB (also ConfigurationDevice)
-            and build the relative SQL query.
+    def build_select_server_query(self, default_skip_server_names: list):
+        """Select the servers to be removed except the default ones in TangoDB (also ConfigurationDevice)
+        and build the relative SQL query.
         """
         sql_query = f"SELECT server, class, name FROM device WHERE class != 'DServer' "
         for server in default_skip_server_names:
@@ -172,8 +191,8 @@ class StationConfiguration:
         sql_query += f"ORDER BY server ASC"
         return sql_query
 
-    def delete_server(self, server:str):
-        """ Given a list of server names, delete the servers from Tango DB,
+    def delete_server(self, server: str):
+        """Given a list of server names, delete the servers from Tango DB,
             and all their nested data (Devices, Attributes, Properties, Values)
         N.B. This action cannot be undone
         """
@@ -185,30 +204,41 @@ class StationConfiguration:
             logger.warn(f"[Delete Server] Server {server} not found in DB")
 
     def add_server(self, tangodb_dict: dict):
-        """ Given a new TangoDb Configuration as a dictionary,
+        """Given a new TangoDb Configuration as a dictionary,
         create a server object with its own parameters, and add it to the Database.
 
         N.B. structure is:
         'server_name' : { 'server_instance' : { 'server_class' :
-            'device_name':  { 'properties' : { 'property_name': ['property_value'] } }, 
-                            { 'attribute_properties' : { 'attribute_name': {'property_name' : ['property_value'] } } } } } 
+            'device_name':  { 'properties' : { 'property_name': ['property_value'] } },
+                            { 'attribute_properties' : { 'attribute_name': {'property_name' : ['property_value'] } } } } }
         """
-        configuration_db = tangodb_dict['servers']
+        configuration_db = tangodb_dict["servers"]
         for server_name in configuration_db:
             instance_data = configuration_db.get(server_name, {})
             for instance in instance_data:
                 # Excluding from update default_skip_servers as well
-                if f"{server_name}/{instance}".lower() not in self.DEFAULT_SKIP_SERVER_NAMES:
-                    class_data = instance_data.get(instance,{})
+                if (
+                    f"{server_name}/{instance}".lower()
+                    not in self.DEFAULT_SKIP_SERVER_NAMES
+                ):
+                    class_data = instance_data.get(instance, {})
                     for _class in class_data:
-                        device_data = class_data.get(_class,{})
+                        device_data = class_data.get(_class, {})
                         for device_name in device_data:
-                            self._insert_server_into_db(server_name, instance, _class, device_data, device_name)
-    
-    def _insert_server_into_db(self, server_name:str, instance:str, _class:str, device_data:str, 
-                                device_name:str):
-        """ Insert a new server with all its relative info into the Tango DB """
-        device_info = DbDevInfo()   # Built-in Tango object to interact with DB
+                            self._insert_server_into_db(
+                                server_name, instance, _class, device_data, device_name
+                            )
+
+    def _insert_server_into_db(
+        self,
+        server_name: str,
+        instance: str,
+        _class: str,
+        device_data: str,
+        device_name: str,
+    ):
+        """Insert a new server with all its relative info into the Tango DB"""
+        device_info = DbDevInfo()  # Built-in Tango object to interact with DB
         # Set device name
         device_info.name = device_name
         # Set class name
@@ -221,13 +251,13 @@ class StationConfiguration:
         logger.info(f"Server {server_name}/{instance} has been inserted into DB")
         # Add device properties
         device_property_data = device_data.get(device_name, {})
-        if 'properties' in device_property_data:
-            property_data = device_property_data['properties']
+        if "properties" in device_property_data:
+            property_data = device_property_data["properties"]
             # https://pytango.readthedocs.io/en/stable/database.html#tango.Database.put_device_property
             self.db.put_device_property(device_name, property_data)
         # Add attribute properties
-        if 'attribute_properties' in device_property_data:
-            attr_property_data = device_property_data['attribute_properties']
+        if "attribute_properties" in device_property_data:
+            attr_property_data = device_property_data["attribute_properties"]
             # https://pytango.readthedocs.io/en/stable/database.html#tango.Database.put_device_attribute_property
             self.db.put_device_attribute_property(device_name, attr_property_data)
 
@@ -235,10 +265,10 @@ class StationConfiguration:
     #   UPLOAD JSON INTO TANGO DATABASE
     #
 
-    def update_configdb(self, station_configuration:str):
-        """ Takes a JSON string which represents the station configuration 
-            and upload the whole configuration. 
-            
-            N.B. it does not delete existing devices, it updates overlapping parameters.
+    def update_configdb(self, station_configuration: str):
+        """Takes a JSON string which represents the station configuration
+        and upload the whole configuration.
+
+        N.B. it does not delete existing devices, it updates overlapping parameters.
         """
         self.load_configdb(station_configuration, update=True)
diff --git a/tangostationcontrol/tangostationcontrol/common/constants.py b/tangostationcontrol/tangostationcontrol/common/constants.py
index a88e7151ae4fa767550167e156e956f3b71527db..29175b6cd9351f8bd58a4a69e27c00eb95c49c78 100644
--- a/tangostationcontrol/tangostationcontrol/common/constants.py
+++ b/tangostationcontrol/tangostationcontrol/common/constants.py
@@ -1,5 +1,7 @@
-from tangostationcontrol.common.baselines import nr_baselines
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
+from tangostationcontrol.common.baselines import nr_baselines
 
 # number of FPGA processing nodes
 N_pn = 16
@@ -48,7 +50,6 @@ N_subbands = 512
 # Number of points per subband (the resolution)
 N_subband_res = 1024
 
-
 # main clock frequency's are 200MHz and 160MHz
 CLK_200_MHZ = 200_000_000
 CLK_160_MHZ = 160_000_000
@@ -74,7 +75,6 @@ N_ddr = 2
 # number of QSFP tranceivers per uniboard
 N_qsfp = 24
 
-
 # the three spatial dimensions XYZ used a lot for PQR and ITRF coordinates.
 N_xyz = 3
 # amount of parameters needed for a pointing
@@ -82,7 +82,6 @@ N_point_prop = 3
 # number of values for latitude/longitude coordinates
 N_latlong = 2
 
-
 # default subband we use because of its low RFI
 DEFAULT_SUBBAND = 102
 
diff --git a/tangostationcontrol/tangostationcontrol/common/entrypoint.py b/tangostationcontrol/tangostationcontrol/common/entrypoint.py
index 94e231e52e3b6607b8ae4252d399b6249ab4c117..7d771c716357cb17f4b48d9a72c142ad2795f180 100644
--- a/tangostationcontrol/tangostationcontrol/common/entrypoint.py
+++ b/tangostationcontrol/tangostationcontrol/common/entrypoint.py
@@ -1,12 +1,9 @@
-# -*- coding: utf-8 -*-
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import sys
 
 from tango.server import run
-
 from tangostationcontrol.common.lofar_logging import configure_logger
 
 
diff --git a/tangostationcontrol/tangostationcontrol/common/health.py b/tangostationcontrol/tangostationcontrol/common/health.py
index 688a534d145ba30ca4934731333e46fbde62ad5c..f37de1c982575579e3b6c0ca8b917ef8bebb7b3f 100644
--- a/tangostationcontrol/tangostationcontrol/common/health.py
+++ b/tangostationcontrol/tangostationcontrol/common/health.py
@@ -1,12 +1,11 @@
-# -*- coding: utf-8 -*-
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import sys
 
 from tango import DeviceProxy
 
+
 def main(*args, **kwargs):
     """Main function health check"""
 
diff --git a/tangostationcontrol/tangostationcontrol/common/lofar_logging.py b/tangostationcontrol/tangostationcontrol/common/lofar_logging.py
index 13d20551b0def7a72cd75ff750232b861ebd9b0e..dad5238901720c08a938673fa8e8a4ecea65e756 100644
--- a/tangostationcontrol/tangostationcontrol/common/lofar_logging.py
+++ b/tangostationcontrol/tangostationcontrol/common/lofar_logging.py
@@ -1,17 +1,22 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import logging
-from functools import wraps
-from tango.server import Device
-import traceback
 import socket
 import time
+import traceback
+from functools import wraps
+
+from tango.server import Device
 
 from tangostationcontrol import __version__ as version
 
+
 class TangoLoggingHandler(logging.Handler):
     LEVEL_TO_DEVICE_STREAM = {
         logging.DEBUG: Device.debug_stream,
-        logging.INFO:  Device.info_stream,
-        logging.WARN:  Device.warn_stream,
+        logging.INFO: Device.info_stream,
+        logging.WARN: Device.warn_stream,
         logging.ERROR: Device.error_stream,
         logging.FATAL: Device.fatal_stream,
     }
@@ -34,18 +39,19 @@ class TangoLoggingHandler(logging.Handler):
             stream(record.tango_device, record.msg, *record.args)
         except TypeError:
             # Tango's logger barfs on mal-formed log lines, f.e. if msg % args is not possible
-            record_msg = f"{record.msg} {record.args}".replace("%","%%")
+            record_msg = f"{record.msg} {record.args}".replace("%", "%%")
             stream(record.tango_device, record_msg)
 
         self.flush()
 
+
 class LogSuppressErrorSpam(logging.Formatter):
     """
-       Suppress specific errors from spamming the logs, by only letting them through periodically.
+    Suppress specific errors from spamming the logs, by only letting them through periodically.
     """
 
-    def __init__(self, error_suppress_interval = 3600):
-        """ Suppress subsequent errors for `error_suppress_interval` seconds. """
+    def __init__(self, error_suppress_interval=3600):
+        """Suppress subsequent errors for `error_suppress_interval` seconds."""
 
         super().__init__()
 
@@ -57,7 +63,10 @@ class LogSuppressErrorSpam(logging.Formatter):
 
     def is_error_to_suppress(self, record):
         # Errors occuring by not being able to connect to the log processing container, f.e. because it is down.
-        return record.name == "LogProcessingWorker" and record.msg == "An error occurred while sending events: %s"
+        return (
+            record.name == "LogProcessingWorker"
+            and record.msg == "An error occurred while sending events: %s"
+        )
 
     def filter(self, record):
         if self.is_error_to_suppress(record):
@@ -71,17 +80,18 @@ class LogSuppressErrorSpam(logging.Formatter):
 
         return True
 
+
 class LogAnnotator(logging.Formatter):
-    """ Annotates log records with:
+    """Annotates log records with:
 
-        record.tango_device: the Tango Device that is executing. """
+    record.tango_device: the Tango Device that is executing."""
 
     @staticmethod
     def get_current_tango_device() -> Device:
-        """ Return the tango Device we're currently executing for, or None if it can't be detected.
+        """Return the tango Device we're currently executing for, or None if it can't be detected.
 
-            This is derived by traversing the stack and find a Device as 'self'. In some cases,
-            this fails, for example if a separate Thread is started for a certain Device. """
+        This is derived by traversing the stack and find a Device as 'self'. In some cases,
+        this fails, for example if a separate Thread is started for a certain Device."""
 
         for frame, _lineno in traceback.walk_stack(f=None):
             if "self" in frame.f_locals and isinstance(frame.f_locals["self"], Device):
@@ -102,12 +112,13 @@ class LogAnnotator(logging.Formatter):
         # we just annotate, we don't filter
         return True
 
-def configure_logger(logger: logging.Logger=None, log_extra=None, debug=False):
+
+def configure_logger(logger: logging.Logger = None, log_extra=None, debug=False):
     """
-       Configure the given logger (or root if None) to:
-         - send logs to Loki through Logstash
-         - send logs to Tango
-         - send logs to stdout
+    Configure the given logger (or root if None) to:
+      - send logs to Loki through Logstash
+      - send logs to Tango
+      - send logs to stdout
     """
 
     # NOTE: We have to attach filters to handlers, instead to this logger,
@@ -140,7 +151,12 @@ def configure_logger(logger: logging.Logger=None, log_extra=None, debug=False):
     # Always also log the hostname because it makes the origin of the log clear.
     hostname = socket.gethostname()
 
-    formatter = logging.Formatter(fmt = '%(asctime)s.%(msecs)d %(levelname)s - %(tango_device)s: %(message)s [%(funcName)s in %(filename)s:%(lineno)d]'.format(hostname), datefmt = '%Y-%m-%dT%H:%M:%S')
+    formatter = logging.Formatter(
+        fmt="%(asctime)s.%(msecs)d %(levelname)s - %(tango_device)s: %(message)s [%(funcName)s in %(filename)s:%(lineno)d]".format(
+            hostname
+        ),
+        datefmt="%Y-%m-%dT%H:%M:%S",
+    )
     handler.setFormatter(formatter)
     handler.addFilter(LogSuppressErrorSpam())
     handler.addFilter(LogAnnotator())
@@ -153,10 +169,15 @@ def configure_logger(logger: logging.Logger=None, log_extra=None, debug=False):
 
     # Log to Logstash-Loki
     try:
-        from logstash_async.handler import AsynchronousLogstashHandler, LogstashFormatter
+        from logstash_async.handler import (
+            AsynchronousLogstashHandler,
+            LogstashFormatter,
+        )
 
         # log to the tcp_input of logstash in our logstash-loki container
-        handler = AsynchronousLogstashHandler("logstash", 5959, database_path='/tmp/lofar_pending_log_messages.db')
+        handler = AsynchronousLogstashHandler(
+            "logstash", 5959, database_path="/tmp/lofar_pending_log_messages.db"
+        )
 
         # configure log messages
         formatter = LogstashFormatter(extra=log_extra, tags=["python", "lofar"])
@@ -167,7 +188,9 @@ def configure_logger(logger: logging.Logger=None, log_extra=None, debug=False):
         # install the handler
         logger.addHandler(handler)
     except ImportError:
-        logger.exception("Cannot forward logs to Logstash-Loki: logstash_async module not found.")
+        logger.exception(
+            "Cannot forward logs to Logstash-Loki: logstash_async module not found."
+        )
     except Exception:
         logger.exception("Cannot forward logs to Logstash-Loki.")
 
@@ -185,13 +208,16 @@ def configure_logger(logger: logging.Logger=None, log_extra=None, debug=False):
 
     return logger
 
+
 def device_logging_to_python():
-    """ Decorator. Call this on a Tango Device instance or class to have your Tango Device log to python instead. """
+    """Decorator. Call this on a Tango Device instance or class to have your Tango Device log to python instead."""
 
     def inner(cls):
         # we'll be doing very weird things if this class isnt
         if not issubclass(cls, Device):
-            raise ValueError("device_logging_to_python decorator is to be used on Tango Device classes only.")
+            raise ValueError(
+                "device_logging_to_python decorator is to be used on Tango Device classes only."
+            )
 
         # Monkey patch the python logger to replace the tango logger
         logger = logging.getLogger()
@@ -208,8 +234,9 @@ def device_logging_to_python():
 
     return inner
 
-def log_exceptions(logger: logging.Logger=None):
-    """ Decorator that logs all exceptions that the function raises. """
+
+def log_exceptions(logger: logging.Logger = None):
+    """Decorator that logs all exceptions that the function raises."""
 
     def wrapper(func):
         @wraps(func)
@@ -217,7 +244,9 @@ def log_exceptions(logger: logging.Logger=None):
             try:
                 return func(self, *args, **kwargs)
             except Exception as e:
-                (logger or logging.getLogger()).exception(f"Unhandled exception: {e.__class__.__name__}: {e}")
+                (logger or logging.getLogger()).exception(
+                    f"Unhandled exception: {e.__class__.__name__}: {e}"
+                )
 
                 # we can log but we cannot hide
                 raise
diff --git a/tangostationcontrol/tangostationcontrol/common/measures.py b/tangostationcontrol/tangostationcontrol/common/measures.py
index fd9438caa492940de222043ff0f18c7193be6b54..4eb86fdfe6a9f79464e1c21075710a369db28d2d 100644
--- a/tangostationcontrol/tangostationcontrol/common/measures.py
+++ b/tangostationcontrol/tangostationcontrol/common/measures.py
@@ -1,7 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ Utility functions for managing the casacore 'measures' tables.
 
@@ -31,13 +29,13 @@ needs to be restarted in order to clear the cache.
 
 """
 
-import pathlib
-import urllib.request
-import tarfile
 import datetime
 import os
-import sys
+import pathlib
 import shutil
+import sys
+import tarfile
+import urllib.request
 
 # Where to store the measures table sets
 IERS_ROOTDIR = "/opt/IERS"
@@ -48,18 +46,20 @@ DOWNLOAD_DIR = "/tmp"
 # Where new measures can be downloaded
 MEASURES_URL = "ftp://ftp.astron.nl/outgoing/Measures/WSRT_Measures.ztar"
 
+
 def get_measures_directory():
-    """ Return the directory of the current measures table in use. """
+    """Return the directory of the current measures table in use."""
 
     return str(pathlib.Path(IERS_ROOTDIR, "current").resolve())
 
+
 def use_measures_directory(newdir):
-    """ Select a new set of measures tables to use.
+    """Select a new set of measures tables to use.
 
-        NOTE: Python must be restarted if the 'casacore.measures' module
-              already loaded the measures table before this switch.
+    NOTE: Python must be restarted if the 'casacore.measures' module
+          already loaded the measures table before this switch.
 
-              The 'restart_python()' function can be used for this purpose.
+          The 'restart_python()' function can be used for this purpose.
     """
 
     newdir = pathlib.Path(newdir)
@@ -69,7 +69,7 @@ def use_measures_directory(newdir):
         raise ValueError(f"Target is not an available measures directory: {newdir}")
 
     # be sure newdir must point to a directory containing measures
-    for subdir in ['ephemerides', 'geodetic']:
+    for subdir in ["ephemerides", "geodetic"]:
         subdir = pathlib.Path(newdir, subdir)
 
         if not subdir.is_dir():
@@ -81,23 +81,29 @@ def use_measures_directory(newdir):
         current_symlink.unlink()
     current_symlink.symlink_to(newdir)
 
+
 def restart_python():
-    """ Force a restart this python program. 
+    """Force a restart this python program.
 
-        This function does not return. """
+    This function does not return."""
 
     exe_path = pathlib.Path(sys.executable)
 
     # NOTE: Python 3.4+ closes all file descriptors > 2 automatically, see https://www.python.org/dev/peps/pep-0446/
     os.execv(exe_path, [exe_path.name] + sys.argv)
 
+
 def get_available_measures_directories() -> list:
-    """ Returns the set of installed measures tables. """
-    return [str(d) for d in pathlib.Path(IERS_ROOTDIR).glob("IERS-*") if d.is_dir() and not d.is_symlink()]
+    """Returns the set of installed measures tables."""
+    return [
+        str(d)
+        for d in pathlib.Path(IERS_ROOTDIR).glob("IERS-*")
+        if d.is_dir() and not d.is_symlink()
+    ]
+
 
 def download_measures() -> str:
-    """ Download new measures and return the directory in which they were installed.
-    """
+    """Download new measures and return the directory in which they were installed."""
 
     # create target directory for new measures
     now = datetime.datetime.now()
diff --git a/tangostationcontrol/tangostationcontrol/common/observation_controller.py b/tangostationcontrol/tangostationcontrol/common/observation_controller.py
index d77bfc9eae8234aa3b0d6c29c3e9f50bcaa9e6c4..a95fe31d28d1988fac928d1e96aeb784e0a53443 100644
--- a/tangostationcontrol/tangostationcontrol/common/observation_controller.py
+++ b/tangostationcontrol/tangostationcontrol/common/observation_controller.py
@@ -1,16 +1,11 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR2.0 Station Control project.
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import logging
 import time
 from datetime import datetime
 
 from tango import DevFailed, DevState, Except, Util, EventType, DeviceProxy
-
 from tangostationcontrol.common.lofar_logging import log_exceptions
 from tangostationcontrol.configuration import ObservationSettings
 
@@ -25,6 +20,7 @@ class RunningObservation(object):
     @property
     def class_name(self) -> str:
         from tangostationcontrol.devices.observation import Observation
+
         return Observation.__name__
 
     @property
@@ -53,7 +49,10 @@ class RunningObservation(object):
             self._tango_util.create_device(self.class_name, f"{self.device_name}")
         except DevFailed as ex:
             logger.exception(ex)
-            if ex.args[0].desc == f"The device {self.device_name.lower()} is already defined in the database":
+            if (
+                ex.args[0].desc
+                == f"The device {self.device_name.lower()} is already defined in the database"
+            ):
                 # and self.is_observation_running(self.observation_id) is False:
                 self._tango_util.delete_device(self.class_name, self.device_name)
                 error_string = f"Cannot create the Observation device {self.device_name} because it is already present in the Database but it is not running. Try to re-run the start_observation command"
@@ -85,13 +84,15 @@ class RunningObservation(object):
         # Turn on the polling for the attribute.
         # Note that this is not automatically done despite the attribute
         # having the right polling values set in the ctor.
-        self._device_proxy.poll_attribute(self.attribute_name.split('/')[-1], 1000)
+        self._device_proxy.poll_attribute(self.attribute_name.split("/")[-1], 1000)
 
         # Right. Now subscribe to periodic events.
-        self._event_id = self._device_proxy.subscribe_event(self.attribute_name.split('/')[-1],
-                                                            EventType.PERIODIC_EVENT,
-                                                            cb)
-        logger.info(f"Successfully started an observation with ID={self.observation_id}.")
+        self._event_id = self._device_proxy.subscribe_event(
+            self.attribute_name.split("/")[-1], EventType.PERIODIC_EVENT, cb
+        )
+        logger.info(
+            f"Successfully started an observation with ID={self.observation_id}."
+        )
 
     def shutdown(self):
         # Check if the device has not terminated itself in the meanwhile.
@@ -99,7 +100,8 @@ class RunningObservation(object):
             self._device_proxy.ping()
         except DevFailed:
             logger.warning(
-                f"The device for the Observation with ID={self.observation_id} has unexpectedly already disappeared.  It is advised to check the logs up to 10s prior to this message to see what happened.")
+                f"The device for the Observation with ID={self.observation_id} has unexpectedly already disappeared.  It is advised to check the logs up to 10s prior to this message to see what happened."
+            )
         else:
             # Unsubscribe from the subscribed event.
             event_id = self._event_id
@@ -124,17 +126,21 @@ class RunningObservation(object):
                 remaining_wait_time = remaining_wait_time - sleep_time
             # Check if the observation object is really in OFF state.
             if stopped:
-                logger.info(f"Successfully stopped the observation with ID={self.observation_id}")
+                logger.info(
+                    f"Successfully stopped the observation with ID={self.observation_id}"
+                )
             else:
                 logger.warning(
-                    f"Could not shut down the Observation device ( {self.device_name} ) for observation ID={self.observation_id}.  This means that there is a chance for a memory leak.  Will continue anyway and forcefully delete the Observation object.")
+                    f"Could not shut down the Observation device ( {self.device_name} ) for observation ID={self.observation_id}.  This means that there is a chance for a memory leak.  Will continue anyway and forcefully delete the Observation object."
+                )
 
         # Finally remove the device object from the Tango DB.
         try:
             self._tango_util.delete_device(self.class_name, self.device_name)
         except DevFailed:
             logger.warning(
-                f"Something went wrong when the device {self.device_name} was removed from the Tango DB.  There is nothing that can be done about this here at this moment but you should check the Tango DB yourself.")
+                f"Something went wrong when the device {self.device_name} was removed from the Tango DB.  There is nothing that can be done about this here at this moment but you should check the Tango DB yourself."
+            )
 
 
 class ObservationController(object):
@@ -166,7 +172,8 @@ class ObservationController(object):
         if event.err:
             # Something is fishy with this event.
             logger.warning(
-                f"The Observation device {event.device} sent an event but the event signals an error.  It is advised to check the logs for any indication that something went wrong in that device.  Event data={event}")
+                f"The Observation device {event.device} sent an event but the event signals an error.  It is advised to check the logs for any indication that something went wrong in that device.  Event data={event}"
+            )
             return
 
         # Get the Observation ID from the sending device.
@@ -177,7 +184,8 @@ class ObservationController(object):
         if not running_obs:
             # No obs is running???
             logger.warning(
-                f"Received an observation_running event for the observation with ID={obs_id}.  According to the records in ObservationControl, this observation is not supposed to run.  Please check previous logs, especially around the time an observation with this ID was started.  Will continue and ignore this event.")
+                f"Received an observation_running event for the observation with ID={obs_id}.  According to the records in ObservationControl, this observation is not supposed to run.  Please check previous logs, especially around the time an observation with this ID was started.  Will continue and ignore this event."
+            )
             return
 
         if obs_id in running_obs:
@@ -197,13 +205,14 @@ class ObservationController(object):
         else:
             # The observation that we are trying to process is not part of the running_obs dictionary
             logger.warning(
-                f"Received an observation_running event for the observation with ID={obs_id}.  According to the records in ObservationControl, this observation is not supposed to run.  Please check previous logs, especially around the time an observation with this ID was started.  Will continue and ignore this event.")
+                f"Received an observation_running event for the observation with ID={obs_id}.  According to the records in ObservationControl, this observation is not supposed to run.  Please check previous logs, especially around the time an observation with this ID was started.  Will continue and ignore this event."
+            )
             return
 
     def start_observation(self, settings: ObservationSettings):
         # Check further properties that cannot be validated through a JSON schema
         if settings.stop_time <= datetime.now():
-            error = f"Cannot start an observation with ID={settings.observation_id} because the parameter stop_time parameter value=\"{settings.stop_time}\" is invalid. Set a stop_time parameter later in time than the start time."
+            error = f'Cannot start an observation with ID={settings.observation_id} because the parameter stop_time parameter value="{settings.stop_time}" is invalid. Set a stop_time parameter later in time than the start time.'
             Except.throw_exception("IllegalCommand", error, __name__)
 
         obs = RunningObservation(self._tango_domain, settings)
@@ -227,7 +236,9 @@ class ObservationController(object):
         except DevFailed as ex:
             self._tango_util.delete_device(obs.class_name, obs.device_name)
             error_string = "Cannot access the Observation device instance for observation ID=%s with device class name=%s and device instance name=%s.  This means that the observation cannot be controlled and/or forcefully be stopped."
-            logger.exception(error_string, obs.observation_id, obs.class_name, obs.device_name)
+            logger.exception(
+                error_string, obs.observation_id, obs.class_name, obs.device_name
+            )
             Except.re_throw_exception(ex, "DevFailed", error_string, __name__)
 
     def stop_observation(self, obs_id):
diff --git a/tangostationcontrol/tangostationcontrol/common/states.py b/tangostationcontrol/tangostationcontrol/common/states.py
index cc458005621a3116b7a05839e81727d3dc796e70..6c7aa97d04ed84d73e5aabd4971fa31d1415907e 100644
--- a/tangostationcontrol/tangostationcontrol/common/states.py
+++ b/tangostationcontrol/tangostationcontrol/common/states.py
@@ -1,3 +1,6 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 from tango import DevState
 
 # The Device states in which we consider our device operational,
diff --git a/tangostationcontrol/tangostationcontrol/common/type_checking.py b/tangostationcontrol/tangostationcontrol/common/type_checking.py
index ac146170d2274cc950a6bffa326404c9481fd072..d70ee6b6c750aa570b7b45d0eb2d56f59036e3eb 100644
--- a/tangostationcontrol/tangostationcontrol/common/type_checking.py
+++ b/tangostationcontrol/tangostationcontrol/common/type_checking.py
@@ -1,7 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tango.utils import is_seq
 
diff --git a/tangostationcontrol/tangostationcontrol/configuration/__init__.py b/tangostationcontrol/tangostationcontrol/configuration/__init__.py
index 2b1c62bb5f2f9b8cc2a53cab08ba3c1e261ee040..de4c16c013f4e80a96ea95465e47a454c7902bec 100644
--- a/tangostationcontrol/tangostationcontrol/configuration/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/configuration/__init__.py
@@ -1,12 +1,12 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR2.0 Station Control project.
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from .observation_settings import ObservationSettings
 from .pointing import Pointing
 from .sap import Sap
 
-__all__ = ['ObservationSettings', 'Pointing', 'Sap', ]
+__all__ = [
+    "ObservationSettings",
+    "Pointing",
+    "Sap",
+]
diff --git a/tangostationcontrol/tangostationcontrol/configuration/_json_parser.py b/tangostationcontrol/tangostationcontrol/configuration/_json_parser.py
index 92ecaa60932ea55ce7e630099c751973281f8d3a..a922f9e82a409e50df8f9826d455e8de72ab26e7 100644
--- a/tangostationcontrol/tangostationcontrol/configuration/_json_parser.py
+++ b/tangostationcontrol/tangostationcontrol/configuration/_json_parser.py
@@ -1,11 +1,6 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 from typing import Type
 
 from jsonschema.exceptions import ValidationError
@@ -13,6 +8,7 @@ from jsonschema.exceptions import ValidationError
 
 def _from_json_hook_t(primary: Type):
     from tangostationcontrol.configuration import Pointing, Sap, ObservationSettings
+
     def actual_hook(json_dct):
         primary_ex = None
         for t in [Pointing, Sap, ObservationSettings]:
diff --git a/tangostationcontrol/tangostationcontrol/configuration/configuration_base.py b/tangostationcontrol/tangostationcontrol/configuration/configuration_base.py
index 7820eefb7924add428108dab35e645c3b596e04b..d375aa812f35d34ed2c0f290e7a3f2d652599ca5 100644
--- a/tangostationcontrol/tangostationcontrol/configuration/configuration_base.py
+++ b/tangostationcontrol/tangostationcontrol/configuration/configuration_base.py
@@ -1,11 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import json
 import re
@@ -17,10 +11,9 @@ import jsonschema
 import requests
 from jsonschema import Draft7Validator, FormatChecker, ValidationError
 from jsonschema.validators import RefResolver
-
 from tangostationcontrol.configuration._json_parser import _from_json_hook_t
 
-T = TypeVar('T')
+T = TypeVar("T")
 
 
 def _fetch_url(url):
@@ -39,7 +32,6 @@ def _fetch_url(url):
 
 
 class RetryHttpRefResolver(RefResolver):
-
     def resolve_remote(self, uri):
         result = _fetch_url(uri)
 
@@ -52,8 +44,11 @@ def _is_object(_, instance):
     return isinstance(instance, dict) or issubclass(type(instance), _ConfigurationBase)
 
 
-jsonschema.validators.Draft7Validator.TYPE_CHECKER = Draft7Validator.TYPE_CHECKER.redefine(
-        "object", _is_object,
+jsonschema.validators.Draft7Validator.TYPE_CHECKER = (
+    Draft7Validator.TYPE_CHECKER.redefine(
+        "object",
+        _is_object,
+    )
 )
 
 
@@ -62,15 +57,19 @@ class _ConfigurationBase(ABC):
 
     @staticmethod
     def _class_to_url(cls_name):
-        return re.sub(r'(?<!^)(?=[A-Z])', '-', cls_name).lower()
+        return re.sub(r"(?<!^)(?=[A-Z])", "-", cls_name).lower()
 
     @classmethod
     def get_validator(cls):
         name = cls.__name__
         url = f"{_ConfigurationBase.BASE_URL}{_ConfigurationBase._class_to_url(name)}.json"
-        resolver = RetryHttpRefResolver(base_uri=_ConfigurationBase.BASE_URL, referrer=url)
+        resolver = RetryHttpRefResolver(
+            base_uri=_ConfigurationBase.BASE_URL, referrer=url
+        )
         _, resolved = resolver.resolve(url)
-        return Draft7Validator(resolved, format_checker=FormatChecker(), resolver=resolver)
+        return Draft7Validator(
+            resolved, format_checker=FormatChecker(), resolver=resolver
+        )
 
     @abstractmethod
     def __iter__(self):
@@ -102,5 +101,7 @@ class _ConfigurationBase(ABC):
     def from_json(cls: Type[T], data: str) -> T:
         s = json.loads(data, object_hook=_from_json_hook_t(cls))
         if not isinstance(s, cls):
-            raise ValidationError(f"Unexpected type: expected <{cls.__class__.__name__}>, got <{type(s).__name__}>")
+            raise ValidationError(
+                f"Unexpected type: expected <{cls.__class__.__name__}>, got <{type(s).__name__}>"
+            )
         return s
diff --git a/tangostationcontrol/tangostationcontrol/configuration/observation_settings.py b/tangostationcontrol/tangostationcontrol/configuration/observation_settings.py
index 65c024b05951c0aedc07dab8aac833cc860cd41d..bcffad6971279c66e905b649a8615b28bff176e8 100644
--- a/tangostationcontrol/tangostationcontrol/configuration/observation_settings.py
+++ b/tangostationcontrol/tangostationcontrol/configuration/observation_settings.py
@@ -1,9 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR2.0 Station Control project.
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from datetime import datetime
 from typing import Sequence
@@ -14,9 +10,16 @@ from tangostationcontrol.configuration.sap import Sap
 
 
 class ObservationSettings(_ConfigurationBase):
-    def __init__(self, observation_id: int, stop_time: datetime, antenna_mask: Sequence[int], filter: str,
-                 SAPs: Sequence[Sap],
-                 tile_beam: Pointing = None, first_beamlet: int = 0):
+    def __init__(
+        self,
+        observation_id: int,
+        stop_time: datetime,
+        antenna_mask: Sequence[int],
+        filter: str,
+        SAPs: Sequence[Sap],
+        tile_beam: Pointing = None,
+        first_beamlet: int = 0,
+    ):
         self.observation_id = observation_id
         self.stop_time = stop_time
         self.antenna_mask = antenna_mask
@@ -28,19 +31,23 @@ class ObservationSettings(_ConfigurationBase):
     def __iter__(self):
         yield from {
             "observation_id": self.observation_id,
-            "stop_time"     : self.stop_time.isoformat(),
-            "antenna_mask"  : self.antenna_mask,
-            "filter"        : self.filter,
-            "SAPs"          : [dict(s) for s in self.SAPs]
+            "stop_time": self.stop_time.isoformat(),
+            "antenna_mask": self.antenna_mask,
+            "filter": self.filter,
+            "SAPs": [dict(s) for s in self.SAPs],
         }.items()
         if self.tile_beam:
             yield "tile_beam", dict(self.tile_beam)
         yield "first_beamlet", self.first_beamlet
 
     @staticmethod
-    def to_object(json_dct) -> 'ObservationSettings':
-        return ObservationSettings(json_dct['observation_id'], datetime.fromisoformat(json_dct['stop_time']),
-                                   json_dct['antenna_mask'],
-                                   json_dct['filter'], json_dct['SAPs'],
-                                   json_dct['tile_beam'] if 'tile_beam' in json_dct else None,
-                                   json_dct['first_beamlet'] if 'first_beamlet' in json_dct else 0)
+    def to_object(json_dct) -> "ObservationSettings":
+        return ObservationSettings(
+            json_dct["observation_id"],
+            datetime.fromisoformat(json_dct["stop_time"]),
+            json_dct["antenna_mask"],
+            json_dct["filter"],
+            json_dct["SAPs"],
+            json_dct["tile_beam"] if "tile_beam" in json_dct else None,
+            json_dct["first_beamlet"] if "first_beamlet" in json_dct else 0,
+        )
diff --git a/tangostationcontrol/tangostationcontrol/configuration/pointing.py b/tangostationcontrol/tangostationcontrol/configuration/pointing.py
index 8e7933f6f6d9a375f6e51e7b502ea5e2eb4f54e8..dcbea62a49a8741013307ca0c8b7c21897ca2a06 100644
--- a/tangostationcontrol/tangostationcontrol/configuration/pointing.py
+++ b/tangostationcontrol/tangostationcontrol/configuration/pointing.py
@@ -1,9 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR2.0 Station Control project.
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tangostationcontrol.configuration.configuration_base import _ConfigurationBase
 
@@ -11,18 +7,25 @@ from tangostationcontrol.configuration.configuration_base import _ConfigurationB
 class Pointing(_ConfigurationBase):
     VALIDATOR = None
 
-    def __init__(self, angle1=0.6624317181687094, angle2=1.5579526427549426, direction_type="J2000"):
+    def __init__(
+        self,
+        angle1=0.6624317181687094,
+        angle2=1.5579526427549426,
+        direction_type="J2000",
+    ):
         self.angle1 = angle1
         self.angle2 = angle2
         self.direction_type = direction_type
 
     def __iter__(self):
         yield from {
-            "angle1"        : self.angle1,
-            "angle2"        : self.angle2,
-            "direction_type": self.direction_type
+            "angle1": self.angle1,
+            "angle2": self.angle2,
+            "direction_type": self.direction_type,
         }.items()
 
     @staticmethod
-    def to_object(json_dct) -> 'Pointing':
-        return Pointing(json_dct['angle1'], json_dct['angle2'], json_dct['direction_type'])
+    def to_object(json_dct) -> "Pointing":
+        return Pointing(
+            json_dct["angle1"], json_dct["angle2"], json_dct["direction_type"]
+        )
diff --git a/tangostationcontrol/tangostationcontrol/configuration/sap.py b/tangostationcontrol/tangostationcontrol/configuration/sap.py
index d74e12a88867b91d31cec4e19d338a444eaa4c4c..f691128017591b8de28fd324229f6d16086b68c1 100644
--- a/tangostationcontrol/tangostationcontrol/configuration/sap.py
+++ b/tangostationcontrol/tangostationcontrol/configuration/sap.py
@@ -1,9 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR2.0 Station Control project.
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tangostationcontrol.configuration.configuration_base import _ConfigurationBase
 from tangostationcontrol.configuration.pointing import Pointing
@@ -15,11 +11,8 @@ class Sap(_ConfigurationBase):
         self.pointing = pointing
 
     def __iter__(self):
-        yield from {
-            "subbands": self.subbands,
-            "pointing": dict(self.pointing)
-        }.items()
+        yield from {"subbands": self.subbands, "pointing": dict(self.pointing)}.items()
 
     @staticmethod
-    def to_object(json_dct) -> 'Sap':
-        return Sap(json_dct['subbands'], json_dct['pointing'])
+    def to_object(json_dct) -> "Sap":
+        return Sap(json_dct["subbands"], json_dct["pointing"])
diff --git a/tangostationcontrol/tangostationcontrol/devices/__init__.py b/tangostationcontrol/tangostationcontrol/devices/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/devices/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/devices/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/devices/antennafield.py b/tangostationcontrol/tangostationcontrol/devices/antennafield.py
index d32ed2a6e7b1965986c50710e3010782434afe59..c5f620429d44ba98488c4ba2e63c3053428f5ca3 100644
--- a/tangostationcontrol/tangostationcontrol/devices/antennafield.py
+++ b/tangostationcontrol/tangostationcontrol/devices/antennafield.py
@@ -10,10 +10,15 @@ from math import pi
 from typing import List
 
 import numpy
+
 # PyTango imports
 from tango import (
-    DeviceProxy, DevSource, AttrWriteType, DevVarFloatArray,
-    DevVarLongArray, DebugIt
+    DeviceProxy,
+    DevSource,
+    AttrWriteType,
+    DevVarFloatArray,
+    DevVarLongArray,
+    DebugIt,
 )
 from tango.server import device_property, attribute, command
 
@@ -26,13 +31,22 @@ from tangostationcontrol.common.cables import cable_types
 from tangostationcontrol.common.calibration import delay_compensation
 from tangostationcontrol.common.calibration import loss_compensation
 from tangostationcontrol.common.constants import (
-    N_elements, MAX_ANTENNA, N_pol, N_xyz,
-    N_latlong, N_rcu, N_rcu_inp, N_pn, S_pn, N_subbands, VALUES_PER_COMPLEX
+    N_elements,
+    MAX_ANTENNA,
+    N_pol,
+    N_xyz,
+    N_latlong,
+    N_rcu,
+    N_rcu_inp,
+    N_pn,
+    S_pn,
+    N_subbands,
+    VALUES_PER_COMPLEX,
 )
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.common.lofar_logging import (
     device_logging_to_python,
-    log_exceptions
+    log_exceptions,
 )
 from tangostationcontrol.common.states import DEFAULT_COMMAND_STATES
 from tangostationcontrol.common.type_checking import type_not_sequence
@@ -40,7 +54,7 @@ from tangostationcontrol.devices.device_decorators import fault_on_error, only_i
 from tangostationcontrol.devices.lofar_device import LOFARDevice
 from tangostationcontrol.devices.sdp.common import (
     subband_frequency,
-    real_imag_to_weights
+    real_imag_to_weights,
 )
 from tangostationcontrol.devices.sdp.sdp import SDP
 
@@ -64,18 +78,24 @@ class AntennaQuality(IntEnum):
 
 class MappedAttribute(attribute):
     def __init__(
-            self, mapping_attribute, dtype, max_dim_x, max_dim_y=0,
-            access=AttrWriteType.READ, **kwargs
+        self,
+        mapping_attribute,
+        dtype,
+        max_dim_x,
+        max_dim_y=0,
+        access=AttrWriteType.READ,
+        **kwargs,
     ):
 
         if access == AttrWriteType.READ_WRITE:
+
             @fault_on_error()
             def write_func_wrapper(device, value):
                 cast_type = dtype
                 while not type_not_sequence(cast_type):
                     cast_type = cast_type[0]
                 write_func = device.set_mapped_attribute(
-                        mapping_attribute, value, cast_type
+                    mapping_attribute, value, cast_type
                 )
 
             self.fset = write_func_wrapper
@@ -87,458 +107,510 @@ class MappedAttribute(attribute):
         self.fget = read_func_wrapper
 
         super().__init__(
-                dtype=dtype, max_dim_y=max_dim_y, max_dim_x=max_dim_x, access=access,
-                fisallowed="is_attribute_access_allowed", **kwargs
+            dtype=dtype,
+            max_dim_y=max_dim_y,
+            max_dim_x=max_dim_x,
+            access=access,
+            fisallowed="is_attribute_access_allowed",
+            **kwargs,
         )
 
 
 @device_logging_to_python()
 class AntennaField(LOFARDevice):
-    """ Manages the antennas in a single antenna field, by acting as a
-        a mapping onto one or more RECV devices.
-
-        The antenna field models a number of antennas, each of which
-        carries:
-          * a position in Antenna_Reference_ETRS/ITRF,
-          * a control mapping onto an RCU in Control_to_RECV_mapping,
-          * a power mapping onto an RCU in Power_to_RECV_mapping.
-
-        Furthermore, a central field reference position for the
-        antenna field is maintained in Antenna_Field_Reference_ETRS/ITRF.
-
-        For each position, it is best to provide the ETRS [x,y,z] position,
-        although this can be overruled through supplying an ITRF [x,y,z] position
-        instead. If not, the ITRF position is calculated by extrapolating the
-        tectonic shifts from the ETRS position, into the frame and epoch as
-        configured in ITRF_Reference_Frame and ITRF_Reference_Epoch.
-
-        From the ITRF positions, the geographical [lat,long] positions are
-        calculated, as well as the geohash.
+    """Manages the antennas in a single antenna field, by acting as a
+    a mapping onto one or more RECV devices.
+
+    The antenna field models a number of antennas, each of which
+    carries:
+      * a position in Antenna_Reference_ETRS/ITRF,
+      * a control mapping onto an RCU in Control_to_RECV_mapping,
+      * a power mapping onto an RCU in Power_to_RECV_mapping.
+
+    Furthermore, a central field reference position for the
+    antenna field is maintained in Antenna_Field_Reference_ETRS/ITRF.
+
+    For each position, it is best to provide the ETRS [x,y,z] position,
+    although this can be overruled through supplying an ITRF [x,y,z] position
+    instead. If not, the ITRF position is calculated by extrapolating the
+    tectonic shifts from the ETRS position, into the frame and epoch as
+    configured in ITRF_Reference_Frame and ITRF_Reference_Epoch.
+
+    From the ITRF positions, the geographical [lat,long] positions are
+    calculated, as well as the geohash.
     """
 
     # ----- Antenna names
 
     Antenna_Names = device_property(
-            doc="Name of each antenna",
-            dtype='DevVarStringArray',
-            mandatory=False,
-            default_value=[f'Antenna{n + 1}' for n in range(MAX_ANTENNA)]
+        doc="Name of each antenna",
+        dtype="DevVarStringArray",
+        mandatory=False,
+        default_value=[f"Antenna{n + 1}" for n in range(MAX_ANTENNA)],
     )
 
     # ----- Antenna states
 
     Antenna_Quality = device_property(
-            doc="Operational quality state of each antenna",
-            dtype='DevVarUShortArray',
-            mandatory=False,
-            default_value=numpy.array([AntennaQuality.OK] * MAX_ANTENNA)
+        doc="Operational quality state of each antenna",
+        dtype="DevVarUShortArray",
+        mandatory=False,
+        default_value=numpy.array([AntennaQuality.OK] * MAX_ANTENNA),
     )
 
     Antenna_Use = device_property(
-            doc="Operational State of each antenna",
-            dtype='DevVarUShortArray',
-            mandatory=False,
-            default_value=numpy.array([AntennaUse.AUTO] * MAX_ANTENNA)
+        doc="Operational State of each antenna",
+        dtype="DevVarUShortArray",
+        mandatory=False,
+        default_value=numpy.array([AntennaUse.AUTO] * MAX_ANTENNA),
     )
 
     # ----- Antenna properties
 
     Antenna_Type = device_property(
-            doc="Type of antenna in this field (LBA or HBA)",
-            dtype='DevString',
-            mandatory=False,
-            default_value="LBA"
+        doc="Type of antenna in this field (LBA or HBA)",
+        dtype="DevString",
+        mandatory=False,
+        default_value="LBA",
     )
 
     Antenna_Needs_Power = device_property(
-            doc="Whether to provide power to each antenna (False for noise sources)",
-            dtype='DevVarBooleanArray',
-            mandatory=False,
-            default_value=numpy.array([False] * MAX_ANTENNA)
+        doc="Whether to provide power to each antenna (False for noise sources)",
+        dtype="DevVarBooleanArray",
+        mandatory=False,
+        default_value=numpy.array([False] * MAX_ANTENNA),
     )
 
     Antenna_Cables = device_property(
-            doc=f"Which cables connect each antenna to the RCU. Both polarisations are "
-                f"assumed to be connected using the same type of cable. Needs to be "
-                f"any of ({', '.join(cable_types.keys())}).",
-            dtype='DevVarStringArray',
-            mandatory=False,
-            default_value=numpy.array(["0m"] * MAX_ANTENNA)
+        doc=f"Which cables connect each antenna to the RCU. Both polarisations are "
+        f"assumed to be connected using the same type of cable. Needs to be "
+        f"any of ({', '.join(cable_types.keys())}).",
+        dtype="DevVarStringArray",
+        mandatory=False,
+        default_value=numpy.array(["0m"] * MAX_ANTENNA),
     )
 
     Field_Attenuation = device_property(
-            doc=f"Attenuation value to apply on all inputs.",
-            dtype='DevFloat',
-            mandatory=False,
-            default_value=0.0
+        doc=f"Attenuation value to apply on all inputs.",
+        dtype="DevFloat",
+        mandatory=False,
+        default_value=0.0,
     )
 
     Calibration_SDP_Subband_Weights_50MHz = device_property(
-            doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW "
-                f"columns of each polarisation of each antenna, at 50 MHz. Each "
-                f"polarisation is represented by a (real, imag) pair for every "
-                f"subband.",
-            dtype='DevVarFloatArray',
-            mandatory=False
+        doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW "
+        f"columns of each polarisation of each antenna, at 50 MHz. Each "
+        f"polarisation is represented by a (real, imag) pair for every "
+        f"subband.",
+        dtype="DevVarFloatArray",
+        mandatory=False,
     )
 
     Calibration_SDP_Subband_Weights_150MHz = device_property(
-            doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW "
-                f"columns of each polarisation of each antenna, at 150 MHz. Each "
-                f"polarisation is represented by a (real, imag) pair for every "
-                f"subband.",
-            dtype='DevVarFloatArray',
-            mandatory=False
+        doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW "
+        f"columns of each polarisation of each antenna, at 150 MHz. Each "
+        f"polarisation is represented by a (real, imag) pair for every "
+        f"subband.",
+        dtype="DevVarFloatArray",
+        mandatory=False,
     )
 
     Calibration_SDP_Subband_Weights_200MHz = device_property(
-            doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW "
-                f"columns of each polarisation of each antenna, at 200 MHz. Each "
-                f"polarisation is represented by a (real, imag) pair for every "
-                f"subband.",
-            dtype='DevVarFloatArray',
-            mandatory=False
+        doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW "
+        f"columns of each polarisation of each antenna, at 200 MHz. Each "
+        f"polarisation is represented by a (real, imag) pair for every "
+        f"subband.",
+        dtype="DevVarFloatArray",
+        mandatory=False,
     )
 
     Calibration_SDP_Subband_Weights_250MHz = device_property(
-            doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW "
-                f"columns of each polarisation of each antenna, at 250 MHz. Each "
-                f"polarisation is represented by a (real, imag) pair for every "
-                f"subband.",
-            dtype='DevVarFloatArray',
-            mandatory=False
+        doc=f"Measured calibration values for the sdp.FPGA_subband_weights_RW "
+        f"columns of each polarisation of each antenna, at 250 MHz. Each "
+        f"polarisation is represented by a (real, imag) pair for every "
+        f"subband.",
+        dtype="DevVarFloatArray",
+        mandatory=False,
     )
 
     # ----- Position information
 
     Antenna_Field_Reference_ITRF = device_property(
-            doc="ITRF position (XYZ) of each antenna field (leave empty to auto-derive "
-                "from ETRS)",
-            dtype='DevVarFloatArray',
-            mandatory=False
+        doc="ITRF position (XYZ) of each antenna field (leave empty to auto-derive "
+        "from ETRS)",
+        dtype="DevVarFloatArray",
+        mandatory=False,
     )
 
     Antenna_Field_Reference_ETRS = device_property(
-            doc="ETRS position (XYZ) of each antenna field",
-            dtype='DevVarFloatArray',
-            mandatory=False
+        doc="ETRS position (XYZ) of each antenna field",
+        dtype="DevVarFloatArray",
+        mandatory=False,
     )
 
     Antenna_Reference_ITRF = device_property(
-            doc="ITRF position (XYZ) of each Antenna (leave empty to auto-derive from "
-                "ETRS)",
-            dtype='DevVarFloatArray',
-            mandatory=False
+        doc="ITRF position (XYZ) of each Antenna (leave empty to auto-derive from "
+        "ETRS)",
+        dtype="DevVarFloatArray",
+        mandatory=False,
     )
 
     Antenna_Reference_ETRS = device_property(
-            doc="ETRS position (XYZ) of each Antenna",
-            dtype='DevVarFloatArray',
-            mandatory=False
+        doc="ETRS position (XYZ) of each Antenna",
+        dtype="DevVarFloatArray",
+        mandatory=False,
     )
 
     ITRF_Reference_Frame = device_property(
-            doc="Reference frame in which the ITRF coordinates are provided, or are to "
-                "be computed from ETRS89",
-            dtype='DevString',
-            mandatory=False,
-            default_value="ITRF2005"
+        doc="Reference frame in which the ITRF coordinates are provided, or are to "
+        "be computed from ETRS89",
+        dtype="DevString",
+        mandatory=False,
+        default_value="ITRF2005",
     )
 
     ITRF_Reference_Epoch = device_property(
-            doc="Reference epoch in which the ITRF coordinates are provided, or are to "
-                "be extrapolated from ETRS89",
-            dtype='DevFloat',
-            mandatory=False,
-            default_value=2015.5
+        doc="Reference epoch in which the ITRF coordinates are provided, or are to "
+        "be extrapolated from ETRS89",
+        dtype="DevFloat",
+        mandatory=False,
+        default_value=2015.5,
     )
 
     HBAT_PQR_rotation_angles_deg = device_property(
-            doc='Rotation of each tile in the PQ plane ("horizontal") in degrees.',
-            dtype='DevVarFloatArray',
-            mandatory=False,
-            default_value=[0.0] * MAX_ANTENNA
+        doc='Rotation of each tile in the PQ plane ("horizontal") in degrees.',
+        dtype="DevVarFloatArray",
+        mandatory=False,
+        default_value=[0.0] * MAX_ANTENNA,
     )
 
     PQR_to_ETRS_rotation_matrix = device_property(
-            doc="Field-specific rotation matrix to convert PQR offsets to ETRS/ITRF "
-                "offsets.",
-            dtype='DevVarFloatArray',
-            mandatory=False,
-            default_value=numpy.array(
-                    [  # PQR->ETRS rotation matrix for the core stations
-                        [-0.1195951054, -0.7919544517, 0.5987530018],
-                        [0.9928227484, -0.0954186800, 0.0720990002],
-                        [0.0000330969, 0.6030782884, 0.7976820024]]
-            ).flatten()
+        doc="Field-specific rotation matrix to convert PQR offsets to ETRS/ITRF "
+        "offsets.",
+        dtype="DevVarFloatArray",
+        mandatory=False,
+        default_value=numpy.array(
+            [  # PQR->ETRS rotation matrix for the core stations
+                [-0.1195951054, -0.7919544517, 0.5987530018],
+                [0.9928227484, -0.0954186800, 0.0720990002],
+                [0.0000330969, 0.6030782884, 0.7976820024],
+            ]
+        ).flatten(),
     )
 
     HBAT_base_antenna_offsets = device_property(
-            doc="Offsets of the antennas in a HBAT, with respect to its reference "
-                "center (16x3).",
-            dtype='DevVarFloatArray',
-            mandatory=False,
-            default_value=HBATAntennaOffsets.HBAT1_BASE_ANTENNA_OFFSETS.flatten()
+        doc="Offsets of the antennas in a HBAT, with respect to its reference "
+        "center (16x3).",
+        dtype="DevVarFloatArray",
+        mandatory=False,
+        default_value=HBATAntennaOffsets.HBAT1_BASE_ANTENNA_OFFSETS.flatten(),
     )
 
     # ----- SDP mapping
 
     Antenna_to_SDP_Mapping = device_property(
-            dtype=(numpy.int32,),
-            doc='The mapping of Antennas to FPGA input pairs. Each FPGA can handle 6 '
-                'inputs, and SDP has 16 FPGAs. Each antenna is represented with a '
-                '(fpga, input) value pair. The array is flattened, so must be reshaped '
-                'upon use. An input=-1 means the antenna is unconnected.',
-            default_value=numpy.array([-1] * MAX_ANTENNA * 2, dtype=numpy.int32)
+        dtype=(numpy.int32,),
+        doc="The mapping of Antennas to FPGA input pairs. Each FPGA can handle 6 "
+        "inputs, and SDP has 16 FPGAs. Each antenna is represented with a "
+        "(fpga, input) value pair. The array is flattened, so must be reshaped "
+        "upon use. An input=-1 means the antenna is unconnected.",
+        default_value=numpy.array([-1] * MAX_ANTENNA * 2, dtype=numpy.int32),
     )
 
     SDP_device = device_property(
-            dtype=str,
-            doc='Which SDP device is processing this AntennaField.',
-            mandatory=False,
-            default_value="STAT/SDP/1"
+        dtype=str,
+        doc="Which SDP device is processing this AntennaField.",
+        mandatory=False,
+        default_value="STAT/SDP/1",
     )
 
     # ----- RECV mapping
 
     Power_to_RECV_mapping = device_property(
-            dtype=(numpy.int32,),
-            doc='The mapping of Antenna power lines to RECV mapping. Each RECV can '
-                'handle 96 inputs. The Antenna number is the index and the value shows '
-                'to which receiver device it is connected and on which input. The '
-                'first integer is the input. The second integer is the RECV id. '
-                'Example: [0, 3] = first receiver of property RECV_devices with '
-                'input 3. -1 means that the Antenna is not connected. The property is '
-                'stored in a one dimensional structure. It needs to be reshaped to a '
-                'list of lists of two items.',
-            mandatory=False,
-            default_value=[-1] * MAX_ANTENNA * 2
+        dtype=(numpy.int32,),
+        doc="The mapping of Antenna power lines to RECV mapping. Each RECV can "
+        "handle 96 inputs. The Antenna number is the index and the value shows "
+        "to which receiver device it is connected and on which input. The "
+        "first integer is the input. The second integer is the RECV id. "
+        "Example: [0, 3] = first receiver of property RECV_devices with "
+        "input 3. -1 means that the Antenna is not connected. The property is "
+        "stored in a one dimensional structure. It needs to be reshaped to a "
+        "list of lists of two items.",
+        mandatory=False,
+        default_value=[-1] * MAX_ANTENNA * 2,
     )
 
     Control_to_RECV_mapping = device_property(
-            dtype=(numpy.int32,),
-            doc='The mapping of Antenna control lines to RECV mapping. Each RECV can '
-                'handle 96 inputs. The Antenna number is the index and the value shows '
-                'to which receiver device it is connected and on which input. The '
-                'first integer is the input. The second interger is the RECV id. '
-                'Example: [1, 3] = STAT/RECV/1 with input 3. -1 means that the Antenna '
-                'is not connected. The property is stored in a one dimensional '
-                'structure. It needs to be reshaped to a list of lists of two items.',
-            mandatory=False,
-            default_value=[-1] * MAX_ANTENNA * 2
+        dtype=(numpy.int32,),
+        doc="The mapping of Antenna control lines to RECV mapping. Each RECV can "
+        "handle 96 inputs. The Antenna number is the index and the value shows "
+        "to which receiver device it is connected and on which input. The "
+        "first integer is the input. The second interger is the RECV id. "
+        "Example: [1, 3] = STAT/RECV/1 with input 3. -1 means that the Antenna "
+        "is not connected. The property is stored in a one dimensional "
+        "structure. It needs to be reshaped to a list of lists of two items.",
+        mandatory=False,
+        default_value=[-1] * MAX_ANTENNA * 2,
     )
 
     RECV_devices = device_property(
-            dtype=(str,),
-            doc='Which Recv devices are in use by the AntennaField. The order is '
-                'important and it should match up with the order of the mapping.',
-            mandatory=False,
-            default_value=[]
+        dtype=(str,),
+        doc="Which Recv devices are in use by the AntennaField. The order is "
+        "important and it should match up with the order of the mapping.",
+        mandatory=False,
+        default_value=[],
     )
 
     # ----- Generic information
 
     Antenna_Type_R = attribute(
-            doc='The type of antenna in this field (LBA or HBA).',
-            dtype=str
+        doc="The type of antenna in this field (LBA or HBA).", dtype=str
     )
     Antenna_Names_R = attribute(
-            access=AttrWriteType.READ,
-            dtype=(str,), max_dim_x=MAX_ANTENNA
+        access=AttrWriteType.READ, dtype=(str,), max_dim_x=MAX_ANTENNA
     )
     Antenna_to_SDP_Mapping_R = attribute(
-            doc='To which (fpga, input) pair each antenna is connected. '
-                '-1=unconnected.',
-            dtype=((numpy.int32,),), max_dim_x=N_pol, max_dim_y=MAX_ANTENNA
+        doc="To which (fpga, input) pair each antenna is connected. " "-1=unconnected.",
+        dtype=((numpy.int32,),),
+        max_dim_x=N_pol,
+        max_dim_y=MAX_ANTENNA,
     )
 
     # ----- Cable information (between antenna and RCU)
 
     Antenna_Cables_R = attribute(
-            doc=f"Which cables connect each antenna to the RCU. Both polarisations are "
-                f"assumed to be connected using the same type of cable. Needs to be "
-                f"any of ({', '.join(cable_types.keys())}).",
-            dtype=(str,), max_dim_x=MAX_ANTENNA
+        doc=f"Which cables connect each antenna to the RCU. Both polarisations are "
+        f"assumed to be connected using the same type of cable. Needs to be "
+        f"any of ({', '.join(cable_types.keys())}).",
+        dtype=(str,),
+        max_dim_x=MAX_ANTENNA,
     )
     Antenna_Cables_Delay_R = attribute(
-            doc=f"Delay caused by the cable between antenna and RCU, in seconds.",
-            dtype=(numpy.float64,), max_dim_x=MAX_ANTENNA, unit="s"
+        doc=f"Delay caused by the cable between antenna and RCU, in seconds.",
+        dtype=(numpy.float64,),
+        max_dim_x=MAX_ANTENNA,
+        unit="s",
     )
     Antenna_Cables_Loss_R = attribute(
-            doc=f"Loss caused by the cable between antenna and RCU, in dB.",
-            dtype=(numpy.float64,), max_dim_x=MAX_ANTENNA, unit="dB"
+        doc=f"Loss caused by the cable between antenna and RCU, in dB.",
+        dtype=(numpy.float64,),
+        max_dim_x=MAX_ANTENNA,
+        unit="dB",
     )
 
     # ----- Calibration information
 
     Calibration_SDP_Signal_Input_Samples_Delay_R = attribute(
-            doc=f"Number of samples that each antenna signal should be delayed to line "
-                f"up. To be applied on sdp.FPGA_signal_input_samples_delay_RW.",
-            dtype=(numpy.uint32,), max_dim_x=MAX_ANTENNA, unit="samples"
+        doc=f"Number of samples that each antenna signal should be delayed to line "
+        f"up. To be applied on sdp.FPGA_signal_input_samples_delay_RW.",
+        dtype=(numpy.uint32,),
+        max_dim_x=MAX_ANTENNA,
+        unit="samples",
     )
     Calibration_RCU_Attenuation_dB_R = attribute(
-            doc=f"Amount of dB with which each antenna signal must be adjusted to line "
-                f"up. To be applied on recv.RCU_attenuator_dB_RW.",
-            dtype=(numpy.uint32,), max_dim_x=MAX_ANTENNA, unit="dB"
+        doc=f"Amount of dB with which each antenna signal must be adjusted to line "
+        f"up. To be applied on recv.RCU_attenuator_dB_RW.",
+        dtype=(numpy.uint32,),
+        max_dim_x=MAX_ANTENNA,
+        unit="dB",
     )
     Calibration_SDP_Fine_Calibration_Default_R = attribute(
-            doc=f"Computed calibration values for the fine calibration of each "
-                f"antenna. Each antenna is represented by a (delay, phase_offset, "
-                f"amplitude_scaling) triplet.",
-            dtype=((numpy.float64,),), max_dim_y=MAX_ANTENNA * N_pol, max_dim_x=3
+        doc=f"Computed calibration values for the fine calibration of each "
+        f"antenna. Each antenna is represented by a (delay, phase_offset, "
+        f"amplitude_scaling) triplet.",
+        dtype=((numpy.float64,),),
+        max_dim_y=MAX_ANTENNA * N_pol,
+        max_dim_x=3,
     )
     Calibration_SDP_Subband_Weights_Default_R = attribute(
-            doc=f"Calibration values for the rows in sdp.FPGA_subband_weights_RW "
-                f"relevant for our antennas, as computed. Each subband of each "
-                f"polarisation of each antenna is represented by a real_imag number "
-                f"(real, imag).",
-            dtype=((numpy.float64,),), max_dim_y=MAX_ANTENNA * N_pol,
-            max_dim_x=N_subbands * VALUES_PER_COMPLEX
+        doc=f"Calibration values for the rows in sdp.FPGA_subband_weights_RW "
+        f"relevant for our antennas, as computed. Each subband of each "
+        f"polarisation of each antenna is represented by a real_imag number "
+        f"(real, imag).",
+        dtype=((numpy.float64,),),
+        max_dim_y=MAX_ANTENNA * N_pol,
+        max_dim_x=N_subbands * VALUES_PER_COMPLEX,
     )
     Calibration_SDP_Subband_Weights_R = attribute(
-            doc=f"Calibration values for the rows in sdp.FPGA_subband_weights_RW "
-                f"relevant for our antennas. Each subband of each polarisation of "
-                f"each antenna is represented by a real_imag number (real, imag). "
-                f"Returns the measured values from "
-                f"Calibration_SDP_Subband_Weights_XXXMHz if available, and values "
-                f"computed from Calibration_SDP_Fine_Calibration_Default_R otherwise.",
-            dtype=((numpy.float64,),), max_dim_y=MAX_ANTENNA * N_pol,
-            max_dim_x=N_subbands * VALUES_PER_COMPLEX
+        doc=f"Calibration values for the rows in sdp.FPGA_subband_weights_RW "
+        f"relevant for our antennas. Each subband of each polarisation of "
+        f"each antenna is represented by a real_imag number (real, imag). "
+        f"Returns the measured values from "
+        f"Calibration_SDP_Subband_Weights_XXXMHz if available, and values "
+        f"computed from Calibration_SDP_Fine_Calibration_Default_R otherwise.",
+        dtype=((numpy.float64,),),
+        max_dim_y=MAX_ANTENNA * N_pol,
+        max_dim_x=N_subbands * VALUES_PER_COMPLEX,
     )
 
     # ----- Quality and usage information
 
     Antenna_Quality_R = attribute(
-            doc='The quality of each antenna. '
-                '0=OK, 1=SUSPICIOUS, 2=BROKEN, 3=BEYOND_REPAIR.',
-            dtype=(numpy.uint32,), max_dim_x=MAX_ANTENNA
+        doc="The quality of each antenna. "
+        "0=OK, 1=SUSPICIOUS, 2=BROKEN, 3=BEYOND_REPAIR.",
+        dtype=(numpy.uint32,),
+        max_dim_x=MAX_ANTENNA,
     )
     Antenna_Use_R = attribute(
-            doc='Whether each antenna should be used. 0=AUTO, 1=ON, 2=OFF. In AUTO '
-                'mode, the antenna is used if it is not BROKEN or BEYOND_REPAIR.',
-            dtype=(numpy.uint32,), max_dim_x=MAX_ANTENNA
+        doc="Whether each antenna should be used. 0=AUTO, 1=ON, 2=OFF. In AUTO "
+        "mode, the antenna is used if it is not BROKEN or BEYOND_REPAIR.",
+        dtype=(numpy.uint32,),
+        max_dim_x=MAX_ANTENNA,
     )
     Antenna_Quality_str_R = attribute(
-            doc='The quality of each antenna, as a string.',
-            dtype=(str,), max_dim_x=MAX_ANTENNA
+        doc="The quality of each antenna, as a string.",
+        dtype=(str,),
+        max_dim_x=MAX_ANTENNA,
     )
     Antenna_Use_str_R = attribute(
-            doc='Whether each antenna should be used, as a string.',
-            dtype=(str,), max_dim_x=MAX_ANTENNA
+        doc="Whether each antenna should be used, as a string.",
+        dtype=(str,),
+        max_dim_x=MAX_ANTENNA,
     )
     Antenna_Usage_Mask_R = attribute(
-            doc='Whether each antenna will be used.',
-            dtype=(bool,), max_dim_x=MAX_ANTENNA
+        doc="Whether each antenna will be used.", dtype=(bool,), max_dim_x=MAX_ANTENNA
     )
 
     # ----- Attributes mapped on RECV
 
     ANT_mask_RW = MappedAttribute(
-            "ANT_mask_RW", dtype=(bool,), max_dim_x=MAX_ANTENNA,
-            access=AttrWriteType.READ_WRITE
+        "ANT_mask_RW",
+        dtype=(bool,),
+        max_dim_x=MAX_ANTENNA,
+        access=AttrWriteType.READ_WRITE,
     )
     RCU_PWR_ANT_on_R = MappedAttribute(
-            "RCU_PWR_ANT_on_R", dtype=(bool,), max_dim_x=MAX_ANTENNA
+        "RCU_PWR_ANT_on_R", dtype=(bool,), max_dim_x=MAX_ANTENNA
     )
     RCU_PWR_ANT_on_RW = MappedAttribute(
-            "RCU_PWR_ANT_on_RW", dtype=(bool,), max_dim_x=MAX_ANTENNA,
-            access=AttrWriteType.READ_WRITE
+        "RCU_PWR_ANT_on_RW",
+        dtype=(bool,),
+        max_dim_x=MAX_ANTENNA,
+        access=AttrWriteType.READ_WRITE,
     )
     HBAT_BF_delay_steps_R = MappedAttribute(
-            "HBAT_BF_delay_steps_R", dtype=((numpy.int64,),),
-            max_dim_x=N_elements * N_pol, max_dim_y=MAX_ANTENNA
+        "HBAT_BF_delay_steps_R",
+        dtype=((numpy.int64,),),
+        max_dim_x=N_elements * N_pol,
+        max_dim_y=MAX_ANTENNA,
     )
     HBAT_BF_delay_steps_RW = MappedAttribute(
-            "HBAT_BF_delay_steps_RW", dtype=((numpy.int64,),),
-            max_dim_x=N_elements * N_pol, max_dim_y=MAX_ANTENNA,
-            access=AttrWriteType.READ_WRITE
+        "HBAT_BF_delay_steps_RW",
+        dtype=((numpy.int64,),),
+        max_dim_x=N_elements * N_pol,
+        max_dim_y=MAX_ANTENNA,
+        access=AttrWriteType.READ_WRITE,
     )
     HBAT_LED_on_R = MappedAttribute(
-            "HBAT_LED_on_R", dtype=((bool,),), max_dim_x=N_elements * N_pol,
-            max_dim_y=MAX_ANTENNA
+        "HBAT_LED_on_R",
+        dtype=((bool,),),
+        max_dim_x=N_elements * N_pol,
+        max_dim_y=MAX_ANTENNA,
     )
     HBAT_LED_on_RW = MappedAttribute(
-            "HBAT_LED_on_RW", dtype=((bool,),), max_dim_x=N_elements * N_pol,
-            max_dim_y=MAX_ANTENNA, access=AttrWriteType.READ_WRITE
+        "HBAT_LED_on_RW",
+        dtype=((bool,),),
+        max_dim_x=N_elements * N_pol,
+        max_dim_y=MAX_ANTENNA,
+        access=AttrWriteType.READ_WRITE,
     )
     HBAT_PWR_LNA_on_R = MappedAttribute(
-            "HBAT_PWR_LNA_on_R", dtype=((bool,),),
-            max_dim_x=N_elements * N_pol, max_dim_y=MAX_ANTENNA
+        "HBAT_PWR_LNA_on_R",
+        dtype=((bool,),),
+        max_dim_x=N_elements * N_pol,
+        max_dim_y=MAX_ANTENNA,
     )
     HBAT_PWR_LNA_on_RW = MappedAttribute(
-            "HBAT_PWR_LNA_on_RW", dtype=((bool,),),
-            max_dim_x=N_elements * N_pol, max_dim_y=MAX_ANTENNA,
-            access=AttrWriteType.READ_WRITE
+        "HBAT_PWR_LNA_on_RW",
+        dtype=((bool,),),
+        max_dim_x=N_elements * N_pol,
+        max_dim_y=MAX_ANTENNA,
+        access=AttrWriteType.READ_WRITE,
     )
     HBAT_PWR_on_R = MappedAttribute(
-            "HBAT_PWR_on_R", dtype=((bool,),), max_dim_x=N_elements * N_pol,
-            max_dim_y=MAX_ANTENNA
+        "HBAT_PWR_on_R",
+        dtype=((bool,),),
+        max_dim_x=N_elements * N_pol,
+        max_dim_y=MAX_ANTENNA,
     )
     HBAT_PWR_on_RW = MappedAttribute(
-            "HBAT_PWR_on_RW", dtype=((bool,),), max_dim_x=N_elements * N_pol,
-            max_dim_y=MAX_ANTENNA, access=AttrWriteType.READ_WRITE
+        "HBAT_PWR_on_RW",
+        dtype=((bool,),),
+        max_dim_x=N_elements * N_pol,
+        max_dim_y=MAX_ANTENNA,
+        access=AttrWriteType.READ_WRITE,
     )
     RCU_band_select_RW = MappedAttribute(
-            "RCU_band_select_RW", dtype=(numpy.int64,), max_dim_x=MAX_ANTENNA,
-            access=AttrWriteType.READ_WRITE
+        "RCU_band_select_RW",
+        dtype=(numpy.int64,),
+        max_dim_x=MAX_ANTENNA,
+        access=AttrWriteType.READ_WRITE,
     )
     RCU_attenuator_dB_RW = MappedAttribute(
-            "RCU_attenuator_dB_RW", dtype=(numpy.int64,), max_dim_x=MAX_ANTENNA,
-            access=AttrWriteType.READ_WRITE
+        "RCU_attenuator_dB_RW",
+        dtype=(numpy.int64,),
+        max_dim_x=MAX_ANTENNA,
+        access=AttrWriteType.READ_WRITE,
     )
 
     # ----- Position information
 
     Antenna_Field_Reference_ITRF_R = attribute(
-            access=AttrWriteType.READ,
-            doc='Absolute reference position of antenna field, in ITRF (XYZ)',
-            dtype=(numpy.float64,), max_dim_x=N_xyz
+        access=AttrWriteType.READ,
+        doc="Absolute reference position of antenna field, in ITRF (XYZ)",
+        dtype=(numpy.float64,),
+        max_dim_x=N_xyz,
     )
 
     Antenna_Field_Reference_GEO_R = attribute(
-            access=AttrWriteType.READ,
-            doc='Absolute reference position of antenna field, '
-                'in latitude/longitude (degrees)',
-            dtype=(numpy.float64,), max_dim_x=N_latlong
+        access=AttrWriteType.READ,
+        doc="Absolute reference position of antenna field, "
+        "in latitude/longitude (degrees)",
+        dtype=(numpy.float64,),
+        max_dim_x=N_latlong,
     )
 
     Antenna_Field_Reference_GEOHASH_R = attribute(
-            access=AttrWriteType.READ,
-            doc='Absolute reference position of antenna field, as a geohash string',
-            dtype=str
+        access=AttrWriteType.READ,
+        doc="Absolute reference position of antenna field, as a geohash string",
+        dtype=str,
     )
 
     HBAT_antenna_ITRF_offsets_R = attribute(
-            access=AttrWriteType.READ,
-            doc='For each tile, the offsets of the antennas within that, '
-                'in ITRF ("iHBADeltas"). True shape: nrtiles x 16 x 3.',
-            dtype=((numpy.float64,),), max_dim_x=MAX_ANTENNA * N_xyz,
-            max_dim_y=MAX_ANTENNA
+        access=AttrWriteType.READ,
+        doc="For each tile, the offsets of the antennas within that, "
+        'in ITRF ("iHBADeltas"). True shape: nrtiles x 16 x 3.',
+        dtype=((numpy.float64,),),
+        max_dim_x=MAX_ANTENNA * N_xyz,
+        max_dim_y=MAX_ANTENNA,
     )
 
     Antenna_Reference_ITRF_R = attribute(
-            access=AttrWriteType.READ,
-            doc='Absolute reference position of each tile, in ITRF (XYZ)',
-            dtype=((numpy.float64,),), max_dim_x=N_xyz, max_dim_y=MAX_ANTENNA
+        access=AttrWriteType.READ,
+        doc="Absolute reference position of each tile, in ITRF (XYZ)",
+        dtype=((numpy.float64,),),
+        max_dim_x=N_xyz,
+        max_dim_y=MAX_ANTENNA,
     )
 
     Antenna_Reference_GEO_R = attribute(
-            access=AttrWriteType.READ,
-            doc='Absolute reference position of each tile, '
-                'in latitude/longitude (degrees)',
-            dtype=((numpy.float64,),), max_dim_x=N_latlong, max_dim_y=MAX_ANTENNA
+        access=AttrWriteType.READ,
+        doc="Absolute reference position of each tile, "
+        "in latitude/longitude (degrees)",
+        dtype=((numpy.float64,),),
+        max_dim_x=N_latlong,
+        max_dim_y=MAX_ANTENNA,
     )
 
     Antenna_Reference_GEOHASH_R = attribute(
-            access=AttrWriteType.READ,
-            doc='Absolute reference position of each tile, as geohash strings',
-            dtype=(str,), max_dim_x=MAX_ANTENNA, )
-
-    nr_antennas_R = attribute(
-            doc='Number of Antennas in this field',
-            dtype=numpy.int32
+        access=AttrWriteType.READ,
+        doc="Absolute reference position of each tile, as geohash strings",
+        dtype=(str,),
+        max_dim_x=MAX_ANTENNA,
     )
 
+    nr_antennas_R = attribute(doc="Number of Antennas in this field", dtype=numpy.int32)
+
     def read_Antenna_Type_R(self):
         return self.Antenna_Type
 
@@ -550,24 +622,26 @@ class AntennaField(LOFARDevice):
 
     def read_Antenna_Cables_Delay_R(self):
         return numpy.array(
-                [cable_types[antenna].delay for antenna in self.Antenna_Cables]
+            [cable_types[antenna].delay for antenna in self.Antenna_Cables]
         )
 
     def read_Antenna_Cables_Loss_R(self):
         rcu_bands = self.read_attribute("RCU_band_select_RW")
 
         control_to_recv_mapping = numpy.array(self.Control_to_RECV_mapping).reshape(
-                -1, 2
+            -1, 2
         )
         recvs = control_to_recv_mapping[:, 0]  # first column is RECV device number
 
         # Unconnected antennas return RCU band 0, which does not exist.
         # Return 0 loss for them instead.
         return numpy.array(
-                [cable_types[cable].get_loss(self.Antenna_Type, rcu_band)
-                 if recv > 0 else 0
-                 for recv, cable, rcu_band in
-                 zip(recvs, self.Antenna_Cables, rcu_bands)]
+            [
+                cable_types[cable].get_loss(self.Antenna_Type, rcu_band)
+                if recv > 0
+                else 0
+                for recv, cable, rcu_band in zip(recvs, self.Antenna_Cables, rcu_bands)
+            ]
         )
 
     def read_Calibration_SDP_Signal_Input_Samples_Delay_R(self):
@@ -586,36 +660,34 @@ class AntennaField(LOFARDevice):
             # repeat values twice, and restore the shape (with the inner dimension
             # being twice the size now)
             return numpy.dstack((arr, arr)).reshape(
-                    arr.shape[0] * N_pol, *arr.shape[1:]
+                arr.shape[0] * N_pol, *arr.shape[1:]
             )
 
         # ----- Delay
 
         # correct for signal delays in the cables (equal for both polarisations)
         signal_delay_seconds = repeat_per_pol(
-                self.read_attribute("Antenna_Cables_Delay_R")
+            self.read_attribute("Antenna_Cables_Delay_R")
         )
 
         # compute the required compensation
         clock = self.sdp_proxy.clock_RW
         _, input_delay_subsample_seconds = delay_compensation(
-                signal_delay_seconds, clock
+            signal_delay_seconds, clock
         )
 
         # ----- Phase offsets
 
         # we don't have any
         phase_offsets = repeat_per_pol(
-                numpy.zeros(
-                        (self.read_attribute("nr_antennas_R"),), dtype=numpy.float64
-                )
+            numpy.zeros((self.read_attribute("nr_antennas_R"),), dtype=numpy.float64)
         )
 
         # ----- Amplitude
 
         # correct for signal loss in the cables
         signal_delay_loss = repeat_per_pol(
-                self.read_attribute("Antenna_Cables_Loss_R") - self.Field_Attenuation
+            self.read_attribute("Antenna_Cables_Loss_R") - self.Field_Attenuation
         )
 
         # return fine scaling to apply
@@ -623,13 +695,17 @@ class AntennaField(LOFARDevice):
 
         # Return as (delay, phase_offset, amplitude) triplet per polarisation
         return numpy.stack(
-                (input_delay_subsample_seconds, phase_offsets,
-                 input_attenuation_remaining_factor), axis=1
+            (
+                input_delay_subsample_seconds,
+                phase_offsets,
+                input_attenuation_remaining_factor,
+            ),
+            axis=1,
         )
 
     def read_Calibration_SDP_Subband_Weights_Default_R(self):
         delay_phase_amplitude = self.read_attribute(
-                "Calibration_SDP_Fine_Calibration_Default_R"
+            "Calibration_SDP_Fine_Calibration_Default_R"
         )
 
         clock = self.sdp_proxy.clock_RW
@@ -639,8 +715,7 @@ class AntennaField(LOFARDevice):
         antenna_to_sdp_mapping = self.read_attribute("Antenna_to_SDP_Mapping_R")
 
         subband_weights = numpy.zeros(
-                (nr_antennas, N_pol, N_subbands, VALUES_PER_COMPLEX),
-                dtype=numpy.float64
+            (nr_antennas, N_pol, N_subbands, VALUES_PER_COMPLEX), dtype=numpy.float64
         )
 
         # compute real_imag weight for each subband
@@ -651,11 +726,12 @@ class AntennaField(LOFARDevice):
 
             for pol_nr in range(N_pol):
                 delay, phase_offset, amplitude = delay_phase_amplitude[
-                                                 antenna_nr * N_pol + pol_nr, :]
+                    antenna_nr * N_pol + pol_nr, :
+                ]
 
                 for subband_nr in range(N_subbands):
                     frequency = subband_frequency(
-                            subband_nr, clock, nyquist_zone[fpga_nr, input_nr]
+                        subband_nr, clock, nyquist_zone[fpga_nr, input_nr]
                     )
 
                     # turn signal backwards to compensate for the provided delay
@@ -668,12 +744,12 @@ class AntennaField(LOFARDevice):
                     subband_weights[antenna_nr, pol_nr, subband_nr, :] = (real, imag)
 
         return subband_weights.reshape(
-                nr_antennas * N_pol, N_subbands * VALUES_PER_COMPLEX
+            nr_antennas * N_pol, N_subbands * VALUES_PER_COMPLEX
         )
 
     def _rcu_band_to_calibration_table(self) -> dict:
         """
-            Returns the SDP subband weights to apply per RCU band.
+        Returns the SDP subband weights to apply per RCU band.
         """
         nr_antennas = self.read_attribute("nr_antennas_R")
 
@@ -696,7 +772,7 @@ class AntennaField(LOFARDevice):
         # reshape them into their actual form
         for band, caltable in rcu_band_to_caltable.items():
             rcu_band_to_caltable[band] = numpy.array(caltable).reshape(
-                    nr_antennas, N_pol, N_subbands, 2
+                nr_antennas, N_pol, N_subbands, 2
             )
 
         return rcu_band_to_caltable
@@ -708,7 +784,7 @@ class AntennaField(LOFARDevice):
 
         # antenna mapping onto RECV
         control_to_recv_mapping = numpy.array(self.Control_to_RECV_mapping).reshape(
-                -1, 2
+            -1, 2
         )
         recvs = control_to_recv_mapping[:, 0]  # first column is RECV device number
 
@@ -719,8 +795,7 @@ class AntennaField(LOFARDevice):
         # combining the relevant tables.
         nr_antennas = self.read_attribute("nr_antennas_R")
         subband_weights = numpy.zeros(
-                (nr_antennas, N_pol, N_subbands, VALUES_PER_COMPLEX),
-                dtype=numpy.float64
+            (nr_antennas, N_pol, N_subbands, VALUES_PER_COMPLEX), dtype=numpy.float64
         )
         for antenna_nr, rcu_band in enumerate(rcu_bands):
             # Skip antennas not connected to RECV. These do not have a valid RCU band
@@ -733,17 +808,18 @@ class AntennaField(LOFARDevice):
                 continue
 
             subband_weights[antenna_nr, :, :, :] = rcu_band_to_caltable[rcu_band][
-                                                   antenna_nr, :, :, :]
+                antenna_nr, :, :, :
+            ]
 
         return subband_weights.reshape(
-                nr_antennas * N_pol, N_subbands * VALUES_PER_COMPLEX
+            nr_antennas * N_pol, N_subbands * VALUES_PER_COMPLEX
         )
 
     def read_Calibration_RCU_Attenuation_dB_R(self):
         # Correct for signal loss in the cables
-        signal_delay_loss = self.read_attribute(
-                "Antenna_Cables_Loss_R"
-        ) - self.Field_Attenuation
+        signal_delay_loss = (
+            self.read_attribute("Antenna_Cables_Loss_R") - self.Field_Attenuation
+        )
 
         # return coarse attenuation to apply
         input_attenuation_integer_db, _ = loss_compensation(signal_delay_loss)
@@ -767,7 +843,7 @@ class AntennaField(LOFARDevice):
 
         antennas_forced_on = use == AntennaUse.ON
         antennas_auto_on = numpy.logical_and(
-                use == AntennaUse.AUTO, quality <= AntennaQuality.SUSPICIOUS
+            use == AntennaUse.AUTO, quality <= AntennaQuality.SUSPICIOUS
         )
 
         return numpy.logical_or(antennas_forced_on, antennas_auto_on)
@@ -796,7 +872,7 @@ class AntennaField(LOFARDevice):
         # reference
         etrs_coordinates = numpy.array(self.Antenna_Field_Reference_ETRS).reshape(N_xyz)
         return ETRS_to_ITRF(
-                etrs_coordinates, self.ITRF_Reference_Frame, self.ITRF_Reference_Epoch
+            etrs_coordinates, self.ITRF_Reference_Frame, self.ITRF_Reference_Epoch
         )
 
     def read_Antenna_Field_Reference_GEO_R(self):
@@ -806,39 +882,41 @@ class AntennaField(LOFARDevice):
         return GEO_to_GEOHASH(self.read_Antenna_Field_Reference_GEO_R())
 
     def read_HBAT_antenna_ITRF_offsets_R(self):
-        """ Returns the ITRF differences between the center of the tile and its
-            individual elements, which is a (nrtiles)x16x3 matrix (16 elements with 3
-            ITRF coordinates each), returned as a (nrtiles)x48 matrix.
+        """Returns the ITRF differences between the center of the tile and its
+        individual elements, which is a (nrtiles)x16x3 matrix (16 elements with 3
+        ITRF coordinates each), returned as a (nrtiles)x48 matrix.
 
-            This takes the relative offsets between the elements in the tiles as
-            described in HBAT_base_antenna_offsets. These offsets are in PQR space,
-            which is the plane of the station. The tiles are rotated locally in this
-            space according to the HBAT_PQR_rotation_angles_deg, and finally translated
-            into global ETRS coordinates using the PQR_to_ETRS_rotation_matrix.
+        This takes the relative offsets between the elements in the tiles as
+        described in HBAT_base_antenna_offsets. These offsets are in PQR space,
+        which is the plane of the station. The tiles are rotated locally in this
+        space according to the HBAT_PQR_rotation_angles_deg, and finally translated
+        into global ETRS coordinates using the PQR_to_ETRS_rotation_matrix.
 
-            The relative ITRF offsets are the same as relative ETRS offsets.
+        The relative ITRF offsets are the same as relative ETRS offsets.
 
-            NB: In all of this, the absolute position of each tile is actually
-            irrelevant, as all the tiles lie on the same plane in ITRF. """
+        NB: In all of this, the absolute position of each tile is actually
+        irrelevant, as all the tiles lie on the same plane in ITRF."""
 
         # the relative offsets between the elements is fixed in
         # HBAT_base_antenna_offsets
         base_antenna_offsets = numpy.array(self.HBAT_base_antenna_offsets).reshape(
-                N_elements, N_xyz
+            N_elements, N_xyz
         )
 
         pqr_to_etrs_rotation_matrix = numpy.array(
-                self.PQR_to_ETRS_rotation_matrix
+            self.PQR_to_ETRS_rotation_matrix
         ).reshape(N_xyz, N_xyz)
 
         # each tile has its own rotation angle, resulting in different offsets per tile
         all_offsets = numpy.array(
-                [HBATAntennaOffsets.ITRF_offsets(
-                        base_antenna_offsets,
-                        angle_deg * pi / 180,
-                        pqr_to_etrs_rotation_matrix
+            [
+                HBATAntennaOffsets.ITRF_offsets(
+                    base_antenna_offsets,
+                    angle_deg * pi / 180,
+                    pqr_to_etrs_rotation_matrix,
                 )
-                    for angle_deg in self.HBAT_PQR_rotation_angles_deg]
+                for angle_deg in self.HBAT_PQR_rotation_angles_deg
+            ]
         )
 
         return all_offsets.reshape(-1, N_elements * N_xyz)
@@ -853,7 +931,7 @@ class AntennaField(LOFARDevice):
         etrs_coordinates = numpy.array(self.Antenna_Reference_ETRS).reshape(-1, N_xyz)
 
         return ETRS_to_ITRF(
-                etrs_coordinates, self.ITRF_Reference_Frame, self.ITRF_Reference_Epoch
+            etrs_coordinates, self.ITRF_Reference_Frame, self.ITRF_Reference_Epoch
         )
 
     def read_Antenna_Reference_GEO_R(self):
@@ -880,7 +958,7 @@ class AntennaField(LOFARDevice):
         control_mapping = numpy.reshape(self.Control_to_RECV_mapping, (-1, 2))
         power_mapping = numpy.reshape(self.Power_to_RECV_mapping, (-1, 2))
         self.__mapper = AntennaToRecvMapper(
-                control_mapping, power_mapping, number_of_receivers
+            control_mapping, power_mapping, number_of_receivers
         )
 
     def get_mapped_attribute(self, mapped_point: str):
@@ -910,7 +988,7 @@ class AntennaField(LOFARDevice):
             new_values = mapped_value[idx]
 
             self.atomic_read_modify_write_attribute(
-                    new_values, recv_proxy, mapped_point, cast_type=cast_type
+                new_values, recv_proxy, mapped_point, cast_type=cast_type
             )
 
     # --------
@@ -937,25 +1015,25 @@ class AntennaField(LOFARDevice):
     @DebugIt()
     @log_exceptions()
     def configure_recv(self):
-        """ Configure RECV to process our antennas. """
+        """Configure RECV to process our antennas."""
 
         # Disable controlling the tiles that fall outside the mask
         # WARN: Needed in configure_for_initialise but Tango does not allow to write
         # attributes in INIT state
         self.proxy.write_attribute(
-                'ANT_mask_RW', self.read_attribute('Antenna_Usage_Mask_R')
+            "ANT_mask_RW", self.read_attribute("Antenna_Usage_Mask_R")
         )
 
         # Turn on power to antennas that need it (and due to the ANT_mask, that we're
         # using)
-        self.proxy.write_attribute('RCU_PWR_ANT_on_RW', self.Antenna_Needs_Power)
+        self.proxy.write_attribute("RCU_PWR_ANT_on_RW", self.Antenna_Needs_Power)
 
     @command()
     @only_in_states(DEFAULT_COMMAND_STATES)
     @DebugIt()
     @log_exceptions()
     def configure_sdp(self):
-        """ Configure SDP to process our antennas. """
+        """Configure SDP to process our antennas."""
 
         # Upload which antenna type we're using
 
@@ -971,11 +1049,11 @@ class AntennaField(LOFARDevice):
 
     @command()
     def calibrate_recv(self):
-        """ Calibrate RECV for our antennas.
+        """Calibrate RECV for our antennas.
 
-            Run whenever the following changes:
-                sdp.clock_RW
-                antennafield.RCU_band_select_RW
+        Run whenever the following changes:
+            sdp.clock_RW
+            antennafield.RCU_band_select_RW
         """
 
         # -----------------------------------------------------------
@@ -988,11 +1066,11 @@ class AntennaField(LOFARDevice):
 
     @command()
     def calibrate_sdp(self):
-        """ Calibrate SDP for our antennas.
+        """Calibrate SDP for our antennas.
 
-            Run whenever the following changes:
-                sdp.clock_RW
-                antennafield.RCU_band_select_RW
+        Run whenever the following changes:
+            sdp.clock_RW
+            antennafield.RCU_band_select_RW
         """
 
         # Mapping [antenna] -> [fpga][input]
@@ -1004,25 +1082,29 @@ class AntennaField(LOFARDevice):
 
         # The delay to apply, in samples [antenna]
         input_samples_delay = self.read_attribute(
-                "Calibration_SDP_Signal_Input_Samples_Delay_R"
+            "Calibration_SDP_Signal_Input_Samples_Delay_R"
         )
 
         # read-modify-write on [fpga][(input, polarisation)]
-        fpga_signal_input_samples_delay = \
+        fpga_signal_input_samples_delay = (
             self.sdp_proxy.FPGA_signal_input_samples_delay_RW
+        )
         for antenna_nr, (fpga_nr, input_nr) in enumerate(antenna_to_sdp_mapping):
             if input_nr == -1:
                 # skip unconnected antennas
                 continue
 
             # set for X polarisation
-            fpga_signal_input_samples_delay[fpga_nr, input_nr * N_pol + 0] = \
-                input_samples_delay[antenna_nr]
+            fpga_signal_input_samples_delay[
+                fpga_nr, input_nr * N_pol + 0
+            ] = input_samples_delay[antenna_nr]
             # set for Y polarisation
-            fpga_signal_input_samples_delay[fpga_nr, input_nr * N_pol + 1] = \
-                input_samples_delay[antenna_nr]
-        self.sdp_proxy.FPGA_signal_input_samples_delay_RW = \
+            fpga_signal_input_samples_delay[
+                fpga_nr, input_nr * N_pol + 1
+            ] = input_samples_delay[antenna_nr]
+        self.sdp_proxy.FPGA_signal_input_samples_delay_RW = (
             fpga_signal_input_samples_delay
+        )
 
         # -----------------------------------------------------------
         #   Compute calibration of subband weights for the remaining
@@ -1034,7 +1116,7 @@ class AntennaField(LOFARDevice):
 
         # read-modify-write on [fpga][(input, polarisation)]
         fpga_subband_weights = self.sdp_proxy.FPGA_subband_weights_RW.reshape(
-                N_pn, S_pn, N_subbands
+            N_pn, S_pn, N_subbands
         )
         for antenna_nr, (fpga_nr, input_nr) in enumerate(antenna_to_sdp_mapping):
             if input_nr == -1:
@@ -1042,16 +1124,18 @@ class AntennaField(LOFARDevice):
                 continue
 
             # set weights
-            fpga_subband_weights[fpga_nr, input_nr * N_pol + 0,
-            :] = real_imag_to_weights(
-                    caltable[antenna_nr * N_pol + 0, :], SDP.SUBBAND_UNIT_WEIGHT
+            fpga_subband_weights[
+                fpga_nr, input_nr * N_pol + 0, :
+            ] = real_imag_to_weights(
+                caltable[antenna_nr * N_pol + 0, :], SDP.SUBBAND_UNIT_WEIGHT
             )
-            fpga_subband_weights[fpga_nr, input_nr * N_pol + 1,
-            :] = real_imag_to_weights(
-                    caltable[antenna_nr * N_pol + 1, :], SDP.SUBBAND_UNIT_WEIGHT
+            fpga_subband_weights[
+                fpga_nr, input_nr * N_pol + 1, :
+            ] = real_imag_to_weights(
+                caltable[antenna_nr * N_pol + 1, :], SDP.SUBBAND_UNIT_WEIGHT
             )
         self.sdp_proxy.FPGA_subband_weights_RW = fpga_subband_weights.reshape(
-                N_pn, S_pn * N_subbands
+            N_pn, S_pn * N_subbands
         )
 
     @command(dtype_in=DevVarFloatArray, dtype_out=DevVarLongArray)
@@ -1074,8 +1158,8 @@ class AntennaField(LOFARDevice):
 
             # convert them into delay steps
             flatten_delay_steps = numpy.array(
-                    recv_proxy.calculate_HBAT_bf_delay_steps(recv_delays.flatten()),
-                    dtype=numpy.int64
+                recv_proxy.calculate_HBAT_bf_delay_steps(recv_delays.flatten()),
+                dtype=numpy.int64,
             )
             delay_steps = numpy.reshape(flatten_delay_steps, (-1, N_elements * N_pol))
 
@@ -1091,17 +1175,17 @@ class AntennaToRecvMapper(object):
     _VALUE_MAP_NONE_96_32 = numpy.full((MAX_ANTENNA, N_elements * N_pol), None)
 
     def __init__(
-            self, control_to_recv_mapping, power_to_recv_mapping, number_of_receivers
+        self, control_to_recv_mapping, power_to_recv_mapping, number_of_receivers
     ):
         number_of_antennas = len(control_to_recv_mapping)
 
         # Reduce memory footprint of default values by creating single instance of
         # common fields
         value_map_ant_32_int = numpy.zeros(
-                [number_of_antennas, N_elements * N_pol], dtype=numpy.int64
+            [number_of_antennas, N_elements * N_pol], dtype=numpy.int64
         )
         value_map_ant_32_bool = numpy.full(
-                (number_of_antennas, N_elements * N_pol), False
+            (number_of_antennas, N_elements * N_pol), False
         )
         value_map_ant_bool = numpy.full(number_of_antennas, False)
 
@@ -1109,64 +1193,60 @@ class AntennaToRecvMapper(object):
         self._power_mapping = power_to_recv_mapping
         self._number_of_receivers = number_of_receivers
         self._value_mapper = {
-            "ANT_mask_RW"           : self._control_mapping,
-            "HBAT_BF_delay_steps_R" : self._control_mapping,
+            "ANT_mask_RW": self._control_mapping,
+            "HBAT_BF_delay_steps_R": self._control_mapping,
             "HBAT_BF_delay_steps_RW": self._control_mapping,
-            "HBAT_LED_on_R"         : self._control_mapping,
-            "HBAT_LED_on_RW"        : self._control_mapping,
-            "HBAT_PWR_LNA_on_R"     : self._control_mapping,
-            "HBAT_PWR_LNA_on_RW"    : self._control_mapping,
-            "HBAT_PWR_on_R"         : self._control_mapping,
-            "HBAT_PWR_on_RW"        : self._control_mapping,
-            "RCU_PWR_ANT_on_R"      : self._power_mapping,
-            "RCU_PWR_ANT_on_RW"     : self._power_mapping,
-            "RCU_band_select_RW"    : self._control_mapping,
-            "RCU_attenuator_dB_RW"  : self._control_mapping,
+            "HBAT_LED_on_R": self._control_mapping,
+            "HBAT_LED_on_RW": self._control_mapping,
+            "HBAT_PWR_LNA_on_R": self._control_mapping,
+            "HBAT_PWR_LNA_on_RW": self._control_mapping,
+            "HBAT_PWR_on_R": self._control_mapping,
+            "HBAT_PWR_on_RW": self._control_mapping,
+            "RCU_PWR_ANT_on_R": self._power_mapping,
+            "RCU_PWR_ANT_on_RW": self._power_mapping,
+            "RCU_band_select_RW": self._control_mapping,
+            "RCU_attenuator_dB_RW": self._control_mapping,
         }
         self._default_value_mapping_read = {
-            "ANT_mask_RW"           : value_map_ant_bool,
-            "HBAT_BF_delay_steps_R" : value_map_ant_32_int,
+            "ANT_mask_RW": value_map_ant_bool,
+            "HBAT_BF_delay_steps_R": value_map_ant_32_int,
             "HBAT_BF_delay_steps_RW": value_map_ant_32_int,
-            "HBAT_LED_on_R"         : value_map_ant_32_bool,
-            "HBAT_LED_on_RW"        : value_map_ant_32_bool,
-            "HBAT_PWR_LNA_on_R"     : value_map_ant_32_bool,
-            "HBAT_PWR_LNA_on_RW"    : value_map_ant_32_bool,
-            "HBAT_PWR_on_R"         : value_map_ant_32_bool,
-            "HBAT_PWR_on_RW"        : value_map_ant_32_bool,
-            "RCU_PWR_ANT_on_R"      : value_map_ant_bool,
-            "RCU_PWR_ANT_on_RW"     : value_map_ant_bool,
-            "RCU_band_select_RW"    : numpy.zeros(
-                    number_of_antennas, dtype=numpy.int64
-            ),
-            "RCU_attenuator_dB_RW"  : numpy.zeros(
-                    number_of_antennas, dtype=numpy.int64
-            ),
+            "HBAT_LED_on_R": value_map_ant_32_bool,
+            "HBAT_LED_on_RW": value_map_ant_32_bool,
+            "HBAT_PWR_LNA_on_R": value_map_ant_32_bool,
+            "HBAT_PWR_LNA_on_RW": value_map_ant_32_bool,
+            "HBAT_PWR_on_R": value_map_ant_32_bool,
+            "HBAT_PWR_on_RW": value_map_ant_32_bool,
+            "RCU_PWR_ANT_on_R": value_map_ant_bool,
+            "RCU_PWR_ANT_on_RW": value_map_ant_bool,
+            "RCU_band_select_RW": numpy.zeros(number_of_antennas, dtype=numpy.int64),
+            "RCU_attenuator_dB_RW": numpy.zeros(number_of_antennas, dtype=numpy.int64),
         }
         self._masked_value_mapping_write = {
-            "ANT_mask_RW"           : AntennaToRecvMapper._VALUE_MAP_NONE_96,
+            "ANT_mask_RW": AntennaToRecvMapper._VALUE_MAP_NONE_96,
             "HBAT_BF_delay_steps_RW": AntennaToRecvMapper._VALUE_MAP_NONE_96_32,
-            "HBAT_LED_on_RW"        : AntennaToRecvMapper._VALUE_MAP_NONE_96_32,
-            "HBAT_PWR_LNA_on_RW"    : AntennaToRecvMapper._VALUE_MAP_NONE_96_32,
-            "HBAT_PWR_on_RW"        : AntennaToRecvMapper._VALUE_MAP_NONE_96_32,
-            "RCU_PWR_ANT_on_RW"     : AntennaToRecvMapper._VALUE_MAP_NONE_96,
-            "RCU_band_select_RW"    : AntennaToRecvMapper._VALUE_MAP_NONE_96,
-            "RCU_attenuator_dB_RW"  : AntennaToRecvMapper._VALUE_MAP_NONE_96,
+            "HBAT_LED_on_RW": AntennaToRecvMapper._VALUE_MAP_NONE_96_32,
+            "HBAT_PWR_LNA_on_RW": AntennaToRecvMapper._VALUE_MAP_NONE_96_32,
+            "HBAT_PWR_on_RW": AntennaToRecvMapper._VALUE_MAP_NONE_96_32,
+            "RCU_PWR_ANT_on_RW": AntennaToRecvMapper._VALUE_MAP_NONE_96,
+            "RCU_band_select_RW": AntennaToRecvMapper._VALUE_MAP_NONE_96,
+            "RCU_attenuator_dB_RW": AntennaToRecvMapper._VALUE_MAP_NONE_96,
         }
         self._reshape_attributes_in = {
-            "ANT_mask_RW"           : (MAX_ANTENNA,),
+            "ANT_mask_RW": (MAX_ANTENNA,),
             "HBAT_BF_delay_steps_RW": (MAX_ANTENNA, N_elements * N_pol),
-            "RCU_PWR_ANT_on_R"      : (MAX_ANTENNA,),
-            "RCU_PWR_ANT_on_RW"     : (MAX_ANTENNA,),
-            "RCU_band_select_RW"    : (MAX_ANTENNA,),
-            "RCU_attenuator_dB_RW"  : (MAX_ANTENNA,),
+            "RCU_PWR_ANT_on_R": (MAX_ANTENNA,),
+            "RCU_PWR_ANT_on_RW": (MAX_ANTENNA,),
+            "RCU_band_select_RW": (MAX_ANTENNA,),
+            "RCU_attenuator_dB_RW": (MAX_ANTENNA,),
         }
         self._reshape_attributes_out = {
-            "ANT_mask_RW"           : (N_rcu, N_rcu_inp),
+            "ANT_mask_RW": (N_rcu, N_rcu_inp),
             "HBAT_BF_delay_steps_RW": (MAX_ANTENNA, N_elements * N_pol),
-            "RCU_PWR_ANT_on_R"      : (N_rcu, N_rcu_inp),
-            "RCU_PWR_ANT_on_RW"     : (N_rcu, N_rcu_inp),
-            "RCU_band_select_RW"    : (N_rcu, N_rcu_inp),
-            "RCU_attenuator_dB_RW"  : (N_rcu, N_rcu_inp),
+            "RCU_PWR_ANT_on_R": (N_rcu, N_rcu_inp),
+            "RCU_PWR_ANT_on_RW": (N_rcu, N_rcu_inp),
+            "RCU_band_select_RW": (N_rcu, N_rcu_inp),
+            "RCU_attenuator_dB_RW": (N_rcu, N_rcu_inp),
         }
 
     def map_read(self, mapped_attribute: str, recv_results: List[any]) -> List[any]:
@@ -1183,13 +1263,13 @@ class AntennaToRecvMapper(object):
 
         if mapped_attribute in self._reshape_attributes_in:
             recv_results = numpy.reshape(
-                    recv_results,
-                    (self._number_of_receivers,) + self._reshape_attributes_in[
-                        mapped_attribute]
+                recv_results,
+                (self._number_of_receivers,)
+                + self._reshape_attributes_in[mapped_attribute],
             )
 
         return self._mapped_r_values(
-                recv_results, default_values, self._value_mapper[mapped_attribute]
+            recv_results, default_values, self._value_mapper[mapped_attribute]
         )
 
     def map_write(self, mapped_attribute: str, set_values: List[any]) -> List[any]:
@@ -1204,21 +1284,20 @@ class AntennaToRecvMapper(object):
         default_values = self._masked_value_mapping_write[mapped_attribute]
 
         mapped_values = self._mapped_rw_values(
-                set_values, default_values, self._value_mapper[mapped_attribute]
+            set_values, default_values, self._value_mapper[mapped_attribute]
         )
 
         if mapped_attribute in self._reshape_attributes_out:
             mapped_values = numpy.reshape(
-                    mapped_values,
-                    (self._number_of_receivers,) + self._reshape_attributes_out[
-                        mapped_attribute]
+                mapped_values,
+                (self._number_of_receivers,)
+                + self._reshape_attributes_out[mapped_attribute],
             )
 
         return mapped_values
 
     def _mapped_r_values(
-            self, recv_results: List[any], default_values: List[any],
-            value_mapping
+        self, recv_results: List[any], default_values: List[any], value_mapping
     ):
         """Mapping for read using :py:attribute:`~_control_mapping` and shallow copy"""
 
@@ -1233,8 +1312,7 @@ class AntennaToRecvMapper(object):
         return mapped_values
 
     def _mapped_rw_values(
-            self, set_values: List[any], default_values: List[any],
-            value_mapping
+        self, set_values: List[any], default_values: List[any], value_mapping
     ):
         """Mapping for write using :py:attribute:`~_control_mapping` and shallow copy"""
 
diff --git a/tangostationcontrol/tangostationcontrol/devices/apsct.py b/tangostationcontrol/tangostationcontrol/devices/apsct.py
index 8cdd98c58bf77366d77841ca90a12d01c411e8b3..b172674746ffa8915147f45e3dc506cb09ff92d5 100644
--- a/tangostationcontrol/tangostationcontrol/devices/apsct.py
+++ b/tangostationcontrol/tangostationcontrol/devices/apsct.py
@@ -1,32 +1,28 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the RECV project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ APSCT Device Server for LOFAR2.0
 
 """
 
+import logging
+
+import numpy
+from tango import AttrWriteType
+
 # PyTango imports
 from tango import DebugIt
 from tango.server import command, attribute, device_property
-from tango import AttrWriteType
-import numpy
-# Additional import
-
 from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
+from tangostationcontrol.common.constants import DEFAULT_POLLING_PERIOD
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.common.lofar_logging import device_logging_to_python
 from tangostationcontrol.common.states import DEFAULT_COMMAND_STATES
-from tangostationcontrol.common.constants import DEFAULT_POLLING_PERIOD
 from tangostationcontrol.devices.device_decorators import only_in_states
 from tangostationcontrol.devices.opcua_device import OPCUADevice
 
-import logging
+# Additional import
+
 logger = logging.getLogger()
 
 __all__ = ["APSCT", "main"]
@@ -39,111 +35,188 @@ class APSCT(OPCUADevice):
     # -----------------
 
     APSCTTR_monitor_rate_RW_default = device_property(
-        dtype='DevLong64',
-        mandatory=False,
-        default_value=1
+        dtype="DevLong64", mandatory=False, default_value=1
     )
 
     # ----- Timing values
 
     APSCT_On_Off_timeout = device_property(
-        doc='Maximum amount of time to wait after turning APSCT on or off',
-        dtype='DevFloat',
+        doc="Maximum amount of time to wait after turning APSCT on or off",
+        dtype="DevFloat",
         mandatory=False,
-        default_value=10.0
+        default_value=10.0,
     )
 
-    TRANSLATOR_DEFAULT_SETTINGS = [
-        'APSCTTR_monitor_rate_RW'
-    ]
+    TRANSLATOR_DEFAULT_SETTINGS = ["APSCTTR_monitor_rate_RW"]
 
     # ----------
     # Attributes
     # ----------
 
-    APSCTTR_I2C_error_R          = AttributeWrapper(comms_annotation=["APSCTTR_I2C_error_R"       ],datatype=numpy.int64  )
-    APSCTTR_monitor_rate_RW      = AttributeWrapper(comms_annotation=["APSCTTR_monitor_rate_RW"   ],datatype=numpy.int64  , access=AttrWriteType.READ_WRITE)
-    APSCTTR_translator_busy_R    = AttributeWrapper(comms_annotation=["APSCTTR_translator_busy_R" ],datatype=bool)
-    APSCT_INPUT_10MHz_good_R     = AttributeWrapper(comms_annotation=["APSCT_INPUT_10MHz_good_R"  ],datatype=bool)
-    APSCT_INPUT_PPS_good_R       = AttributeWrapper(comms_annotation=["APSCT_INPUT_PPS_good_R"    ],datatype=bool)
-    APSCT_PCB_ID_R               = AttributeWrapper(comms_annotation=["APSCT_PCB_ID_R"            ],datatype=numpy.int64  )
-    APSCT_PCB_number_R           = AttributeWrapper(comms_annotation=["APSCT_PCB_number_R"        ],datatype=str          )
-    APSCT_PCB_version_R          = AttributeWrapper(comms_annotation=["APSCT_PCB_version_R"       ],datatype=str          )
-    APSCT_PLL_160MHz_error_R     = AttributeWrapper(comms_annotation=["APSCT_PLL_160MHz_error_R"  ],datatype=bool)
-    APSCT_PLL_160MHz_locked_R    = AttributeWrapper(comms_annotation=["APSCT_PLL_160MHz_locked_R" ],datatype=bool)
-    APSCT_PLL_200MHz_error_R     = AttributeWrapper(comms_annotation=["APSCT_PLL_200MHz_error_R"  ],datatype=bool)
-    APSCT_PLL_200MHz_locked_R    = AttributeWrapper(comms_annotation=["APSCT_PLL_200MHz_locked_R" ],datatype=bool)
-    APSCT_PPS_ignore_R           = AttributeWrapper(comms_annotation=["APSCT_PPS_ignore_R"        ],datatype=bool)
-    APSCT_PPS_ignore_RW          = AttributeWrapper(comms_annotation=["APSCT_PPS_ignore_RW"       ],datatype=bool, access=AttrWriteType.READ_WRITE)
-    APSCT_PWR_CLKDIST1_3V3_R     = AttributeWrapper(comms_annotation=["APSCT_PWR_CLKDIST1_3V3_R"  ],datatype=numpy.float64)
-    APSCT_PWR_CLKDIST2_3V3_R     = AttributeWrapper(comms_annotation=["APSCT_PWR_CLKDIST2_3V3_R"  ],datatype=numpy.float64)
-    APSCT_PWR_CTRL_3V3_R         = AttributeWrapper(comms_annotation=["APSCT_PWR_CTRL_3V3_R"      ],datatype=numpy.float64)
-    APSCT_PWR_INPUT_3V3_R        = AttributeWrapper(comms_annotation=["APSCT_PWR_INPUT_3V3_R"     ],datatype=numpy.float64)
-    APSCT_PWR_on_R               = AttributeWrapper(comms_annotation=["APSCT_PWR_on_R"            ],datatype=bool)
-    APSCT_PWR_PLL_160MHz_3V3_R   = AttributeWrapper(comms_annotation=["APSCT_PWR_PLL_160MHz_3V3_R"],datatype=numpy.float64)
-    APSCT_PWR_PLL_160MHz_on_R    = AttributeWrapper(comms_annotation=["APSCT_PWR_PLL_160MHz_on_R" ],datatype=bool)
-    APSCT_PWR_PLL_200MHz_3V3_R   = AttributeWrapper(comms_annotation=["APSCT_PWR_PLL_200MHz_3V3_R"],datatype=numpy.float64)
-    APSCT_PWR_PLL_200MHz_on_R    = AttributeWrapper(comms_annotation=["APSCT_PWR_PLL_200MHz_on_R" ],datatype=bool)
-    APSCT_PWR_PPSDIST_3V3_R      = AttributeWrapper(comms_annotation=["APSCT_PWR_PPSDIST_3V3_R"   ],datatype=numpy.float64)
-    APSCT_TEMP_R                 = AttributeWrapper(comms_annotation=["APSCT_TEMP_R"              ],datatype=numpy.float64)
+    APSCTTR_I2C_error_R = AttributeWrapper(
+        comms_annotation=["APSCTTR_I2C_error_R"], datatype=numpy.int64
+    )
+    APSCTTR_monitor_rate_RW = AttributeWrapper(
+        comms_annotation=["APSCTTR_monitor_rate_RW"],
+        datatype=numpy.int64,
+        access=AttrWriteType.READ_WRITE,
+    )
+    APSCTTR_translator_busy_R = AttributeWrapper(
+        comms_annotation=["APSCTTR_translator_busy_R"], datatype=bool
+    )
+    APSCT_INPUT_10MHz_good_R = AttributeWrapper(
+        comms_annotation=["APSCT_INPUT_10MHz_good_R"], datatype=bool
+    )
+    APSCT_INPUT_PPS_good_R = AttributeWrapper(
+        comms_annotation=["APSCT_INPUT_PPS_good_R"], datatype=bool
+    )
+    APSCT_PCB_ID_R = AttributeWrapper(
+        comms_annotation=["APSCT_PCB_ID_R"], datatype=numpy.int64
+    )
+    APSCT_PCB_number_R = AttributeWrapper(
+        comms_annotation=["APSCT_PCB_number_R"], datatype=str
+    )
+    APSCT_PCB_version_R = AttributeWrapper(
+        comms_annotation=["APSCT_PCB_version_R"], datatype=str
+    )
+    APSCT_PLL_160MHz_error_R = AttributeWrapper(
+        comms_annotation=["APSCT_PLL_160MHz_error_R"], datatype=bool
+    )
+    APSCT_PLL_160MHz_locked_R = AttributeWrapper(
+        comms_annotation=["APSCT_PLL_160MHz_locked_R"], datatype=bool
+    )
+    APSCT_PLL_200MHz_error_R = AttributeWrapper(
+        comms_annotation=["APSCT_PLL_200MHz_error_R"], datatype=bool
+    )
+    APSCT_PLL_200MHz_locked_R = AttributeWrapper(
+        comms_annotation=["APSCT_PLL_200MHz_locked_R"], datatype=bool
+    )
+    APSCT_PPS_ignore_R = AttributeWrapper(
+        comms_annotation=["APSCT_PPS_ignore_R"], datatype=bool
+    )
+    APSCT_PPS_ignore_RW = AttributeWrapper(
+        comms_annotation=["APSCT_PPS_ignore_RW"],
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    APSCT_PWR_CLKDIST1_3V3_R = AttributeWrapper(
+        comms_annotation=["APSCT_PWR_CLKDIST1_3V3_R"], datatype=numpy.float64
+    )
+    APSCT_PWR_CLKDIST2_3V3_R = AttributeWrapper(
+        comms_annotation=["APSCT_PWR_CLKDIST2_3V3_R"], datatype=numpy.float64
+    )
+    APSCT_PWR_CTRL_3V3_R = AttributeWrapper(
+        comms_annotation=["APSCT_PWR_CTRL_3V3_R"], datatype=numpy.float64
+    )
+    APSCT_PWR_INPUT_3V3_R = AttributeWrapper(
+        comms_annotation=["APSCT_PWR_INPUT_3V3_R"], datatype=numpy.float64
+    )
+    APSCT_PWR_on_R = AttributeWrapper(
+        comms_annotation=["APSCT_PWR_on_R"], datatype=bool
+    )
+    APSCT_PWR_PLL_160MHz_3V3_R = AttributeWrapper(
+        comms_annotation=["APSCT_PWR_PLL_160MHz_3V3_R"], datatype=numpy.float64
+    )
+    APSCT_PWR_PLL_160MHz_on_R = AttributeWrapper(
+        comms_annotation=["APSCT_PWR_PLL_160MHz_on_R"], datatype=bool
+    )
+    APSCT_PWR_PLL_200MHz_3V3_R = AttributeWrapper(
+        comms_annotation=["APSCT_PWR_PLL_200MHz_3V3_R"], datatype=numpy.float64
+    )
+    APSCT_PWR_PLL_200MHz_on_R = AttributeWrapper(
+        comms_annotation=["APSCT_PWR_PLL_200MHz_on_R"], datatype=bool
+    )
+    APSCT_PWR_PPSDIST_3V3_R = AttributeWrapper(
+        comms_annotation=["APSCT_PWR_PPSDIST_3V3_R"], datatype=numpy.float64
+    )
+    APSCT_TEMP_R = AttributeWrapper(
+        comms_annotation=["APSCT_TEMP_R"], datatype=numpy.float64
+    )
 
     # ----------
     # Summarising Attributes
     # ----------
-    APSCT_error_R                 = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
+    APSCT_error_R = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
 
     def read_APSCT_error_R(self):
-        errors = [self.read_attribute("APSCTTR_I2C_error_R") > 0, 
-                self.alarm_val("APSCT_PCB_ID_R"), 
-                not self.read_attribute("APSCT_INPUT_10MHz_good_R"), 
-                not self.read_attribute("APSCT_INPUT_PPS_good_R") and not self.read_attribute("APSCT_PPS_ignore_R"),
-                not self.read_attribute("APSCT_PLL_160MHz_locked_R") and not self.read_attribute("APSCT_PLL_200MHz_locked_R"),
-                self.read_attribute("APSCT_PLL_200MHz_locked_R") and self.read_attribute("APSCT_PLL_200MHz_error_R"),
-                self.read_attribute("APSCT_PLL_160MHz_locked_R") and self.read_attribute("APSCT_PLL_160MHz_error_R")]
+        errors = [
+            self.read_attribute("APSCTTR_I2C_error_R") > 0,
+            self.alarm_val("APSCT_PCB_ID_R"),
+            not self.read_attribute("APSCT_INPUT_10MHz_good_R"),
+            not self.read_attribute("APSCT_INPUT_PPS_good_R")
+            and not self.read_attribute("APSCT_PPS_ignore_R"),
+            not self.read_attribute("APSCT_PLL_160MHz_locked_R")
+            and not self.read_attribute("APSCT_PLL_200MHz_locked_R"),
+            self.read_attribute("APSCT_PLL_200MHz_locked_R")
+            and self.read_attribute("APSCT_PLL_200MHz_error_R"),
+            self.read_attribute("APSCT_PLL_160MHz_locked_R")
+            and self.read_attribute("APSCT_PLL_160MHz_error_R"),
+        ]
         return any(errors)
 
-    APSCT_TEMP_error_R            = attribute(dtype=bool, fisallowed="is_attribute_access_allowed", polling_period=DEFAULT_POLLING_PERIOD)
-    APSCT_VOUT_error_R            = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
+    APSCT_TEMP_error_R = attribute(
+        dtype=bool,
+        fisallowed="is_attribute_access_allowed",
+        polling_period=DEFAULT_POLLING_PERIOD,
+    )
+    APSCT_VOUT_error_R = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
 
     def read_APSCT_TEMP_error_R(self):
-        return (self.alarm_val("APSCT_TEMP_R"))
+        return self.alarm_val("APSCT_TEMP_R")
 
     def read_APSCT_VOUT_error_R(self):
-        return ( self.alarm_val("APSCT_PWR_PPSDIST_3V3_R")
-               or self.alarm_val("APSCT_PWR_CLKDIST1_3V3_R")
-               or self.alarm_val("APSCT_PWR_CLKDIST2_3V3_R")
-               or self.alarm_val("APSCT_PWR_CTRL_3V3_R")
-               or self.alarm_val("APSCT_PWR_INPUT_3V3_R")
-               or (self.read_attribute("APSCT_PWR_PLL_160MHz_on_R") and self.alarm_val("APSCT_PWR_PLL_160MHz_3V3_R"))
-               or (self.read_attribute("APSCT_PWR_PLL_200MHz_on_R") and self.alarm_val("APSCT_PWR_PLL_200MHz_3V3_R"))
-               or (not self.read_attribute("APSCT_PWR_on_R"))
-               )
+        return (
+            self.alarm_val("APSCT_PWR_PPSDIST_3V3_R")
+            or self.alarm_val("APSCT_PWR_CLKDIST1_3V3_R")
+            or self.alarm_val("APSCT_PWR_CLKDIST2_3V3_R")
+            or self.alarm_val("APSCT_PWR_CTRL_3V3_R")
+            or self.alarm_val("APSCT_PWR_INPUT_3V3_R")
+            or (
+                self.read_attribute("APSCT_PWR_PLL_160MHz_on_R")
+                and self.alarm_val("APSCT_PWR_PLL_160MHz_3V3_R")
+            )
+            or (
+                self.read_attribute("APSCT_PWR_PLL_200MHz_on_R")
+                and self.alarm_val("APSCT_PWR_PLL_200MHz_3V3_R")
+            )
+            or (not self.read_attribute("APSCT_PWR_on_R"))
+        )
 
     # --------
     # overloaded functions
     # --------
 
     def _prepare_hardware(self):
-        """ Initialise the APSCT hardware. """
+        """Initialise the APSCT hardware."""
 
         # Cycle clock
         self.APSCT_off()
-        self.wait_attribute("APSCTTR_translator_busy_R", False, self.APSCT_On_Off_timeout)
+        self.wait_attribute(
+            "APSCTTR_translator_busy_R", False, self.APSCT_On_Off_timeout
+        )
         self.APSCT_200MHz_on()
-        self.wait_attribute("APSCTTR_translator_busy_R", False, self.APSCT_On_Off_timeout)
+        self.wait_attribute(
+            "APSCTTR_translator_busy_R", False, self.APSCT_On_Off_timeout
+        )
 
         if not self.read_attribute("APSCT_PLL_200MHz_locked_R"):
             if self.read_attribute("APSCTTR_I2C_error_R"):
-                raise Exception("I2C is not working. Maybe power cycle subrack to restart CLK board and translator?")
+                raise Exception(
+                    "I2C is not working. Maybe power cycle subrack to restart CLK board and translator?"
+                )
             else:
-                raise Exception("200MHz signal is not locked. The subrack probably do not receive clock input or the CLK PCB is broken?")
+                raise Exception(
+                    "200MHz signal is not locked. The subrack probably do not receive clock input or the CLK PCB is broken?"
+                )
 
     def _disable_hardware(self):
-        """ Disable the APSCT hardware. """
+        """Disable the APSCT hardware."""
 
         # Turn off the APSCT
         self.APSCT_off()
-        self.wait_attribute("APSCTTR_translator_busy_R", False, self.APSCT_On_Off_timeout)
+        self.wait_attribute(
+            "APSCTTR_translator_busy_R", False, self.APSCT_On_Off_timeout
+        )
 
     # --------
     # Commands
diff --git a/tangostationcontrol/tangostationcontrol/devices/apspu.py b/tangostationcontrol/tangostationcontrol/devices/apspu.py
index 3b057c0b7408071cc68af9f82c94a9e8d143067c..24d83f5b9629534b9a390a48e25ca7345c83f7c6 100644
--- a/tangostationcontrol/tangostationcontrol/devices/apspu.py
+++ b/tangostationcontrol/tangostationcontrol/devices/apspu.py
@@ -1,27 +1,22 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the RECV project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ APSPU Device Server for LOFAR2.0
 
 """
 
+import numpy
+
 # PyTango imports
 from tango import AttrWriteType
 from tango.server import attribute, device_property
-import numpy
-# Additional import
-
-from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
-from tangostationcontrol.devices.opcua_device import OPCUADevice
-from tangostationcontrol.common.lofar_logging import device_logging_to_python
 from tangostationcontrol.common.constants import DEFAULT_POLLING_PERIOD
+from tangostationcontrol.common.entrypoint import entry
+from tangostationcontrol.common.lofar_logging import device_logging_to_python
+from tangostationcontrol.devices.opcua_device import OPCUADevice
+
+# Additional import
 
 __all__ = ["APSPU", "main"]
 
@@ -33,78 +28,121 @@ class APSPU(OPCUADevice):
     # -----------------
 
     APSPUTR_monitor_rate_RW_default = device_property(
-        dtype='DevLong64',
-        mandatory=False,
-        default_value=1
+        dtype="DevLong64", mandatory=False, default_value=1
     )
 
-    TRANSLATOR_DEFAULT_SETTINGS = [
-        'APSPUTR_monitor_rate_RW'
-    ]
+    TRANSLATOR_DEFAULT_SETTINGS = ["APSPUTR_monitor_rate_RW"]
 
     # ----------
     # Attributes
     # ----------
 
-    APSPUTR_I2C_error_R          = AttributeWrapper(comms_annotation=["APSPUTR_I2C_error_R"       ],datatype=numpy.int64  )
-    APSPUTR_monitor_rate_RW      = AttributeWrapper(comms_annotation=["APSPUTR_monitor_rate_RW"   ],datatype=numpy.int64  , access=AttrWriteType.READ_WRITE)
-    APSPUTR_translator_busy_R    = AttributeWrapper(comms_annotation=["APSPUTR_translator_busy_R" ],datatype=bool         )
-    APSPU_FAN1_RPM_R             = AttributeWrapper(comms_annotation=["APSPU_FAN1_RPM_R"          ],datatype=numpy.float64)
-    APSPU_FAN2_RPM_R             = AttributeWrapper(comms_annotation=["APSPU_FAN2_RPM_R"          ],datatype=numpy.float64)
-    APSPU_FAN3_RPM_R             = AttributeWrapper(comms_annotation=["APSPU_FAN3_RPM_R"          ],datatype=numpy.float64)
-    APSPU_LBA_IOUT_R             = AttributeWrapper(comms_annotation=["APSPU_LBA_IOUT_R"          ],datatype=numpy.float64)
-    APSPU_LBA_TEMP_R             = AttributeWrapper(comms_annotation=["APSPU_LBA_TEMP_R"          ],datatype=numpy.float64)
-    APSPU_LBA_VOUT_R             = AttributeWrapper(comms_annotation=["APSPU_LBA_VOUT_R"          ],datatype=numpy.float64)
-    APSPU_PCB_ID_R               = AttributeWrapper(comms_annotation=["APSPU_PCB_ID_R"            ],datatype=numpy.int64  )
-    APSPU_PCB_number_R           = AttributeWrapper(comms_annotation=["APSPU_PCB_number_R"        ],datatype=str          )
-    APSPU_PCB_version_R          = AttributeWrapper(comms_annotation=["APSPU_PCB_version_R"       ],datatype=str          )
-    APSPU_RCU2A_IOUT_R           = AttributeWrapper(comms_annotation=["APSPU_RCU2A_IOUT_R"        ],datatype=numpy.float64)
-    APSPU_RCU2A_TEMP_R           = AttributeWrapper(comms_annotation=["APSPU_RCU2A_TEMP_R"        ],datatype=numpy.float64)
-    APSPU_RCU2A_VOUT_R           = AttributeWrapper(comms_annotation=["APSPU_RCU2A_VOUT_R"        ],datatype=numpy.float64)
-    APSPU_RCU2D_IOUT_R           = AttributeWrapper(comms_annotation=["APSPU_RCU2D_IOUT_R"        ],datatype=numpy.float64)
-    APSPU_RCU2D_TEMP_R           = AttributeWrapper(comms_annotation=["APSPU_RCU2D_TEMP_R"        ],datatype=numpy.float64)
-    APSPU_RCU2D_VOUT_R           = AttributeWrapper(comms_annotation=["APSPU_RCU2D_VOUT_R"        ],datatype=numpy.float64)
+    APSPUTR_I2C_error_R = AttributeWrapper(
+        comms_annotation=["APSPUTR_I2C_error_R"], datatype=numpy.int64
+    )
+    APSPUTR_monitor_rate_RW = AttributeWrapper(
+        comms_annotation=["APSPUTR_monitor_rate_RW"],
+        datatype=numpy.int64,
+        access=AttrWriteType.READ_WRITE,
+    )
+    APSPUTR_translator_busy_R = AttributeWrapper(
+        comms_annotation=["APSPUTR_translator_busy_R"], datatype=bool
+    )
+    APSPU_FAN1_RPM_R = AttributeWrapper(
+        comms_annotation=["APSPU_FAN1_RPM_R"], datatype=numpy.float64
+    )
+    APSPU_FAN2_RPM_R = AttributeWrapper(
+        comms_annotation=["APSPU_FAN2_RPM_R"], datatype=numpy.float64
+    )
+    APSPU_FAN3_RPM_R = AttributeWrapper(
+        comms_annotation=["APSPU_FAN3_RPM_R"], datatype=numpy.float64
+    )
+    APSPU_LBA_IOUT_R = AttributeWrapper(
+        comms_annotation=["APSPU_LBA_IOUT_R"], datatype=numpy.float64
+    )
+    APSPU_LBA_TEMP_R = AttributeWrapper(
+        comms_annotation=["APSPU_LBA_TEMP_R"], datatype=numpy.float64
+    )
+    APSPU_LBA_VOUT_R = AttributeWrapper(
+        comms_annotation=["APSPU_LBA_VOUT_R"], datatype=numpy.float64
+    )
+    APSPU_PCB_ID_R = AttributeWrapper(
+        comms_annotation=["APSPU_PCB_ID_R"], datatype=numpy.int64
+    )
+    APSPU_PCB_number_R = AttributeWrapper(
+        comms_annotation=["APSPU_PCB_number_R"], datatype=str
+    )
+    APSPU_PCB_version_R = AttributeWrapper(
+        comms_annotation=["APSPU_PCB_version_R"], datatype=str
+    )
+    APSPU_RCU2A_IOUT_R = AttributeWrapper(
+        comms_annotation=["APSPU_RCU2A_IOUT_R"], datatype=numpy.float64
+    )
+    APSPU_RCU2A_TEMP_R = AttributeWrapper(
+        comms_annotation=["APSPU_RCU2A_TEMP_R"], datatype=numpy.float64
+    )
+    APSPU_RCU2A_VOUT_R = AttributeWrapper(
+        comms_annotation=["APSPU_RCU2A_VOUT_R"], datatype=numpy.float64
+    )
+    APSPU_RCU2D_IOUT_R = AttributeWrapper(
+        comms_annotation=["APSPU_RCU2D_IOUT_R"], datatype=numpy.float64
+    )
+    APSPU_RCU2D_TEMP_R = AttributeWrapper(
+        comms_annotation=["APSPU_RCU2D_TEMP_R"], datatype=numpy.float64
+    )
+    APSPU_RCU2D_VOUT_R = AttributeWrapper(
+        comms_annotation=["APSPU_RCU2D_VOUT_R"], datatype=numpy.float64
+    )
 
     # ----------
     # Summarising Attributes
     # ----------
-    APSPU_error_R                 = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
+    APSPU_error_R = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
 
     def read_APSPU_error_R(self):
-        return ((self.read_attribute("APSPUTR_I2C_error_R") > 0)
-               or self.alarm_val("APSPU_PCB_ID_R")
-               or self.alarm_val("APSPU_FAN1_RPM_R")
-               or self.alarm_val("APSPU_FAN2_RPM_R") 
-               or self.alarm_val("APSPU_FAN3_RPM_R"))
-
-    APSPU_IOUT_error_R          = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
-    APSPU_TEMP_error_R          = attribute(dtype=bool, fisallowed="is_attribute_access_allowed", polling_period=DEFAULT_POLLING_PERIOD)
-    APSPU_VOUT_error_R          = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
+        return (
+            (self.read_attribute("APSPUTR_I2C_error_R") > 0)
+            or self.alarm_val("APSPU_PCB_ID_R")
+            or self.alarm_val("APSPU_FAN1_RPM_R")
+            or self.alarm_val("APSPU_FAN2_RPM_R")
+            or self.alarm_val("APSPU_FAN3_RPM_R")
+        )
+
+    APSPU_IOUT_error_R = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
+    APSPU_TEMP_error_R = attribute(
+        dtype=bool,
+        fisallowed="is_attribute_access_allowed",
+        polling_period=DEFAULT_POLLING_PERIOD,
+    )
+    APSPU_VOUT_error_R = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
 
     def read_APSPU_IOUT_error_R(self):
-        return ( self.alarm_val("APSPU_LBA_IOUT_R")
-               or self.alarm_val("APSPU_RCU2A_IOUT_R")
-               or self.alarm_val("APSPU_RCU2D_IOUT_R")
-               )
+        return (
+            self.alarm_val("APSPU_LBA_IOUT_R")
+            or self.alarm_val("APSPU_RCU2A_IOUT_R")
+            or self.alarm_val("APSPU_RCU2D_IOUT_R")
+        )
 
     def read_APSPU_TEMP_error_R(self):
-        return ( self.alarm_val("APSPU_LBA_TEMP_R")
-               or self.alarm_val("APSPU_RCU2A_TEMP_R")
-               or self.alarm_val("APSPU_RCU2D_TEMP_R")
-               )
+        return (
+            self.alarm_val("APSPU_LBA_TEMP_R")
+            or self.alarm_val("APSPU_RCU2A_TEMP_R")
+            or self.alarm_val("APSPU_RCU2D_TEMP_R")
+        )
 
     def read_APSPU_VOUT_error_R(self):
-        return ( self.alarm_val("APSPU_LBA_VOUT_R")
-               or self.alarm_val("APSPU_RCU2A_VOUT_R")
-               or self.alarm_val("APSPU_RCU2D_VOUT_R")
-               )
+        return (
+            self.alarm_val("APSPU_LBA_VOUT_R")
+            or self.alarm_val("APSPU_RCU2A_VOUT_R")
+            or self.alarm_val("APSPU_RCU2D_VOUT_R")
+        )
 
     # --------
     # overloaded functions
     # --------
 
     def _disable_hardware(self):
-        """ Disable the APSPU hardware. """
+        """Disable the APSPU hardware."""
         super()._disable_hardware()
 
     # --------
diff --git a/tangostationcontrol/tangostationcontrol/devices/beam_device.py b/tangostationcontrol/tangostationcontrol/devices/beam_device.py
index 106647fbc340062f0cd0ed531fd952de70795821..7539b3f29b5d6714a4180773d069aa28cd27fca0 100644
--- a/tangostationcontrol/tangostationcontrol/devices/beam_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/beam_device.py
@@ -1,36 +1,52 @@
-# -*- coding: utf-8 -*-
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """Beam Abstract Device Server for LOFAR2.0
 
 """
 
 import datetime
-import numpy
 import time
 from json import loads
-from threading import Thread, Lock, Condition
 from statistics import median
+from threading import Thread, Lock, Condition
+
+import numpy
+from tango import (
+    AttrWriteType,
+    DebugIt,
+    DevVarStringArray,
+    DevVarDoubleArray,
+    DevString,
+)
 
 # PyTango imports
 from tango.server import attribute, command, device_property
-from tango import AttrWriteType, DebugIt, DevVarStringArray, DevVarDoubleArray, DevString
+from tangostationcontrol.beam.delays import Delays
+from tangostationcontrol.common.constants import MAX_POINTINGS, N_point_prop
 
 # Additional import
 from tangostationcontrol.common.entrypoint import entry
-from tangostationcontrol.common.measures import get_measures_directory, get_available_measures_directories, download_measures, use_measures_directory, restart_python
-from tangostationcontrol.common.constants import MAX_POINTINGS, N_point_prop
 from tangostationcontrol.common.lofar_logging import log_exceptions
+from tangostationcontrol.common.measures import (
+    get_measures_directory,
+    get_available_measures_directories,
+    download_measures,
+    use_measures_directory,
+    restart_python,
+)
 from tangostationcontrol.common.states import DEFAULT_COMMAND_STATES
-from tangostationcontrol.devices.device_decorators import TimeIt, only_in_states, fault_on_error
-from tangostationcontrol.beam.delays import Delays
+from tangostationcontrol.devices.device_decorators import (
+    TimeIt,
+    only_in_states,
+    fault_on_error,
+)
 from tangostationcontrol.devices.lofar_device import LOFARDevice
 
 __all__ = ["BeamDevice", "main", "BeamTracker"]
 
 import logging
+
 logger = logging.getLogger()
 
 
@@ -40,30 +56,28 @@ class BeamDevice(LOFARDevice):
     # -----------------
 
     Beam_tracking_interval = device_property(
-        dtype='DevFloat',
-        doc='Beam weights updating interval time [seconds]',
+        dtype="DevFloat",
+        doc="Beam weights updating interval time [seconds]",
         mandatory=False,
-        default_value = 10.0
+        default_value=10.0,
     )
 
     Beam_tracking_application_offset = device_property(
-        dtype='DevFloat',
-        doc='Amount of time to send the weights earlier than the interval, to allow the hardware to get apply them in time [seconds]',
+        dtype="DevFloat",
+        doc="Amount of time to send the weights earlier than the interval, to allow the hardware to get apply them in time [seconds]",
         mandatory=False,
-        default_value = 0.05
+        default_value=0.05,
     )
 
     Beam_tracking_preparation_period = device_property(
-        dtype='DevFloat',
-        doc='Preparation time [seconds] needed before starting update operation',
+        dtype="DevFloat",
+        doc="Preparation time [seconds] needed before starting update operation",
         mandatory=False,
-        default_value = 0.4
+        default_value=0.4,
     )
 
     Tracking_enabled_RW_default = device_property(
-        dtype='DevBoolean',
-        mandatory=False,
-        default_value=True
+        dtype="DevBoolean", mandatory=False, default_value=True
     )
 
     # ----------
@@ -73,53 +87,84 @@ class BeamDevice(LOFARDevice):
     # The actual number of pointings for this device
     # will be stored as self._num_pointings.
 
-    Pointing_direction_R = attribute(access=AttrWriteType.READ,
-        dtype=((str,),), max_dim_x=N_point_prop, max_dim_y=MAX_POINTINGS,
-        fget=lambda self: self._pointing_direction_r)
+    Pointing_direction_R = attribute(
+        access=AttrWriteType.READ,
+        dtype=((str,),),
+        max_dim_x=N_point_prop,
+        max_dim_y=MAX_POINTINGS,
+        fget=lambda self: self._pointing_direction_r,
+    )
 
-    Pointing_direction_RW = attribute(access=AttrWriteType.READ_WRITE,
-        dtype=((str,),), max_dim_x=N_point_prop, max_dim_y=MAX_POINTINGS,
-        fget=lambda self: self._pointing_direction_rw)
+    Pointing_direction_RW = attribute(
+        access=AttrWriteType.READ_WRITE,
+        dtype=((str,),),
+        max_dim_x=N_point_prop,
+        max_dim_y=MAX_POINTINGS,
+        fget=lambda self: self._pointing_direction_rw,
+    )
 
-    Pointing_direction_str_R = attribute(access=AttrWriteType.READ,
-        doc='Pointing direction as a formatted string',
-        dtype=(str,), max_dim_x=MAX_POINTINGS,
-        fget=lambda self:  ["{0} ({1}, {2})".format(*x) for x in self._pointing_direction_r])
+    Pointing_direction_str_R = attribute(
+        access=AttrWriteType.READ,
+        doc="Pointing direction as a formatted string",
+        dtype=(str,),
+        max_dim_x=MAX_POINTINGS,
+        fget=lambda self: [
+            "{0} ({1}, {2})".format(*x) for x in self._pointing_direction_r
+        ],
+    )
 
-    Pointing_timestamp_R = attribute(access=AttrWriteType.READ,
-        dtype=(numpy.double,), max_dim_x=MAX_POINTINGS,
-        fget=lambda self: self._pointing_timestamp_r)
+    Pointing_timestamp_R = attribute(
+        access=AttrWriteType.READ,
+        dtype=(numpy.double,),
+        max_dim_x=MAX_POINTINGS,
+        fget=lambda self: self._pointing_timestamp_r,
+    )
 
-    Tracking_enabled_R = attribute(access=AttrWriteType.READ,
+    Tracking_enabled_R = attribute(
+        access=AttrWriteType.READ,
         doc="Whether the tile beam is updated periodically",
         dtype=bool,
-        fget=lambda self: bool(self.Beam_tracker and self.Beam_tracker.is_alive()))
+        fget=lambda self: bool(self.Beam_tracker and self.Beam_tracker.is_alive()),
+    )
 
-    Tracking_enabled_RW = attribute(access=AttrWriteType.READ_WRITE,
+    Tracking_enabled_RW = attribute(
+        access=AttrWriteType.READ_WRITE,
         doc="Whether the tile beam should be updated periodically",
         dtype=bool,
-        fget=lambda self: self._tracking_enabled_rw)
+        fget=lambda self: self._tracking_enabled_rw,
+    )
 
-    Duration_compute_weights_R = attribute(access=AttrWriteType.READ,
+    Duration_compute_weights_R = attribute(
+        access=AttrWriteType.READ,
         doc="Time it took to compute weights",
         unit="s",
-        dtype=numpy.float64, fget=lambda self: self._compute_weights.statistics["last"] or 0)
+        dtype=numpy.float64,
+        fget=lambda self: self._compute_weights.statistics["last"] or 0,
+    )
 
-    Duration_preparation_period_slack_R = attribute(access=AttrWriteType.READ,
+    Duration_preparation_period_slack_R = attribute(
+        access=AttrWriteType.READ,
         doc="Slack between computing and applying weights",
         unit="s",
-        dtype=numpy.float64, fget=lambda self: self._wait_to_apply_weights.statistics["last"] or 0)
+        dtype=numpy.float64,
+        fget=lambda self: self._wait_to_apply_weights.statistics["last"] or 0,
+    )
 
-    Duration_apply_weights_R = attribute(access=AttrWriteType.READ,
+    Duration_apply_weights_R = attribute(
+        access=AttrWriteType.READ,
         doc="Time it took to upload weights",
         unit="s",
-        dtype=numpy.float64, fget=lambda self: self._apply_weights.statistics["last"] or 0)
+        dtype=numpy.float64,
+        fget=lambda self: self._apply_weights.statistics["last"] or 0,
+    )
 
     def write_Pointing_direction_RW(self, value):
-        """ Setter method for attribute Pointing_direction_RW """
+        """Setter method for attribute Pointing_direction_RW"""
         # verify whether values are valid
         if len(value) != self._num_pointings:
-            raise ValueError(f"Expected {self._num_pointings} directions, got {len(value)}")
+            raise ValueError(
+                f"Expected {self._num_pointings} directions, got {len(value)}"
+            )
 
         for pointing in value:
             if not self.generic_delay_calculator.is_valid_direction(pointing):
@@ -140,16 +185,17 @@ class BeamDevice(LOFARDevice):
         else:
             self.Beam_tracker.stop()
 
-
     def update_pointing(self, timestamp: datetime.datetime):
-        """ Update the weights for the configured pointings, for the given timestamp, at the given timestamp. """
+        """Update the weights for the configured pointings, for the given timestamp, at the given timestamp."""
         self._set_pointing(self._pointing_direction_rw, timestamp)
 
     # --------
     # abstract interface
     # --------
 
-    def _delays(self, pointing_direction: numpy.array, timestamp: datetime.datetime) -> numpy.array:
+    def _delays(
+        self, pointing_direction: numpy.array, timestamp: datetime.datetime
+    ) -> numpy.array:
         """
         Calculate the delay values based on the 2D pointing list (num_pointings x 3) and the timestamp
         """
@@ -157,7 +203,9 @@ class BeamDevice(LOFARDevice):
         raise NotImplementedError
 
     @TimeIt()
-    def _compute_weights(self, pointing_direction: numpy.array, timestamp: datetime.datetime) -> numpy.array:
+    def _compute_weights(
+        self, pointing_direction: numpy.array, timestamp: datetime.datetime
+    ) -> numpy.array:
         """
         Calculate and he hardware-specific delay weights based on the 2D pointing list (num_pointings x 3) and the timestamp
         """
@@ -165,7 +213,12 @@ class BeamDevice(LOFARDevice):
         raise NotImplementedError
 
     @TimeIt()
-    def _apply_weights(self, pointing_direction: numpy.array, timestamp: datetime.datetime, weights: numpy.array) -> numpy.array:
+    def _apply_weights(
+        self,
+        pointing_direction: numpy.array,
+        timestamp: datetime.datetime,
+        weights: numpy.array,
+    ) -> numpy.array:
         """
         Upload the hardware-specific delay weights based on the 2D pointing list (num_pointings x 3) and the timestamp
 
@@ -177,16 +230,26 @@ class BeamDevice(LOFARDevice):
     @TimeIt()
     def _wait_to_apply_weights(self, timestamp: datetime.datetime):
         # expected time required to upload weights to hardware (use last 10 measured durations)
-        expected_application_time = median(self._apply_weights.statistics["history"][-10:] or [0.1])
+        expected_application_time = median(
+            self._apply_weights.statistics["history"][-10:] or [0.1]
+        )
 
         # wait until provided time occurs, but don't start sleeping long here
-        sleep_time = (timestamp - datetime.datetime.now()).total_seconds() - expected_application_time - self.Beam_tracking_application_offset
+        sleep_time = (
+            (timestamp - datetime.datetime.now()).total_seconds()
+            - expected_application_time
+            - self.Beam_tracking_application_offset
+        )
         if sleep_time > 1:
-            raise ValueError(f"Provided timestamp is too far into the future to apply at real time: {sleep_time} seconds from now.")
+            raise ValueError(
+                f"Provided timestamp is too far into the future to apply at real time: {sleep_time} seconds from now."
+            )
 
         time.sleep(max(0, sleep_time))
 
-    def _set_pointing(self, pointing_direction: numpy.array, timestamp: datetime.datetime):
+    def _set_pointing(
+        self, pointing_direction: numpy.array, timestamp: datetime.datetime
+    ):
         """
         Calculate and Upload the hardware-specific delay weights based on the 2D pointing list (num_pointings x 3) and the timestamp
 
@@ -210,7 +273,7 @@ class BeamDevice(LOFARDevice):
         super().init_device()
 
         # Initialise pointing array data and attribute
-        self._tracking_enabled_rw      = False
+        self._tracking_enabled_rw = False
 
         # thread to perform beam tracking
         self.Beam_tracker = None
@@ -225,17 +288,28 @@ class BeamDevice(LOFARDevice):
         super().configure_for_initialise()
 
         if not (0 < num_pointings <= MAX_POINTINGS):
-            raise ValueError(f"beam_device is configured to support 0 - {MAX_POINTINGS} pointings, but {num_pointings} were requested")
+            raise ValueError(
+                f"beam_device is configured to support 0 - {MAX_POINTINGS} pointings, but {num_pointings} were requested"
+            )
 
         # Initialise tracking control
-        self._num_pointings            = num_pointings
-        self._pointing_timestamp_r     = numpy.zeros(num_pointings, dtype=numpy.double)
-        self._pointing_direction_r     = numpy.zeros((num_pointings, N_point_prop), dtype="<U32")
-        self._pointing_direction_rw    = numpy.array([["AZELGEO","0deg","90deg"]] * num_pointings, dtype="<U32")
-        self._tracking_enabled_rw      = self.Tracking_enabled_RW_default
+        self._num_pointings = num_pointings
+        self._pointing_timestamp_r = numpy.zeros(num_pointings, dtype=numpy.double)
+        self._pointing_direction_r = numpy.zeros(
+            (num_pointings, N_point_prop), dtype="<U32"
+        )
+        self._pointing_direction_rw = numpy.array(
+            [["AZELGEO", "0deg", "90deg"]] * num_pointings, dtype="<U32"
+        )
+        self._tracking_enabled_rw = self.Tracking_enabled_RW_default
 
         # Create a thread object to update beam weights
-        self.Beam_tracker = BeamTracker(self.Beam_tracking_interval, self.Beam_tracking_preparation_period, self.update_pointing, self.Fault)
+        self.Beam_tracker = BeamTracker(
+            self.Beam_tracking_interval,
+            self.Beam_tracking_preparation_period,
+            self.update_pointing,
+            self.Fault,
+        )
 
     @log_exceptions()
     def configure_for_on(self):
@@ -253,7 +327,6 @@ class BeamDevice(LOFARDevice):
 
         super().configure_for_off()
 
-
     # --------
     # Commands
     # --------
@@ -269,11 +342,13 @@ class BeamDevice(LOFARDevice):
         """
 
         # Reshape the flatten input array
-        pointing_direction = numpy.array(pointing_direction).reshape(self._num_pointings, 3)
+        pointing_direction = numpy.array(pointing_direction).reshape(
+            self._num_pointings, 3
+        )
 
         self._set_pointing(pointing_direction, datetime.datetime.now())
 
-    @command(dtype_in = DevString)
+    @command(dtype_in=DevString)
     @DebugIt()
     @only_in_states(DEFAULT_COMMAND_STATES)
     def set_pointing_for_specific_time(self, parameters: DevString = None):
@@ -292,11 +367,12 @@ class BeamDevice(LOFARDevice):
         timestamp = datetime.datetime.fromtimestamp(timestamp_param)
 
         # Reshape the flatten pointing array
-        pointing_direction = numpy.array(pointing_direction).reshape(self._num_pointings, 3)
+        pointing_direction = numpy.array(pointing_direction).reshape(
+            self._num_pointings, 3
+        )
 
         self._set_pointing(pointing_direction, timestamp)
 
-
     @command(dtype_in=DevVarStringArray, dtype_out=DevVarDoubleArray)
     @DebugIt()
     @log_exceptions()
@@ -306,7 +382,9 @@ class BeamDevice(LOFARDevice):
         Calculate the delays based on the pointing list and the timestamp
         """
 
-        pointing_direction = numpy.array(pointing_direction).reshape(self._num_pointings, 3)
+        pointing_direction = numpy.array(pointing_direction).reshape(
+            self._num_pointings, 3
+        )
 
         delays = self._delays(pointing_direction, datetime.datetime.now())
 
@@ -318,19 +396,26 @@ class BeamDevice(LOFARDevice):
 
     # Directory where the casacore measures that we use, reside. We configure ~/.casarc to
     # use the symlink /opt/IERS/current, which we switch to the actual set of files to use.
-    measures_directory_R = attribute(dtype=str, access=AttrWriteType.READ, fget = lambda self: get_measures_directory())
+    measures_directory_R = attribute(
+        dtype=str, access=AttrWriteType.READ, fget=lambda self: get_measures_directory()
+    )
 
     # List of dowloaded measures (the latest 64, anyway)
-    measures_directories_available_R = attribute(dtype=(str,), max_dim_x=64, access=AttrWriteType.READ, fget = lambda self: sorted(get_available_measures_directories())[-64:])
+    measures_directories_available_R = attribute(
+        dtype=(str,),
+        max_dim_x=64,
+        access=AttrWriteType.READ,
+        fget=lambda self: sorted(get_available_measures_directories())[-64:],
+    )
 
     @command(dtype_out=str, doc_out="Name of newly installed measures directory")
     @DebugIt()
     @log_exceptions()
     def download_measures(self):
-        """ Download new measures tables into /opt/IERS, but do not activate them.
+        """Download new measures tables into /opt/IERS, but do not activate them.
 
-            NOTE: This may take a while to complete. You are advised to increase
-                  the timeout of the proxy using `my_device.set_timeout_millis(10000)`. """
+        NOTE: This may take a while to complete. You are advised to increase
+              the timeout of the proxy using `my_device.set_timeout_millis(10000)`."""
 
         return download_measures()
 
@@ -338,9 +423,9 @@ class BeamDevice(LOFARDevice):
     @DebugIt()
     @log_exceptions()
     def use_measures(self, newdir):
-        """ Activate a downloaded set of measures tables.
+        """Activate a downloaded set of measures tables.
 
-            NOTE: This will turn off and restart this device!! """
+        NOTE: This will turn off and restart this device!!"""
 
         # switch to requested measures
         use_measures_directory(newdir)
@@ -354,6 +439,7 @@ class BeamDevice(LOFARDevice):
         logger.warning("Restarting device to activate new measures tables")
         restart_python()
 
+
 # ----------
 # Run server
 # ----------
@@ -361,16 +447,21 @@ def main(**kwargs):
     """Main function of the Docker module."""
     return entry(BeamDevice, **kwargs)
 
+
 # ----------
 # Beam Tracker
-# ---------- 
+# ----------
+
 
-class BeamTracker():
+class BeamTracker:
 
     DISCONNECT_TIMEOUT = 3.0
 
     """ Object that encapsulates a Thread, resposible for beam tracking operations """
-    def __init__(self, interval, preparation_period, update_pointing_callback, fault_callback):
+
+    def __init__(
+        self, interval, preparation_period, update_pointing_callback, fault_callback
+    ):
         self.thread = None
         self.interval = interval
         self.preparation_period = preparation_period
@@ -385,7 +476,7 @@ class BeamTracker():
         self.stale_pointing = True
 
     def start(self):
-        """ Starts the Beam Tracking thread """
+        """Starts the Beam Tracking thread"""
         if self.thread:
             # already started
             return
@@ -397,11 +488,11 @@ class BeamTracker():
         logger.info("BeamTracking thread started")
 
     def is_alive(self):
-        """ Returns True just before the Thread run() method starts until just after the Thread run() method terminates. """
+        """Returns True just before the Thread run() method starts until just after the Thread run() method terminates."""
         return self.thread and self.thread.is_alive()
 
     def force_update(self):
-        """ Force the pointing to be updated. """
+        """Force the pointing to be updated."""
 
         self.stale_pointing = True
         self.notify_thread()
@@ -412,7 +503,7 @@ class BeamTracker():
             self.update_condition.notify()
 
     def stop(self):
-        """ Stops the Beam Tracking loop """
+        """Stops the Beam Tracking loop"""
 
         if not self.thread:
             return
@@ -433,10 +524,10 @@ class BeamTracker():
         logger.info("BeamTracking thread stopped")
 
     def _get_sleep_time(self):
-        """ Computes the sleep time (in seconds) that needs to be waited for the next beam tracking update """  
+        """Computes the sleep time (in seconds) that needs to be waited for the next beam tracking update"""
         now = datetime.datetime.now().timestamp()
 
-        # Computes the left seconds before the next update 
+        # Computes the left seconds before the next update
         next_update_in = self.interval - (now % self.interval)
 
         # Computes the needed sleep time before the next update
@@ -454,7 +545,7 @@ class BeamTracker():
     @log_exceptions()
     @fault_on_error()
     def _update_pointing_direction(self):
-        """ Updates the beam weights using a fixed interval of time """
+        """Updates the beam weights using a fixed interval of time"""
 
         # Check if flag beamtracking is true
         with self.update_lock:
@@ -466,11 +557,15 @@ class BeamTracker():
                 if self.stale_pointing:
                     next_update_in = now
                 else:
-                    next_update_in = now + datetime.timedelta(seconds=now.timestamp() % self.interval)
+                    next_update_in = now + datetime.timedelta(
+                        seconds=now.timestamp() % self.interval
+                    )
 
                     # sleep until the next update, or when interrupted (this releases the lock, allowing for notification)
                     # note that we need wait_for as conditions can be triggered multiple times in succession
-                    self.update_condition.wait_for(lambda: self.done or self.stale_pointing, self._get_sleep_time())
+                    self.update_condition.wait_for(
+                        lambda: self.done or self.stale_pointing, self._get_sleep_time()
+                    )
 
                 # update pointing at requested time
                 self.stale_pointing = False
diff --git a/tangostationcontrol/tangostationcontrol/devices/boot.py b/tangostationcontrol/tangostationcontrol/devices/boot.py
index b6dcf838d2a2c0faf23d15d03b49b4d76767fe78..ed3b5f1c2da84a1cde26093e28bf0956c0a98360 100644
--- a/tangostationcontrol/tangostationcontrol/devices/boot.py
+++ b/tangostationcontrol/tangostationcontrol/devices/boot.py
@@ -1,11 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the RECV project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ Boot Device Server for LOFAR2.0
 
@@ -13,23 +7,25 @@ Boots the rest of the station software.
 
 """
 
-# PyTango imports
-from tango import DebugIt
-from tango.server import command
-from tango.server import device_property, attribute
-from tango import AttrWriteType, DeviceProxy, DevState, DevSource
+import logging
 
 # Additional import
 import numpy
+from tango import AttrWriteType, DeviceProxy, DevState, DevSource
 
-from tangostationcontrol.devices.device_decorators import only_in_states
-
+# PyTango imports
+from tango import DebugIt
+from tango.server import command
+from tango.server import device_property, attribute
 from tangostationcontrol.common.entrypoint import entry
-from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
+from tangostationcontrol.common.lofar_logging import (
+    device_logging_to_python,
+    log_exceptions,
+)
 from tangostationcontrol.common.states import OPERATIONAL_STATES
+from tangostationcontrol.devices.device_decorators import only_in_states
 from tangostationcontrol.devices.lofar_device import LOFARDevice
 
-import logging
 logger = logging.getLogger()
 
 from threading import Thread
@@ -40,20 +36,23 @@ __all__ = ["Boot", "main"]
 class InitialisationException(Exception):
     pass
 
+
 class DevicesInitialiser(object):
     """
-        Initialise devices on this station which are not already on (reboot=False),
-        or all of them (reboot=True).
+    Initialise devices on this station which are not already on (reboot=False),
+    or all of them (reboot=True).
 
-        Devices which are unreachable are assumed to be brought down explicitly,
-        and are ignored.
+    Devices which are unreachable are assumed to be brought down explicitly,
+    and are ignored.
 
-        Initialisation happens in a separate thread. It is started by calling
-        the start() method, and progress can be followed by inspecting the
-        members progress (0-100), status (string), and is_running() (bool).
+    Initialisation happens in a separate thread. It is started by calling
+    the start() method, and progress can be followed by inspecting the
+    members progress (0-100), status (string), and is_running() (bool).
     """
 
-    def __init__(self, device_names, reboot=False, initialise_hardware=True, proxy_timeout=60.0):
+    def __init__(
+        self, device_names, reboot=False, initialise_hardware=True, proxy_timeout=60.0
+    ):
         self.reboot = reboot
         self.initialise_hardware = initialise_hardware
 
@@ -68,7 +67,7 @@ class DevicesInitialiser(object):
         self.set_status("Initialisation not started yet")
 
     def _get_device_proxies(self):
-        """ Obtain the Device Proxies to all the devices we are to initialise. """
+        """Obtain the Device Proxies to all the devices we are to initialise."""
 
         # Since Python3.7+, the insertion order equals the iteration order, which is what we depend on
         # to process the devices in the same order as in device_names.
@@ -156,7 +155,10 @@ class DevicesInitialiser(object):
                 continue
 
             if self.is_available(device):
-                if self.reboot or self.devices[device].state() not in OPERATIONAL_STATES:
+                if (
+                    self.reboot
+                    or self.devices[device].state() not in OPERATIONAL_STATES
+                ):
                     self.stop_device(device)
                     self.boot_device(device)
 
@@ -169,7 +171,7 @@ class DevicesInitialiser(object):
         self.progress = 100
 
     def is_available(self, device_name: str):
-        """ Return whether the device 'device_name' is actually available on this server. """
+        """Return whether the device 'device_name' is actually available on this server."""
 
         proxy = self.devices[device_name]
         try:
@@ -180,7 +182,7 @@ class DevicesInitialiser(object):
         return True
 
     def stop_device(self, device_name: str):
-        """ Stop device 'device_name'. """
+        """Stop device 'device_name'."""
 
         proxy = self.devices[device_name]
 
@@ -192,12 +194,14 @@ class DevicesInitialiser(object):
 
         proxy.Off()
         if proxy.state() != DevState.OFF:
-            raise InitialisationException(f"Could not turn off device {device_name}. It reports status: {proxy.status()}")
+            raise InitialisationException(
+                f"Could not turn off device {device_name}. It reports status: {proxy.status()}"
+            )
 
         self.set_status(f"[stopping {device_name}] Stopped device.")
 
     def boot_device(self, device_name: str):
-        """ Run the startup sequence for device 'device_name'. """
+        """Run the startup sequence for device 'device_name'."""
 
         proxy = self.devices[device_name]
 
@@ -208,10 +212,13 @@ class DevicesInitialiser(object):
             proxy.warm_boot()
 
         if proxy.state() not in OPERATIONAL_STATES:
-            raise InitialisationException(f"Could not boot device {device_name}. It reports status: {proxy.status()}")
+            raise InitialisationException(
+                f"Could not boot device {device_name}. It reports status: {proxy.status()}"
+            )
 
         self.set_status(f"[restarting {device_name}] Succesfully booted.")
 
+
 @device_logging_to_python()
 class Boot(LOFARDevice):
     # maximum number of devices boot.py supports
@@ -222,51 +229,95 @@ class Boot(LOFARDevice):
     # -----------------
 
     DeviceProxy_Time_Out = device_property(
-        dtype='DevDouble',
+        dtype="DevDouble",
         mandatory=False,
         default_value=60.0,
     )
 
     # Initialise the hardware when initialising a station. Can end badly when using simulators.
     Initialise_Hardware = device_property(
-        dtype='DevBoolean',
+        dtype="DevBoolean",
         mandatory=False,
         default_value=True,
     )
 
     # Which devices to initialise, and in which order
     Device_Names = device_property(
-        dtype='DevVarStringArray',
+        dtype="DevVarStringArray",
         mandatory=False,
-        default_value=["STAT/Docker/1", # Docker controls the device containers, so it goes before anything else
-                       "STAT/Configuration/1",  # Configuration device loads and update station configuration
-                       "STAT/PSOC/1",  # PSOC boot early to detect power delivery failure as fast as possible
-                       "STAT/PCON/1",  # PCON boot early because it is responsible for power delivery.
-                       "STAT/APSPU/1",  # APS Power Units control other hardware we want to initialise
-                       "STAT/APSCT/1",
-                       "STAT/CCD/1",
-                       "STAT/RECV/1",   # RCUs are input for SDP, so initialise them first
-                       "STAT/UNB2/1",   # Uniboards host SDP, so initialise them first
-                       "STAT/SDP/1",    # SDP controls the mask for SST/XST/BST/Beamlet, so initialise it first
-                       "STAT/BST/1",
-                       "STAT/SST/1",
-                       "STAT/XST/1",
-                       "STAT/Beamlet/1",
-                       "STAT/AntennaField/1", # Accesses RECV
-                       "STAT/TileBeam/1",     # Accesses AntennaField
-                       "STAT/DigitalBeam/1",  # Accessed SDP and Beamlet
-                       "STAT/TemperatureManager/1",
-                      ],
+        default_value=[
+            "STAT/Docker/1",
+            # Docker controls the device containers, so it goes before anything else
+            "STAT/Configuration/1",
+            # Configuration device loads and update station configuration
+            "STAT/PSOC/1",
+            # PSOC boot early to detect power delivery failure as fast as possible
+            "STAT/PCON/1",
+            # PCON boot early because it is responsible for power delivery.
+            "STAT/APSPU/1",
+            # APS Power Units control other hardware we want to initialise
+            "STAT/APSCT/1",
+            "STAT/CCD/1",
+            "STAT/RECV/1",
+            # RCUs are input for SDP, so initialise them first
+            "STAT/UNB2/1",
+            # Uniboards host SDP, so initialise them first
+            "STAT/SDP/1",
+            # SDP controls the mask for SST/XST/BST/Beamlet, so initialise it first
+            "STAT/BST/1",
+            "STAT/SST/1",
+            "STAT/XST/1",
+            "STAT/Beamlet/1",
+            "STAT/AntennaField/1",  # Accesses RECV
+            "STAT/TileBeam/1",  # Accesses AntennaField
+            "STAT/DigitalBeam/1",  # Accessed SDP and Beamlet
+            "STAT/TemperatureManager/1",
+        ],
     )
 
     # ----------
     # Attributes
     # ----------
-    booting_R = attribute(dtype=bool, access=AttrWriteType.READ, fget=lambda self: self.initialiser.is_running(), doc="Whether booting is in progress.")
-    progress_R = attribute(dtype=numpy.int32, access=AttrWriteType.READ, fget=lambda self: numpy.int32(self.initialiser.progress), doc="Percentage of devices that was initialised")
-    status_R = attribute(dtype=str, access=AttrWriteType.READ, fget=lambda self: self.initialiser.status, doc="Description of current boot activity")
-    initialised_devices_R = attribute(dtype=(str,), max_dim_x=MAX_BOOT_DEVICES, access=AttrWriteType.READ, fget=lambda self: [name for name, initialised in self.initialiser.device_initialised.items() if initialised], doc="Which devices were initialised succesfully")
-    uninitialised_devices_R = attribute(dtype=(str,), max_dim_x=MAX_BOOT_DEVICES, access=AttrWriteType.READ, fget=lambda self: [name for name, initialised in self.initialiser.device_initialised.items() if not initialised], doc="Which devices have not been initialised or failed to initialise")
+    booting_R = attribute(
+        dtype=bool,
+        access=AttrWriteType.READ,
+        fget=lambda self: self.initialiser.is_running(),
+        doc="Whether booting is in progress.",
+    )
+    progress_R = attribute(
+        dtype=numpy.int32,
+        access=AttrWriteType.READ,
+        fget=lambda self: numpy.int32(self.initialiser.progress),
+        doc="Percentage of devices that was initialised",
+    )
+    status_R = attribute(
+        dtype=str,
+        access=AttrWriteType.READ,
+        fget=lambda self: self.initialiser.status,
+        doc="Description of current boot activity",
+    )
+    initialised_devices_R = attribute(
+        dtype=(str,),
+        max_dim_x=MAX_BOOT_DEVICES,
+        access=AttrWriteType.READ,
+        fget=lambda self: [
+            name
+            for name, initialised in self.initialiser.device_initialised.items()
+            if initialised
+        ],
+        doc="Which devices were initialised succesfully",
+    )
+    uninitialised_devices_R = attribute(
+        dtype=(str,),
+        max_dim_x=MAX_BOOT_DEVICES,
+        access=AttrWriteType.READ,
+        fget=lambda self: [
+            name
+            for name, initialised in self.initialiser.device_initialised.items()
+            if not initialised
+        ],
+        doc="Which devices have not been initialised or failed to initialise",
+    )
 
     # --------
     # overloaded functions
@@ -287,16 +338,25 @@ class Boot(LOFARDevice):
 
     @log_exceptions()
     def configure_for_off(self):
-        """ user code here. is called when the state is set to OFF """
+        """user code here. is called when the state is set to OFF"""
         try:
             self.initialiser.stop()
         except Exception as e:
-            logger.warning("Exception while stopping OPC ua connection in configure_for_off function: {}. Exception ignored".format(e))
+            logger.warning(
+                "Exception while stopping OPC ua connection in configure_for_off function: {}. Exception ignored".format(
+                    e
+                )
+            )
 
     @log_exceptions()
     def configure_for_initialise(self):
         # create an initialiser object so we can query it even before starting the (first) initialisation
-        self.initialiser = DevicesInitialiser(self.Device_Names, False, self.Initialise_Hardware, self.DeviceProxy_Time_Out)
+        self.initialiser = DevicesInitialiser(
+            self.Device_Names,
+            False,
+            self.Initialise_Hardware,
+            self.DeviceProxy_Time_Out,
+        )
 
     def _boot(self, reboot=False, initialise_hardware=True):
         """
@@ -321,7 +381,9 @@ class Boot(LOFARDevice):
             pass
 
         # start new initialisation attempt
-        self.initialiser = DevicesInitialiser(self.Device_Names, reboot, initialise_hardware, self.DeviceProxy_Time_Out)
+        self.initialiser = DevicesInitialiser(
+            self.Device_Names, reboot, initialise_hardware, self.DeviceProxy_Time_Out
+        )
         self.initialiser.start()
 
     @command()
@@ -345,6 +407,7 @@ class Boot(LOFARDevice):
     def warm_boot(self):
         self._boot(reboot=False, initialise_hardware=False)
 
+
 # ----------
 # Run server
 # ----------
diff --git a/tangostationcontrol/tangostationcontrol/devices/ccd.py b/tangostationcontrol/tangostationcontrol/devices/ccd.py
index 20d1fd8042c06e8dd0b434c08648d9f13d94cded..2d5993c96e0aa0e637baf1b2031156b89ad188cb 100644
--- a/tangostationcontrol/tangostationcontrol/devices/ccd.py
+++ b/tangostationcontrol/tangostationcontrol/devices/ccd.py
@@ -1,32 +1,28 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the RECV project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ CCD Device Server for LOFAR2.0
 
 """
 
+import logging
+
+import numpy
+from tango import AttrWriteType
+
 # PyTango imports
 from tango import DebugIt
 from tango.server import command, attribute, device_property
-from tango import AttrWriteType
-import numpy
-# Additional import
-
 from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
+from tangostationcontrol.common.constants import DEFAULT_POLLING_PERIOD
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.common.lofar_logging import device_logging_to_python
 from tangostationcontrol.common.states import DEFAULT_COMMAND_STATES
-from tangostationcontrol.common.constants import DEFAULT_POLLING_PERIOD
 from tangostationcontrol.devices.device_decorators import only_in_states
 from tangostationcontrol.devices.opcua_device import OPCUADevice
 
-import logging
+# Additional import
+
 logger = logging.getLogger()
 
 __all__ = ["CCD", "main"]
@@ -39,75 +35,127 @@ class CCD(OPCUADevice):
     # -----------------
 
     CCDTR_monitor_rate_RW_default = device_property(
-        dtype='DevLong64',
-        mandatory=False,
-        default_value=1
+        dtype="DevLong64", mandatory=False, default_value=1
     )
 
     # ----- Timing values
 
     CCD_On_Off_timeout = device_property(
-        doc='Maximum amount of time to wait after turning CCD on or off',
-        dtype='DevFloat',
+        doc="Maximum amount of time to wait after turning CCD on or off",
+        dtype="DevFloat",
         mandatory=False,
-        default_value=10.0
+        default_value=10.0,
     )
 
     # ----------
     # Attributes
     # ----------
-    CCDTR_I2C_error_R =         AttributeWrapper(comms_annotation=["CCDTR_I2C_error_R"         ], datatype=numpy.int64)
-    CCDTR_monitor_rate_RW =     AttributeWrapper(comms_annotation=["CCDTR_monitor_rate_RW"     ], datatype=numpy.int64, access=AttrWriteType.READ_WRITE)
-    CCDTR_translator_busy_R =   AttributeWrapper(comms_annotation=["CCDTR_translator_busy_R"   ], datatype=bool)
-    CCD_clear_lock_R =          AttributeWrapper(comms_annotation=["CCD_clear_lock_R"          ], datatype=bool)
-    CCD_clear_lock_RW =         AttributeWrapper(comms_annotation=["CCD_clear_lock_RW"         ], datatype=bool, access=AttrWriteType.READ_WRITE)
-    CCD_FAN_RPM_R =             AttributeWrapper(comms_annotation=["CCD_FAN_RPM_R"             ], datatype=numpy.float64)
-    CCD_INPUT_10MHz_good_R =    AttributeWrapper(comms_annotation=["CCD_INPUT_10MHz_good_R"    ], datatype=bool)
-    CCD_INPUT_PPS_good_R =      AttributeWrapper(comms_annotation=["CCD_INPUT_PPS_good_R"      ], datatype=bool)
-    CCD_loss_lock_R =           AttributeWrapper(comms_annotation=["CCD_loss_lock_R"           ], datatype=bool)
-    CCD_PCB_ID_R =              AttributeWrapper(comms_annotation=["CCD_PCB_ID_R"              ], datatype=numpy.int64)
-    CCD_PCB_number_R =          AttributeWrapper(comms_annotation=["CCD_PCB_number_R"          ], datatype=str)
-    CCD_PCB_version_R =         AttributeWrapper(comms_annotation=["CCD_PCB_version_R"         ], datatype=str)
-    CCD_PLL_locked_R =          AttributeWrapper(comms_annotation=["CCD_PLL_locked_R"          ], datatype=bool)
-    CCD_PWR_CLK_DIST_3V3_R =    AttributeWrapper(comms_annotation=["CCD_PWR_CLK_DIST_3V3_R"    ], datatype=numpy.float64)
-    CCD_PWR_CLK_INPUT_3V3_R =   AttributeWrapper(comms_annotation=["CCD_PWR_CLK_INPUT_3V3_R"   ], datatype=numpy.float64)
-    CCD_PWR_CTRL_3V3_R =        AttributeWrapper(comms_annotation=["CCD_PWR_CTRL_3V3_R"        ], datatype=numpy.float64)
-    CCD_PWR_OCXO_INPUT_3V3_R =  AttributeWrapper(comms_annotation=["CCD_PWR_OCXO_INPUT_3V3_R"  ], datatype=numpy.float64)
-    CCD_PWR_on_R =              AttributeWrapper(comms_annotation=["CCD_PWR_on_R"              ], datatype=bool)
-    CCD_PWR_PLL_INPUT_3V3_R =   AttributeWrapper(comms_annotation=["CCD_PWR_PLL_INPUT_3V3_R"   ], datatype=numpy.float64)
-    CCD_PWR_PPS_INPUT_3V3_R =   AttributeWrapper(comms_annotation=["CCD_PWR_PPS_INPUT_3V3_R"   ], datatype=numpy.float64)
-    CCD_PWR_PPS_OUTPUT_3V3_R =  AttributeWrapper(comms_annotation=["CCD_PWR_PPS_OUTPUT_3V3_R"  ], datatype=numpy.float64)
-    CCD_TEMP_R =                AttributeWrapper(comms_annotation=["CCD_TEMP_R"                ], datatype=numpy.float64)
+    CCDTR_I2C_error_R = AttributeWrapper(
+        comms_annotation=["CCDTR_I2C_error_R"], datatype=numpy.int64
+    )
+    CCDTR_monitor_rate_RW = AttributeWrapper(
+        comms_annotation=["CCDTR_monitor_rate_RW"],
+        datatype=numpy.int64,
+        access=AttrWriteType.READ_WRITE,
+    )
+    CCDTR_translator_busy_R = AttributeWrapper(
+        comms_annotation=["CCDTR_translator_busy_R"], datatype=bool
+    )
+    CCD_clear_lock_R = AttributeWrapper(
+        comms_annotation=["CCD_clear_lock_R"], datatype=bool
+    )
+    CCD_clear_lock_RW = AttributeWrapper(
+        comms_annotation=["CCD_clear_lock_RW"],
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    CCD_FAN_RPM_R = AttributeWrapper(
+        comms_annotation=["CCD_FAN_RPM_R"], datatype=numpy.float64
+    )
+    CCD_INPUT_10MHz_good_R = AttributeWrapper(
+        comms_annotation=["CCD_INPUT_10MHz_good_R"], datatype=bool
+    )
+    CCD_INPUT_PPS_good_R = AttributeWrapper(
+        comms_annotation=["CCD_INPUT_PPS_good_R"], datatype=bool
+    )
+    CCD_loss_lock_R = AttributeWrapper(
+        comms_annotation=["CCD_loss_lock_R"], datatype=bool
+    )
+    CCD_PCB_ID_R = AttributeWrapper(
+        comms_annotation=["CCD_PCB_ID_R"], datatype=numpy.int64
+    )
+    CCD_PCB_number_R = AttributeWrapper(
+        comms_annotation=["CCD_PCB_number_R"], datatype=str
+    )
+    CCD_PCB_version_R = AttributeWrapper(
+        comms_annotation=["CCD_PCB_version_R"], datatype=str
+    )
+    CCD_PLL_locked_R = AttributeWrapper(
+        comms_annotation=["CCD_PLL_locked_R"], datatype=bool
+    )
+    CCD_PWR_CLK_DIST_3V3_R = AttributeWrapper(
+        comms_annotation=["CCD_PWR_CLK_DIST_3V3_R"], datatype=numpy.float64
+    )
+    CCD_PWR_CLK_INPUT_3V3_R = AttributeWrapper(
+        comms_annotation=["CCD_PWR_CLK_INPUT_3V3_R"], datatype=numpy.float64
+    )
+    CCD_PWR_CTRL_3V3_R = AttributeWrapper(
+        comms_annotation=["CCD_PWR_CTRL_3V3_R"], datatype=numpy.float64
+    )
+    CCD_PWR_OCXO_INPUT_3V3_R = AttributeWrapper(
+        comms_annotation=["CCD_PWR_OCXO_INPUT_3V3_R"], datatype=numpy.float64
+    )
+    CCD_PWR_on_R = AttributeWrapper(comms_annotation=["CCD_PWR_on_R"], datatype=bool)
+    CCD_PWR_PLL_INPUT_3V3_R = AttributeWrapper(
+        comms_annotation=["CCD_PWR_PLL_INPUT_3V3_R"], datatype=numpy.float64
+    )
+    CCD_PWR_PPS_INPUT_3V3_R = AttributeWrapper(
+        comms_annotation=["CCD_PWR_PPS_INPUT_3V3_R"], datatype=numpy.float64
+    )
+    CCD_PWR_PPS_OUTPUT_3V3_R = AttributeWrapper(
+        comms_annotation=["CCD_PWR_PPS_OUTPUT_3V3_R"], datatype=numpy.float64
+    )
+    CCD_TEMP_R = AttributeWrapper(
+        comms_annotation=["CCD_TEMP_R"], datatype=numpy.float64
+    )
     # ----------
     # Summarising Attributes
     # ----------
-    CCD_error_R                 = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
+    CCD_error_R = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
 
     def read_CCD_error_R(self):
-        errors = [self.read_attribute("CCDTR_I2C_error_R") > 0,
-                self.alarm_val("CCD_loss_lock_R"),
-                self.read_attribute("CCD_INPUT_10MHz_good_R"),
-                not self.read_attribute("CCD_INPUT_10MHz_good_R"),
-                not self.read_attribute("CCD_INPUT_PPS_good_R") and not self.read_attribute("CCD_clear_lock_R"),
-                not self.read_attribute("CCD_PLL_locked_R")]
+        errors = [
+            self.read_attribute("CCDTR_I2C_error_R") > 0,
+            self.alarm_val("CCD_loss_lock_R"),
+            self.read_attribute("CCD_INPUT_10MHz_good_R"),
+            not self.read_attribute("CCD_INPUT_10MHz_good_R"),
+            not self.read_attribute("CCD_INPUT_PPS_good_R")
+            and not self.read_attribute("CCD_clear_lock_R"),
+            not self.read_attribute("CCD_PLL_locked_R"),
+        ]
         return any(errors)
 
-    CCD_TEMP_error_R            = attribute(dtype=bool, fisallowed="is_attribute_access_allowed", polling_period=DEFAULT_POLLING_PERIOD)
-    CCD_VOUT_error_R            = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
+    CCD_TEMP_error_R = attribute(
+        dtype=bool,
+        fisallowed="is_attribute_access_allowed",
+        polling_period=DEFAULT_POLLING_PERIOD,
+    )
+    CCD_VOUT_error_R = attribute(dtype=bool, fisallowed="is_attribute_access_allowed")
 
     def read_CCD_TEMP_error_R(self):
-        return (self.alarm_val("CCD_TEMP_R"))
+        return self.alarm_val("CCD_TEMP_R")
 
     def read_CCD_VOUT_error_R(self):
-        return ( self.alarm_val("CCD_PWR_CLK_DIST_3V3_R")
-               or self.alarm_val("CCD_PWR_CLK_INPUT_3V3_R")
-               or self.alarm_val("CCD_PWR_CTRL_3V3_R")
-               or self.alarm_val("CCD_PWR_OCXO_INPUT_3V3_R")
-               or self.alarm_val("CCD_PWR_PLL_INPUT_3V3_R")
-               or self.alarm_val("CCD_PWR_PPS_INPUT_3V3_R")
-               or self.alarm_val("CCD_PWR_PPS_OUTPUT_3V3_R")
-               or (not self.read_attribute("CCD_PWR_on_R"))
-               )
+        return (
+            self.alarm_val("CCD_PWR_CLK_DIST_3V3_R")
+            or self.alarm_val("CCD_PWR_CLK_INPUT_3V3_R")
+            or self.alarm_val("CCD_PWR_CTRL_3V3_R")
+            or self.alarm_val("CCD_PWR_OCXO_INPUT_3V3_R")
+            or self.alarm_val("CCD_PWR_PLL_INPUT_3V3_R")
+            or self.alarm_val("CCD_PWR_PPS_INPUT_3V3_R")
+            or self.alarm_val("CCD_PWR_PPS_OUTPUT_3V3_R")
+            or (not self.read_attribute("CCD_PWR_on_R"))
+        )
 
     # --------
     # overloaded functions
@@ -117,7 +165,7 @@ class CCD(OPCUADevice):
     @DebugIt()
     @only_in_states(DEFAULT_COMMAND_STATES)
     def reset_hardware(self):
-        """ Initialise the CCD hardware. """
+        """Initialise the CCD hardware."""
 
         # Cycle clock. Quickly toggling the heater should not cool the heater down too much.
         self.CCD_off()
@@ -129,10 +177,12 @@ class CCD(OPCUADevice):
             if self.read_attribute("CCDTR_I2C_error_R"):
                 raise Exception("I2C is not working.")
             else:
-                logger.warning("CCD not locked, this may indicate the clock has not yet warmed up")
+                logger.warning(
+                    "CCD not locked, this may indicate the clock has not yet warmed up"
+                )
 
     def _disable_hardware(self):
-        """ Disable the CCD hardware.
+        """Disable the CCD hardware.
         WARNING: The CCD contains a heater that takes about 15 minutes to fully heat up from a cold start.
         This
         """
@@ -167,7 +217,6 @@ class CCD(OPCUADevice):
         self.opcua_connection.call_method(["CCD_on"])
 
 
-
 # ----------
 # Run server
 # ----------
diff --git a/tangostationcontrol/tangostationcontrol/devices/configuration_device.py b/tangostationcontrol/tangostationcontrol/devices/configuration_device.py
index d006ce0e8ca82f9126d2f4b0d1ec3fa0b3e984c0..e11aa35a9314fc03b764a347d2707463377ee4c6 100644
--- a/tangostationcontrol/tangostationcontrol/devices/configuration_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/configuration_device.py
@@ -1,17 +1,14 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the RECV project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ Configuration Device Server for LOFAR2.0
 
 Handles and exposes the station configuration
 
 """
+import json
+import logging
+
 # PyTango imports
 from tango import AttrWriteType, Database, DebugIt, DevString
 from tango.server import attribute, command
@@ -19,68 +16,75 @@ from tango.server import attribute, command
 # Additional import
 from tangostationcontrol.common.configuration import StationConfiguration
 from tangostationcontrol.common.entrypoint import entry
-from tangostationcontrol.devices.device_decorators import only_in_states
+from tangostationcontrol.common.lofar_logging import (
+    device_logging_to_python,
+    log_exceptions,
+)
 from tangostationcontrol.common.states import DEFAULT_COMMAND_STATES
+from tangostationcontrol.devices.device_decorators import only_in_states
 from tangostationcontrol.devices.lofar_device import LOFARDevice
-from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
 
-import json
-import logging
 logger = logging.getLogger()
 
 __all__ = ["Configuration", "main"]
 
+
 @device_logging_to_python()
 class Configuration(LOFARDevice):
     # -----------------
     # Device Properties
     # -----------------
-    
+
     # ----------
     # Attributes
     # ----------
-    station_configuration_RW = attribute(dtype=str, access=AttrWriteType.READ_WRITE, doc='The Tango properties of all the devices in this station, as a JSON string.')
+    station_configuration_RW = attribute(
+        dtype=str,
+        access=AttrWriteType.READ_WRITE,
+        doc="The Tango properties of all the devices in this station, as a JSON string.",
+    )
 
     def read_station_configuration_RW(self):
         return self._dump_configdb()
-    
+
     def write_station_configuration_RW(self, station_configuration):
         self._load_configdb(station_configuration)
-    
+
     def _dump_configdb(self):
-        """ Returns the TangoDB station configuration as a JSON string """
+        """Returns the TangoDB station configuration as a JSON string"""
         dbdata = self.station_configuration.get_tangodb_data()
         return json.dumps(dbdata, ensure_ascii=False, indent=4, sort_keys=True)
-    
+
     def _load_configdb(self, station_configuration):
-        """ Takes a JSON string which represents the station configuration 
-        and loads the whole configuration from scratch. 
-        
-        N.B. it does not update, it loads a full new configuration. 
+        """Takes a JSON string which represents the station configuration
+        and loads the whole configuration from scratch.
+
+        N.B. it does not update, it loads a full new configuration.
         """
         self.station_configuration.load_configdb(station_configuration, update=False)
-    
+
     # --------
     # Commands
     # --------
     @command(dtype_in=DevString)
     @DebugIt()
-    @only_in_states(DEFAULT_COMMAND_STATES)   
-    def update_station_configuration(self, station_configuration:str):
-        """ Takes a JSON string which represents the station configuration 
-        and upload the whole configuration. 
-        
+    @only_in_states(DEFAULT_COMMAND_STATES)
+    def update_station_configuration(self, station_configuration: str):
+        """Takes a JSON string which represents the station configuration
+        and upload the whole configuration.
+
         N.B. it does not delete existing devices, it updates overlapping parameters.
         """
         self.station_configuration.update_configdb(station_configuration)
+
     # --------
     # overloaded functions
     # --------
     @log_exceptions()
     def configure_for_initialise(self):
         super().configure_for_initialise()
-        self.station_configuration = StationConfiguration(db = Database())
-         
+        self.station_configuration = StationConfiguration(db=Database())
+
 
 # ----------
 # Run server
diff --git a/tangostationcontrol/tangostationcontrol/devices/device_decorators.py b/tangostationcontrol/tangostationcontrol/devices/device_decorators.py
index 88486177a56240b7c40033c3bb96fe0da9475f46..12a0247a414d98e9c2eb84db079ba35f675e7fe7 100644
--- a/tangostationcontrol/tangostationcontrol/devices/device_decorators.py
+++ b/tangostationcontrol/tangostationcontrol/devices/device_decorators.py
@@ -1,17 +1,23 @@
-from tango import DevState
-from functools import wraps
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import logging
 import time
+from functools import wraps
+
+from tango import DevState
+
 logger = logging.getLogger()
 
 __all__ = ["only_in_states", "only_when_on", "fault_on_error", "TimeIt"]
 
+
 def only_in_states(allowed_states, log=True):
     """
-      Wrapper to call and return the wrapped function if the device is
-      in one of the given states. Otherwise a PyTango exception is thrown.
+    Wrapper to call and return the wrapped function if the device is
+    in one of the given states. Otherwise a PyTango exception is thrown.
     """
+
     def wrapper(func):
         @wraps(func)
         def state_check_wrapper(self, *args, **kwargs):
@@ -19,20 +25,26 @@ def only_in_states(allowed_states, log=True):
                 return func(self, *args, **kwargs)
 
             if log:
-                logger.warning(f"Illegal command: Function {func.__name__} can only be called in states {allowed_states}. Current state: {self.get_state()}")
+                logger.warning(
+                    f"Illegal command: Function {func.__name__} can only be called in states {allowed_states}. Current state: {self.get_state()}"
+                )
 
-            raise Exception(f"IllegalCommand: Function {func.__name__} can only be called in states {allowed_states}. Current state: {self.get_state()}")
+            raise Exception(
+                f"IllegalCommand: Function {func.__name__} can only be called in states {allowed_states}. Current state: {self.get_state()}"
+            )
 
         return state_check_wrapper
 
     return wrapper
 
+
 def only_when_on():
     """
-      Wrapper to call and return the wrapped function if the device is
-      in the ON state. Otherwise None is returned and nothing
-      will be called.
+    Wrapper to call and return the wrapped function if the device is
+    in the ON state. Otherwise None is returned and nothing
+    will be called.
     """
+
     def inner(func):
         @wraps(func)
         def when_on_wrapper(self, *args, **kwargs):
@@ -45,10 +57,12 @@ def only_when_on():
 
     return inner
 
+
 def fault_on_error():
     """
-      Wrapper to catch exceptions. Sets the device in a FAULT state if any occurs.
+    Wrapper to catch exceptions. Sets the device in a FAULT state if any occurs.
     """
+
     def inner(func):
         @wraps(func)
         def error_wrapper(self, *args, **kwargs):
@@ -66,17 +80,17 @@ def fault_on_error():
 
 def TimeIt(log_function=None):
     """
-      Wrapper to time calls. Stores the timing log in a
-      <function>.statistics property as a dict with the following
-      information:
+    Wrapper to time calls. Stores the timing log in a
+    <function>.statistics property as a dict with the following
+    information:
 
-        "count":        number of times the function was called
-        "last":         duration of the last invocation
-        "history":      last 10 durations
+      "count":        number of times the function was called
+      "last":         duration of the last invocation
+      "history":      last 10 durations
 
-      NOTE: If the function called throws an exception, timing information
-            is not logged or recorded. Those calls are expected to be
-            uncharacteristically cheap, after all.
+    NOTE: If the function called throws an exception, timing information
+          is not logged or recorded. Those calls are expected to be
+          uncharacteristically cheap, after all.
     """
 
     def inner(func):
@@ -99,7 +113,9 @@ def TimeIt(log_function=None):
             statistics["history"] = statistics["history"][-9:] + [statistics["last"]]
 
             if log_function:
-                log_function(f"Call to {func.__name__} took {(after - before)/1e6} ms")
+                log_function(
+                    f"Call to {func.__name__} took {(after - before) / 1e6} ms"
+                )
 
             # return function result (if any)
             return result
@@ -109,4 +125,3 @@ def TimeIt(log_function=None):
         return timer_wrapper
 
     return inner
-
diff --git a/tangostationcontrol/tangostationcontrol/devices/docker_device.py b/tangostationcontrol/tangostationcontrol/devices/docker_device.py
index b37072c915961e10632a7c65dc02ffc863b76fe7..e21500202518a2fe559d8cc93e99f8f61efbc807 100644
--- a/tangostationcontrol/tangostationcontrol/devices/docker_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/docker_device.py
@@ -1,29 +1,28 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the Docker project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ Docker Device Server for LOFAR2.0
 
 """
 
+import asyncio
+import logging
+
+from tango import AttrWriteType
+
 # PyTango imports
 from tango.server import device_property
-from tango import AttrWriteType
-import asyncio
+from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
+from tangostationcontrol.clients.docker_client import DockerClient
 
 # Additional import
 from tangostationcontrol.common.entrypoint import entry
-from tangostationcontrol.clients.docker_client import DockerClient
-from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
+from tangostationcontrol.common.lofar_logging import (
+    device_logging_to_python,
+    log_exceptions,
+)
 from tangostationcontrol.devices.lofar_device import LOFARDevice
-from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
 
-import logging
 logger = logging.getLogger()
 
 __all__ = ["Docker", "main"]
@@ -36,9 +35,7 @@ class Docker(LOFARDevice):
     # -----------------
 
     Docker_Base_URL = device_property(
-        dtype='DevString',
-        mandatory=False,
-        default_value="unix:///var/run/docker.sock"
+        dtype="DevString", mandatory=False, default_value="unix:///var/run/docker.sock"
     )
 
     # ----------
@@ -46,108 +43,322 @@ class Docker(LOFARDevice):
     # ----------
 
     # Software devices
-    device_boot_R = AttributeWrapper(comms_annotation={"container": "device-boot"}, datatype=bool)
-    device_boot_RW = AttributeWrapper(comms_annotation={"container": "device-boot"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_docker_R = AttributeWrapper(comms_annotation={"container": "device-docker"}, datatype=bool)
-    device_configuration_R = AttributeWrapper(comms_annotation={"container": "device-configuration"}, datatype=bool)
-    device_configuration_RW = AttributeWrapper(comms_annotation={"container": "device-configuration"}, datatype=bool, access=AttrWriteType.READ_WRITE)
+    device_boot_R = AttributeWrapper(
+        comms_annotation={"container": "device-boot"}, datatype=bool
+    )
+    device_boot_RW = AttributeWrapper(
+        comms_annotation={"container": "device-boot"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_docker_R = AttributeWrapper(
+        comms_annotation={"container": "device-docker"}, datatype=bool
+    )
+    device_configuration_R = AttributeWrapper(
+        comms_annotation={"container": "device-configuration"}, datatype=bool
+    )
+    device_configuration_RW = AttributeWrapper(
+        comms_annotation={"container": "device-configuration"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
     # device_docker_RW is not available, as we cannot start our own container`
-    device_temperature_manager_R = AttributeWrapper(comms_annotation={"container": "device-temperature-manager"}, datatype=bool)
-    device_temperature_manager_RW = AttributeWrapper(comms_annotation={"container": "device-temperature-manager"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_observation_R = AttributeWrapper(comms_annotation={"container": "device-observation"}, datatype=bool)
-    device_observation_RW = AttributeWrapper(comms_annotation={"container": "device-observation"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_observation_control_R = AttributeWrapper(comms_annotation={"container": "device-observation-control"}, datatype=bool)
-    device_observation_control_RW = AttributeWrapper(comms_annotation={"container": "device-observation-control"}, datatype=bool, access=AttrWriteType.READ_WRITE)
+    device_temperature_manager_R = AttributeWrapper(
+        comms_annotation={"container": "device-temperature-manager"}, datatype=bool
+    )
+    device_temperature_manager_RW = AttributeWrapper(
+        comms_annotation={"container": "device-temperature-manager"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_observation_R = AttributeWrapper(
+        comms_annotation={"container": "device-observation"}, datatype=bool
+    )
+    device_observation_RW = AttributeWrapper(
+        comms_annotation={"container": "device-observation"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_observation_control_R = AttributeWrapper(
+        comms_annotation={"container": "device-observation-control"}, datatype=bool
+    )
+    device_observation_control_RW = AttributeWrapper(
+        comms_annotation={"container": "device-observation-control"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
 
     # Hardware devices
-    device_apsct_R = AttributeWrapper(comms_annotation={"container": "device-apsct"}, datatype=bool)
-    device_apsct_RW = AttributeWrapper(comms_annotation={"container": "device-apsct"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_apspu_R = AttributeWrapper(comms_annotation={"container": "device-apspu"}, datatype=bool)
-    device_apspu_RW = AttributeWrapper(comms_annotation={"container": "device-apspu"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_recv_R = AttributeWrapper(comms_annotation={"container": "device-recv"}, datatype=bool)
-    device_recv_RW = AttributeWrapper(comms_annotation={"container": "device-recv"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_sdp_R = AttributeWrapper(comms_annotation={"container": "device-sdp"}, datatype=bool)
-    device_sdp_RW = AttributeWrapper(comms_annotation={"container": "device-sdp"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_unb2_R = AttributeWrapper(comms_annotation={"container": "device-unb2"}, datatype=bool)
-    device_unb2_RW = AttributeWrapper(comms_annotation={"container": "device-unb2"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_pcon_R = AttributeWrapper(comms_annotation={"container": "device-pcon"}, datatype=bool)
-    device_pcon_RW = AttributeWrapper(comms_annotation={"container": "device-pcon"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_psoc_R = AttributeWrapper(comms_annotation={"container": "device-psoc"}, datatype=bool)
-    device_psoc_RW = AttributeWrapper(comms_annotation={"container": "device-psoc"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_ccd_R = AttributeWrapper(comms_annotation={"container": "device-ccd"}, datatype=bool)
-    device_ccd_RW = AttributeWrapper(comms_annotation={"container": "device-ccd"}, datatype=bool, access=AttrWriteType.READ_WRITE)
+    device_apsct_R = AttributeWrapper(
+        comms_annotation={"container": "device-apsct"}, datatype=bool
+    )
+    device_apsct_RW = AttributeWrapper(
+        comms_annotation={"container": "device-apsct"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_apspu_R = AttributeWrapper(
+        comms_annotation={"container": "device-apspu"}, datatype=bool
+    )
+    device_apspu_RW = AttributeWrapper(
+        comms_annotation={"container": "device-apspu"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_recv_R = AttributeWrapper(
+        comms_annotation={"container": "device-recv"}, datatype=bool
+    )
+    device_recv_RW = AttributeWrapper(
+        comms_annotation={"container": "device-recv"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_sdp_R = AttributeWrapper(
+        comms_annotation={"container": "device-sdp"}, datatype=bool
+    )
+    device_sdp_RW = AttributeWrapper(
+        comms_annotation={"container": "device-sdp"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_unb2_R = AttributeWrapper(
+        comms_annotation={"container": "device-unb2"}, datatype=bool
+    )
+    device_unb2_RW = AttributeWrapper(
+        comms_annotation={"container": "device-unb2"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_pcon_R = AttributeWrapper(
+        comms_annotation={"container": "device-pcon"}, datatype=bool
+    )
+    device_pcon_RW = AttributeWrapper(
+        comms_annotation={"container": "device-pcon"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_psoc_R = AttributeWrapper(
+        comms_annotation={"container": "device-psoc"}, datatype=bool
+    )
+    device_psoc_RW = AttributeWrapper(
+        comms_annotation={"container": "device-psoc"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_ccd_R = AttributeWrapper(
+        comms_annotation={"container": "device-ccd"}, datatype=bool
+    )
+    device_ccd_RW = AttributeWrapper(
+        comms_annotation={"container": "device-ccd"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
 
     # Statistics devices
-    device_sst_R = AttributeWrapper(comms_annotation={"container": "device-sst"}, datatype=bool)
-    device_sst_RW = AttributeWrapper(comms_annotation={"container": "device-sst"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_xst_R = AttributeWrapper(comms_annotation={"container": "device-xst"}, datatype=bool)
-    device_xst_RW = AttributeWrapper(comms_annotation={"container": "device-xst"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_bst_R = AttributeWrapper(comms_annotation={"container": "device-bst"}, datatype=bool)
-    device_bst_RW = AttributeWrapper(comms_annotation={"container": "device-bst"}, datatype=bool, access=AttrWriteType.READ_WRITE)
+    device_sst_R = AttributeWrapper(
+        comms_annotation={"container": "device-sst"}, datatype=bool
+    )
+    device_sst_RW = AttributeWrapper(
+        comms_annotation={"container": "device-sst"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_xst_R = AttributeWrapper(
+        comms_annotation={"container": "device-xst"}, datatype=bool
+    )
+    device_xst_RW = AttributeWrapper(
+        comms_annotation={"container": "device-xst"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_bst_R = AttributeWrapper(
+        comms_annotation={"container": "device-bst"}, datatype=bool
+    )
+    device_bst_RW = AttributeWrapper(
+        comms_annotation={"container": "device-bst"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
 
     # Beam devices
-    device_antennafield_R = AttributeWrapper(comms_annotation={"container": "device-antennafield"}, datatype=bool)
-    device_antennafield_RW = AttributeWrapper(comms_annotation={"container": "device-antennafield"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_beamlet_R = AttributeWrapper(comms_annotation={"container": "device-beamlet"}, datatype=bool)
-    device_beamlet_RW = AttributeWrapper(comms_annotation={"container": "device-beamlet"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_digitalbeam_R = AttributeWrapper(comms_annotation={"container": "device-digitalbeam"}, datatype=bool)
-    device_digitalbeam_RW = AttributeWrapper(comms_annotation={"container": "device-digitalbeam"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    device_tilebeam_R = AttributeWrapper(comms_annotation={"container": "device-tilebeam"}, datatype=bool)
-    device_tilebeam_RW = AttributeWrapper(comms_annotation={"container": "device-tilebeam"}, datatype=bool, access=AttrWriteType.READ_WRITE)
+    device_antennafield_R = AttributeWrapper(
+        comms_annotation={"container": "device-antennafield"}, datatype=bool
+    )
+    device_antennafield_RW = AttributeWrapper(
+        comms_annotation={"container": "device-antennafield"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_beamlet_R = AttributeWrapper(
+        comms_annotation={"container": "device-beamlet"}, datatype=bool
+    )
+    device_beamlet_RW = AttributeWrapper(
+        comms_annotation={"container": "device-beamlet"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_digitalbeam_R = AttributeWrapper(
+        comms_annotation={"container": "device-digitalbeam"}, datatype=bool
+    )
+    device_digitalbeam_RW = AttributeWrapper(
+        comms_annotation={"container": "device-digitalbeam"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    device_tilebeam_R = AttributeWrapper(
+        comms_annotation={"container": "device-tilebeam"}, datatype=bool
+    )
+    device_tilebeam_RW = AttributeWrapper(
+        comms_annotation={"container": "device-tilebeam"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
 
     # Other containers
-    archiver_timescale_R = AttributeWrapper(comms_annotation={"container": "archiver-timescale"}, datatype=bool)
-    archiver_timescale_RW = AttributeWrapper(comms_annotation={"container": "archiver-timescale"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    databaseds_R = AttributeWrapper(comms_annotation={"container": "databaseds"}, datatype=bool)
-    databaseds_RW = AttributeWrapper(comms_annotation={"container": "databaseds"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    dsconfig_R = AttributeWrapper(comms_annotation={"container": "dsconfig"}, datatype=bool)
-    dsconfig_RW = AttributeWrapper(comms_annotation={"container": "dsconfig"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    grafana_R = AttributeWrapper(comms_annotation={"container": "grafana"}, datatype=bool)
-    grafana_RW = AttributeWrapper(comms_annotation={"container": "grafana"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    logstash_R = AttributeWrapper(comms_annotation={"container": "logstash"}, datatype=bool)
-    logstash_RW = AttributeWrapper(comms_annotation={"container": "logstash"}, datatype=bool, access=AttrWriteType.READ_WRITE)
+    archiver_timescale_R = AttributeWrapper(
+        comms_annotation={"container": "archiver-timescale"}, datatype=bool
+    )
+    archiver_timescale_RW = AttributeWrapper(
+        comms_annotation={"container": "archiver-timescale"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    databaseds_R = AttributeWrapper(
+        comms_annotation={"container": "databaseds"}, datatype=bool
+    )
+    databaseds_RW = AttributeWrapper(
+        comms_annotation={"container": "databaseds"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    dsconfig_R = AttributeWrapper(
+        comms_annotation={"container": "dsconfig"}, datatype=bool
+    )
+    dsconfig_RW = AttributeWrapper(
+        comms_annotation={"container": "dsconfig"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    grafana_R = AttributeWrapper(
+        comms_annotation={"container": "grafana"}, datatype=bool
+    )
+    grafana_RW = AttributeWrapper(
+        comms_annotation={"container": "grafana"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    logstash_R = AttributeWrapper(
+        comms_annotation={"container": "logstash"}, datatype=bool
+    )
+    logstash_RW = AttributeWrapper(
+        comms_annotation={"container": "logstash"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
     loki_R = AttributeWrapper(comms_annotation={"container": "loki"}, datatype=bool)
-    loki_RW = AttributeWrapper(comms_annotation={"container": "loki"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    hdbppts_cm_R = AttributeWrapper(comms_annotation={"container": "hdbppts-cm"}, datatype=bool)
-    hdbppts_cm_RW = AttributeWrapper(comms_annotation={"container": "hdbppts-cm"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    hdbppts_es_R = AttributeWrapper(comms_annotation={"container": "hdbppts-es"}, datatype=bool)
-    hdbppts_es_RW = AttributeWrapper(comms_annotation={"container": "hdbppts-es"}, datatype=bool, access=AttrWriteType.READ_WRITE)
+    loki_RW = AttributeWrapper(
+        comms_annotation={"container": "loki"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    hdbppts_cm_R = AttributeWrapper(
+        comms_annotation={"container": "hdbppts-cm"}, datatype=bool
+    )
+    hdbppts_cm_RW = AttributeWrapper(
+        comms_annotation={"container": "hdbppts-cm"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    hdbppts_es_R = AttributeWrapper(
+        comms_annotation={"container": "hdbppts-es"}, datatype=bool
+    )
+    hdbppts_es_RW = AttributeWrapper(
+        comms_annotation={"container": "hdbppts-es"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
     itango_R = AttributeWrapper(comms_annotation={"container": "itango"}, datatype=bool)
-    itango_RW = AttributeWrapper(comms_annotation={"container": "itango"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    jupyter_lab_R = AttributeWrapper(comms_annotation={"container": "jupyter-lab"}, datatype=bool)
-    jupyter_lab_RW = AttributeWrapper(comms_annotation={"container": "jupyter-lab"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    tangodb_R = AttributeWrapper(comms_annotation={"container": "tangodb"}, datatype=bool)
-    tangodb_RW = AttributeWrapper(comms_annotation={"container": "tangodb"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    prometheus_R = AttributeWrapper(comms_annotation={"container": "prometheus"}, datatype=bool)
-    prometheus_RW = AttributeWrapper(comms_annotation={"container": "prometheus"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    tango_prometheus_exporter_R = AttributeWrapper(comms_annotation={"container": "tango-prometheus-exporter"}, datatype=bool)
-    tango_prometheus_exporter_RW = AttributeWrapper(comms_annotation={"container": "tango-prometheus-exporter"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    prometheus_node_exporter_R = AttributeWrapper(comms_annotation={"container": "prometheus-node-exporter"}, datatype=bool)
-    prometheus_node_exporter_RW = AttributeWrapper(comms_annotation={"container": "prometheus-node-exporter"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    tango_rest_R = AttributeWrapper(comms_annotation={"container": "tango-rest"}, datatype=bool)
-    tango_rest_RW = AttributeWrapper(comms_annotation={"container": "tango-rest"}, datatype=bool, access=AttrWriteType.READ_WRITE)
+    itango_RW = AttributeWrapper(
+        comms_annotation={"container": "itango"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    jupyter_lab_R = AttributeWrapper(
+        comms_annotation={"container": "jupyter-lab"}, datatype=bool
+    )
+    jupyter_lab_RW = AttributeWrapper(
+        comms_annotation={"container": "jupyter-lab"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    tangodb_R = AttributeWrapper(
+        comms_annotation={"container": "tangodb"}, datatype=bool
+    )
+    tangodb_RW = AttributeWrapper(
+        comms_annotation={"container": "tangodb"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    prometheus_R = AttributeWrapper(
+        comms_annotation={"container": "prometheus"}, datatype=bool
+    )
+    prometheus_RW = AttributeWrapper(
+        comms_annotation={"container": "prometheus"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    tango_prometheus_exporter_R = AttributeWrapper(
+        comms_annotation={"container": "tango-prometheus-exporter"}, datatype=bool
+    )
+    tango_prometheus_exporter_RW = AttributeWrapper(
+        comms_annotation={"container": "tango-prometheus-exporter"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    prometheus_node_exporter_R = AttributeWrapper(
+        comms_annotation={"container": "prometheus-node-exporter"}, datatype=bool
+    )
+    prometheus_node_exporter_RW = AttributeWrapper(
+        comms_annotation={"container": "prometheus-node-exporter"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    tango_rest_R = AttributeWrapper(
+        comms_annotation={"container": "tango-rest"}, datatype=bool
+    )
+    tango_rest_RW = AttributeWrapper(
+        comms_annotation={"container": "tango-rest"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
 
     # --------
     # overloaded functions
     # --------
     @log_exceptions()
     def configure_for_off(self):
-        """ user code here. is called when the state is set to OFF """
+        """user code here. is called when the state is set to OFF"""
         # Stop keep-alive
         try:
             self.docker_client.sync_stop()
         except Exception as e:
-            logger.warning("Exception while stopping docker client in configure_for_off function: {}. Exception ignored".format(e))
+            logger.warning(
+                "Exception while stopping docker client in configure_for_off function: {}. Exception ignored".format(
+                    e
+                )
+            )
 
     @log_exceptions()
     def configure_for_initialise(self):
-        """ user code here. is called when the state is set to INIT """
+        """user code here. is called when the state is set to INIT"""
 
         # set up the Docker client
         self.docker_client = DockerClient(self.Docker_Base_URL, self.Fault)
 
         # schedule the docker initialisation, and wait for it to finish
-        future = asyncio.run_coroutine_threadsafe(self._connect_docker(), self.docker_client.event_loop)
+        future = asyncio.run_coroutine_threadsafe(
+            self._connect_docker(), self.docker_client.event_loop
+        )
         _ = future.result()
 
     async def _connect_docker(self):
diff --git a/tangostationcontrol/tangostationcontrol/devices/lofar_device.py b/tangostationcontrol/tangostationcontrol/devices/lofar_device.py
index def3d167b1016680b4e36051377cf04977df0b53..e88c8659b7dd18ee2785fdaef6fd1827cf611bf2 100644
--- a/tangostationcontrol/tangostationcontrol/devices/lofar_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/lofar_device.py
@@ -1,28 +1,28 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the XXX project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """Hardware Device Server for LOFAR2.0
 
 """
-import time
 import math
+import textwrap
+import time
 from typing import List
 
 import numpy
-import textwrap
+from tango import (
+    AttrWriteType,
+    DevState,
+    DebugIt,
+    Attribute,
+    DeviceProxy,
+    AttrDataFormat,
+    DevSource,
+    DevDouble,
+)
 
 # PyTango imports
 from tango.server import attribute, command, Device, DeviceMeta
-from tango import AttrWriteType, DevState, DebugIt, Attribute, DeviceProxy, AttrDataFormat, DevSource, DevDouble
-
-# Additional import
-from tangostationcontrol import __version__ as version
 from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
 from tangostationcontrol.common.lofar_logging import log_exceptions
 from tangostationcontrol.common.states import DEFAULT_COMMAND_STATES, INITIALISED_STATES
@@ -30,10 +30,13 @@ from tangostationcontrol.common.type_checking import sequence_not_str
 from tangostationcontrol.devices.device_decorators import only_in_states, fault_on_error
 from tangostationcontrol.toolkit.archiver import Archiver
 
+# Additional import
+from tangostationcontrol import __version__ as version
 
 __all__ = ["LOFARDevice"]
 
 import logging
+
 logger = logging.getLogger()
 
 
@@ -72,7 +75,9 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
     # Attributes
     # ----------
 
-    version_R = attribute(dtype=str, access=AttrWriteType.READ, fget=lambda self: version)
+    version_R = attribute(
+        dtype=str, access=AttrWriteType.READ, fget=lambda self: version
+    )
 
     # list of translator property names to be set by set_translator_defaults
     TRANSLATOR_DEFAULT_SETTINGS = []
@@ -82,23 +87,27 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
 
     @classmethod
     def attr_list(cls):
-        """ Return a list of all the AttributeWrapper members of this class. """
+        """Return a list of all the AttributeWrapper members of this class."""
         return [v for k, v in cls.__dict__.items() if type(v) == AttributeWrapper]
 
     def setup_attribute_wrapper(self):
-        """ prepare the caches for attribute wrapper objects"""
+        """prepare the caches for attribute wrapper objects"""
 
         self._attribute_wrapper_io = {}
 
     def is_attribute_access_allowed(self, req_type):
-        """ Returns whether an attribute wrapped by the AttributeWrapper be accessed. """
+        """Returns whether an attribute wrapped by the AttributeWrapper be accessed."""
 
         return self.get_state() in INITIALISED_STATES
 
     # TODO(Corne): Actually implement locking in L2SS-940
     def atomic_read_modify_write_attribute(
-        self, values: numpy.ndarray, proxy: DeviceProxy, attribute: str,
-        mask_or_sparse=None, cast_type=None
+        self,
+        values: numpy.ndarray,
+        proxy: DeviceProxy,
+        attribute: str,
+        mask_or_sparse=None,
+        cast_type=None,
     ):
         """Automatically read-modify-write the attribute on the given proxy
 
@@ -120,8 +129,11 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
         # proxy.unlock()
 
     def merge_write(
-        self, merge_values: numpy.ndarray, current_values: List[any],
-        mask_or_sparse=None, cast_type=None
+        self,
+        merge_values: numpy.ndarray,
+        current_values: List[any],
+        mask_or_sparse=None,
+        cast_type=None,
     ) -> numpy.ndarray:
         """Merge values with current_values retrieved from attribute by mask / sparse
 
@@ -141,23 +153,19 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
         merge_values = merge_values.copy()
 
         if mask_or_sparse is not None and sequence_not_str(mask_or_sparse):
-            self._merge_write_mask(
-                merge_values, current_values, mask_or_sparse
-            )
+            self._merge_write_mask(merge_values, current_values, mask_or_sparse)
             return merge_values
         else:
             if cast_type is None:
-                raise AttributeError(
-                    "dtype can not be None for sparse merge_write"
-                )
+                raise AttributeError("dtype can not be None for sparse merge_write")
 
-            self._merge_write_delimiter(
-                merge_values, current_values, mask_or_sparse
-            )
+            self._merge_write_delimiter(merge_values, current_values, mask_or_sparse)
             return merge_values.astype(dtype=cast_type)
 
     def _merge_write_delimiter(
-        self, merge_values: numpy.ndarray, current_values: List[any],
+        self,
+        merge_values: numpy.ndarray,
+        current_values: List[any],
         sparse=None,
     ):
         """Merge merge_values and current_values by replacing elements by sparse
@@ -178,8 +186,7 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
                 merge_values[idx] = current_values[idx]
 
     def _merge_write_mask(
-        self, merge_values: List[any], current_values: List[any],
-        mask: List[any]
+        self, merge_values: List[any], current_values: List[any], mask: List[any]
     ):
         """Merge merge_values and current_values by replacing elements by mask
 
@@ -200,7 +207,7 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
 
     @log_exceptions()
     def init_device(self):
-        """ Instantiates the device in the OFF state. """
+        """Instantiates the device in the OFF state."""
 
         # NOTE: Will delete_device first, if necessary
         Device.init_device(self)
@@ -249,7 +256,6 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
         :return:None
         """
 
-
         self.set_state(DevState.INIT)
         self.set_status("Device is in the INIT state.")
 
@@ -317,7 +323,6 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
         self.set_state(DevState.OFF)
         self.set_status("Device is in the OFF state.")
 
-
     @command()
     @DebugIt()
     @log_exceptions()
@@ -332,12 +337,16 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
         """
         if self.get_state() == DevState.OFF:
             # Spurious FAULT
-            logger.warning("Requested to go to FAULT state, but am already in OFF state.")
+            logger.warning(
+                "Requested to go to FAULT state, but am already in OFF state."
+            )
             return
 
         if self.get_state() == DevState.FAULT:
             # Already faulting. Don't complain.
-            logger.warning("Requested to go to FAULT state, but am already in FAULT state.")
+            logger.warning(
+                "Requested to go to FAULT state, but am already in FAULT state."
+            )
             return
 
         self.set_state(DevState.FAULT)
@@ -367,12 +376,12 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
         pass
 
     def _set_defaults(self, attribute_names: list):
-        """ Set hardware points to their default value.
+        """Set hardware points to their default value.
 
-            attribute_names: The names of the attributes to set to their default value.
+        attribute_names: The names of the attributes to set to their default value.
 
-            A hardware point XXX is set to the value of the object member named XXX_default, if it exists.
-            XXX_default can be f.e. a constant, or a device_property.
+        A hardware point XXX is set to the value of the object member named XXX_default, if it exists.
+        XXX_default can be f.e. a constant, or a device_property.
         """
 
         # set them all
@@ -387,10 +396,16 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
 
                 if max_dim_y > 1:
                     # 2D array -> reshape 1D default
-                    default_value = numpy.array(default_value).reshape(max_dim_y, max_dim_x)
+                    default_value = numpy.array(default_value).reshape(
+                        max_dim_y, max_dim_x
+                    )
 
                 # set the attribute to the configured default. Shorten after 150 characters
-                logger.debug(textwrap.shorten(f"Setting attribute {name} to {default_value}", 150))
+                logger.debug(
+                    textwrap.shorten(
+                        f"Setting attribute {name} to {default_value}", 150
+                    )
+                )
                 self.proxy.write_attribute(name, default_value)
             except Exception as e:
                 # log which attribute we're addressing
@@ -404,37 +419,43 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
     @DebugIt()
     @log_exceptions()
     def set_defaults(self):
-        """ Set hardware points to their default value.
+        """Set hardware points to their default value.
 
-            A hardware point XXX is set to the value of the object member named XXX_default, if it exists.
-            XXX_default can be f.e. a constant, or a device_property.
+        A hardware point XXX is set to the value of the object member named XXX_default, if it exists.
+        XXX_default can be f.e. a constant, or a device_property.
 
-            The points are set in the following order:
-                1) The python class property 'FIRST_DEFAULT_SETTINGS' is read, as an array of strings denoting property names. Each property
-                   is set in that order.
-                2) Any remaining default properties are set, except the translators (those in 'TRANSLATOR_DEFAULT_SETTINGS').
+        The points are set in the following order:
+            1) The python class property 'FIRST_DEFAULT_SETTINGS' is read, as an array of strings denoting property names. Each property
+               is set in that order.
+            2) Any remaining default properties are set, except the translators (those in 'TRANSLATOR_DEFAULT_SETTINGS').
         """
 
         # collect all attributes for which defaults are provided
-        attributes_with_defaults = [name for name in dir(self)
-                                    # collect all attribute members
-                                    if isinstance(getattr(self, name), Attribute)
-                                    # with a default set
-                                    and hasattr(self, f"{name}_default")
-                                    and name not in self.TRANSLATOR_DEFAULT_SETTINGS]
+        attributes_with_defaults = [
+            name
+            for name in dir(self)
+            # collect all attribute members
+            if isinstance(getattr(self, name), Attribute)
+            # with a default set
+            and hasattr(self, f"{name}_default")
+            and name not in self.TRANSLATOR_DEFAULT_SETTINGS
+        ]
 
         # determine the order: first do the ones mentioned in default_settings_order
-        attributes_to_set = self.FIRST_DEFAULT_SETTINGS + [name for name in attributes_with_defaults if name not in self.FIRST_DEFAULT_SETTINGS]
+        attributes_to_set = self.FIRST_DEFAULT_SETTINGS + [
+            name
+            for name in attributes_with_defaults
+            if name not in self.FIRST_DEFAULT_SETTINGS
+        ]
 
         # set them
         self._set_defaults(attributes_to_set)
 
-
     @only_in_states(DEFAULT_COMMAND_STATES)
     @fault_on_error()
     @command()
     def set_translator_defaults(self):
-        """ Initialise the translator translators to their configured settings. """
+        """Initialise the translator translators to their configured settings."""
 
         # This is just the command version of _set_translator_defaults().
         self._set_translator_defaults()
@@ -444,7 +465,7 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
     @command()
     @DebugIt()
     def prepare_hardware(self):
-        """ Load firmware required before configuring anything. """
+        """Load firmware required before configuring anything."""
 
         # This is just the command version of _prepare_hardware().
         self._prepare_hardware()
@@ -454,11 +475,13 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
     @command()
     @DebugIt()
     def disable_hardware(self):
-        """ Disable the hardware related to the device. """
+        """Disable the hardware related to the device."""
 
         if self.get_state() == DevState.DISABLE:
             # Already disabled.
-            logger.warning("Requested to go to DISABLE state, but am already in DISABLE state.")
+            logger.warning(
+                "Requested to go to DISABLE state, but am already in DISABLE state."
+            )
             return
 
         self._disable_hardware()
@@ -468,9 +491,9 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
         self.set_status("Device is in the DISABLE state.")
 
     @only_in_states(DEFAULT_COMMAND_STATES)
-    @command(dtype_out = DevDouble)
+    @command(dtype_out=DevDouble)
     def max_archiving_load(self):
-        """ Return the maximum archiving load for the device attributes """
+        """Return the maximum archiving load for the device attributes"""
         archiver = Archiver()
         return archiver.get_maximum_device_load(self.get_name())
 
@@ -501,20 +524,20 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
         self._boot(initialise_hardware=False)
 
     def _set_translator_defaults(self):
-        """ Initialise any translators to their default settings. """
+        """Initialise any translators to their default settings."""
 
         self._set_defaults(self.TRANSLATOR_DEFAULT_SETTINGS)
 
     def _prepare_hardware(self):
-        """ Override this method to load any firmware or power cycle components before configuring the hardware. """
+        """Override this method to load any firmware or power cycle components before configuring the hardware."""
         pass
 
     def _disable_hardware(self):
-        """ Override this method to disable any hardware related to the device. """
+        """Override this method to disable any hardware related to the device."""
         pass
 
     def read_attribute(self, attr_name):
-        """ Read the value of a certain attribute (directly from the hardware). """
+        """Read the value of a certain attribute (directly from the hardware)."""
 
         # obtain the class information of this attribute, effectively equal
         # to getattr(self, attr_name), but this also makes sure we actually
@@ -528,14 +551,14 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
         return read_wrapper(class_attribute)
 
     def wait_attribute(self, attr_name, value, timeout=10, pollperiod=0.2):
-        """ Wait until the given attribute obtains the given value.
+        """Wait until the given attribute obtains the given value.
 
-            Raises an Exception if it has not after the timeout.
+        Raises an Exception if it has not after the timeout.
 
-            value:       The value that needs to be matched, or a function
-                         that needs to evaluate to True given the attribute.
-            timeout:     time until an Exception is raised, in seconds.
-            pollperiod:  how often to check the attribute, in seconds.
+        value:       The value that needs to be matched, or a function
+                     that needs to evaluate to True given the attribute.
+        timeout:     time until an Exception is raised, in seconds.
+        pollperiod:  how often to check the attribute, in seconds.
         """
         if type(value) == type(lambda x: True):
             # evaluate function
@@ -545,7 +568,7 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
             is_correct = lambda x: x == value
 
         # Poll every half a second
-        for _ in range(math.ceil(timeout/pollperiod)):
+        for _ in range(math.ceil(timeout / pollperiod)):
             if is_correct(self.read_attribute(attr_name)):
                 return
 
@@ -554,9 +577,9 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
         raise Exception(f"{attr_name} != {value} after {timeout} seconds still.")
 
     def alarm_val(self, attr_name):
-        """ Returns whether min_alarm < attr_value < max_alarm for the given attribute,
-            if these values are set. For arrays, an array of booleans of the same shape
-            is returned. """
+        """Returns whether min_alarm < attr_value < max_alarm for the given attribute,
+        if these values are set. For arrays, an array of booleans of the same shape
+        is returned."""
 
         # fetch attribute configuration
         attr_config = self.proxy.get_attribute_config(attr_name)
@@ -566,15 +589,15 @@ class LOFARDevice(Device, metaclass=DeviceMeta):
         # fetch attribute value as an array
         value = self.read_attribute(attr_name)
         if is_scalar:
-            value = numpy.array(value) # this stays a scalar in numpy
+            value = numpy.array(value)  # this stays a scalar in numpy
 
         # construct alarm state, in the same shape as the attribute
         alarm_state = numpy.zeros(value.shape, dtype=bool)
 
-        if alarms.max_alarm != 'Not specified':
+        if alarms.max_alarm != "Not specified":
             alarm_state |= value >= value.dtype.type(alarms.max_alarm)
 
-        if alarms.min_alarm != 'Not specified':
+        if alarms.min_alarm != "Not specified":
             alarm_state |= value <= value.dtype.type(alarms.min_alarm)
 
         # return alarm state, with the same dimensions as the attribute
diff --git a/tangostationcontrol/tangostationcontrol/devices/mibs/__init__.py b/tangostationcontrol/tangostationcontrol/devices/mibs/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/devices/mibs/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/devices/mibs/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/devices/observation.py b/tangostationcontrol/tangostationcontrol/devices/observation.py
index ab2145aba132146832baf4fd609f4251db839c23..0a8b005fb7f0899ae402647d566c470e047e474b 100644
--- a/tangostationcontrol/tangostationcontrol/devices/observation.py
+++ b/tangostationcontrol/tangostationcontrol/devices/observation.py
@@ -1,9 +1,6 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR2.0 Station Control project.
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import logging
 from time import time
 from typing import Optional
@@ -14,8 +11,12 @@ from jsonschema.exceptions import ValidationError
 # PyTango imports
 from tango import AttrWriteType, DeviceProxy, DevState, DevSource, Util, Except
 from tango.server import attribute
-
-from tangostationcontrol.common.constants import DEFAULT_POLLING_PERIOD, MAX_ANTENNA, N_beamlets_ctrl, N_point_prop
+from tangostationcontrol.common.constants import (
+    DEFAULT_POLLING_PERIOD,
+    MAX_ANTENNA,
+    N_beamlets_ctrl,
+    N_point_prop,
+)
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.common.lofar_logging import device_logging_to_python
 from tangostationcontrol.common.lofar_logging import log_exceptions
@@ -32,7 +33,7 @@ __all__ = ["Observation", "main"]
 
 @device_logging_to_python()
 class Observation(LOFARDevice):
-    """ Observation Device for LOFAR2.0
+    """Observation Device for LOFAR2.0
     This Tango device is responsible for the set-up of hardware for a
     specific observation.  It will, if necessary keep tabs on HW MPs to signal
     issues that are not caught by MPs being outside their nominal range.
@@ -41,18 +42,34 @@ class Observation(LOFARDevice):
     """
 
     # Attributes
-    observation_running_R = attribute(dtype=numpy.float64, access=AttrWriteType.READ,
-                                      polling_period=DEFAULT_POLLING_PERIOD, period=DEFAULT_POLLING_PERIOD,
-                                      rel_change="1.0")
+    observation_running_R = attribute(
+        dtype=numpy.float64,
+        access=AttrWriteType.READ,
+        polling_period=DEFAULT_POLLING_PERIOD,
+        period=DEFAULT_POLLING_PERIOD,
+        rel_change="1.0",
+    )
     observation_id_R = attribute(dtype=numpy.int64, access=AttrWriteType.READ)
     stop_time_R = attribute(dtype=numpy.float64, access=AttrWriteType.READ)
-    antenna_mask_R = attribute(dtype=(numpy.int64,), max_dim_x=MAX_ANTENNA, access=AttrWriteType.READ)
+    antenna_mask_R = attribute(
+        dtype=(numpy.int64,), max_dim_x=MAX_ANTENNA, access=AttrWriteType.READ
+    )
     filter_R = attribute(dtype=numpy.str, access=AttrWriteType.READ)
-    saps_subband_R = attribute(dtype=((numpy.uint32,),), max_dim_x=N_beamlets_ctrl, max_dim_y=N_beamlets_ctrl,
-                               access=AttrWriteType.READ)
-    saps_pointing_R = attribute(dtype=((numpy.str,),), max_dim_x=N_point_prop, max_dim_y=N_beamlets_ctrl,
-                                access=AttrWriteType.READ)
-    tile_beam_R = attribute(dtype=(numpy.str,), max_dim_x=N_point_prop, access=AttrWriteType.READ)
+    saps_subband_R = attribute(
+        dtype=((numpy.uint32,),),
+        max_dim_x=N_beamlets_ctrl,
+        max_dim_y=N_beamlets_ctrl,
+        access=AttrWriteType.READ,
+    )
+    saps_pointing_R = attribute(
+        dtype=((numpy.str,),),
+        max_dim_x=N_point_prop,
+        max_dim_y=N_beamlets_ctrl,
+        access=AttrWriteType.READ,
+    )
+    tile_beam_R = attribute(
+        dtype=(numpy.str,), max_dim_x=N_point_prop, access=AttrWriteType.READ
+    )
     first_beamlet_R = attribute(dtype=numpy.int64, access=AttrWriteType.READ)
 
     observation_settings_RW = attribute(dtype=str, access=AttrWriteType.READ_WRITE)
@@ -77,7 +94,11 @@ class Observation(LOFARDevice):
         super().configure_for_initialise()
 
         if self._observation_settings is None:
-            Except.throw_exception("IllegalCommand", "Device can not be initialized without configuration", __name__)
+            Except.throw_exception(
+                "IllegalCommand",
+                "Device can not be initialized without configuration",
+                __name__,
+            )
 
         # ObservationControl takes already good care of checking that the
         # parameters are in order and sufficient.  It is therefore unnecessary
@@ -91,7 +112,9 @@ class Observation(LOFARDevice):
         util = Util.instance()
         # TODO(Stefano): set a proper policy for the devices instance number
         # It cannot be inherited from the Observation instance number (i.e. Observation_id)
-        self.antennafield_proxy = DeviceProxy(f"{util.get_ds_inst_name()}/AntennaField/1")
+        self.antennafield_proxy = DeviceProxy(
+            f"{util.get_ds_inst_name()}/AntennaField/1"
+        )
         self.antennafield_proxy.set_source(DevSource.DEV)
 
         # Set a reference of RECV device that is correlated to this device
@@ -116,15 +139,18 @@ class Observation(LOFARDevice):
         logger.info(
             f"The observation with ID={self._observation_settings.observation_id} is "
             "configured. It will begin as soon as On() is called and it is"
-            f"supposed to stop at {self._observation_settings.stop_time}")
+            f"supposed to stop at {self._observation_settings.stop_time}"
+        )
 
     def configure_for_off(self):
         """Indicate the observation has stopped"""
 
         super().configure_for_off()
 
-        logger.info(f"Stopped the observation with ID="
-                    f"{self._observation_settings.observation_id if self._observation_settings else None}.")
+        logger.info(
+            f"Stopped the observation with ID="
+            f"{self._observation_settings.observation_id if self._observation_settings else None}."
+        )
 
     def configure_for_on(self):
         """Indicate the observation has started"""
@@ -132,21 +158,33 @@ class Observation(LOFARDevice):
         super().configure_for_on()
 
         # Apply Antenna Mask and Filter
-        ant_mask, rcu_band_select = self._apply_antennafield_settings(self.read_antenna_mask_R(), self.read_filter_R())
+        ant_mask, rcu_band_select = self._apply_antennafield_settings(
+            self.read_antenna_mask_R(), self.read_filter_R()
+        )
         self.antennafield_proxy.ANT_mask_RW = ant_mask
         self.antennafield_proxy.RCU_band_select_RW = rcu_band_select
 
         # Apply Beamlet configuration
-        self.beamlet_proxy.subband_select_RW = self._apply_saps_subbands(self.read_saps_subband_R())
-        self.digitalbeam_proxy.Pointing_direction_RW = self._apply_saps_pointing(self.read_saps_pointing_R())
-        self.digitalbeam_proxy.antenna_select_RW = self._apply_saps_antenna_select(self.read_antenna_mask_R())
+        self.beamlet_proxy.subband_select_RW = self._apply_saps_subbands(
+            self.read_saps_subband_R()
+        )
+        self.digitalbeam_proxy.Pointing_direction_RW = self._apply_saps_pointing(
+            self.read_saps_pointing_R()
+        )
+        self.digitalbeam_proxy.antenna_select_RW = self._apply_saps_antenna_select(
+            self.read_antenna_mask_R()
+        )
 
         # Apply Tile Beam pointing direction
         tile_beam = self.read_tile_beam_R()
         if tile_beam is not None:
-            self.tilebeam_proxy.Pointing_direction_RW = [tuple(tile_beam)] * self.antennafield_proxy.nr_antennas_R
+            self.tilebeam_proxy.Pointing_direction_RW = [
+                tuple(tile_beam)
+            ] * self.antennafield_proxy.nr_antennas_R
 
-        logger.info(f"Started the observation with ID={self._observation_settings.observation_id}.")
+        logger.info(
+            f"Started the observation with ID={self._observation_settings.observation_id}."
+        )
 
     @only_when_on()
     @fault_on_error()
@@ -198,8 +236,11 @@ class Observation(LOFARDevice):
         if self._observation_settings.tile_beam is None:
             return None
         pointing_direction = self._observation_settings.tile_beam
-        return [str(pointing_direction.direction_type), f"{pointing_direction.angle1}deg",
-                f"{pointing_direction.angle2}deg"]
+        return [
+            str(pointing_direction.direction_type),
+            f"{pointing_direction.angle1}deg",
+            f"{pointing_direction.angle2}deg",
+        ]
 
     @only_in_states([DevState.STANDBY, DevState.ON])
     @fault_on_error()
@@ -212,7 +253,11 @@ class Observation(LOFARDevice):
     @log_exceptions()
     def read_observation_settings_RW(self):
         """Return current observation_parameters string"""
-        return None if self._observation_settings is None else self._observation_settings.to_json()
+        return (
+            None
+            if self._observation_settings is None
+            else self._observation_settings.to_json()
+        )
 
     @only_in_states([DevState.OFF])
     @fault_on_error()
@@ -223,7 +268,7 @@ class Observation(LOFARDevice):
             self._observation_settings = ObservationSettings.from_json(parameters)
         except ValidationError as e:
             self._observation_settings = None
-            #Except.throw_exception("IllegalCommand", e.message, __name__)
+            # Except.throw_exception("IllegalCommand", e.message, __name__)
 
     @only_when_on()
     @fault_on_error()
@@ -235,19 +280,24 @@ class Observation(LOFARDevice):
         return time()
 
     def _build_saps_pointing(self, parameters: ObservationSettings):
-        """ Build the sap pointing array preserving the correct order from JSON """
+        """Build the sap pointing array preserving the correct order from JSON"""
         saps_pointing = []
         for i in range(0, self._num_saps):
             pointing_direction = parameters.SAPs[i].pointing
-            saps_pointing.insert(i, (
-                pointing_direction.direction_type, f"{pointing_direction.angle1}deg",
-                f"{pointing_direction.angle2}deg"))
+            saps_pointing.insert(
+                i,
+                (
+                    pointing_direction.direction_type,
+                    f"{pointing_direction.angle1}deg",
+                    f"{pointing_direction.angle2}deg",
+                ),
+            )
         return saps_pointing
 
     def _apply_antennafield_settings(self, antenna_mask: list, filter_name: str):
-        """ Convert an array of antenna indexes into a boolean mask array and
-            retrieve the RCU band from filter name, returning the correct format for 
-            AntennaField device
+        """Convert an array of antenna indexes into a boolean mask array and
+        retrieve the RCU band from filter name, returning the correct format for
+        AntennaField device
         """
         ANT_mask_RW = [False] * MAX_ANTENNA
         RCU_band_select_RW = [0] * MAX_ANTENNA
@@ -258,25 +308,26 @@ class Observation(LOFARDevice):
         return numpy.array(ANT_mask_RW), numpy.array(RCU_band_select_RW)
 
     def _apply_saps_subbands(self, sap_subbands: list):
-        """ Convert an array of subbands into the correct format for Beamlet device"""
+        """Convert an array of subbands into the correct format for Beamlet device"""
         subband_select = self.beamlet_proxy.subband_select_RW
         first_beamlet = numpy.array(self.read_first_beamlet_R(), dtype=numpy.int64)
         # Insert subband values starting from the first beamlet
         sap_subbands = numpy.array(sap_subbands).flatten()
-        subband_select[first_beamlet:len(sap_subbands)] = sap_subbands
+        subband_select[first_beamlet : len(sap_subbands)] = sap_subbands
         return subband_select
 
     def _apply_saps_pointing(self, sap_pointing: list):
-        """ Convert an array of string directions into the correct format for DigitalBeam device"""
+        """Convert an array of string directions into the correct format for DigitalBeam device"""
         pointing_direction = list(
-            self.digitalbeam_proxy.Pointing_direction_RW)  # convert to list to allows item assignment
+            self.digitalbeam_proxy.Pointing_direction_RW
+        )  # convert to list to allows item assignment
         first_beamlet = numpy.array(self.read_first_beamlet_R(), dtype=numpy.int64)
         # Insert pointing values starting from the first beamlet
-        pointing_direction[first_beamlet:len(sap_pointing)] = sap_pointing
+        pointing_direction[first_beamlet : len(sap_pointing)] = sap_pointing
         return tuple(pointing_direction)
 
     def _apply_saps_antenna_select(self, antenna_mask: list):
-        """ Convert an array of antenna indexes into a boolean select array"""
+        """Convert an array of antenna indexes into a boolean select array"""
         antenna_select = numpy.array([[False] * N_beamlets_ctrl] * self._num_inputs)
         first_beamlet = numpy.array(self.read_first_beamlet_R(), dtype=numpy.int64)
         for a in antenna_mask:
diff --git a/tangostationcontrol/tangostationcontrol/devices/observation_control.py b/tangostationcontrol/tangostationcontrol/devices/observation_control.py
index d241dcbe634c233995ea8d6cf7613ac1bb406479..97f26d1c2aa4c6afdaee882d39789efdbb570e06 100644
--- a/tangostationcontrol/tangostationcontrol/devices/observation_control.py
+++ b/tangostationcontrol/tangostationcontrol/devices/observation_control.py
@@ -1,19 +1,17 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR2.0 Station Control project.
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import logging
 
 import numpy
 from tango import Except, DevState, AttrWriteType, DebugIt, Util, DevBoolean, DevString
 from tango.server import Device, command, attribute
-
-from tangostationcontrol.common.entrypoint import entry
-from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
 from tangostationcontrol.common import ObservationController
+from tangostationcontrol.common.entrypoint import entry
+from tangostationcontrol.common.lofar_logging import (
+    device_logging_to_python,
+    log_exceptions,
+)
 from tangostationcontrol.configuration import ObservationSettings
 from tangostationcontrol.devices.device_decorators import only_when_on, fault_on_error
 from tangostationcontrol.devices.lofar_device import LOFARDevice
@@ -26,7 +24,7 @@ __all__ = ["ObservationControl", "main"]
 
 @device_logging_to_python()
 class ObservationControl(LOFARDevice):
-    """ Observation Control Device Server for LOFAR2.0
+    """Observation Control Device Server for LOFAR2.0
     The ObservationControl Tango device controls the instantiation of a Tango Dynamic Device from the Observation class.
     ObservationControl then keeps a record of the Observation devices and if they are still alive.
 
@@ -81,9 +79,11 @@ class ObservationControl(LOFARDevice):
 
         # The top level tango domain is the left-most part of a
         # device's name.
-        self.myTangoDomain: str = self.get_name().split('/')[0]
+        self.myTangoDomain: str = self.get_name().split("/")[0]
 
-        self._observation_controller: ObservationController = ObservationController(self.myTangoDomain)
+        self._observation_controller: ObservationController = ObservationController(
+            self.myTangoDomain
+        )
 
     # Core functions
     @log_exceptions()
@@ -113,7 +113,9 @@ class ObservationControl(LOFARDevice):
     @only_when_on()
     @log_exceptions()
     def start_observation(self, parameters: DevString = None):
-        self._observation_controller.start_observation(ObservationSettings.from_json(parameters))
+        self._observation_controller.start_observation(
+            ObservationSettings.from_json(parameters)
+        )
 
     @command(dtype_in=numpy.int64)
     @only_when_on()
diff --git a/tangostationcontrol/tangostationcontrol/devices/opcua_device.py b/tangostationcontrol/tangostationcontrol/devices/opcua_device.py
index 23624f9078b85f1a0f464251751abb6d7044f7bb..11123502d477acd1c0a6a5182195addbd8ba4b04 100644
--- a/tangostationcontrol/tangostationcontrol/devices/opcua_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/opcua_device.py
@@ -1,27 +1,23 @@
-# -*- coding: utf-8 -*-
-#
-# This file represents a top-level device
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ Generic OPC-UA Device Server for LOFAR2.0
 
 """
 
-# PyTango imports
-from tango.server import device_property, attribute
-import numpy
 import asyncio
-# Additional import
+import logging
 
-from tangostationcontrol.common.lofar_logging import log_exceptions
+import numpy
+
+# PyTango imports
+from tango.server import device_property, attribute
 from tangostationcontrol.clients.opcua_client import OPCUAConnection
+from tangostationcontrol.common.lofar_logging import log_exceptions
 from tangostationcontrol.devices.lofar_device import LOFARDevice
 
-import logging
+# Additional import
+
 logger = logging.getLogger()
 
 __all__ = ["OPCUADevice"]
@@ -45,39 +41,31 @@ class OPCUADevice(LOFARDevice):
     # Device Properties
     # -----------------
 
-    OPC_Server_Name = device_property(
-        dtype='DevString',
-        mandatory=True
-    )
+    OPC_Server_Name = device_property(dtype="DevString", mandatory=True)
 
-    OPC_Server_Port = device_property(
-        dtype='DevULong',
-        mandatory=True
-    )
+    OPC_Server_Port = device_property(dtype="DevULong", mandatory=True)
 
-    OPC_Time_Out = device_property(
-        dtype='DevDouble',
-        mandatory=True
-    )
+    OPC_Time_Out = device_property(dtype="DevDouble", mandatory=True)
 
     OPC_namespace = device_property(
-        dtype='DevString',
-        mandatory=False,
-        default_value="http://lofar.eu"
+        dtype="DevString", mandatory=False, default_value="http://lofar.eu"
     )
 
     # Add these elements to the OPC-UA node path.
     OPC_Node_Path_Prefix = device_property(
-        dtype='DevVarStringArray',
-        mandatory=False,
-        default_value=[]
+        dtype="DevVarStringArray", mandatory=False, default_value=[]
     )
 
     # ----------
     # Attributes
     # ----------
 
-    opcua_missing_attributes_R = attribute(max_dim_x=128, dtype=(str,), fget=lambda self: numpy.array(self.opcua_missing_attributes, dtype=str), doc="OPC-UA attributes that this device requested, but which are not exposed on the server. These attributes are replaced with a no-op and thus do not function as expected.")
+    opcua_missing_attributes_R = attribute(
+        max_dim_x=128,
+        dtype=(str,),
+        fget=lambda self: numpy.array(self.opcua_missing_attributes, dtype=str),
+        doc="OPC-UA attributes that this device requested, but which are not exposed on the server. These attributes are replaced with a no-op and thus do not function as expected.",
+    )
 
     # --------
     # overloaded functions
@@ -85,16 +73,23 @@ class OPCUADevice(LOFARDevice):
 
     @log_exceptions()
     def configure_for_initialise(self):
-        """ user code here. is called when the state is set to INIT """
+        """user code here. is called when the state is set to INIT"""
 
         # set up the OPC ua client
-        self.opcua_connection = OPCUAConnection("opc.tcp://{}:{}/".format(self.OPC_Server_Name, self.OPC_Server_Port), self.OPC_namespace, self.OPC_Time_Out, self.Fault)
+        self.opcua_connection = OPCUAConnection(
+            "opc.tcp://{}:{}/".format(self.OPC_Server_Name, self.OPC_Server_Port),
+            self.OPC_namespace,
+            self.OPC_Time_Out,
+            self.Fault,
+        )
         self.opcua_connection.node_path_prefix = self.OPC_Node_Path_Prefix
 
         self.opcua_missing_attributes = []
 
         # schedule the opc-ua initialisation, and wait for it to finish
-        future = asyncio.run_coroutine_threadsafe(self._connect_opcua(), self.opcua_connection.event_loop)
+        future = asyncio.run_coroutine_threadsafe(
+            self._connect_opcua(), self.opcua_connection.event_loop
+        )
         _ = future.result()
 
     async def _connect_opcua(self):
@@ -109,15 +104,24 @@ class OPCUADevice(LOFARDevice):
             except Exception as e:
                 # use the pass function instead of setting read/write fails
                 i.set_pass_func(self)
-                self.opcua_missing_attributes.append(",".join(self.opcua_connection.get_node_path(i.comms_annotation)))
+                self.opcua_missing_attributes.append(
+                    ",".join(self.opcua_connection.get_node_path(i.comms_annotation))
+                )
 
-                logger.warning(f"Error while setting the attribute {i.comms_annotation} read/write function.", exc_info=True)
+                logger.warning(
+                    f"Error while setting the attribute {i.comms_annotation} read/write function.",
+                    exc_info=True,
+                )
 
     @log_exceptions()
     def configure_for_off(self):
-        """ user code here. is called when the state is set to OFF """
+        """user code here. is called when the state is set to OFF"""
         try:
             # disconnect
             self.opcua_connection.sync_stop()
         except Exception as e:
-            logger.warning("Exception while stopping OPC ua connection in configure_for_off function: {}. Exception ignored".format(e))
+            logger.warning(
+                "Exception while stopping OPC ua connection in configure_for_off function: {}. Exception ignored".format(
+                    e
+                )
+            )
diff --git a/tangostationcontrol/tangostationcontrol/devices/pcon.py b/tangostationcontrol/tangostationcontrol/devices/pcon.py
index fe7ceb280816db1d996e3e0c1acbeb2b4865e729..01ccbb5832a92e777fb942750628b5bb660ba51d 100644
--- a/tangostationcontrol/tangostationcontrol/devices/pcon.py
+++ b/tangostationcontrol/tangostationcontrol/devices/pcon.py
@@ -1,25 +1,22 @@
-# -*- coding: utf-8 -*-
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ PCON Device Server for LOFAR2.0
 
 """
 
-# Additional import
-from tangostationcontrol.common.entrypoint import entry
-from tangostationcontrol.common.lofar_logging import device_logging_to_python
-
 import logging
-from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
-from tangostationcontrol.devices.snmp_device import SNMPDevice
 
 import numpy
-
 from pysmi import debug
+from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
 
-debug.setLogger(debug.Debug('searcher', "compiler", "borrower", "reader"))
+# Additional import
+from tangostationcontrol.common.entrypoint import entry
+from tangostationcontrol.common.lofar_logging import device_logging_to_python
+from tangostationcontrol.devices.snmp_device import SNMPDevice
+
+debug.setLogger(debug.Debug("searcher", "compiler", "borrower", "reader"))
 
 logger = logging.getLogger()
 
@@ -32,13 +29,44 @@ class PCON(SNMPDevice):
     # ----------
     # Attributes
     # ----------
-    systemVoltage_R = AttributeWrapper(comms_annotation={"mib": "ACC-MIB", "name": "systemVoltage", "scaling_factor": 0.01}, datatype=numpy.double)
-
-    rectifierCurrent_R = AttributeWrapper(comms_annotation={"mib": "ACC-MIB", "name": "rectifierCurrent", "scaling_factor": 0.1}, datatype=numpy.double)
-    loadCurrent_R = AttributeWrapper(comms_annotation={"mib": "ACC-MIB", "name": "loadCurrent", "scaling_factor": 0.1}, datatype=numpy.double)
-    batteryCurrent_R = AttributeWrapper(comms_annotation={"mib": "ACC-MIB", "name": "batteryCurrent", "scaling_factor": 0.1}, datatype=numpy.double)
-
-    battTemperature_R = AttributeWrapper(comms_annotation={"mib": "ACC-MIB", "name": "battTemperature"}, datatype=numpy.double)
+    systemVoltage_R = AttributeWrapper(
+        comms_annotation={
+            "mib": "ACC-MIB",
+            "name": "systemVoltage",
+            "scaling_factor": 0.01,
+        },
+        datatype=numpy.double,
+    )
+
+    rectifierCurrent_R = AttributeWrapper(
+        comms_annotation={
+            "mib": "ACC-MIB",
+            "name": "rectifierCurrent",
+            "scaling_factor": 0.1,
+        },
+        datatype=numpy.double,
+    )
+    loadCurrent_R = AttributeWrapper(
+        comms_annotation={
+            "mib": "ACC-MIB",
+            "name": "loadCurrent",
+            "scaling_factor": 0.1,
+        },
+        datatype=numpy.double,
+    )
+    batteryCurrent_R = AttributeWrapper(
+        comms_annotation={
+            "mib": "ACC-MIB",
+            "name": "batteryCurrent",
+            "scaling_factor": 0.1,
+        },
+        datatype=numpy.double,
+    )
+
+    battTemperature_R = AttributeWrapper(
+        comms_annotation={"mib": "ACC-MIB", "name": "battTemperature"},
+        datatype=numpy.double,
+    )
 
 
 # ----------
diff --git a/tangostationcontrol/tangostationcontrol/devices/psoc.py b/tangostationcontrol/tangostationcontrol/devices/psoc.py
index 425faa23a87b7ebbc209e392e77b1b10a3844ef7..1fcbe70f1e9dae7577b44a0d47e235c143e3c706 100644
--- a/tangostationcontrol/tangostationcontrol/devices/psoc.py
+++ b/tangostationcontrol/tangostationcontrol/devices/psoc.py
@@ -1,29 +1,28 @@
-# -*- coding: utf-8 -*-
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ PSOC Device Server for LOFAR2.0
 
 """
 
-# Additional import
-from tangostationcontrol.common.entrypoint import entry
-from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
+import logging
+from datetime import timedelta
 
+import numpy
+from pysmi import debug
 from tango.server import device_property, command
-
-import logging
 from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
 from tangostationcontrol.clients.snmp_client import SNMPAttribute
-from tangostationcontrol.devices.snmp_device import SNMPDevice
-
-import numpy
-from datetime import timedelta
 
-from pysmi import debug
+# Additional import
+from tangostationcontrol.common.entrypoint import entry
+from tangostationcontrol.common.lofar_logging import (
+    device_logging_to_python,
+    log_exceptions,
+)
+from tangostationcontrol.devices.snmp_device import SNMPDevice
 
-debug.setLogger(debug.Debug('searcher', "compiler", "borrower", "reader"))
+debug.setLogger(debug.Debug("searcher", "compiler", "borrower", "reader"))
 
 logger = logging.getLogger()
 
@@ -38,18 +37,32 @@ class PSOC(SNMPDevice):
     # -----------------
     # Device Properties
     # -----------------
-    PSOC_sockets = device_property(
-        dtype=[str],
-        mandatory=True
-    )
+    PSOC_sockets = device_property(dtype=[str], mandatory=True)
 
     # ----------
     # Attributes
     # ----------
-    sockets_state_R = AttributeWrapper(comms_annotation={"mib": "PowerNet-MIB", "name": "sPDUOutletCtl", "index": 1}, dims=(8,), datatype=str)
-    master_state_R = AttributeWrapper(comms_annotation={"mib": "PowerNet-MIB", "name": "sPDUMasterState"}, datatype=str)
-    current_load_R = AttributeWrapper(comms_annotation={"mib": "PowerNet-MIB", "name": "rPDULoadStatusLoad", "index": 1}, datatype=numpy.int64)
-    uptime_R = AttributeWrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysUpTime"}, datatype=numpy.int64)
+    sockets_state_R = AttributeWrapper(
+        comms_annotation={"mib": "PowerNet-MIB", "name": "sPDUOutletCtl", "index": 1},
+        dims=(8,),
+        datatype=str,
+    )
+    master_state_R = AttributeWrapper(
+        comms_annotation={"mib": "PowerNet-MIB", "name": "sPDUMasterState"},
+        datatype=str,
+    )
+    current_load_R = AttributeWrapper(
+        comms_annotation={
+            "mib": "PowerNet-MIB",
+            "name": "rPDULoadStatusLoad",
+            "index": 1,
+        },
+        datatype=numpy.int64,
+    )
+    uptime_R = AttributeWrapper(
+        comms_annotation={"mib": "SNMPv2-MIB", "name": "sysUpTime"},
+        datatype=numpy.int64,
+    )
 
     # --------
     # overloaded functions
@@ -57,21 +70,34 @@ class PSOC(SNMPDevice):
 
     @log_exceptions()
     def configure_for_initialise(self):
-        """ user code here. is called when the state is set to STANDBY """
+        """user code here. is called when the state is set to STANDBY"""
 
         # make sure all sockets are named
         if len(self.PSOC_sockets) != self.PSOC_SOCKETS:
             raise Exception(
-                f"At least {self.PSOC_NOF_SOCKETS} names are required to be given. You can simply leave any unused sockets as empty strings")
+                f"At least {self.PSOC_NOF_SOCKETS} names are required to be given. You can simply leave any unused sockets as empty strings"
+            )
         else:
             # create a dict with the name of the sockets being keys for the socket number (e.g: "socket_nr_1": 1)
-            self.socket_dict = {self.PSOC_sockets[f]: f + 1 for f in range(len(self.PSOC_sockets))}
-            logger.debug(f"Configured PSOC with the following socket names: {self.PSOC_sockets}")
+            self.socket_dict = {
+                self.PSOC_sockets[f]: f + 1 for f in range(len(self.PSOC_sockets))
+            }
+            logger.debug(
+                f"Configured PSOC with the following socket names: {self.PSOC_sockets}"
+            )
 
         super().configure_for_initialise()
 
         # prepares this object for the readable_uptime command
-        self.uptime_attr = SNMPAttribute(self.snmp_manager.SNMP_comm, "SNMPv2-MIB", name="sysUpTime", idx=0, dtype=numpy.int64, dim_x=1, dim_y=0)
+        self.uptime_attr = SNMPAttribute(
+            self.snmp_manager.SNMP_comm,
+            "SNMPv2-MIB",
+            name="sysUpTime",
+            idx=0,
+            dtype=numpy.int64,
+            dim_x=1,
+            dim_y=0,
+        )
 
     def _toggle_socket(self, socket_name, on: bool):
         """
@@ -81,7 +107,9 @@ class PSOC(SNMPDevice):
         try:
             socket_nr = self.socket_dict[socket_name]
         except Exception:
-            raise Exception(f"This is not a valid socket name, please make sure it is one of the following: {self.socket_dict.keys()}")
+            raise Exception(
+                f"This is not a valid socket name, please make sure it is one of the following: {self.socket_dict.keys()}"
+            )
 
         # get the correct value to set
         if on:
@@ -90,7 +118,15 @@ class PSOC(SNMPDevice):
             socket_set = "outletOff"
 
         # create the SNMPAttribute for the correct socket
-        attr = SNMPAttribute(self.snmp_manager.SNMP_comm, "PowerNet-MIB", name="sPDUOutletCtl", idx=socket_nr, dtype=str, dim_x=1, dim_y=0)
+        attr = SNMPAttribute(
+            self.snmp_manager.SNMP_comm,
+            "PowerNet-MIB",
+            name="sPDUOutletCtl",
+            idx=socket_nr,
+            dtype=str,
+            dim_x=1,
+            dim_y=0,
+        )
 
         # write the correct value
         attr.write_function([socket_set])
@@ -112,7 +148,8 @@ class PSOC(SNMPDevice):
         """
 
         # for whatever reason, the uptime is given in hundredts of a second
-        return str(timedelta(seconds=self.uptime_attr.read_function()/100))
+        return str(timedelta(seconds=self.uptime_attr.read_function() / 100))
+
 
 # ----------
 # Run server
diff --git a/tangostationcontrol/tangostationcontrol/devices/recv.py b/tangostationcontrol/tangostationcontrol/devices/recv.py
index 70543fbff6d54c36e3f240e6492604079c4a4006..bd03821b39bbad605a2d51e9a8b9a1a991923e2b 100644
--- a/tangostationcontrol/tangostationcontrol/devices/recv.py
+++ b/tangostationcontrol/tangostationcontrol/devices/recv.py
@@ -1,35 +1,35 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the RECV project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ RECV Device Server for LOFAR2.0
 
 """
 
+import logging
+
+import numpy
+from tango import AttrWriteType, DevVarFloatArray, DevString, DevLong
+
 # PyTango imports
 from tango import DebugIt
 from tango.server import command
 from tango.server import device_property, attribute
-from tango import AttrWriteType, DevVarFloatArray, DevString, DevLong
-
-import numpy
+from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
+from tangostationcontrol.common.constants import (
+    N_rcu,
+    N_elements,
+    N_pol,
+    N_rcu_inp,
+    DEFAULT_POLLING_PERIOD,
+)
 
 # Additional import
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.common.lofar_logging import device_logging_to_python
 from tangostationcontrol.common.states import DEFAULT_COMMAND_STATES
-from tangostationcontrol.common.constants import N_rcu, N_elements, N_pol, N_rcu_inp, DEFAULT_POLLING_PERIOD
-from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
 from tangostationcontrol.devices.device_decorators import only_in_states
 from tangostationcontrol.devices.opcua_device import OPCUADevice
 
-import logging
-
 logger = logging.getLogger()
 
 __all__ = ["RECV", "main"]
@@ -45,7 +45,7 @@ class RECV(OPCUADevice):
         "LBA_30_70": 2,
         "HBA_170_230": 1,
         "HBA_110_190": 2,
-        "HBA_210_250": 4
+        "HBA_210_250": 4,
     }
 
     # -----------------
@@ -55,62 +55,58 @@ class RECV(OPCUADevice):
     # ----- Default settings
 
     ANT_mask_RW_default = device_property(
-        dtype='DevVarBooleanArray',
+        dtype="DevVarBooleanArray",
         mandatory=False,
-        default_value=[True] * N_rcu * N_rcu_inp
+        default_value=[True] * N_rcu * N_rcu_inp,
     )
 
     RCU_mask_RW_default = device_property(
-        dtype='DevVarBooleanArray',
-        mandatory=False,
-        default_value=[True] * N_rcu
+        dtype="DevVarBooleanArray", mandatory=False, default_value=[True] * N_rcu
     )
 
     RCU_attenuator_dB_RW_default = device_property(
-        dtype='DevVarLong64Array',
+        dtype="DevVarLong64Array",
         mandatory=False,
-        default_value=[0] * N_rcu * N_rcu_inp
+        default_value=[0] * N_rcu * N_rcu_inp,
     )
 
     RCU_band_select_RW_default = device_property(
-        dtype='DevVarLong64Array',
+        dtype="DevVarLong64Array",
         mandatory=False,
-        default_value=[1] * N_rcu * N_rcu_inp
+        default_value=[1] * N_rcu * N_rcu_inp,
     )
 
     RCU_PWR_ANT_on_RW_default = device_property(
-        dtype='DevVarBooleanArray',
+        dtype="DevVarBooleanArray",
         mandatory=False,
         default_value=[False] * N_rcu * N_rcu_inp
         # turn power off by default in test setups, f.e. to prevent blowing up the noise sources
     )
 
     RECVTR_monitor_rate_RW_default = device_property(
-        dtype='DevLong64',
-        mandatory=False,
-        default_value=1
+        dtype="DevLong64", mandatory=False, default_value=1
     )
 
     TRANSLATOR_DEFAULT_SETTINGS = [
-        'ANT_mask_RW',
-        'RCU_mask_RW',
-        'RECVTR_monitor_rate_RW'
+        "ANT_mask_RW",
+        "RCU_mask_RW",
+        "RECVTR_monitor_rate_RW",
     ]
 
     # ----- Timing values
 
     RCU_On_Off_timeout = device_property(
-        doc='Maximum amount of time to wait after turning RCU(s) on or off',
-        dtype='DevFloat',
+        doc="Maximum amount of time to wait after turning RCU(s) on or off",
+        dtype="DevFloat",
         mandatory=False,
-        default_value=30.0
+        default_value=30.0,
     )
 
     RCU_DTH_On_Off_timeout = device_property(
-        doc='Maximum amount of time to wait after turning dithering on or off',
-        dtype='DevFloat',
+        doc="Maximum amount of time to wait after turning dithering on or off",
+        dtype="DevFloat",
         mandatory=False,
-        default_value=30.0
+        default_value=30.0,
     )
 
     # ----- Calibration values
@@ -118,159 +114,265 @@ class RECV(OPCUADevice):
     HBAT_bf_delay_step_delays = device_property(
         dtype="DevVarFloatArray",
         mandatory=False,
-        default_value=numpy.array([
-            0.0,        0.5228E-9,  0.9797E-9,  1.4277E-9,  1.9055E-9,
-            2.4616E-9,  2.9539E-9,  3.4016E-9,  3.8076E-9,  4.3461E-9,
-            4.9876E-9,  5.4894E-9,  5.7973E-9,  6.2707E-9,  6.8628E-9,
-            7.3989E-9,  8.0673E-9,  8.6188E-9,  9.1039E-9,  9.5686E-9,
-            10.0463E-9, 10.5774E-9, 11.0509E-9, 11.5289E-9, 11.9374E-9,
-            12.4524E-9, 13.0842E-9, 13.5936E-9, 13.9198E-9, 14.4087E-9,
-            14.9781E-9, 15.5063E-9
-        ], dtype=numpy.float64)
+        default_value=numpy.array(
+            [
+                0.0,
+                0.5228e-9,
+                0.9797e-9,
+                1.4277e-9,
+                1.9055e-9,
+                2.4616e-9,
+                2.9539e-9,
+                3.4016e-9,
+                3.8076e-9,
+                4.3461e-9,
+                4.9876e-9,
+                5.4894e-9,
+                5.7973e-9,
+                6.2707e-9,
+                6.8628e-9,
+                7.3989e-9,
+                8.0673e-9,
+                8.6188e-9,
+                9.1039e-9,
+                9.5686e-9,
+                10.0463e-9,
+                10.5774e-9,
+                11.0509e-9,
+                11.5289e-9,
+                11.9374e-9,
+                12.4524e-9,
+                13.0842e-9,
+                13.5936e-9,
+                13.9198e-9,
+                14.4087e-9,
+                14.9781e-9,
+                15.5063e-9,
+            ],
+            dtype=numpy.float64,
+        ),
     )
 
     HBAT_signal_input_delays = device_property(
-        doc='Signal input delay calibration values for the elements within a tile.',
-        dtype='DevVarFloatArray',
+        doc="Signal input delay calibration values for the elements within a tile.",
+        dtype="DevVarFloatArray",
         mandatory=False,
-        default_value=numpy.zeros((N_rcu,), dtype=numpy.float64)
+        default_value=numpy.zeros((N_rcu,), dtype=numpy.float64),
     )
 
     # ----------
     # Attributes
     # ----------
     ANT_mask_RW = AttributeWrapper(
-        comms_annotation=["ANT_mask_RW"], datatype=bool, dims=(N_rcu, N_rcu_inp),
-        access=AttrWriteType.READ_WRITE
+        comms_annotation=["ANT_mask_RW"],
+        datatype=bool,
+        dims=(N_rcu, N_rcu_inp),
+        access=AttrWriteType.READ_WRITE,
     )
 
     # The HBAT beamformer delays represent 32 delays for each of the 96 inputs.
     # The 32 delays deconstruct as delays[polarisation][dipole],
     # and each delay is the number of 'delay steps' to apply (0.5ns for HBAT1).
     HBAT_BF_delay_steps_R = AttributeWrapper(
-        comms_annotation=["HBAT_BF_delay_steps_R"], datatype=numpy.int64,
-        dims=(N_rcu * N_rcu_inp, N_elements, N_pol)
+        comms_annotation=["HBAT_BF_delay_steps_R"],
+        datatype=numpy.int64,
+        dims=(N_rcu * N_rcu_inp, N_elements, N_pol),
     )
     HBAT_BF_delay_steps_RW = AttributeWrapper(
-        comms_annotation=["HBAT_BF_delay_steps_RW"], datatype=numpy.int64,
+        comms_annotation=["HBAT_BF_delay_steps_RW"],
+        datatype=numpy.int64,
         dims=(N_rcu * N_rcu_inp, N_elements, N_pol),
-        access=AttrWriteType.READ_WRITE
+        access=AttrWriteType.READ_WRITE,
     )
     HBAT_LED_on_R = AttributeWrapper(
-        comms_annotation=["HBAT_LED_on_R"], datatype=bool,
-        dims=(N_rcu * N_rcu_inp, N_elements, N_pol)
+        comms_annotation=["HBAT_LED_on_R"],
+        datatype=bool,
+        dims=(N_rcu * N_rcu_inp, N_elements, N_pol),
     )
     HBAT_LED_on_RW = AttributeWrapper(
-        comms_annotation=["HBAT_LED_on_RW"], datatype=bool,
+        comms_annotation=["HBAT_LED_on_RW"],
+        datatype=bool,
         dims=(N_rcu * N_rcu_inp, N_elements, N_pol),
-        access=AttrWriteType.READ_WRITE
+        access=AttrWriteType.READ_WRITE,
     )
     HBAT_PWR_LNA_on_R = AttributeWrapper(
-        comms_annotation=["HBAT_PWR_LNA_on_R"], datatype=bool,
-        dims=(N_rcu * N_rcu_inp, N_elements, N_pol)
+        comms_annotation=["HBAT_PWR_LNA_on_R"],
+        datatype=bool,
+        dims=(N_rcu * N_rcu_inp, N_elements, N_pol),
     )
     HBAT_PWR_LNA_on_RW = AttributeWrapper(
-        comms_annotation=["HBAT_PWR_LNA_on_RW"], datatype=bool,
+        comms_annotation=["HBAT_PWR_LNA_on_RW"],
+        datatype=bool,
         dims=(N_rcu * N_rcu_inp, N_elements, N_pol),
-        access=AttrWriteType.READ_WRITE
+        access=AttrWriteType.READ_WRITE,
     )
     HBAT_PWR_on_R = AttributeWrapper(
-        comms_annotation=["HBAT_PWR_on_R"], datatype=bool,
-        dims=(N_rcu * N_rcu_inp, N_elements, N_pol)
+        comms_annotation=["HBAT_PWR_on_R"],
+        datatype=bool,
+        dims=(N_rcu * N_rcu_inp, N_elements, N_pol),
     )
     HBAT_PWR_on_RW = AttributeWrapper(
-        comms_annotation=["HBAT_PWR_on_RW"], datatype=bool,
+        comms_annotation=["HBAT_PWR_on_RW"],
+        datatype=bool,
         dims=(N_rcu * N_rcu_inp, N_elements, N_pol),
-        access=AttrWriteType.READ_WRITE
+        access=AttrWriteType.READ_WRITE,
     )
     RCU_ADC_locked_R = AttributeWrapper(
-        comms_annotation=["RCU_ADC_locked_R"], datatype=bool,
-        dims=(N_rcu, N_rcu_inp)
+        comms_annotation=["RCU_ADC_locked_R"], datatype=bool, dims=(N_rcu, N_rcu_inp)
     )
     RCU_attenuator_dB_R = AttributeWrapper(
-        comms_annotation=["RCU_attenuator_dB_R"], datatype=numpy.int64,
-        dims=(N_rcu, N_rcu_inp)
+        comms_annotation=["RCU_attenuator_dB_R"],
+        datatype=numpy.int64,
+        dims=(N_rcu, N_rcu_inp),
     )
     RCU_attenuator_dB_RW = AttributeWrapper(
-        comms_annotation=["RCU_attenuator_dB_RW"], datatype=numpy.int64,
-        dims=(N_rcu, N_rcu_inp), access=AttrWriteType.READ_WRITE
+        comms_annotation=["RCU_attenuator_dB_RW"],
+        datatype=numpy.int64,
+        dims=(N_rcu, N_rcu_inp),
+        access=AttrWriteType.READ_WRITE,
     )
     RCU_band_select_R = AttributeWrapper(
-        comms_annotation=["RCU_band_select_R"], datatype=numpy.int64,
-        dims=(N_rcu, N_rcu_inp)
+        comms_annotation=["RCU_band_select_R"],
+        datatype=numpy.int64,
+        dims=(N_rcu, N_rcu_inp),
     )
     RCU_band_select_RW = AttributeWrapper(
-        comms_annotation=["RCU_band_select_RW"], datatype=numpy.int64,
-        dims=(N_rcu, N_rcu_inp), access=AttrWriteType.READ_WRITE
+        comms_annotation=["RCU_band_select_RW"],
+        datatype=numpy.int64,
+        dims=(N_rcu, N_rcu_inp),
+        access=AttrWriteType.READ_WRITE,
     )
     RCU_DTH_freq_R = AttributeWrapper(
-        comms_annotation=["RCU_DTH_freq_R"], datatype=numpy.int64,
-        dims=(N_rcu, N_rcu_inp)
+        comms_annotation=["RCU_DTH_freq_R"],
+        datatype=numpy.int64,
+        dims=(N_rcu, N_rcu_inp),
     )
     RCU_DTH_freq_RW = AttributeWrapper(
-        comms_annotation=["RCU_DTH_freq_RW"], datatype=numpy.int64,
+        comms_annotation=["RCU_DTH_freq_RW"],
+        datatype=numpy.int64,
         dims=(N_rcu, N_rcu_inp),
-        access=AttrWriteType.READ_WRITE
+        access=AttrWriteType.READ_WRITE,
     )
     RCU_DTH_on_R = AttributeWrapper(
-        comms_annotation=["RCU_DTH_on_R"], datatype=bool,
-        dims=(N_rcu, N_rcu_inp)
+        comms_annotation=["RCU_DTH_on_R"], datatype=bool, dims=(N_rcu, N_rcu_inp)
     )
     RCU_LED_green_on_R = AttributeWrapper(
-        comms_annotation=["RCU_LED_green_on_R"], datatype=bool,
-        dims=(N_rcu,)
+        comms_annotation=["RCU_LED_green_on_R"], datatype=bool, dims=(N_rcu,)
     )
     RCU_LED_green_on_RW = AttributeWrapper(
-        comms_annotation=["RCU_LED_green_on_RW"], datatype=bool,
-        dims=(N_rcu,), access=AttrWriteType.READ_WRITE
+        comms_annotation=["RCU_LED_green_on_RW"],
+        datatype=bool,
+        dims=(N_rcu,),
+        access=AttrWriteType.READ_WRITE,
     )
     RCU_LED_red_on_R = AttributeWrapper(
-        comms_annotation=["RCU_LED_red_on_R"], datatype=bool,
-        dims=(N_rcu,)
+        comms_annotation=["RCU_LED_red_on_R"], datatype=bool, dims=(N_rcu,)
     )
     RCU_LED_red_on_RW = AttributeWrapper(
-        comms_annotation=["RCU_LED_red_on_RW"], datatype=bool,
-        dims=(N_rcu,), access=AttrWriteType.READ_WRITE
+        comms_annotation=["RCU_LED_red_on_RW"],
+        datatype=bool,
+        dims=(N_rcu,),
+        access=AttrWriteType.READ_WRITE,
     )
     RCU_mask_RW = AttributeWrapper(
-        comms_annotation=["RCU_mask_RW"], datatype=bool, dims=(N_rcu,),
-        access=AttrWriteType.READ_WRITE
-    )
-    RCU_PCB_ID_R = AttributeWrapper(comms_annotation=["RCU_PCB_ID_R"], datatype=numpy.int64, dims=(N_rcu,))
-    RCU_PCB_number_R = AttributeWrapper(comms_annotation=["RCU_PCB_number_R"], datatype=str, dims=(N_rcu,))
-    RCU_PCB_version_R = AttributeWrapper(comms_annotation=["RCU_PCB_version_R"], datatype=str, dims=(N_rcu,))
-    RCU_PWR_1V8_R = AttributeWrapper(comms_annotation=["RCU_PWR_1V8_R"], datatype=numpy.float64, dims=(N_rcu,))
-    RCU_PWR_2V5_R = AttributeWrapper(comms_annotation=["RCU_PWR_2V5_R"], datatype=numpy.float64, dims=(N_rcu,))
-    RCU_PWR_3V3_R = AttributeWrapper(comms_annotation=["RCU_PWR_3V3_R"], datatype=numpy.float64, dims=(N_rcu,))
-    RCU_PWR_ANALOG_on_R = AttributeWrapper(comms_annotation=["RCU_PWR_ANALOG_on_R"], datatype=bool, dims=(N_rcu,))
-    RCU_PWR_ANT_IOUT_R = AttributeWrapper(comms_annotation=["RCU_PWR_ANT_IOUT_R"], datatype=numpy.float64, dims=(N_rcu, N_rcu_inp))
-    RCU_PWR_ANT_on_R = AttributeWrapper(comms_annotation=["RCU_PWR_ANT_on_R"], datatype=bool, dims=(N_rcu, N_rcu_inp))
-    RCU_PWR_ANT_on_RW = AttributeWrapper(comms_annotation=["RCU_PWR_ANT_on_RW"], datatype=bool, dims=(N_rcu, N_rcu_inp), access=AttrWriteType.READ_WRITE)
-    RCU_PWR_ANT_VIN_R = AttributeWrapper(comms_annotation=["RCU_PWR_ANT_VIN_R"], datatype=numpy.float64, dims=(N_rcu, N_rcu_inp))
-    RCU_PWR_ANT_VOUT_R = AttributeWrapper(comms_annotation=["RCU_PWR_ANT_VOUT_R"], datatype=numpy.float64, dims=(N_rcu, N_rcu_inp))
-    RCU_PWR_DIGITAL_on_R = AttributeWrapper(comms_annotation=["RCU_PWR_DIGITAL_on_R"], datatype=bool, dims=(N_rcu,))
-    RCU_PWR_good_R = AttributeWrapper(comms_annotation=["RCU_PWR_good_R"], datatype=bool, dims=(N_rcu,))
-    RCU_TEMP_R = AttributeWrapper(comms_annotation=["RCU_TEMP_R"], datatype=numpy.float64, dims=(N_rcu,))
-    RECVTR_I2C_error_R = AttributeWrapper(comms_annotation=["RECVTR_I2C_error_R"], datatype=numpy.int64, dims=(N_rcu,))
-    RECVTR_monitor_rate_RW = AttributeWrapper(comms_annotation=["RECVTR_monitor_rate_RW"], datatype=numpy.int64, access=AttrWriteType.READ_WRITE)
-    RECVTR_translator_busy_R = AttributeWrapper(comms_annotation=["RECVTR_translator_busy_R"], datatype=bool)
+        comms_annotation=["RCU_mask_RW"],
+        datatype=bool,
+        dims=(N_rcu,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    RCU_PCB_ID_R = AttributeWrapper(
+        comms_annotation=["RCU_PCB_ID_R"], datatype=numpy.int64, dims=(N_rcu,)
+    )
+    RCU_PCB_number_R = AttributeWrapper(
+        comms_annotation=["RCU_PCB_number_R"], datatype=str, dims=(N_rcu,)
+    )
+    RCU_PCB_version_R = AttributeWrapper(
+        comms_annotation=["RCU_PCB_version_R"], datatype=str, dims=(N_rcu,)
+    )
+    RCU_PWR_1V8_R = AttributeWrapper(
+        comms_annotation=["RCU_PWR_1V8_R"], datatype=numpy.float64, dims=(N_rcu,)
+    )
+    RCU_PWR_2V5_R = AttributeWrapper(
+        comms_annotation=["RCU_PWR_2V5_R"], datatype=numpy.float64, dims=(N_rcu,)
+    )
+    RCU_PWR_3V3_R = AttributeWrapper(
+        comms_annotation=["RCU_PWR_3V3_R"], datatype=numpy.float64, dims=(N_rcu,)
+    )
+    RCU_PWR_ANALOG_on_R = AttributeWrapper(
+        comms_annotation=["RCU_PWR_ANALOG_on_R"], datatype=bool, dims=(N_rcu,)
+    )
+    RCU_PWR_ANT_IOUT_R = AttributeWrapper(
+        comms_annotation=["RCU_PWR_ANT_IOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_rcu, N_rcu_inp),
+    )
+    RCU_PWR_ANT_on_R = AttributeWrapper(
+        comms_annotation=["RCU_PWR_ANT_on_R"], datatype=bool, dims=(N_rcu, N_rcu_inp)
+    )
+    RCU_PWR_ANT_on_RW = AttributeWrapper(
+        comms_annotation=["RCU_PWR_ANT_on_RW"],
+        datatype=bool,
+        dims=(N_rcu, N_rcu_inp),
+        access=AttrWriteType.READ_WRITE,
+    )
+    RCU_PWR_ANT_VIN_R = AttributeWrapper(
+        comms_annotation=["RCU_PWR_ANT_VIN_R"],
+        datatype=numpy.float64,
+        dims=(N_rcu, N_rcu_inp),
+    )
+    RCU_PWR_ANT_VOUT_R = AttributeWrapper(
+        comms_annotation=["RCU_PWR_ANT_VOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_rcu, N_rcu_inp),
+    )
+    RCU_PWR_DIGITAL_on_R = AttributeWrapper(
+        comms_annotation=["RCU_PWR_DIGITAL_on_R"], datatype=bool, dims=(N_rcu,)
+    )
+    RCU_PWR_good_R = AttributeWrapper(
+        comms_annotation=["RCU_PWR_good_R"], datatype=bool, dims=(N_rcu,)
+    )
+    RCU_TEMP_R = AttributeWrapper(
+        comms_annotation=["RCU_TEMP_R"], datatype=numpy.float64, dims=(N_rcu,)
+    )
+    RECVTR_I2C_error_R = AttributeWrapper(
+        comms_annotation=["RECVTR_I2C_error_R"], datatype=numpy.int64, dims=(N_rcu,)
+    )
+    RECVTR_monitor_rate_RW = AttributeWrapper(
+        comms_annotation=["RECVTR_monitor_rate_RW"],
+        datatype=numpy.int64,
+        access=AttrWriteType.READ_WRITE,
+    )
+    RECVTR_translator_busy_R = AttributeWrapper(
+        comms_annotation=["RECVTR_translator_busy_R"], datatype=bool
+    )
 
     # ----------
     # Summarising Attributes
     # ----------
-    RCU_LED_colour_R = attribute(dtype=(numpy.uint32,), max_dim_x=N_rcu, fisallowed="is_attribute_access_allowed")
+    RCU_LED_colour_R = attribute(
+        dtype=(numpy.uint32,), max_dim_x=N_rcu, fisallowed="is_attribute_access_allowed"
+    )
 
     def read_RCU_LED_colour_R(self):
-        return (2 * self.read_attribute("RCU_LED_green_on_R") + 4 * self.read_attribute("RCU_LED_red_on_R")).astype(
-            numpy.uint32)
+        return (
+            2 * self.read_attribute("RCU_LED_green_on_R")
+            + 4 * self.read_attribute("RCU_LED_red_on_R")
+        ).astype(numpy.uint32)
 
-    RCU_error_R = attribute(dtype=(bool,), max_dim_x=N_rcu, fisallowed="is_attribute_access_allowed")
-    ANT_error_R = attribute(dtype=((bool,),), max_dim_y=N_rcu, max_dim_x=N_rcu_inp, fisallowed="is_attribute_access_allowed")
+    RCU_error_R = attribute(
+        dtype=(bool,), max_dim_x=N_rcu, fisallowed="is_attribute_access_allowed"
+    )
+    ANT_error_R = attribute(
+        dtype=((bool,),),
+        max_dim_y=N_rcu,
+        max_dim_x=N_rcu_inp,
+        fisallowed="is_attribute_access_allowed",
+    )
 
     def read_RCU_error_R(self):
         return self.read_attribute("RCU_mask_RW") & (
-                (self.read_attribute("RECVTR_I2C_error_R") > 0)
-                | self.alarm_val("RCU_PCB_ID_R")
+            (self.read_attribute("RECVTR_I2C_error_R") > 0)
+            | self.alarm_val("RCU_PCB_ID_R")
         )
 
     def read_ANT_error_R(self):
@@ -278,10 +380,21 @@ class RECV(OPCUADevice):
             ~self.read_attribute("RCU_ADC_locked_R")
         )
 
-    RECV_IOUT_error_R = attribute(dtype=((bool,),), max_dim_y=N_rcu, max_dim_x=N_rcu_inp, fisallowed="is_attribute_access_allowed")
-    RECV_TEMP_error_R = attribute(dtype=(bool,), max_dim_x=N_rcu, fisallowed="is_attribute_access_allowed",
-                                  polling_period=DEFAULT_POLLING_PERIOD)
-    RECV_VOUT_error_R = attribute(dtype=(bool,), max_dim_x=N_rcu, fisallowed="is_attribute_access_allowed")
+    RECV_IOUT_error_R = attribute(
+        dtype=((bool,),),
+        max_dim_y=N_rcu,
+        max_dim_x=N_rcu_inp,
+        fisallowed="is_attribute_access_allowed",
+    )
+    RECV_TEMP_error_R = attribute(
+        dtype=(bool,),
+        max_dim_x=N_rcu,
+        fisallowed="is_attribute_access_allowed",
+        polling_period=DEFAULT_POLLING_PERIOD,
+    )
+    RECV_VOUT_error_R = attribute(
+        dtype=(bool,), max_dim_x=N_rcu, fisallowed="is_attribute_access_allowed"
+    )
 
     def read_RECV_IOUT_error_R(self):
         return self.read_attribute("ANT_mask_RW") & (
@@ -290,21 +403,25 @@ class RECV(OPCUADevice):
 
     def read_RECV_TEMP_error_R(self):
         # Don't apply the mask here --- we always want to know if things get too hot!
-        return (
-            self.alarm_val("RCU_TEMP_R")
-        )
+        return self.alarm_val("RCU_TEMP_R")
 
     def read_RECV_VOUT_error_R(self):
-        return (self.read_attribute("ANT_mask_RW") & (
+        return (
+            self.read_attribute("ANT_mask_RW")
+            & (
                 self.alarm_val("RCU_PWR_ANT_VIN_R")
                 | self.alarm_val("RCU_PWR_ANT_VOUT_R")
-        )).any(axis=1) | (self.read_attribute("RCU_mask_RW") & (
+            )
+        ).any(axis=1) | (
+            self.read_attribute("RCU_mask_RW")
+            & (
                 self.alarm_val("RCU_PWR_1V8_R")
                 | self.alarm_val("RCU_PWR_2V5_R")
                 | self.alarm_val("RCU_PWR_3V3_R")
                 | ~self.read_attribute("RCU_PWR_DIGITAL_on_R")
                 | ~self.read_attribute("RCU_PWR_good_R")
-        ))
+            )
+        )
 
     # --------
     # overloaded functions
@@ -322,7 +439,7 @@ class RECV(OPCUADevice):
         self.HBAT_bf_delay_offset = numpy.mean(self.HBAT_bf_delay_step_delays)
 
     def _prepare_hardware(self):
-        """ Initialise the RCU hardware. """
+        """Initialise the RCU hardware."""
 
         # Cycle RCUs
         self.RCU_off()
@@ -331,7 +448,7 @@ class RECV(OPCUADevice):
         self.wait_attribute("RECVTR_translator_busy_R", False, self.RCU_On_Off_timeout)
 
     def _disable_hardware(self):
-        """ Disable the RECV hardware. """
+        """Disable the RECV hardware."""
 
         # Save actual mask values
         RCU_mask = self.proxy.RCU_mask_RW
@@ -361,7 +478,11 @@ class RECV(OPCUADevice):
         def nearest_delay_step(delay):
             # We want the index in the HBAT_bf_delay_steps_delay array which is closest to the given delay,
             # shifted by HBAT_bf_delay_offset to obtain strictly positive delays.
-            return (numpy.abs(self.HBAT_bf_delay_step_delays - (delay + self.HBAT_bf_delay_offset))).argmin()
+            return (
+                numpy.abs(
+                    self.HBAT_bf_delay_step_delays - (delay + self.HBAT_bf_delay_offset)
+                )
+            ).argmin()
 
         # Apply to all elements to convert each delay into the number of delay steps
         return numpy.vectorize(nearest_delay_step)(calibrated_delays)
@@ -371,7 +492,7 @@ class RECV(OPCUADevice):
     # --------
     @command(dtype_in=DevVarFloatArray, dtype_out=DevVarFloatArray)
     def calculate_HBAT_bf_delay_steps(self, delays: numpy.ndarray):
-        """ converts a signal path delay (in seconds) to an analog beam weight """
+        """converts a signal path delay (in seconds) to an analog beam weight"""
 
         # Reshape the flattened input array, into whatever how many tiles we get
         delays = numpy.array(delays).reshape(-1, N_elements)
@@ -383,7 +504,7 @@ class RECV(OPCUADevice):
 
     @command(dtype_in=DevString, dtype_out=DevLong)
     def get_rcu_band_from_filter(self, filter_name: str):
-        """ return the rcu band given the filter name"""
+        """return the rcu band given the filter name"""
         return self.FILTER_RCU_DICT.get(filter_name, -1)
 
     @command()
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/__init__.py b/tangostationcontrol/tangostationcontrol/devices/sdp/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/beamlet.py b/tangostationcontrol/tangostationcontrol/devices/sdp/beamlet.py
index bc7bc84819f2344b625c30072691394a8830facd..69af8168054d461aaa77d133159ab341155f3752 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/beamlet.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/beamlet.py
@@ -1,30 +1,48 @@
-# -*- coding: utf-8 -*-
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ Beamlet Device Server for LOFAR2.0
 
 """
 
+from functools import lru_cache
+
+import numpy
+from tango import (
+    AttrWriteType,
+    DevVarFloatArray,
+    DevVarULongArray,
+    DeviceProxy,
+    DevSource,
+    EventType,
+)
+
 # PyTango imports
 from tango.server import device_property, command, attribute
-from tango import AttrWriteType, DevVarFloatArray, DevVarULongArray, DeviceProxy, DevSource, EventType
+from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
+from tangostationcontrol.common.constants import (
+    N_pn,
+    A_pn,
+    N_pol,
+    N_beamlets_ctrl,
+    N_beamsets_ctrl,
+    P_sum,
+    DEFAULT_SUBBAND,
+)
 
 # Additional import
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.common.lofar_logging import log_exceptions
-from tangostationcontrol.common.constants import N_pn, A_pn, N_pol, N_beamlets_ctrl, N_beamsets_ctrl, P_sum, DEFAULT_SUBBAND
-from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
-from tangostationcontrol.devices.sdp.common import phases_to_weights, subband_frequencies
 from tangostationcontrol.devices.opcua_device import OPCUADevice
-
-import numpy
-from functools import lru_cache
+from tangostationcontrol.devices.sdp.common import (
+    phases_to_weights,
+    subband_frequencies,
+)
 
 __all__ = ["Beamlet", "main"]
 
 import logging
+
 logger = logging.getLogger()
 
 
@@ -35,165 +53,360 @@ class Beamlet(OPCUADevice):
     # -----------------
 
     FPGA_beamlet_output_hdr_eth_source_mac_RW_default = device_property(
-        dtype='DevVarStringArray',
-        mandatory=True
+        dtype="DevVarStringArray", mandatory=True
     )
 
     FPGA_beamlet_output_hdr_ip_source_address_RW_default = device_property(
-        dtype='DevVarStringArray',
-        mandatory=True
+        dtype="DevVarStringArray", mandatory=True
     )
 
     FPGA_beamlet_output_hdr_udp_source_port_RW_default = device_property(
-        dtype='DevVarUShortArray',
-        mandatory=True
+        dtype="DevVarUShortArray", mandatory=True
     )
 
     FPGA_beamlet_output_hdr_eth_destination_mac_RW_default = device_property(
-        dtype='DevVarStringArray',
-        mandatory=True
+        dtype="DevVarStringArray", mandatory=True
     )
 
     FPGA_beamlet_output_hdr_ip_destination_address_RW_default = device_property(
-        dtype='DevVarStringArray',
-        mandatory=True
+        dtype="DevVarStringArray", mandatory=True
     )
 
     FPGA_beamlet_output_hdr_udp_destination_port_RW_default = device_property(
-        dtype='DevVarUShortArray',
-        mandatory=True
+        dtype="DevVarUShortArray", mandatory=True
     )
 
     FPGA_beamlet_output_enable_RW_default = device_property(
-        dtype='DevVarBooleanArray',
-        mandatory=False,
-        default_value=[False] * N_pn
+        dtype="DevVarBooleanArray", mandatory=False, default_value=[False] * N_pn
     )
 
     FPGA_beamlet_output_scale_RW_default = device_property(
-        dtype='DevVarDoubleArray',
-        mandatory=False,
-        default_value=[1.0] * N_pn
+        dtype="DevVarDoubleArray", mandatory=False, default_value=[1.0] * N_pn
     )
 
     FPGA_bf_weights_xy_yx_RW_default = device_property(
-        dtype='DevVarULongArray',
+        dtype="DevVarULongArray",
         mandatory=False,
-        default_value =[0] * N_pn * A_pn * N_pol * N_beamlets_ctrl
+        default_value=[0] * N_pn * A_pn * N_pol * N_beamlets_ctrl,
     )
 
     subband_select_RW_default = device_property(
-        dtype='DevVarULongArray',
+        dtype="DevVarULongArray",
         mandatory=False,
-        default_value =[DEFAULT_SUBBAND] * N_beamlets_ctrl
+        default_value=[DEFAULT_SUBBAND] * N_beamlets_ctrl,
     )
 
     FIRST_DEFAULT_SETTINGS = [
-        'FPGA_beamlet_output_hdr_eth_source_mac_RW',
-        'FPGA_beamlet_output_hdr_ip_source_address_RW',
-        'FPGA_beamlet_output_hdr_udp_source_port_RW',
-        'FPGA_beamlet_output_hdr_eth_destination_mac_RW',
-        'FPGA_beamlet_output_hdr_ip_destination_address_RW',
-        'FPGA_beamlet_output_hdr_udp_destination_port_RW',
-        'FPGA_beamlet_output_enable_RW',
-        'FPGA_bf_weights_xy_yx_RW'
+        "FPGA_beamlet_output_hdr_eth_source_mac_RW",
+        "FPGA_beamlet_output_hdr_ip_source_address_RW",
+        "FPGA_beamlet_output_hdr_udp_source_port_RW",
+        "FPGA_beamlet_output_hdr_eth_destination_mac_RW",
+        "FPGA_beamlet_output_hdr_ip_destination_address_RW",
+        "FPGA_beamlet_output_hdr_udp_destination_port_RW",
+        "FPGA_beamlet_output_enable_RW",
+        "FPGA_bf_weights_xy_yx_RW",
     ]
 
     # ----------
     # Attributes
     # ----------
 
-    FPGA_beamlet_output_enable_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_enable_R"], datatype=bool, dims=(N_pn,))
-    FPGA_beamlet_output_enable_RW = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_enable_RW"], datatype=bool, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    
-    FPGA_beamlet_output_hdr_eth_source_mac_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_hdr_eth_source_mac_R"], datatype=str, dims=(N_pn,))
-    FPGA_beamlet_output_hdr_eth_source_mac_RW = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_hdr_eth_source_mac_RW"], datatype=str, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_beamlet_output_hdr_ip_source_address_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_hdr_ip_source_address_R"], datatype=str, dims=(N_pn,))
-    FPGA_beamlet_output_hdr_ip_source_address_RW = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_hdr_ip_source_address_RW"], datatype=str, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_beamlet_output_hdr_udp_source_port_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_hdr_udp_source_port_R"], datatype=numpy.uint16, dims=(N_pn,))
-    FPGA_beamlet_output_hdr_udp_source_port_RW = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_hdr_udp_source_port_RW"], datatype=numpy.uint16, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    
-    FPGA_beamlet_output_hdr_eth_destination_mac_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_hdr_eth_destination_mac_R"], datatype=str, dims=(N_pn,))
-    FPGA_beamlet_output_hdr_eth_destination_mac_RW = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_hdr_eth_destination_mac_RW"], datatype=str, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_beamlet_output_hdr_ip_destination_address_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_hdr_ip_destination_address_R"], datatype=str, dims=(N_pn,))
-    FPGA_beamlet_output_hdr_ip_destination_address_RW = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_hdr_ip_destination_address_RW"], datatype=str, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_beamlet_output_hdr_udp_destination_port_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_hdr_udp_destination_port_R"], datatype=numpy.uint16, dims=(N_pn,))
-    FPGA_beamlet_output_hdr_udp_destination_port_RW = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_hdr_udp_destination_port_RW"], datatype=numpy.uint16, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    
-    FPGA_beamlet_output_scale_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_scale_R"], datatype=numpy.double, dims=(N_pn,))
-    FPGA_beamlet_output_scale_RW = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_scale_RW"], datatype=numpy.double, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_beamlet_output_bsn_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_bsn_R"], datatype=numpy.int64, dims=(N_pn, N_beamsets_ctrl))
-
-    FPGA_beamlet_output_nof_packets_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_nof_packets_R"], datatype=numpy.int32, dims=(N_pn, N_beamsets_ctrl))
-    FPGA_beamlet_output_nof_valid_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_nof_valid_R"], datatype=numpy.int32, dims=(N_pn, N_beamsets_ctrl))
+    FPGA_beamlet_output_enable_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_enable_R"], datatype=bool, dims=(N_pn,)
+    )
+    FPGA_beamlet_output_enable_RW = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_enable_RW"],
+        datatype=bool,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+
+    FPGA_beamlet_output_hdr_eth_source_mac_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_hdr_eth_source_mac_R"],
+        datatype=str,
+        dims=(N_pn,),
+    )
+    FPGA_beamlet_output_hdr_eth_source_mac_RW = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_hdr_eth_source_mac_RW"],
+        datatype=str,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_beamlet_output_hdr_ip_source_address_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_hdr_ip_source_address_R"],
+        datatype=str,
+        dims=(N_pn,),
+    )
+    FPGA_beamlet_output_hdr_ip_source_address_RW = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_hdr_ip_source_address_RW"],
+        datatype=str,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_beamlet_output_hdr_udp_source_port_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_hdr_udp_source_port_R"],
+        datatype=numpy.uint16,
+        dims=(N_pn,),
+    )
+    FPGA_beamlet_output_hdr_udp_source_port_RW = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_hdr_udp_source_port_RW"],
+        datatype=numpy.uint16,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+
+    FPGA_beamlet_output_hdr_eth_destination_mac_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_hdr_eth_destination_mac_R"],
+        datatype=str,
+        dims=(N_pn,),
+    )
+    FPGA_beamlet_output_hdr_eth_destination_mac_RW = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_hdr_eth_destination_mac_RW"],
+        datatype=str,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_beamlet_output_hdr_ip_destination_address_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_hdr_ip_destination_address_R"],
+        datatype=str,
+        dims=(N_pn,),
+    )
+    FPGA_beamlet_output_hdr_ip_destination_address_RW = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_hdr_ip_destination_address_RW"],
+        datatype=str,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_beamlet_output_hdr_udp_destination_port_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_hdr_udp_destination_port_R"],
+        datatype=numpy.uint16,
+        dims=(N_pn,),
+    )
+    FPGA_beamlet_output_hdr_udp_destination_port_RW = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_hdr_udp_destination_port_RW"],
+        datatype=numpy.uint16,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+
+    FPGA_beamlet_output_scale_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_scale_R"],
+        datatype=numpy.double,
+        dims=(N_pn,),
+    )
+    FPGA_beamlet_output_scale_RW = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_scale_RW"],
+        datatype=numpy.double,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_beamlet_output_bsn_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_bsn_R"],
+        datatype=numpy.int64,
+        dims=(N_pn, N_beamsets_ctrl),
+    )
+
+    FPGA_beamlet_output_nof_packets_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_nof_packets_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, N_beamsets_ctrl),
+    )
+    FPGA_beamlet_output_nof_valid_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_nof_valid_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, N_beamsets_ctrl),
+    )
 
     # boolean[N_pn][N_beamsets_ctrl]
-    FPGA_beamlet_output_ready_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_ready_R"], datatype=bool, dims=(N_pn, N_beamsets_ctrl))
+    FPGA_beamlet_output_ready_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_ready_R"],
+        datatype=bool,
+        dims=(N_pn, N_beamsets_ctrl),
+    )
     # boolean[N_pn][N_beamsets_ctrl]
-    FPGA_beamlet_output_xon_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_output_xon_R"], datatype=bool, dims=(N_pn, N_beamsets_ctrl))
+    FPGA_beamlet_output_xon_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_output_xon_R"],
+        datatype=bool,
+        dims=(N_pn, N_beamsets_ctrl),
+    )
 
     # uint16[N_pn][A_PN][N_POL][N_beamsets_ctrl]
     # Select subband per dual-polarisation beamlet.
     # 0 for antenna polarization X in beamlet polarization X,
     # 1 for antenna polarization Y in beamlet polarization Y.
-    FPGA_beamlet_subband_select_R = AttributeWrapper(comms_annotation=["FPGA_beamlet_subband_select_R"], datatype=numpy.uint32, dims=(N_pn, A_pn, N_pol, N_beamlets_ctrl))
-    FPGA_beamlet_subband_select_RW = AttributeWrapper(comms_annotation=["FPGA_beamlet_subband_select_RW"], datatype=numpy.uint32, dims=(N_pn, A_pn, N_pol, N_beamlets_ctrl), access=AttrWriteType.READ_WRITE)
+    FPGA_beamlet_subband_select_R = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_subband_select_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, A_pn, N_pol, N_beamlets_ctrl),
+    )
+    FPGA_beamlet_subband_select_RW = AttributeWrapper(
+        comms_annotation=["FPGA_beamlet_subband_select_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn, A_pn, N_pol, N_beamlets_ctrl),
+        access=AttrWriteType.READ_WRITE,
+    )
 
     # uint32[N_pn][N_beamset_ctrl]
-    FPGA_bf_ring_nof_transport_hops_R = AttributeWrapper(comms_annotation=["FPGA_bf_ring_nof_transport_hops_R"], datatype=numpy.uint32, dims=(N_pn, N_beamsets_ctrl))
-    FPGA_bf_ring_nof_transport_hops_RW = AttributeWrapper(comms_annotation=["FPGA_bf_ring_nof_transport_hops_RW"], datatype=numpy.uint32, dims=(N_pn, N_beamsets_ctrl), access=AttrWriteType.READ_WRITE)
+    FPGA_bf_ring_nof_transport_hops_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_ring_nof_transport_hops_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, N_beamsets_ctrl),
+    )
+    FPGA_bf_ring_nof_transport_hops_RW = AttributeWrapper(
+        comms_annotation=["FPGA_bf_ring_nof_transport_hops_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn, N_beamsets_ctrl),
+        access=AttrWriteType.READ_WRITE,
+    )
 
     # cint16[N_pn][A_pn][N_pol][N_beamlets_ctrl]
     # Co-polarization BF weights. The N_pol = 2 parameter index is:
     # 0 for antenna polarization X in beamlet polarization X,
     # 1 for antenna polarization Y in beamlet polarization Y.
-    FPGA_bf_weights_xx_yy_R = AttributeWrapper(comms_annotation=["FPGA_bf_weights_xx_yy_R"], datatype=numpy.uint32, dims=(N_pn, A_pn, N_pol, N_beamlets_ctrl))
-    FPGA_bf_weights_xx_yy_RW = AttributeWrapper(comms_annotation=["FPGA_bf_weights_xx_yy_RW"], datatype=numpy.uint32, dims=(N_pn, A_pn, N_pol, N_beamlets_ctrl), access=AttrWriteType.READ_WRITE)
+    FPGA_bf_weights_xx_yy_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_xx_yy_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, A_pn, N_pol, N_beamlets_ctrl),
+    )
+    FPGA_bf_weights_xx_yy_RW = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_xx_yy_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn, A_pn, N_pol, N_beamlets_ctrl),
+        access=AttrWriteType.READ_WRITE,
+    )
 
     # cint16[N_pn][A_pn][N_pol][N_beamlets_ctrl]
     # Cross-polarization BF weights. The N_pol = 2 parameter index is (note that index pol in range 0:N_pol-1 is the antenna polarization, so index !pol is the beamlet polarization):
     # 0 for antenna polarization X in beamlet polarization Y,
     # 1 for antenna polarization Y in beamlet polarization X.
-    FPGA_bf_weights_xy_yx_R = AttributeWrapper(comms_annotation=["FPGA_bf_weights_xy_yx_R"], datatype=numpy.uint32, dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn))
-    FPGA_bf_weights_xy_yx_RW = AttributeWrapper(comms_annotation=["FPGA_bf_weights_xy_yx_RW"], datatype=numpy.uint32, dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn), access=AttrWriteType.READ_WRITE)
+    FPGA_bf_weights_xy_yx_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_xy_yx_R"],
+        datatype=numpy.uint32,
+        dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn),
+    )
+    FPGA_bf_weights_xy_yx_RW = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_xy_yx_RW"],
+        datatype=numpy.uint32,
+        dims=(A_pn * N_pol * N_beamlets_ctrl, N_pn),
+        access=AttrWriteType.READ_WRITE,
+    )
 
     # cint16[N_pn][N_pol][A_pn][N_pol][N_beamlets_ctrl]
     # Full Jones matrix of BF weights.
-    FPGA_bf_weights_xx_xy_yx_yy_R = AttributeWrapper(comms_annotation=["FPGA_bf_weights_xx_xy_yx_yy_R"], datatype=numpy.uint32, dims=(N_pn, N_pol, A_pn, N_pol, N_beamlets_ctrl))
-    FPGA_bf_weights_xx_xy_yx_yy_RW = AttributeWrapper(comms_annotation=["FPGA_bf_weights_xx_xy_yx_yy_RW"], datatype=numpy.uint32, dims=(N_pn, N_pol, A_pn, N_pol, N_beamlets_ctrl), access=AttrWriteType.READ_WRITE)
+    FPGA_bf_weights_xx_xy_yx_yy_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_xx_xy_yx_yy_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, N_pol, A_pn, N_pol, N_beamlets_ctrl),
+    )
+    FPGA_bf_weights_xx_xy_yx_yy_RW = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_xx_xy_yx_yy_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn, N_pol, A_pn, N_pol, N_beamlets_ctrl),
+        access=AttrWriteType.READ_WRITE,
+    )
 
     # cint16[N_pn][A_pn][N_beamlets_ctrl]
     # BF weights for separate access to respectively w_xx, w_xy, w_yx, and w_yy.
-    FPGA_bf_weights_xx_R = AttributeWrapper(comms_annotation=["FPGA_bf_weights_xx_R"], datatype=numpy.uint32, dims=(N_pn, A_pn, N_beamlets_ctrl))
-    FPGA_bf_weights_xx_RW = AttributeWrapper(comms_annotation=["FPGA_bf_weights_xx_RW"], datatype=numpy.uint32, dims=(N_pn, A_pn * N_beamlets_ctrl), access=AttrWriteType.READ_WRITE)
-    FPGA_bf_weights_xy_R = AttributeWrapper(comms_annotation=["FPGA_bf_weights_xy_R"], datatype=numpy.uint32, dims=(N_pn, A_pn, N_beamlets_ctrl))
-    FPGA_bf_weights_xy_RW = AttributeWrapper(comms_annotation=["FPGA_bf_weights_xy_RW"], datatype=numpy.uint32, dims=(N_pn, A_pn, N_beamlets_ctrl), access=AttrWriteType.READ_WRITE)
-    FPGA_bf_weights_yx_R = AttributeWrapper(comms_annotation=["FPGA_bf_weights_yx_R"], datatype=numpy.uint32, dims=(N_pn, A_pn, N_beamlets_ctrl))
-    FPGA_bf_weights_yx_RW = AttributeWrapper(comms_annotation=["FPGA_bf_weights_yx_RW"], datatype=numpy.uint32, dims=(N_pn, A_pn, N_beamlets_ctrl), access=AttrWriteType.READ_WRITE)
-    FPGA_bf_weights_yy_R = AttributeWrapper(comms_annotation=["FPGA_bf_weights_yy_R"], datatype=numpy.uint32, dims=(N_pn, A_pn, N_beamlets_ctrl))
-    FPGA_bf_weights_yy_RW = AttributeWrapper(comms_annotation=["FPGA_bf_weights_yy_RW"], datatype=numpy.uint32, dims=(N_pn, A_pn, N_beamlets_ctrl), access=AttrWriteType.READ_WRITE)
+    FPGA_bf_weights_xx_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_xx_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, A_pn, N_beamlets_ctrl),
+    )
+    FPGA_bf_weights_xx_RW = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_xx_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn, A_pn * N_beamlets_ctrl),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_bf_weights_xy_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_xy_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, A_pn, N_beamlets_ctrl),
+    )
+    FPGA_bf_weights_xy_RW = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_xy_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn, A_pn, N_beamlets_ctrl),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_bf_weights_yx_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_yx_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, A_pn, N_beamlets_ctrl),
+    )
+    FPGA_bf_weights_yx_RW = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_yx_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn, A_pn, N_beamlets_ctrl),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_bf_weights_yy_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_yy_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, A_pn, N_beamlets_ctrl),
+    )
+    FPGA_bf_weights_yy_RW = AttributeWrapper(
+        comms_annotation=["FPGA_bf_weights_yy_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn, A_pn, N_beamlets_ctrl),
+        access=AttrWriteType.READ_WRITE,
+    )
 
     # boolean[N_pn][N_beamsets_ctrl][P_sum]
-    FPGA_bf_rx_align_stream_enable_R = AttributeWrapper(comms_annotation=["FPGA_bf_rx_align_stream_enable_R"], datatype=bool, dims=(N_pn, N_beamsets_ctrl, P_sum))
-    FPGA_bf_rx_align_stream_enable_RW = AttributeWrapper(comms_annotation=["FPGA_bf_rx_align_stream_enable_RW"], datatype=bool, dims=(N_pn, N_beamsets_ctrl, P_sum), access=AttrWriteType.READ_WRITE)
+    FPGA_bf_rx_align_stream_enable_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_rx_align_stream_enable_R"],
+        datatype=bool,
+        dims=(N_pn, N_beamsets_ctrl, P_sum),
+    )
+    FPGA_bf_rx_align_stream_enable_RW = AttributeWrapper(
+        comms_annotation=["FPGA_bf_rx_align_stream_enable_RW"],
+        datatype=bool,
+        dims=(N_pn, N_beamsets_ctrl, P_sum),
+        access=AttrWriteType.READ_WRITE,
+    )
 
     # int64[N_pn][N_beamsets_ctrl][P_sum]
-    FPGA_bf_rx_align_bsn_R = AttributeWrapper(comms_annotation=["FPGA_bf_rx_align_bsn_R"], datatype=numpy.int64, dims=(N_pn, N_beamsets_ctrl, P_sum))
+    FPGA_bf_rx_align_bsn_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_rx_align_bsn_R"],
+        datatype=numpy.int64,
+        dims=(N_pn, N_beamsets_ctrl, P_sum),
+    )
 
-    #int32[N_pn][N_beamsets_ctrl][P_sum]
-    FPGA_bf_rx_align_nof_packets_R = AttributeWrapper(comms_annotation=["FPGA_bf_rx_align_nof_packets_R"], datatype=numpy.int32, dims=(N_pn, N_beamsets_ctrl, P_sum))
-    FPGA_bf_rx_align_nof_valid_R = AttributeWrapper(comms_annotation=["FPGA_bf_rx_align_nof_valid_R"], datatype=numpy.int32, dims=(N_pn, N_beamsets_ctrl, P_sum))
-    FPGA_bf_rx_align_latency_R = AttributeWrapper(comms_annotation=["FPGA_bf_rx_align_latency_R"], datatype=numpy.int32, dims=(N_pn, N_beamsets_ctrl, P_sum))
-    FPGA_bf_rx_align_nof_replaced_packets_R = AttributeWrapper(comms_annotation=["FPGA_bf_rx_align_nof_replaced_packets_R"], datatype=numpy.int32, dims=(N_pn, N_beamsets_ctrl, P_sum))
+    # int32[N_pn][N_beamsets_ctrl][P_sum]
+    FPGA_bf_rx_align_nof_packets_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_rx_align_nof_packets_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, N_beamsets_ctrl, P_sum),
+    )
+    FPGA_bf_rx_align_nof_valid_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_rx_align_nof_valid_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, N_beamsets_ctrl, P_sum),
+    )
+    FPGA_bf_rx_align_latency_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_rx_align_latency_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, N_beamsets_ctrl, P_sum),
+    )
+    FPGA_bf_rx_align_nof_replaced_packets_R = AttributeWrapper(
+        comms_annotation=["FPGA_bf_rx_align_nof_replaced_packets_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, N_beamsets_ctrl, P_sum),
+    )
 
-    subband_select_RW = attribute(dtype=(numpy.uint32,), max_dim_x=N_beamlets_ctrl, access=AttrWriteType.READ_WRITE, fisallowed="is_attribute_access_allowed")
+    subband_select_RW = attribute(
+        dtype=(numpy.uint32,),
+        max_dim_x=N_beamlets_ctrl,
+        access=AttrWriteType.READ_WRITE,
+        fisallowed="is_attribute_access_allowed",
+    )
 
     def read_subband_select_RW(self):
         # We can only return a single value, so we assume the FPGA is configured coherently. Which is something
         # that is to be checked by an independent monitoring system anyway.
-        mask     = self.sdp_proxy.TR_fpga_mask_RW
+        mask = self.sdp_proxy.TR_fpga_mask_RW
         subbands = self.read_attribute("FPGA_beamlet_subband_select_RW")
-        subbands_in_mask = [s for idx,s in enumerate(subbands) if mask[idx]]
+        subbands_in_mask = [s for idx, s in enumerate(subbands) if mask[idx]]
 
         # If there are no FPGAs selected at all, just return a sane default.
         if not subbands_in_mask:
@@ -203,11 +416,13 @@ class Beamlet(OPCUADevice):
         mask_for_all_inputs = subbands_in_mask[0].reshape(A_pn, N_pol, N_beamlets_ctrl)
 
         # Return the first setting (antenna, pol) within this FPGA
-        return mask_for_all_inputs[0,0]
+        return mask_for_all_inputs[0, 0]
 
     def write_subband_select_RW(self, subbands):
         # Use the same subband for all inputs and polarisations of a beamlet
-        self.proxy.FPGA_beamlet_subband_select_RW = numpy.tile(subbands, (N_pn, A_pn * N_pol))
+        self.proxy.FPGA_beamlet_subband_select_RW = numpy.tile(
+            subbands, (N_pn, A_pn * N_pol)
+        )
 
         self.cache_clear()
 
@@ -227,14 +442,24 @@ class Beamlet(OPCUADevice):
 
         # subscribe to events to notice setting changes in SDP that determine the input frequency
         self.event_subscriptions = {}
-        self.event_subscriptions["clock_rw"]       = self.sdp_proxy.subscribe_event("clock_RW", EventType.CHANGE_EVENT, self._frequency_change_event, stateless=True)
-        self.event_subscriptions["nyquist_zone_r"] = self.sdp_proxy.subscribe_event("nyquist_zone_R", EventType.CHANGE_EVENT, self._frequency_change_event, stateless=True)
+        self.event_subscriptions["clock_rw"] = self.sdp_proxy.subscribe_event(
+            "clock_RW",
+            EventType.CHANGE_EVENT,
+            self._frequency_change_event,
+            stateless=True,
+        )
+        self.event_subscriptions["nyquist_zone_r"] = self.sdp_proxy.subscribe_event(
+            "nyquist_zone_R",
+            EventType.CHANGE_EVENT,
+            self._frequency_change_event,
+            stateless=True,
+        )
 
     def configure_for_off(self):
         super().configure_for_off()
 
         # unsubscribe from all events
-        for k,v in list(self.event_subscriptions.items()):
+        for k, v in list(self.event_subscriptions.items()):
             self.sdp_proxy.unsubscribe_event(v)
             del self.event_subscriptions[k]
 
@@ -244,20 +469,22 @@ class Beamlet(OPCUADevice):
 
     @log_exceptions()
     def _frequency_change_event(self, event):
-        """ Trigger on external changes in frequency settings. """
+        """Trigger on external changes in frequency settings."""
 
         if event.err:
             # little we can do here. note that errors are also thrown if the device we subscribed to is offline
             return
 
-        logger.info(f"Received attribute change event from {event.device}: {event.attr_value.name} := {event.attr_value.value}")
+        logger.info(
+            f"Received attribute change event from {event.device}: {event.attr_value.name} := {event.attr_value.value}"
+        )
 
         # invalidate caches for frequency-dependent values
         self.cache_clear()
 
     @command()
     def cache_clear(self):
-        """ Explicitly clear any caches. """
+        """Explicitly clear any caches."""
 
         self._beamlet_frequencies.cache_clear()
 
@@ -274,7 +501,7 @@ class Beamlet(OPCUADevice):
 
        The beamformer combines a set of antennas for each beamlet, and each beamlet can have a different pointing
        and subband selected.
-       
+
        This results in an array of phase[antenna][beamlet] adjustments to be sent to the FPGA.The FPGA accepts
        weights as a 16-bit (imag,real) complex pair packed into an uint32.
 
@@ -283,28 +510,34 @@ class Beamlet(OPCUADevice):
 
     BF_UNIT_WEIGHT = 2**14
 
-    @lru_cache() # this function requires large hardware reads for values that don't change often
+    @lru_cache()  # this function requires large hardware reads for values that don't change often
     def _beamlet_frequencies(self):
-        """ Obtain the frequencies (in Hz) of each subband that is selected for each input and beamlet.
+        """Obtain the frequencies (in Hz) of each subband that is selected for each input and beamlet.
 
-            Returns shape (fpga_nr, [input_nr][pol][beamlet_nr]).
+        Returns shape (fpga_nr, [input_nr][pol][beamlet_nr]).
         """
 
         # obtain which subband is selected for each input and beamlet
-        beamlet_subbands = self.read_attribute("FPGA_beamlet_subband_select_RW") # (fpga_nr, [input_nr][pol][beamlet_nr])
-        nyquist_zones    = self.sdp_proxy.nyquist_zone_R # (fpga_nr, [input_nr][pol])
+        beamlet_subbands = self.read_attribute(
+            "FPGA_beamlet_subband_select_RW"
+        )  # (fpga_nr, [input_nr][pol][beamlet_nr])
+        nyquist_zones = self.sdp_proxy.nyquist_zone_R  # (fpga_nr, [input_nr][pol])
 
         # repeat nyquist zone for all beamlets, to match the shape of beamlet_subbands
-        nyquist_zones    = numpy.repeat(nyquist_zones, N_beamlets_ctrl, axis=1)
+        nyquist_zones = numpy.repeat(nyquist_zones, N_beamlets_ctrl, axis=1)
 
         # compute the frequency of each beamlet for each input
-        return subband_frequencies(beamlet_subbands, self.sdp_proxy.clock_RW, nyquist_zones)
+        return subband_frequencies(
+            beamlet_subbands, self.sdp_proxy.clock_RW, nyquist_zones
+        )
 
     @staticmethod
-    def _calculate_bf_weights(delays: numpy.ndarray, beamlet_frequencies: numpy.ndarray):
-        """ Helper function that converts a series of delays into FPGA_bf_weights_xx_yy. 
+    def _calculate_bf_weights(
+        delays: numpy.ndarray, beamlet_frequencies: numpy.ndarray
+    ):
+        """Helper function that converts a series of delays into FPGA_bf_weights_xx_yy.
 
-            All input and output arrays have the same dimensionality.
+        All input and output arrays have the same dimensionality.
         """
 
         # compute the phases
@@ -323,7 +556,7 @@ class Beamlet(OPCUADevice):
 
     @command(dtype_in=DevVarFloatArray, dtype_out=DevVarULongArray)
     def calculate_bf_weights(self, delays: numpy.ndarray):
-        """ converts a difference in delays (in seconds) to a FPGA weight (in complex number) """
+        """converts a difference in delays (in seconds) to a FPGA weight (in complex number)"""
 
         # Calculate the FPGA weight array
         delays = delays.reshape(N_pn, A_pn * N_pol * N_beamlets_ctrl)
@@ -332,6 +565,7 @@ class Beamlet(OPCUADevice):
 
         return bf_weights.flatten()
 
+
 # ----------
 # Run server
 # ----------
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/bst.py b/tangostationcontrol/tangostationcontrol/devices/sdp/bst.py
index a5b374d642f824ba7ed0f9406506d704974a50e4..e6d99e932f98b11faf3ee1a6e6fe504facafa240 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/bst.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/bst.py
@@ -1,33 +1,27 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the TANGO project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ BST Device Server for LOFAR2.0
 
 """
 
 import numpy
-
-from tango.server import device_property, attribute
-from tango import AttrWriteType
-
 from lofar_station_client.statistics.collector import BSTCollector
+from tango import AttrWriteType
+from tango.server import device_property, attribute
 
-# Own imports
-from tangostationcontrol.common.entrypoint import entry
-from tangostationcontrol.common.constants import N_pn, BST_MAX_BLOCKS, N_beamlets_max
 from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
 from tangostationcontrol.clients.opcua_client import OPCUAConnection
 from tangostationcontrol.clients.statistics.client import StatisticsClient
+from tangostationcontrol.common.constants import N_pn, BST_MAX_BLOCKS, N_beamlets_max
+
+# Own imports
+from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.devices.sdp.statistics import Statistics
 
 __all__ = ["BST", "main"]
 
+
 class BST(Statistics):
     STATISTICS_COLLECTOR_CLASS = BSTCollector
 
@@ -36,33 +30,27 @@ class BST(Statistics):
     # -----------------
 
     FPGA_bst_offload_hdr_eth_destination_mac_RW_default = device_property(
-        dtype='DevVarStringArray',
-        mandatory=True
+        dtype="DevVarStringArray", mandatory=True
     )
 
     FPGA_bst_offload_hdr_ip_destination_address_RW_default = device_property(
-        dtype='DevVarStringArray',
-        mandatory=True
+        dtype="DevVarStringArray", mandatory=True
     )
 
     FPGA_bst_offload_hdr_udp_destination_port_RW_default = device_property(
-        dtype='DevVarUShortArray',
-        mandatory=True
+        dtype="DevVarUShortArray", mandatory=True
     )
 
     FPGA_bst_offload_enable_RW_default = device_property(
-        dtype='DevVarBooleanArray',
-        mandatory=False,
-        default_value=[True] * N_pn
+        dtype="DevVarBooleanArray", mandatory=False, default_value=[True] * N_pn
     )
 
     FIRST_DEFAULT_SETTINGS = [
-        'FPGA_bst_offload_hdr_eth_destination_mac_RW',
-        'FPGA_bst_offload_hdr_ip_destination_address_RW',
-        'FPGA_bst_offload_hdr_udp_destination_port_RW',
-
+        "FPGA_bst_offload_hdr_eth_destination_mac_RW",
+        "FPGA_bst_offload_hdr_ip_destination_address_RW",
+        "FPGA_bst_offload_hdr_udp_destination_port_RW",
         # enable only after the offloading is configured correctly
-        'FPGA_bst_offload_enable_RW'
+        "FPGA_bst_offload_enable_RW",
     ]
 
     # ----------
@@ -70,38 +58,115 @@ class BST(Statistics):
     # ----------
 
     # FPGA control points for BSTs
-    FPGA_bst_offload_enable_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_bst_offload_enable_RW"], datatype=bool, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_bst_offload_enable_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_bst_offload_enable_R"], datatype=bool, dims=(N_pn,))
-    FPGA_bst_offload_hdr_eth_destination_mac_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_bst_offload_hdr_eth_destination_mac_RW"], datatype=str, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_bst_offload_hdr_eth_destination_mac_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_bst_offload_hdr_eth_destination_mac_R"], datatype=str, dims=(N_pn,))
-    FPGA_bst_offload_hdr_ip_destination_address_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_bst_offload_hdr_ip_destination_address_RW"], datatype=str, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_bst_offload_hdr_ip_destination_address_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_bst_offload_hdr_ip_destination_address_R"], datatype=str, dims=(N_pn,))
-    FPGA_bst_offload_hdr_udp_destination_port_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_bst_offload_hdr_udp_destination_port_RW"], datatype=numpy.uint16, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_bst_offload_hdr_udp_destination_port_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_bst_offload_hdr_udp_destination_port_R"], datatype=numpy.uint16, dims=(N_pn,))
-    FPGA_bst_offload_bsn_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_bst_offload_bsn_R"], datatype=numpy.int64, dims=(N_pn,))
-
-    FPGA_bst_offload_nof_packets_R = AttributeWrapper(comms_annotation=["FPGA_bst_offload_nof_packets_R"], datatype=numpy.int32, dims=(N_pn,))
-    FPGA_bst_offload_nof_valid_R = AttributeWrapper(comms_annotation=["FPGA_bst_offload_nof_valid_R"], datatype=numpy.int32, dims=(N_pn,))
+    FPGA_bst_offload_enable_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_bst_offload_enable_RW"],
+        datatype=bool,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_bst_offload_enable_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_bst_offload_enable_R"],
+        datatype=bool,
+        dims=(N_pn,),
+    )
+    FPGA_bst_offload_hdr_eth_destination_mac_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_bst_offload_hdr_eth_destination_mac_RW"],
+        datatype=str,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_bst_offload_hdr_eth_destination_mac_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_bst_offload_hdr_eth_destination_mac_R"],
+        datatype=str,
+        dims=(N_pn,),
+    )
+    FPGA_bst_offload_hdr_ip_destination_address_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_bst_offload_hdr_ip_destination_address_RW"],
+        datatype=str,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_bst_offload_hdr_ip_destination_address_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_bst_offload_hdr_ip_destination_address_R"],
+        datatype=str,
+        dims=(N_pn,),
+    )
+    FPGA_bst_offload_hdr_udp_destination_port_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_bst_offload_hdr_udp_destination_port_RW"],
+        datatype=numpy.uint16,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_bst_offload_hdr_udp_destination_port_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_bst_offload_hdr_udp_destination_port_R"],
+        datatype=numpy.uint16,
+        dims=(N_pn,),
+    )
+    FPGA_bst_offload_bsn_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_bst_offload_bsn_R"],
+        datatype=numpy.int64,
+        dims=(N_pn,),
+    )
+
+    FPGA_bst_offload_nof_packets_R = AttributeWrapper(
+        comms_annotation=["FPGA_bst_offload_nof_packets_R"],
+        datatype=numpy.int32,
+        dims=(N_pn,),
+    )
+    FPGA_bst_offload_nof_valid_R = AttributeWrapper(
+        comms_annotation=["FPGA_bst_offload_nof_valid_R"],
+        datatype=numpy.int32,
+        dims=(N_pn,),
+    )
 
     # number of packets with valid payloads
-    nof_valid_payloads_R    = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_valid_payloads"}, dims=(N_pn,), datatype=numpy.uint64)
+    nof_valid_payloads_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "nof_valid_payloads"},
+        dims=(N_pn,),
+        datatype=numpy.uint64,
+    )
     # number of packets with invalid payloads
-    nof_payload_errors_R    = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_payload_errors"}, dims=(N_pn,), datatype=numpy.uint64)
+    nof_payload_errors_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "nof_payload_errors"},
+        dims=(N_pn,),
+        datatype=numpy.uint64,
+    )
     # latest BSTs
-    bst_R                   = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "bst_values"}, dims=(BST_MAX_BLOCKS, N_beamlets_max), datatype=numpy.uint64)
+    bst_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "bst_values"},
+        dims=(BST_MAX_BLOCKS, N_beamlets_max),
+        datatype=numpy.uint64,
+    )
     # reported timestamp
     # for each row in the latest BSTs
-    bst_timestamp_R         = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "bst_timestamps"}, dims=(BST_MAX_BLOCKS,), datatype=numpy.uint64)
+    bst_timestamp_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "bst_timestamps"},
+        dims=(BST_MAX_BLOCKS,),
+        datatype=numpy.uint64,
+    )
 
     # ----------
     # Summarising Attributes
     # ----------
-    FPGA_processing_error_R      = attribute(dtype=(bool,), max_dim_x=N_pn)
+    FPGA_processing_error_R = attribute(dtype=(bool,), max_dim_x=N_pn)
 
     def read_FPGA_processing_error_R(self):
         return self.sdp_proxy.TR_fpga_mask_RW & (
-                 ~self.read_attribute("FPGA_bst_offload_enable_R")
-               )
+            ~self.read_attribute("FPGA_bst_offload_enable_R")
+        )
 
     # --------
     # Overloaded functions
@@ -111,6 +176,7 @@ class BST(Statistics):
     # Commands
     # --------
 
+
 # ----------
 # Run server
 # ----------
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/common.py b/tangostationcontrol/tangostationcontrol/devices/sdp/common.py
index b9f0065a97fe5d65ef567cbe70221897c394ceb4..756482b0a0c4c348f6044b44d0b84e06cdb87976 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/common.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/common.py
@@ -1,10 +1,20 @@
-from tangostationcontrol.common.constants import N_subbands, N_subband_res, VALUES_PER_COMPLEX
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-import numpy
 from ctypes import c_short
 
-def subband_frequencies(subbands: numpy.ndarray, clock: int, nyquist_zones: numpy.ndarray) -> numpy.ndarray:
-    """ Obtain the frequencies of multiple subbands, given a clock and a nyquist zone for each subband. """
+import numpy
+from tangostationcontrol.common.constants import (
+    N_subbands,
+    N_subband_res,
+    VALUES_PER_COMPLEX,
+)
+
+
+def subband_frequencies(
+    subbands: numpy.ndarray, clock: int, nyquist_zones: numpy.ndarray
+) -> numpy.ndarray:
+    """Obtain the frequencies of multiple subbands, given a clock and a nyquist zone for each subband."""
 
     subband_width = clock / N_subband_res
     base_subbands = nyquist_zones * N_subbands
@@ -14,15 +24,21 @@ def subband_frequencies(subbands: numpy.ndarray, clock: int, nyquist_zones: nump
 
     return frequencies
 
+
 def subband_frequency(subband: int, clock: float, nyquist_zone: int) -> int:
-    """ Obtain the frequencies of a subband, given a clock and a nyquist zone. """
+    """Obtain the frequencies of a subband, given a clock and a nyquist zone."""
 
     # just use the interface for multiple subbands to avoid code duplication
-    return subband_frequencies(numpy.array(subband), clock, numpy.array(nyquist_zone)).item()
+    return subband_frequencies(
+        numpy.array(subband), clock, numpy.array(nyquist_zone)
+    ).item()
+
+
+def phases_to_weights(
+    phases: numpy.ndarray, unit_weight: float, amplitudes: numpy.ndarray = None
+) -> numpy.ndarray:
+    """Convert phases (in radians) into FPGA weights (complex numbers packed into uint32)."""
 
-def phases_to_weights(phases: numpy.ndarray, unit_weight: float, amplitudes: numpy.ndarray = None) -> numpy.ndarray:
-    """ Convert phases (in radians) into FPGA weights (complex numbers packed into uint32). """
-       
     # The FPGA accepts weights as a 16-bit (imag,real) complex pair packed into an uint32.
 
     # flatten array and restore its shape later, which makes running over all elements a lot easier
@@ -50,9 +66,12 @@ def phases_to_weights(phases: numpy.ndarray, unit_weight: float, amplitudes: num
 
     return weights.reshape(orig_shape)
 
-def real_imag_to_weights(real_imag_pairs: numpy.ndarray, unit_weight: float) -> numpy.ndarray:
-    """ Convert complex values (as (real, imag) pairs) into FPGA weights (complex numbers packed into uint32). """
-       
+
+def real_imag_to_weights(
+    real_imag_pairs: numpy.ndarray, unit_weight: float
+) -> numpy.ndarray:
+    """Convert complex values (as (real, imag) pairs) into FPGA weights (complex numbers packed into uint32)."""
+
     # The FPGA accepts weights as a 16-bit (imag,real) complex pair packed into an uint32.
 
     # Interleave into (real, imag) pairs, and store as int16
@@ -64,10 +83,11 @@ def real_imag_to_weights(real_imag_pairs: numpy.ndarray, unit_weight: float) ->
 
     return weights
 
+
 def weight_to_complex(weight: numpy.uint32, unit: int) -> complex:
     """Unpack an FPGA weight (uint32) into a complex number.
 
-       unit: the weight value representing a weight of 1.0."""
+    unit: the weight value representing a weight of 1.0."""
 
     # A weight is a (real, imag) pair, stored as int16 packed into an uint32 value
 
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/digitalbeam.py b/tangostationcontrol/tangostationcontrol/devices/sdp/digitalbeam.py
index a025f9dd4350a6aec932ea04a6980e9cceac81b0..77d3189a83f0ddf92328a43a8a831f919468a406 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/digitalbeam.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/digitalbeam.py
@@ -1,46 +1,52 @@
-# -*- coding: utf-8 -*-
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ DigitalBeam Device Server for LOFAR2.0
 
 """
 
+import datetime
+import logging
+
+import numpy
+
 # PyTango imports
 from tango import Util, DeviceProxy, DevSource
 from tango.server import attribute, AttrWriteType, device_property
+from tangostationcontrol.beam.delays import Delays
+from tangostationcontrol.common.constants import (
+    N_beamlets_ctrl,
+    MAX_ANTENNA,
+    N_xyz,
+    A_pn,
+    N_pn,
+    N_pol,
+)
 
 # Additional import
 from tangostationcontrol.common.entrypoint import entry
-from tangostationcontrol.common.constants import N_beamlets_ctrl, MAX_ANTENNA, N_xyz, A_pn, N_pn, N_pol
+from tangostationcontrol.common.lofar_logging import log_exceptions
 from tangostationcontrol.devices.beam_device import BeamDevice
 from tangostationcontrol.devices.device_decorators import TimeIt
-from tangostationcontrol.common.lofar_logging import log_exceptions
-from tangostationcontrol.beam.delays import Delays
-
-import numpy
-import datetime
 
-import logging
 logger = logging.getLogger()
 
 __all__ = ["DigitalBeam", "main"]
 
 
 class DigitalBeam(BeamDevice):
-    """ Manages Digital Beamforming on the SDP, in which the FPGA combines
-        its digital inputs from the RCUs into one or more beamlets, that
-        are emitted through the Beamlet device.
-
-        We distinguish:
-          * antennas:         the elements in the AntennaField that feeds the FPGA,
-          * inputs:           the antenna slots of the FPGAs (covering both X and Y as one),
-          * polarised_inputs: all the input slots of the FPGAs (separating X and Y).
-
-        The antennas are drawn from an AntennaField device. Only the antennas enabled
-        in the AntennaField's Antenna_Usage_Mask will be used in beamforming. Those
-        disabled in the mask will get a weight of 0.
+    """Manages Digital Beamforming on the SDP, in which the FPGA combines
+    its digital inputs from the RCUs into one or more beamlets, that
+    are emitted through the Beamlet device.
+
+    We distinguish:
+      * antennas:         the elements in the AntennaField that feeds the FPGA,
+      * inputs:           the antenna slots of the FPGAs (covering both X and Y as one),
+      * polarised_inputs: all the input slots of the FPGAs (separating X and Y).
+
+    The antennas are drawn from an AntennaField device. Only the antennas enabled
+    in the AntennaField's Antenna_Usage_Mask will be used in beamforming. Those
+    disabled in the mask will get a weight of 0.
     """
 
     # -----------------
@@ -49,36 +55,54 @@ class DigitalBeam(BeamDevice):
 
     AntennaField_Device = device_property(
         dtype=str,
-        doc='Which AntennaField represents the FPGA inputs to beamform.',
+        doc="Which AntennaField represents the FPGA inputs to beamform.",
         mandatory=False,
-        default_value = "STAT/AntennaField/1"
+        default_value="STAT/AntennaField/1",
     )
 
     Beamlet_Device = device_property(
         dtype=str,
-        doc='Which Beamlet represents the FPGAs to steer.',
+        doc="Which Beamlet represents the FPGAs to steer.",
         mandatory=False,
-        default_value = "STAT/Beamlet/1"
+        default_value="STAT/Beamlet/1",
     )
 
     # ----------
     # Attributes
     # ----------
 
-    Duration_delays_R = attribute(access=AttrWriteType.READ,
-        dtype=numpy.float64, fget=lambda self: self._delays.statistics["last"] or 0)
+    Duration_delays_R = attribute(
+        access=AttrWriteType.READ,
+        dtype=numpy.float64,
+        fget=lambda self: self._delays.statistics["last"] or 0,
+    )
 
-    input_select_RW = attribute(doc='Selection of inputs to use for forming each beamlet. Allows selecting broken antennas.',
-                                dtype=((bool,),), max_dim_x=N_beamlets_ctrl, max_dim_y=MAX_ANTENNA, access=AttrWriteType.READ_WRITE, fisallowed="is_attribute_access_allowed")
+    input_select_RW = attribute(
+        doc="Selection of inputs to use for forming each beamlet. Allows selecting broken antennas.",
+        dtype=((bool,),),
+        max_dim_x=N_beamlets_ctrl,
+        max_dim_y=MAX_ANTENNA,
+        access=AttrWriteType.READ_WRITE,
+        fisallowed="is_attribute_access_allowed",
+    )
 
-    antenna_select_RW = attribute(doc='Selection of antennas desired to use for forming each beamlet (= a subset of input_select of the configured antennas). Unselects broken antennas.',
-                                  dtype=((bool,),), max_dim_x=N_beamlets_ctrl, max_dim_y=MAX_ANTENNA, access=AttrWriteType.READ_WRITE, fisallowed="is_attribute_access_allowed")
+    antenna_select_RW = attribute(
+        doc="Selection of antennas desired to use for forming each beamlet (= a subset of input_select of the configured antennas). Unselects broken antennas.",
+        dtype=((bool,),),
+        max_dim_x=N_beamlets_ctrl,
+        max_dim_y=MAX_ANTENNA,
+        access=AttrWriteType.READ_WRITE,
+        fisallowed="is_attribute_access_allowed",
+    )
 
-    nr_inputs_R = attribute(doc='Number of configured inputs from the associated antenna field.',
-                                  dtype=numpy.uint32, fget="nr_inputs")
+    nr_inputs_R = attribute(
+        doc="Number of configured inputs from the associated antenna field.",
+        dtype=numpy.uint32,
+        fget="nr_inputs",
+    )
 
     def nr_inputs(self):
-        """ Return the number of configured inputs. """
+        """Return the number of configured inputs."""
 
         return len(self.antennafield_proxy.Antenna_to_SDP_Mapping_R)
 
@@ -92,9 +116,13 @@ class DigitalBeam(BeamDevice):
         # select only the rows from self.__input_select for which a mapping onto antennas is defined.
         antenna_select = [[False] * N_beamlets_ctrl] * self.nr_inputs()
 
-        for antenna_nr, (fpga_nr, input_nr) in enumerate(self.antennafield_proxy.Antenna_to_SDP_Mapping_R):
+        for antenna_nr, (fpga_nr, input_nr) in enumerate(
+            self.antennafield_proxy.Antenna_to_SDP_Mapping_R
+        ):
             if input_nr >= 0:
-                antenna_select[antenna_nr] = self._input_select[fpga_nr * A_pn + input_nr]
+                antenna_select[antenna_nr] = self._input_select[
+                    fpga_nr * A_pn + input_nr
+                ]
 
         return antenna_select
 
@@ -103,7 +131,9 @@ class DigitalBeam(BeamDevice):
         # to select the antennas they would like to use.
         antenna_usage_mask = self.antennafield_proxy.Antenna_Usage_Mask_R
 
-        for antenna_nr, (fpga_nr, input_nr) in enumerate(self.antennafield_proxy.Antenna_to_SDP_Mapping_R):
+        for antenna_nr, (fpga_nr, input_nr) in enumerate(
+            self.antennafield_proxy.Antenna_to_SDP_Mapping_R
+        ):
             if input_nr >= 0:
                 if antenna_usage_mask[antenna_nr]:
                     # use antenna for the beamlets as supplied by the client
@@ -126,7 +156,7 @@ class DigitalBeam(BeamDevice):
 
         # Set a reference of RECV device that is correlated to this BEAM device
         util = Util.instance()
-        instance_number = self.get_name().split('/')[2]
+        instance_number = self.get_name().split("/")[2]
 
         self.antennafield_proxy = DeviceProxy(self.AntennaField_Device)
         self.antennafield_proxy.set_source(DevSource.DEV)
@@ -136,16 +166,19 @@ class DigitalBeam(BeamDevice):
 
         # Retrieve positions from RECV device
         reference_itrf = self.antennafield_proxy.Antenna_Field_Reference_ITRF_R
-        antenna_itrf   = self.antennafield_proxy.Antenna_Reference_ITRF_R.reshape(-1, N_xyz)
+        antenna_itrf = self.antennafield_proxy.Antenna_Reference_ITRF_R.reshape(
+            -1, N_xyz
+        )
 
         # Generate positions for all FPGA inputs.
         # Use reference position for any missing antennas so they always get a delay of 0
         input_itrf = numpy.array([reference_itrf] * MAX_ANTENNA)
-        for antenna_nr, (fpga_nr, input_nr) in enumerate(self.antennafield_proxy.Antenna_to_SDP_Mapping_R):
+        for antenna_nr, (fpga_nr, input_nr) in enumerate(
+            self.antennafield_proxy.Antenna_to_SDP_Mapping_R
+        ):
             if input_nr >= 0:
                 input_itrf[fpga_nr * A_pn + input_nr] = antenna_itrf[antenna_nr]
 
-
         # a delay calculator
         self.delay_calculator = Delays(reference_itrf)
 
@@ -153,8 +186,12 @@ class DigitalBeam(BeamDevice):
         self.relative_input_positions = input_itrf - reference_itrf
 
         # use all antennas in the mapping for all beamlets, unless specified otherwise
-        self.write_input_select_RW(numpy.zeros((MAX_ANTENNA, N_beamlets_ctrl), dtype=bool))
-        self.write_antenna_select_RW(numpy.ones((self.nr_inputs(), N_beamlets_ctrl), dtype=bool))
+        self.write_input_select_RW(
+            numpy.zeros((MAX_ANTENNA, N_beamlets_ctrl), dtype=bool)
+        )
+        self.write_antenna_select_RW(
+            numpy.ones((self.nr_inputs(), N_beamlets_ctrl), dtype=bool)
+        )
 
     # --------
     # internal functions
@@ -179,8 +216,8 @@ class DigitalBeam(BeamDevice):
         return delays
 
     def _map_inputs_on_polarised_inputs(self, arr):
-        """ Converts an array with dimensions [antenna][beamlet] -> [fpga_nr][input_nr][pol_nr][beamlet]
-            by repeating the values for both polarisations. """
+        """Converts an array with dimensions [antenna][beamlet] -> [fpga_nr][input_nr][pol_nr][beamlet]
+        by repeating the values for both polarisations."""
 
         assert arr.shape == (MAX_ANTENNA, N_beamlets_ctrl)
 
@@ -189,7 +226,7 @@ class DigitalBeam(BeamDevice):
 
         # double the delays to cover both polarisations
         # [[1,2,3], [4,5,6]] -> [[1,2,3,1,2,3], [4,5,6,4,5,6]]
-        result = numpy.hstack((arr,arr))
+        result = numpy.hstack((arr, arr))
 
         # move doubling of last dimension into first
         # [[1,2,3,1,2,3], [4,5,6,4,5,6]] -> [[1,2,3], [1,2,3], [4,5,6], [4,5,6]]
@@ -201,7 +238,9 @@ class DigitalBeam(BeamDevice):
         return result
 
     @TimeIt()
-    def _compute_weights(self, pointing_direction: numpy.array, timestamp: datetime.datetime) -> numpy.array:
+    def _compute_weights(
+        self, pointing_direction: numpy.array, timestamp: datetime.datetime
+    ) -> numpy.array:
         """
         Uploads beam weights based on a given pointing direction 2D array (96 tiles x 3 parameters)
         """
@@ -218,7 +257,12 @@ class DigitalBeam(BeamDevice):
         return beam_weights
 
     @TimeIt()
-    def _apply_weights(self, pointing_direction: numpy.array, timestamp: datetime.datetime, beam_weights: numpy.array):
+    def _apply_weights(
+        self,
+        pointing_direction: numpy.array,
+        timestamp: datetime.datetime,
+        beam_weights: numpy.array,
+    ):
         """
         Uploads beam weights based on a given pointing direction 2D array (96 tiles x 3 parameters)
         """
@@ -227,12 +271,12 @@ class DigitalBeam(BeamDevice):
             beam_weights,
             self.beamlet_proxy,
             "FPGA_bf_weights_xx_yy_RW",
-            self._map_inputs_on_polarised_inputs(self._input_select)
+            self._map_inputs_on_polarised_inputs(self._input_select),
         )
 
         # Record where we now point to, now that we've updated the weights.
         # Only record pointings per beamlet, not which antennas took part
-        self._pointing_direction_r    = pointing_direction
+        self._pointing_direction_r = pointing_direction
         self._pointing_timestamp_r[:] = timestamp.timestamp()
 
         logger.info("Pointing direction updated")
@@ -241,6 +285,7 @@ class DigitalBeam(BeamDevice):
     # Commands
     # --------
 
+
 # ----------
 # Run server
 # ----------
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py b/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py
index 687667ed5320c965f724cf7f9ae9d093c8dad9d3..c233ade06f54fb266d83a7a88d0dc72df90ffdb4 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py
@@ -1,30 +1,35 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the SDP project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ SDP Device Server for LOFAR2.0
 
 """
 
+import numpy
+from tango import AttrWriteType
+
 # PyTango imports
 from tango.server import device_property, attribute
-from tango import AttrWriteType
 
-# Additional import
-from tangostationcontrol.common.constants import S_pn, N_pn, CLK_200_MHZ, CLK_160_MHZ, N_subband_res, N_subbands, DEFAULT_SUBBAND, N_beamsets_ctrl, DEFAULT_POLLING_PERIOD
 from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
+
+# Additional import
+from tangostationcontrol.common.constants import (
+    S_pn,
+    N_pn,
+    CLK_200_MHZ,
+    CLK_160_MHZ,
+    N_subband_res,
+    N_subbands,
+    DEFAULT_SUBBAND,
+    N_beamsets_ctrl,
+    DEFAULT_POLLING_PERIOD,
+)
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.common.lofar_logging import device_logging_to_python
 from tangostationcontrol.devices.opcua_device import OPCUADevice
 from tangostationcontrol.devices.sdp.common import phases_to_weights, subband_frequency
 
-import numpy
-
 __all__ = ["SDP", "main"]
 
 
@@ -35,159 +40,404 @@ class SDP(OPCUADevice):
     # -----------------
 
     TR_fpga_mask_RW_default = device_property(
-        dtype='DevVarBooleanArray',
-        mandatory=False,
-        default_value=[True] * N_pn
+        dtype="DevVarBooleanArray", mandatory=False, default_value=[True] * N_pn
     )
 
     FPGA_processing_enable_RW_default = device_property(
-        dtype='DevVarBooleanArray',
-        mandatory=False,
-        default_value=[True] * N_pn
+        dtype="DevVarBooleanArray", mandatory=False, default_value=[True] * N_pn
     )
 
     FPGA_wg_enable_RW_default = device_property(
-        dtype='DevVarBooleanArray',
+        dtype="DevVarBooleanArray",
         mandatory=False,
-        default_value=[[False] * S_pn] * N_pn
+        default_value=[[False] * S_pn] * N_pn,
     )
 
     # If we enable the waveform generator, we want some sane defaults.
 
     FPGA_wg_amplitude_RW_default = device_property(
-        dtype='DevVarDoubleArray',
-        mandatory=False,
-        default_value=[[0.1] * S_pn] * N_pn
+        dtype="DevVarDoubleArray", mandatory=False, default_value=[[0.1] * S_pn] * N_pn
     )
 
     FPGA_wg_frequency_RW_default = device_property(
-        dtype='DevVarDoubleArray',
+        dtype="DevVarDoubleArray",
         mandatory=False,
         # Emit a signal on subband 102
-        default_value=[[DEFAULT_SUBBAND * CLK_200_MHZ / N_subband_res] * S_pn] * N_pn
+        default_value=[[DEFAULT_SUBBAND * CLK_200_MHZ / N_subband_res] * S_pn] * N_pn,
     )
 
     FPGA_wg_phase_RW_default = device_property(
-        dtype='DevVarDoubleArray',
-        mandatory=False,
-        default_value=[[0.0] * S_pn] * N_pn
+        dtype="DevVarDoubleArray", mandatory=False, default_value=[[0.0] * S_pn] * N_pn
     )
 
     FPGA_sdp_info_station_id_RW_default = device_property(
-        dtype='DevVarULongArray',
-        mandatory=False,
-        default_value=[0] * N_pn
+        dtype="DevVarULongArray", mandatory=False, default_value=[0] * N_pn
     )
 
     FPGA_signal_input_samples_delay_RW_default = device_property(
-        dtype='DevVarULongArray',
-        mandatory=False,
-        default_value=[[0] * S_pn] * N_pn
+        dtype="DevVarULongArray", mandatory=False, default_value=[[0] * S_pn] * N_pn
     )
 
     FPGA_subband_weights_RW_default = device_property(
-        dtype='DevVarULongArray',
+        dtype="DevVarULongArray",
         mandatory=False,
-        default_value=[[8192] * S_pn * N_subbands] * N_pn
+        default_value=[[8192] * S_pn * N_subbands] * N_pn,
     )
 
     clock_RW_default = device_property(
-        dtype='DevULong',
-        mandatory=False,
-        default_value = CLK_200_MHZ
+        dtype="DevULong", mandatory=False, default_value=CLK_200_MHZ
     )
 
-    TRANSLATOR_DEFAULT_SETTINGS = [
-        'TR_fpga_mask_RW'
-    ]
+    TRANSLATOR_DEFAULT_SETTINGS = ["TR_fpga_mask_RW"]
 
     # ----------
     # Attributes
     # ----------
 
-    FPGA_firmware_version_R = AttributeWrapper(comms_annotation=["FPGA_firmware_version_R"], datatype=str, dims=(N_pn,))
-    FPGA_boot_image_R = AttributeWrapper(comms_annotation=["FPGA_boot_image_R"], datatype=numpy.int32, dims=(N_pn,), doc="Active FPGA image (0=factory, 1=user)")
-    FPGA_boot_image_RW = AttributeWrapper(comms_annotation=["FPGA_boot_image_RW"], datatype=numpy.int32, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_global_node_index_R = AttributeWrapper(comms_annotation=["FPGA_global_node_index_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_hardware_version_R = AttributeWrapper(comms_annotation=["FPGA_hardware_version_R"], datatype=str, dims=(N_pn,))
-    FPGA_pps_present_R = AttributeWrapper(comms_annotation=["FPGA_pps_present_R"], datatype=bool, dims=(N_pn,))
-    FPGA_pps_capture_cnt_R = AttributeWrapper(comms_annotation=["FPGA_pps_capture_cnt_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_pps_expected_cnt_R = AttributeWrapper(comms_annotation=["FPGA_pps_expected_cnt_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_pps_expected_cnt_RW = AttributeWrapper(comms_annotation=["FPGA_pps_expected_cnt_RW"], datatype=numpy.uint32, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_processing_enable_R = AttributeWrapper(comms_annotation=["FPGA_processing_enable_R"], datatype=bool, dims=(N_pn,))
-    FPGA_processing_enable_RW = AttributeWrapper(comms_annotation=["FPGA_processing_enable_RW"], datatype=bool, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_ring_node_offset_R = AttributeWrapper(comms_annotation=["FPGA_ring_node_offset_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_ring_node_offset_RW = AttributeWrapper(comms_annotation=["FPGA_ring_node_offset_RW"], datatype=numpy.uint32, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_ring_nof_nodes_R = AttributeWrapper(comms_annotation=["FPGA_ring_nof_nodes_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_ring_nof_nodes_RW = AttributeWrapper(comms_annotation=["FPGA_ring_nof_nodes_RW"], datatype=numpy.uint32, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_ring_use_cable_to_next_rn_R = AttributeWrapper(comms_annotation=["FPGA_ring_use_cable_to_next_rn_R"], datatype=bool, dims=(N_pn,))
-    FPGA_ring_use_cable_to_next_rn_RW = AttributeWrapper(comms_annotation=["FPGA_ring_use_cable_to_next_rn_RW"], datatype=bool, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_ring_use_cable_to_previous_rn_R = AttributeWrapper(comms_annotation=["FPGA_ring_use_cable_to_previous_rn_R"], datatype=bool, dims=(N_pn,))
-    FPGA_ring_use_cable_to_previous_rn_RW = AttributeWrapper(comms_annotation=["FPGA_ring_use_cable_to_previous_rn_RW"], datatype=bool, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_scrap_R = AttributeWrapper(comms_annotation=["FPGA_scrap_R"], datatype=numpy.int32, dims=(N_pn, N_subbands))
-    FPGA_scrap_RW = AttributeWrapper(comms_annotation=["FPGA_scrap_RW"], datatype=numpy.int32, dims=(N_pn, N_subbands), access=AttrWriteType.READ_WRITE)
-    FPGA_sdp_info_antenna_band_index_R = AttributeWrapper(comms_annotation=["FPGA_sdp_info_antenna_band_index_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_sdp_info_block_period_R = AttributeWrapper(comms_annotation=["FPGA_sdp_info_block_period_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_sdp_info_f_adc_R = AttributeWrapper(comms_annotation=["FPGA_sdp_info_f_adc_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_sdp_info_fsub_type_R = AttributeWrapper(comms_annotation=["FPGA_sdp_info_fsub_type_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_sdp_info_nyquist_sampling_zone_index_R = AttributeWrapper(comms_annotation=["FPGA_sdp_info_nyquist_sampling_zone_index_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_sdp_info_nyquist_sampling_zone_index_RW = AttributeWrapper(comms_annotation=["FPGA_sdp_info_nyquist_sampling_zone_index_RW"], datatype=numpy.uint32, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_sdp_info_observation_id_R = AttributeWrapper(comms_annotation=["FPGA_sdp_info_observation_id_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_sdp_info_observation_id_RW = AttributeWrapper(comms_annotation=["FPGA_sdp_info_observation_id_RW"], datatype=numpy.uint32, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_sdp_info_station_id_R = AttributeWrapper(comms_annotation=["FPGA_sdp_info_station_id_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_sdp_info_station_id_RW = AttributeWrapper(comms_annotation=["FPGA_sdp_info_station_id_RW"], datatype=numpy.uint32, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_subband_spectral_inversion_R = AttributeWrapper(comms_annotation=["FPGA_subband_spectral_inversion_R"], datatype=bool, dims=(N_pn,))
-    FPGA_subband_spectral_inversion_RW = AttributeWrapper(comms_annotation=["FPGA_subband_spectral_inversion_RW"], datatype=bool, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_subband_weights_R = AttributeWrapper(comms_annotation=["FPGA_subband_weights_R"], datatype=numpy.uint32, dims=(N_pn, S_pn, N_subbands))
-    FPGA_subband_weights_RW = AttributeWrapper(comms_annotation=["FPGA_subband_weights_RW"], datatype=numpy.uint32, dims=(N_pn, S_pn, N_subbands), access=AttrWriteType.READ_WRITE)
-    FPGA_time_since_last_pps_R = AttributeWrapper(comms_annotation=["FPGA_time_since_last_pps_R"], datatype=numpy.float_, dims=(N_pn,))
-    FPGA_temp_R = AttributeWrapper(comms_annotation=["FPGA_temp_R"], datatype=numpy.float_, dims=(N_pn,))
-    FPGA_wg_amplitude_R = AttributeWrapper(comms_annotation=["FPGA_wg_amplitude_R"], datatype=numpy.float_, dims=(N_pn, S_pn))
-    FPGA_wg_amplitude_RW = AttributeWrapper(comms_annotation=["FPGA_wg_amplitude_RW"], datatype=numpy.float_, dims=(N_pn, S_pn), access=AttrWriteType.READ_WRITE)
-    FPGA_wg_enable_R = AttributeWrapper(comms_annotation=["FPGA_wg_enable_R"], datatype=bool, dims=(N_pn, S_pn))
-    FPGA_wg_enable_RW = AttributeWrapper(comms_annotation=["FPGA_wg_enable_RW"], datatype=bool, dims=(N_pn, S_pn), access=AttrWriteType.READ_WRITE)
-    FPGA_wg_frequency_R = AttributeWrapper(comms_annotation=["FPGA_wg_frequency_R"], datatype=numpy.float_, dims=(N_pn, S_pn))
-    FPGA_wg_frequency_RW = AttributeWrapper(comms_annotation=["FPGA_wg_frequency_RW"], datatype=numpy.float_, dims=(N_pn, S_pn), access=AttrWriteType.READ_WRITE)
-    FPGA_wg_phase_R = AttributeWrapper(comms_annotation=["FPGA_wg_phase_R"], datatype=numpy.float_, dims=(N_pn, S_pn))
-    FPGA_wg_phase_RW = AttributeWrapper(comms_annotation=["FPGA_wg_phase_RW"], datatype=numpy.float_, dims=(N_pn, S_pn), access=AttrWriteType.READ_WRITE)
-    TR_fpga_mask_R = AttributeWrapper(comms_annotation=["TR_fpga_mask_R"], datatype=bool, dims=(N_pn,))
-    TR_fpga_mask_RW = AttributeWrapper(comms_annotation=["TR_fpga_mask_RW"], datatype=bool, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    TR_fpga_communication_error_R = AttributeWrapper(comms_annotation=["TR_fpga_communication_error_R"], datatype=bool, dims=(N_pn,))
-    TR_sdp_config_first_fpga_nr_R = AttributeWrapper(comms_annotation=["TR_sdp_config_first_fpga_nr_R"], datatype=numpy.uint32)
-    TR_sdp_config_nof_beamsets_R = AttributeWrapper(comms_annotation=["TR_sdp_config_nof_beamsets_R"], datatype=numpy.uint32)
-    TR_sdp_config_nof_fpgas_R = AttributeWrapper(comms_annotation=["TR_sdp_config_nof_fpgas_R"], datatype=numpy.uint32)
-    TR_software_version_R = AttributeWrapper(comms_annotation=["TR_software_version_R"], datatype=str)
-    TR_start_time_R = AttributeWrapper(comms_annotation=["TR_start_time_R"], datatype=numpy.int64)
-    TR_tod_R = AttributeWrapper(comms_annotation=["TR_tod_R"], datatype=numpy.int64, dims=(2,))     # struct of (time_t, int64)
-    TR_tod_pps_delta_R = AttributeWrapper(comms_annotation=["TR_tod_pps_delta_R"], datatype=numpy.double)
+    FPGA_firmware_version_R = AttributeWrapper(
+        comms_annotation=["FPGA_firmware_version_R"], datatype=str, dims=(N_pn,)
+    )
+    FPGA_boot_image_R = AttributeWrapper(
+        comms_annotation=["FPGA_boot_image_R"],
+        datatype=numpy.int32,
+        dims=(N_pn,),
+        doc="Active FPGA image (0=factory, 1=user)",
+    )
+    FPGA_boot_image_RW = AttributeWrapper(
+        comms_annotation=["FPGA_boot_image_RW"],
+        datatype=numpy.int32,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_global_node_index_R = AttributeWrapper(
+        comms_annotation=["FPGA_global_node_index_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_hardware_version_R = AttributeWrapper(
+        comms_annotation=["FPGA_hardware_version_R"], datatype=str, dims=(N_pn,)
+    )
+    FPGA_pps_present_R = AttributeWrapper(
+        comms_annotation=["FPGA_pps_present_R"], datatype=bool, dims=(N_pn,)
+    )
+    FPGA_pps_capture_cnt_R = AttributeWrapper(
+        comms_annotation=["FPGA_pps_capture_cnt_R"], datatype=numpy.uint32, dims=(N_pn,)
+    )
+    FPGA_pps_expected_cnt_R = AttributeWrapper(
+        comms_annotation=["FPGA_pps_expected_cnt_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_pps_expected_cnt_RW = AttributeWrapper(
+        comms_annotation=["FPGA_pps_expected_cnt_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_processing_enable_R = AttributeWrapper(
+        comms_annotation=["FPGA_processing_enable_R"], datatype=bool, dims=(N_pn,)
+    )
+    FPGA_processing_enable_RW = AttributeWrapper(
+        comms_annotation=["FPGA_processing_enable_RW"],
+        datatype=bool,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_ring_node_offset_R = AttributeWrapper(
+        comms_annotation=["FPGA_ring_node_offset_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_ring_node_offset_RW = AttributeWrapper(
+        comms_annotation=["FPGA_ring_node_offset_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_ring_nof_nodes_R = AttributeWrapper(
+        comms_annotation=["FPGA_ring_nof_nodes_R"], datatype=numpy.uint32, dims=(N_pn,)
+    )
+    FPGA_ring_nof_nodes_RW = AttributeWrapper(
+        comms_annotation=["FPGA_ring_nof_nodes_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_ring_use_cable_to_next_rn_R = AttributeWrapper(
+        comms_annotation=["FPGA_ring_use_cable_to_next_rn_R"],
+        datatype=bool,
+        dims=(N_pn,),
+    )
+    FPGA_ring_use_cable_to_next_rn_RW = AttributeWrapper(
+        comms_annotation=["FPGA_ring_use_cable_to_next_rn_RW"],
+        datatype=bool,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_ring_use_cable_to_previous_rn_R = AttributeWrapper(
+        comms_annotation=["FPGA_ring_use_cable_to_previous_rn_R"],
+        datatype=bool,
+        dims=(N_pn,),
+    )
+    FPGA_ring_use_cable_to_previous_rn_RW = AttributeWrapper(
+        comms_annotation=["FPGA_ring_use_cable_to_previous_rn_RW"],
+        datatype=bool,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_scrap_R = AttributeWrapper(
+        comms_annotation=["FPGA_scrap_R"], datatype=numpy.int32, dims=(N_pn, N_subbands)
+    )
+    FPGA_scrap_RW = AttributeWrapper(
+        comms_annotation=["FPGA_scrap_RW"],
+        datatype=numpy.int32,
+        dims=(N_pn, N_subbands),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_sdp_info_antenna_band_index_R = AttributeWrapper(
+        comms_annotation=["FPGA_sdp_info_antenna_band_index_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_sdp_info_block_period_R = AttributeWrapper(
+        comms_annotation=["FPGA_sdp_info_block_period_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_sdp_info_f_adc_R = AttributeWrapper(
+        comms_annotation=["FPGA_sdp_info_f_adc_R"], datatype=numpy.uint32, dims=(N_pn,)
+    )
+    FPGA_sdp_info_fsub_type_R = AttributeWrapper(
+        comms_annotation=["FPGA_sdp_info_fsub_type_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_sdp_info_nyquist_sampling_zone_index_R = AttributeWrapper(
+        comms_annotation=["FPGA_sdp_info_nyquist_sampling_zone_index_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_sdp_info_nyquist_sampling_zone_index_RW = AttributeWrapper(
+        comms_annotation=["FPGA_sdp_info_nyquist_sampling_zone_index_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_sdp_info_observation_id_R = AttributeWrapper(
+        comms_annotation=["FPGA_sdp_info_observation_id_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_sdp_info_observation_id_RW = AttributeWrapper(
+        comms_annotation=["FPGA_sdp_info_observation_id_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_sdp_info_station_id_R = AttributeWrapper(
+        comms_annotation=["FPGA_sdp_info_station_id_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_sdp_info_station_id_RW = AttributeWrapper(
+        comms_annotation=["FPGA_sdp_info_station_id_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_subband_spectral_inversion_R = AttributeWrapper(
+        comms_annotation=["FPGA_subband_spectral_inversion_R"],
+        datatype=bool,
+        dims=(N_pn,),
+    )
+    FPGA_subband_spectral_inversion_RW = AttributeWrapper(
+        comms_annotation=["FPGA_subband_spectral_inversion_RW"],
+        datatype=bool,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_subband_weights_R = AttributeWrapper(
+        comms_annotation=["FPGA_subband_weights_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, S_pn, N_subbands),
+    )
+    FPGA_subband_weights_RW = AttributeWrapper(
+        comms_annotation=["FPGA_subband_weights_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn, S_pn, N_subbands),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_time_since_last_pps_R = AttributeWrapper(
+        comms_annotation=["FPGA_time_since_last_pps_R"],
+        datatype=numpy.float_,
+        dims=(N_pn,),
+    )
+    FPGA_temp_R = AttributeWrapper(
+        comms_annotation=["FPGA_temp_R"], datatype=numpy.float_, dims=(N_pn,)
+    )
+    FPGA_wg_amplitude_R = AttributeWrapper(
+        comms_annotation=["FPGA_wg_amplitude_R"],
+        datatype=numpy.float_,
+        dims=(N_pn, S_pn),
+    )
+    FPGA_wg_amplitude_RW = AttributeWrapper(
+        comms_annotation=["FPGA_wg_amplitude_RW"],
+        datatype=numpy.float_,
+        dims=(N_pn, S_pn),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_wg_enable_R = AttributeWrapper(
+        comms_annotation=["FPGA_wg_enable_R"], datatype=bool, dims=(N_pn, S_pn)
+    )
+    FPGA_wg_enable_RW = AttributeWrapper(
+        comms_annotation=["FPGA_wg_enable_RW"],
+        datatype=bool,
+        dims=(N_pn, S_pn),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_wg_frequency_R = AttributeWrapper(
+        comms_annotation=["FPGA_wg_frequency_R"],
+        datatype=numpy.float_,
+        dims=(N_pn, S_pn),
+    )
+    FPGA_wg_frequency_RW = AttributeWrapper(
+        comms_annotation=["FPGA_wg_frequency_RW"],
+        datatype=numpy.float_,
+        dims=(N_pn, S_pn),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_wg_phase_R = AttributeWrapper(
+        comms_annotation=["FPGA_wg_phase_R"], datatype=numpy.float_, dims=(N_pn, S_pn)
+    )
+    FPGA_wg_phase_RW = AttributeWrapper(
+        comms_annotation=["FPGA_wg_phase_RW"],
+        datatype=numpy.float_,
+        dims=(N_pn, S_pn),
+        access=AttrWriteType.READ_WRITE,
+    )
+    TR_fpga_mask_R = AttributeWrapper(
+        comms_annotation=["TR_fpga_mask_R"], datatype=bool, dims=(N_pn,)
+    )
+    TR_fpga_mask_RW = AttributeWrapper(
+        comms_annotation=["TR_fpga_mask_RW"],
+        datatype=bool,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    TR_fpga_communication_error_R = AttributeWrapper(
+        comms_annotation=["TR_fpga_communication_error_R"], datatype=bool, dims=(N_pn,)
+    )
+    TR_sdp_config_first_fpga_nr_R = AttributeWrapper(
+        comms_annotation=["TR_sdp_config_first_fpga_nr_R"], datatype=numpy.uint32
+    )
+    TR_sdp_config_nof_beamsets_R = AttributeWrapper(
+        comms_annotation=["TR_sdp_config_nof_beamsets_R"], datatype=numpy.uint32
+    )
+    TR_sdp_config_nof_fpgas_R = AttributeWrapper(
+        comms_annotation=["TR_sdp_config_nof_fpgas_R"], datatype=numpy.uint32
+    )
+    TR_software_version_R = AttributeWrapper(
+        comms_annotation=["TR_software_version_R"], datatype=str
+    )
+    TR_start_time_R = AttributeWrapper(
+        comms_annotation=["TR_start_time_R"], datatype=numpy.int64
+    )
+    TR_tod_R = AttributeWrapper(
+        comms_annotation=["TR_tod_R"], datatype=numpy.int64, dims=(2,)
+    )  # struct of (time_t, int64)
+    TR_tod_pps_delta_R = AttributeWrapper(
+        comms_annotation=["TR_tod_pps_delta_R"], datatype=numpy.double
+    )
 
     # OPC-UA MP only points for AIT
-    FPGA_signal_input_mean_R = AttributeWrapper(comms_annotation=["FPGA_signal_input_mean_R"], datatype=numpy.double , dims=(N_pn, S_pn))
-    FPGA_signal_input_rms_R = AttributeWrapper(comms_annotation=["FPGA_signal_input_rms_R"], datatype=numpy.double, dims=(N_pn, S_pn))
-
-    FPGA_jesd204b_csr_rbd_count_R = AttributeWrapper(comms_annotation=["FPGA_jesd204b_csr_rbd_count_R"], datatype=numpy.uint32, dims=(N_pn, S_pn))
-    FPGA_jesd204b_csr_dev_syncn_R = AttributeWrapper(comms_annotation=["FPGA_jesd204b_csr_dev_syncn_R"], datatype=numpy.uint32, dims=(N_pn, S_pn))
-    FPGA_jesd204b_rx_err0_R = AttributeWrapper(comms_annotation=["FPGA_jesd204b_rx_err0_R"], datatype=numpy.uint32, dims=(N_pn, S_pn))
-    FPGA_jesd204b_rx_err1_R = AttributeWrapper(comms_annotation=["FPGA_jesd204b_rx_err1_R"], datatype=numpy.uint32, dims=(N_pn, S_pn))
-
-    FPGA_signal_input_bsn_R = AttributeWrapper(comms_annotation=["FPGA_signal_input_bsn_R"], datatype=numpy.int64, dims=(N_pn,))
-    FPGA_signal_input_nof_blocks_R = AttributeWrapper(comms_annotation=["FPGA_signal_input_nof_blocks_R"], datatype=numpy.int32, dims=(N_pn,))
-    FPGA_signal_input_nof_samples_R = AttributeWrapper(comms_annotation=["FPGA_signal_input_nof_samples_R"], datatype=numpy.int32, dims=(N_pn,))
-    FPGA_signal_input_samples_delay_R = AttributeWrapper(comms_annotation=["FPGA_signal_input_samples_delay_R"], datatype=numpy.uint32, dims=(N_pn, S_pn))
-    FPGA_signal_input_samples_delay_RW = AttributeWrapper(comms_annotation=["FPGA_signal_input_samples_delay_RW"], datatype=numpy.uint32, dims=(N_pn, S_pn), access=AttrWriteType.READ_WRITE)
-
-    FPGA_bst_offload_bsn_R = AttributeWrapper(comms_annotation=["FPGA_bst_offload_bsn_R"], datatype=numpy.int64, dims=(N_pn, N_beamsets_ctrl))
-
-    antenna_type_RW = attribute(doc='Type of antenna (LBA or HBA) attached to each input of the FPGAs',
-                                 dtype=((str,),), max_dim_y=N_pn, max_dim_x=S_pn,
-                                 access=AttrWriteType.READ_WRITE, fisallowed="is_attribute_access_allowed")
-    nyquist_zone_R = attribute(doc='Nyquist zone of each input.',
-                               dtype=((numpy.uint32,),), max_dim_y=N_pn, max_dim_x=S_pn,
-                               fisallowed="is_attribute_access_allowed",
-                               polling_period=DEFAULT_POLLING_PERIOD, abs_change=1)
-    clock_RW = attribute(doc='Configured sampling clock (Hz)',
-                         dtype=numpy.uint32, access=AttrWriteType.READ_WRITE, fisallowed="is_attribute_access_allowed",
-                         polling_period=DEFAULT_POLLING_PERIOD, abs_change=1)
+    FPGA_signal_input_mean_R = AttributeWrapper(
+        comms_annotation=["FPGA_signal_input_mean_R"],
+        datatype=numpy.double,
+        dims=(N_pn, S_pn),
+    )
+    FPGA_signal_input_rms_R = AttributeWrapper(
+        comms_annotation=["FPGA_signal_input_rms_R"],
+        datatype=numpy.double,
+        dims=(N_pn, S_pn),
+    )
+
+    FPGA_jesd204b_csr_rbd_count_R = AttributeWrapper(
+        comms_annotation=["FPGA_jesd204b_csr_rbd_count_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, S_pn),
+    )
+    FPGA_jesd204b_csr_dev_syncn_R = AttributeWrapper(
+        comms_annotation=["FPGA_jesd204b_csr_dev_syncn_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, S_pn),
+    )
+    FPGA_jesd204b_rx_err0_R = AttributeWrapper(
+        comms_annotation=["FPGA_jesd204b_rx_err0_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, S_pn),
+    )
+    FPGA_jesd204b_rx_err1_R = AttributeWrapper(
+        comms_annotation=["FPGA_jesd204b_rx_err1_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, S_pn),
+    )
+
+    FPGA_signal_input_bsn_R = AttributeWrapper(
+        comms_annotation=["FPGA_signal_input_bsn_R"], datatype=numpy.int64, dims=(N_pn,)
+    )
+    FPGA_signal_input_nof_blocks_R = AttributeWrapper(
+        comms_annotation=["FPGA_signal_input_nof_blocks_R"],
+        datatype=numpy.int32,
+        dims=(N_pn,),
+    )
+    FPGA_signal_input_nof_samples_R = AttributeWrapper(
+        comms_annotation=["FPGA_signal_input_nof_samples_R"],
+        datatype=numpy.int32,
+        dims=(N_pn,),
+    )
+    FPGA_signal_input_samples_delay_R = AttributeWrapper(
+        comms_annotation=["FPGA_signal_input_samples_delay_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn, S_pn),
+    )
+    FPGA_signal_input_samples_delay_RW = AttributeWrapper(
+        comms_annotation=["FPGA_signal_input_samples_delay_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn, S_pn),
+        access=AttrWriteType.READ_WRITE,
+    )
+
+    FPGA_bst_offload_bsn_R = AttributeWrapper(
+        comms_annotation=["FPGA_bst_offload_bsn_R"],
+        datatype=numpy.int64,
+        dims=(N_pn, N_beamsets_ctrl),
+    )
+
+    antenna_type_RW = attribute(
+        doc="Type of antenna (LBA or HBA) attached to each input of the FPGAs",
+        dtype=((str,),),
+        max_dim_y=N_pn,
+        max_dim_x=S_pn,
+        access=AttrWriteType.READ_WRITE,
+        fisallowed="is_attribute_access_allowed",
+    )
+    nyquist_zone_R = attribute(
+        doc="Nyquist zone of each input.",
+        dtype=((numpy.uint32,),),
+        max_dim_y=N_pn,
+        max_dim_x=S_pn,
+        fisallowed="is_attribute_access_allowed",
+        polling_period=DEFAULT_POLLING_PERIOD,
+        abs_change=1,
+    )
+    clock_RW = attribute(
+        doc="Configured sampling clock (Hz)",
+        dtype=numpy.uint32,
+        access=AttrWriteType.READ_WRITE,
+        fisallowed="is_attribute_access_allowed",
+        polling_period=DEFAULT_POLLING_PERIOD,
+        abs_change=1,
+    )
 
     def read_antenna_type_RW(self):
         return self._antenna_type
@@ -198,30 +448,34 @@ class SDP(OPCUADevice):
 
         # validate shape
         if value.shape != (N_pn, S_pn):
-            raise ValueError(f"Dimension mismatch. Expected ({N_pn}, {S_pn}), got {value.shape}.")
+            raise ValueError(
+                f"Dimension mismatch. Expected ({N_pn}, {S_pn}), got {value.shape}."
+            )
 
         # validate content
         for val in value.flatten():
             if val not in ["LBA", "HBA"]:
-                raise ValueError(f"Unsupported antenna type: {val}. Must be one of [LBA, HBA].")
+                raise ValueError(
+                    f"Unsupported antenna type: {val}. Must be one of [LBA, HBA]."
+                )
 
         # adopt new value
         self._antenna_type = value
 
     def _nyquist_zone(self, clock):
-        """ Return the Nyquist zone for the given clock (in Hz).
+        """Return the Nyquist zone for the given clock (in Hz).
 
-            The Nyquist zone determines the frequency offset of
-            the antennas.
+        The Nyquist zone determines the frequency offset of
+        the antennas.
 
-            NOTE: Only 160 and 200 MHz clocks are supported. """
+        NOTE: Only 160 and 200 MHz clocks are supported."""
 
         # (antenna type, clockMHz) -> Nyquist zone
         nyquist_zones = {
-           ("LBA", 160): 0,
-           ("LBA", 200): 0,
-           ("HBA", 160): 1,
-           ("HBA", 200): 2,
+            ("LBA", 160): 0,
+            ("LBA", 200): 0,
+            ("HBA", 160): 1,
+            ("HBA", 200): 2,
         }
 
         def antenna_type_to_nyquist_zone(antenna_type):
@@ -239,12 +493,14 @@ class SDP(OPCUADevice):
     def read_clock_RW(self):
         # We can only return a single value, so we assume the FPGA is configured coherently. Which is something
         # that is to be checked by an independent monitoring system anyway.
-        mask   = self.read_attribute("TR_fpga_mask_RW")
+        mask = self.read_attribute("TR_fpga_mask_RW")
         clocks = self.read_attribute("FPGA_pps_expected_cnt_RW")
-        clocks_in_mask = [clock for idx,clock in enumerate(clocks) if mask[idx]]
+        clocks_in_mask = [clock for idx, clock in enumerate(clocks) if mask[idx]]
 
         # We return first setting within the mask. If there are no FPGAs selected at all, just return a sane default.
-        return numpy.uint32(clocks_in_mask[0]) if clocks_in_mask else self.clock_RW_default
+        return (
+            numpy.uint32(clocks_in_mask[0]) if clocks_in_mask else self.clock_RW_default
+        )
 
     def write_clock_RW(self, clock):
         if clock not in (CLK_160_MHZ, CLK_200_MHZ):
@@ -254,34 +510,42 @@ class SDP(OPCUADevice):
         self.proxy.FPGA_pps_expected_cnt_RW = [clock] * N_pn
 
         # Also update the packet headers. We assume the first Nyquist zone of each FPGA is representative
-        self.proxy.FPGA_sdp_info_nyquist_sampling_zone_index_RW = self._nyquist_zone(clock)[:,0]
+        self.proxy.FPGA_sdp_info_nyquist_sampling_zone_index_RW = self._nyquist_zone(
+            clock
+        )[:, 0]
 
     # ----------
     # Summarising Attributes
     # ----------
-    FPGA_error_R                 = attribute(dtype=(bool,), max_dim_x=N_pn, fisallowed="is_attribute_access_allowed")
-    FPGA_processing_error_R      = attribute(dtype=(bool,), max_dim_x=N_pn, fisallowed="is_attribute_access_allowed")
-    FPGA_input_error_R           = attribute(dtype=(bool,), max_dim_x=N_pn, fisallowed="is_attribute_access_allowed")
+    FPGA_error_R = attribute(
+        dtype=(bool,), max_dim_x=N_pn, fisallowed="is_attribute_access_allowed"
+    )
+    FPGA_processing_error_R = attribute(
+        dtype=(bool,), max_dim_x=N_pn, fisallowed="is_attribute_access_allowed"
+    )
+    FPGA_input_error_R = attribute(
+        dtype=(bool,), max_dim_x=N_pn, fisallowed="is_attribute_access_allowed"
+    )
 
     def read_FPGA_error_R(self):
         return self.read_attribute("TR_fpga_mask_R") & (
-                 self.read_attribute("TR_fpga_communication_error_R")
-               | (self.read_attribute("FPGA_firmware_version_R") == "")
-               # we cannot assume all inputs of an FPGA are working until we have a mask for it
-               #| (self.read_attribute("FPGA_jesd204b_csr_dev_syncn_R") == 0).any(axis=1)
-               )
+            self.read_attribute("TR_fpga_communication_error_R")
+            | (self.read_attribute("FPGA_firmware_version_R") == "")
+            # we cannot assume all inputs of an FPGA are working until we have a mask for it
+            # | (self.read_attribute("FPGA_jesd204b_csr_dev_syncn_R") == 0).any(axis=1)
+        )
 
     def read_FPGA_processing_error_R(self):
         return self.read_attribute("TR_fpga_mask_R") & (
-                 ~self.read_attribute("FPGA_processing_enable_R")
-               | (self.read_attribute("FPGA_boot_image_R") <= 0)
-               )
+            ~self.read_attribute("FPGA_processing_enable_R")
+            | (self.read_attribute("FPGA_boot_image_R") <= 0)
+        )
 
     def read_FPGA_input_error_R(self):
         return self.read_attribute("TR_fpga_mask_R") & (
-                 self.read_attribute("FPGA_wg_enable_R").any(axis=1)
-               | (self.read_attribute("FPGA_signal_input_rms_R") == 0).any(axis=1)
-               )
+            self.read_attribute("FPGA_wg_enable_R").any(axis=1)
+            | (self.read_attribute("FPGA_signal_input_rms_R") == 0).any(axis=1)
+        )
 
     # --------
     # overloaded functions
@@ -297,16 +561,20 @@ class SDP(OPCUADevice):
 
     def _prepare_hardware(self):
         # FPGAs that are actually reachable and we care about
-        wait_for = ~(self.read_attribute("TR_fpga_communication_error_R")) & self.read_attribute("TR_fpga_mask_R")
+        wait_for = ~(
+            self.read_attribute("TR_fpga_communication_error_R")
+        ) & self.read_attribute("TR_fpga_mask_R")
 
-        # Order the correct firmare to be loaded 
+        # Order the correct firmare to be loaded
         self.proxy.FPGA_boot_image_RW = [1] * N_pn
 
         # Wait for the firmware to be loaded (ignoring masked out elements)
-        self.wait_attribute("FPGA_boot_image_R", lambda attr: ((attr == 1) | ~wait_for).all(), 60)
+        self.wait_attribute(
+            "FPGA_boot_image_R", lambda attr: ((attr == 1) | ~wait_for).all(), 60
+        )
 
     def _disable_hardware(self):
-        """ Disable the SDP hardware. """
+        """Disable the SDP hardware."""
         # Save actual mask values
         TR_fpga_mask = self.proxy.TR_fpga_mask_RW
         # Set the mask to all Trues
@@ -328,8 +596,14 @@ class SDP(OPCUADevice):
     SUBBAND_UNIT_WEIGHT = 2**13
 
     @staticmethod
-    def subband_weights(delay_seconds: float, phase_offset: float, amplitude_scaling: float, clock: int, nyquist_zone: int) -> numpy.ndarray:
-        """ Compute a FPGA_subband_weights_RW row for all subbands for a single input. """
+    def subband_weights(
+        delay_seconds: float,
+        phase_offset: float,
+        amplitude_scaling: float,
+        clock: int,
+        nyquist_zone: int,
+    ) -> numpy.ndarray:
+        """Compute a FPGA_subband_weights_RW row for all subbands for a single input."""
 
         subband_phases = numpy.zeros((N_subbands,), dtype=numpy.float64)
 
@@ -337,10 +611,17 @@ class SDP(OPCUADevice):
             frequency = subband_frequency(subband_nr, clock, nyquist_zone)
 
             # correct for the delay by rotating the signal *back*
-            subband_phases[subband_nr] = (-2.0 * numpy.pi) * frequency * delay_seconds + phase_offset
+            subband_phases[subband_nr] = (
+                -2.0 * numpy.pi
+            ) * frequency * delay_seconds + phase_offset
 
         # convert phases to their complex equivalent, as FPGA weights (cint16 packed into uint32)
-        return phases_to_weights(subband_phases, SDP.SUBBAND_UNIT_WEIGHT, numpy.array([amplitude_scaling] * N_subbands))
+        return phases_to_weights(
+            subband_phases,
+            SDP.SUBBAND_UNIT_WEIGHT,
+            numpy.array([amplitude_scaling] * N_subbands),
+        )
+
 
 # ----------
 # Run server
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py b/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py
index 4a90b3c12c57bcd7499ff0c4c760af42aeaeea19..24e7e7885c16fe204577c2b0f05b7fb764f27f0c 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py
@@ -1,31 +1,26 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the SST project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ SST Device Server for LOFAR2.0
 
 """
 
+import numpy
+from tango import AttrWriteType
+
 # PyTango imports
 from tango.server import device_property, attribute
-from tango import AttrWriteType
 
-# Additional import
-from tangostationcontrol.common.entrypoint import entry
-from tangostationcontrol.common.constants import N_pn, MAX_INPUTS, N_subbands
 from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
 from tangostationcontrol.clients.opcua_client import OPCUAConnection
 from tangostationcontrol.clients.statistics.client import StatisticsClient
+from tangostationcontrol.common.constants import N_pn, MAX_INPUTS, N_subbands
+
+# Additional import
+from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.devices.sdp.statistics import Statistics
 from tangostationcontrol.statistics.collector import StationSSTCollector
 
-import numpy
-
 __all__ = ["SST", "main"]
 
 
@@ -38,41 +33,32 @@ class SST(Statistics):
     # -----------------
 
     FPGA_sst_offload_hdr_eth_destination_mac_RW_default = device_property(
-        dtype='DevVarStringArray',
-        mandatory=True
+        dtype="DevVarStringArray", mandatory=True
     )
 
     FPGA_sst_offload_hdr_ip_destination_address_RW_default = device_property(
-        dtype='DevVarStringArray',
-        mandatory=True
+        dtype="DevVarStringArray", mandatory=True
     )
 
     FPGA_sst_offload_hdr_udp_destination_port_RW_default = device_property(
-        dtype='DevVarUShortArray',
-        mandatory=True
+        dtype="DevVarUShortArray", mandatory=True
     )
 
     FPGA_sst_offload_enable_RW_default = device_property(
-        dtype='DevVarBooleanArray',
-        mandatory=False,
-        default_value=[True] * N_pn
+        dtype="DevVarBooleanArray", mandatory=False, default_value=[True] * N_pn
     )
 
     FPGA_sst_offload_weighted_subbands_RW_default = device_property(
-        dtype='DevVarBooleanArray',
-        mandatory=False,
-        default_value=[True] * N_pn
+        dtype="DevVarBooleanArray", mandatory=False, default_value=[True] * N_pn
     )
 
     FIRST_DEFAULT_SETTINGS = [
-        'FPGA_sst_offload_hdr_eth_destination_mac_RW',
-        'FPGA_sst_offload_hdr_ip_destination_address_RW',
-        'FPGA_sst_offload_hdr_udp_destination_port_RW',
-
-        'FPGA_sst_offload_weighted_subbands_RW',
-
+        "FPGA_sst_offload_hdr_eth_destination_mac_RW",
+        "FPGA_sst_offload_hdr_ip_destination_address_RW",
+        "FPGA_sst_offload_hdr_udp_destination_port_RW",
+        "FPGA_sst_offload_weighted_subbands_RW",
         # enable only after the offloading is configured correctly
-        'FPGA_sst_offload_enable_RW'
+        "FPGA_sst_offload_enable_RW",
     ]
 
     # ----------
@@ -80,44 +66,142 @@ class SST(Statistics):
     # ----------
 
     # FPGA control points for SSTs
-    FPGA_sst_offload_enable_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_enable_RW"], datatype=bool, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_sst_offload_enable_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_enable_R"], datatype=bool, dims=(N_pn,))
-    FPGA_sst_offload_hdr_eth_destination_mac_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_hdr_eth_destination_mac_RW"], datatype=str, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_sst_offload_hdr_eth_destination_mac_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_hdr_eth_destination_mac_R"], datatype=str, dims=(N_pn,))
-    FPGA_sst_offload_hdr_ip_destination_address_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_hdr_ip_destination_address_RW"], datatype=str, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_sst_offload_hdr_ip_destination_address_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_hdr_ip_destination_address_R"], datatype=str, dims=(N_pn,))
-    FPGA_sst_offload_hdr_udp_destination_port_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_hdr_udp_destination_port_RW"], datatype=numpy.uint16, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_sst_offload_hdr_udp_destination_port_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_hdr_udp_destination_port_R"], datatype=numpy.uint16, dims=(N_pn,))
-    FPGA_sst_offload_bsn_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_bsn_R"], datatype=numpy.int64, dims=(N_pn,))
-    FPGA_sst_offload_weighted_subbands_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_weighted_subbands_RW"], datatype=bool, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_sst_offload_weighted_subbands_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_weighted_subbands_R"], datatype=bool, dims=(N_pn,))
-
-    FPGA_sst_offload_nof_packets_R = AttributeWrapper(comms_annotation=["FPGA_sst_offload_nof_packets_R"], datatype=numpy.int32, dims=(N_pn,))
-    FPGA_sst_offload_nof_valid_R = AttributeWrapper(comms_annotation=["FPGA_sst_offload_nof_valid_R"], datatype=numpy.int32, dims=(N_pn,))
+    FPGA_sst_offload_enable_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_sst_offload_enable_RW"],
+        datatype=bool,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_sst_offload_enable_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_sst_offload_enable_R"],
+        datatype=bool,
+        dims=(N_pn,),
+    )
+    FPGA_sst_offload_hdr_eth_destination_mac_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_sst_offload_hdr_eth_destination_mac_RW"],
+        datatype=str,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_sst_offload_hdr_eth_destination_mac_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_sst_offload_hdr_eth_destination_mac_R"],
+        datatype=str,
+        dims=(N_pn,),
+    )
+    FPGA_sst_offload_hdr_ip_destination_address_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_sst_offload_hdr_ip_destination_address_RW"],
+        datatype=str,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_sst_offload_hdr_ip_destination_address_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_sst_offload_hdr_ip_destination_address_R"],
+        datatype=str,
+        dims=(N_pn,),
+    )
+    FPGA_sst_offload_hdr_udp_destination_port_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_sst_offload_hdr_udp_destination_port_RW"],
+        datatype=numpy.uint16,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_sst_offload_hdr_udp_destination_port_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_sst_offload_hdr_udp_destination_port_R"],
+        datatype=numpy.uint16,
+        dims=(N_pn,),
+    )
+    FPGA_sst_offload_bsn_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_sst_offload_bsn_R"],
+        datatype=numpy.int64,
+        dims=(N_pn,),
+    )
+    FPGA_sst_offload_weighted_subbands_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_sst_offload_weighted_subbands_RW"],
+        datatype=bool,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_sst_offload_weighted_subbands_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_sst_offload_weighted_subbands_R"],
+        datatype=bool,
+        dims=(N_pn,),
+    )
+
+    FPGA_sst_offload_nof_packets_R = AttributeWrapper(
+        comms_annotation=["FPGA_sst_offload_nof_packets_R"],
+        datatype=numpy.int32,
+        dims=(N_pn,),
+    )
+    FPGA_sst_offload_nof_valid_R = AttributeWrapper(
+        comms_annotation=["FPGA_sst_offload_nof_valid_R"],
+        datatype=numpy.int32,
+        dims=(N_pn,),
+    )
 
     # number of packets with valid payloads
-    nof_valid_payloads_R    = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_valid_payloads"}, dims=(N_pn,), datatype=numpy.uint64)
+    nof_valid_payloads_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "nof_valid_payloads"},
+        dims=(N_pn,),
+        datatype=numpy.uint64,
+    )
     # number of packets with invalid payloads
-    nof_payload_errors_R    = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_payload_errors"}, dims=(N_pn,), datatype=numpy.uint64)
+    nof_payload_errors_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "nof_payload_errors"},
+        dims=(N_pn,),
+        datatype=numpy.uint64,
+    )
     # latest SSTs
-    sst_R                   = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "sst_values"}, dims=(MAX_INPUTS, N_subbands), datatype=numpy.uint64)
+    sst_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "sst_values"},
+        dims=(MAX_INPUTS, N_subbands),
+        datatype=numpy.uint64,
+    )
     # reported timestamp
     # for each row in the latest SSTs
-    sst_timestamp_R         = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "sst_timestamps"}, dims=(MAX_INPUTS,), datatype=numpy.uint64)
+    sst_timestamp_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "sst_timestamps"},
+        dims=(MAX_INPUTS,),
+        datatype=numpy.uint64,
+    )
     # integration interval for each row in the latest SSTs
-    integration_interval_R  = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "integration_intervals"}, dims=(MAX_INPUTS,), datatype=numpy.float32)
+    integration_interval_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "integration_intervals"},
+        dims=(MAX_INPUTS,),
+        datatype=numpy.float32,
+    )
     # whether the subband data was calibrated by the SDP (that is, were subband weights applied)
-    subbands_calibrated_R   = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "subbands_calibrated"}, dims=(MAX_INPUTS,), datatype=bool)
+    subbands_calibrated_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "subbands_calibrated"},
+        dims=(MAX_INPUTS,),
+        datatype=bool,
+    )
 
     # ----------
     # Summarising Attributes
     # ----------
-    FPGA_processing_error_R      = attribute(dtype=(bool,), max_dim_x=N_pn)
+    FPGA_processing_error_R = attribute(dtype=(bool,), max_dim_x=N_pn)
 
     def read_FPGA_processing_error_R(self):
         return self.sdp_proxy.TR_fpga_mask_RW & (
-                 ~self.read_attribute("FPGA_sst_offload_enable_R")
-               )
+            ~self.read_attribute("FPGA_sst_offload_enable_R")
+        )
 
     # --------
     # Overloaded functions
@@ -127,6 +211,7 @@ class SST(Statistics):
     # Commands
     # --------
 
+
 # ----------
 # Run server
 # ----------
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/statistics.py b/tangostationcontrol/tangostationcontrol/devices/sdp/statistics.py
index b9e6665749cb11094412b3fa6c103c4046b95755..346fec1acb70fd87721d8b239313902bb99fc4bb 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/statistics.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/statistics.py
@@ -1,30 +1,23 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the SST project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ Base device for Statistics (SST/BST/XST)
 
 """
 
-# PyTango imports
-from tango.server import device_property
-from tango import DeviceProxy, DevSource
-
 # Additional import
 import asyncio
+import logging
 
-from tangostationcontrol.clients.statistics.client import StatisticsClient
+from tango import DeviceProxy, DevSource
+
+# PyTango imports
+from tango.server import device_property
 from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
-from tangostationcontrol.devices.opcua_device import OPCUADevice
-from tangostationcontrol.common.lofar_logging import log_exceptions
+from tangostationcontrol.clients.statistics.client import StatisticsClient
 from tangostationcontrol.common.constants import MAX_ETH_FRAME_SIZE
-
-import logging
+from tangostationcontrol.common.lofar_logging import log_exceptions
+from tangostationcontrol.devices.opcua_device import OPCUADevice
 
 logger = logging.getLogger()
 
@@ -47,67 +40,129 @@ class Statistics(OPCUADevice):
     # Device Properties
     # -----------------
 
-    Statistics_Client_UDP_Port = device_property(
-        dtype='DevUShort',
-        mandatory=True
-    )
+    Statistics_Client_UDP_Port = device_property(dtype="DevUShort", mandatory=True)
 
-    Statistics_Client_TCP_Port = device_property(
-        dtype='DevUShort',
-        mandatory=True
-    )
+    Statistics_Client_TCP_Port = device_property(dtype="DevUShort", mandatory=True)
 
     # ----------
     # Attributes
     # ----------
 
     # number of UDP packets and bytes that were received
-    nof_packets_received_R  = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "udp", "parameter": "nof_packets_received"}, datatype=numpy.uint64)
-    nof_bytes_received_R  = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "udp", "parameter": "nof_bytes_received"}, datatype=numpy.uint64)
+    nof_packets_received_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "udp", "parameter": "nof_packets_received"},
+        datatype=numpy.uint64,
+    )
+    nof_bytes_received_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "udp", "parameter": "nof_bytes_received"},
+        datatype=numpy.uint64,
+    )
     # number of UDP packets that were dropped because we couldn't keep up with processing
-    nof_packets_dropped_R   = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "udp", "parameter": "nof_packets_dropped"}, datatype=numpy.uint64)
+    nof_packets_dropped_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "udp", "parameter": "nof_packets_dropped"},
+        datatype=numpy.uint64,
+    )
     # last packet we processed
-    last_packet_R           = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "udp", "parameter": "last_packet"}, dims=(MAX_ETH_FRAME_SIZE,), datatype=numpy.uint8)
+    last_packet_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "udp", "parameter": "last_packet"},
+        dims=(MAX_ETH_FRAME_SIZE,),
+        datatype=numpy.uint8,
+    )
     # when last packet was received
-    last_packet_timestamp_R = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "udp", "parameter": "last_packet_timestamp"}, datatype=numpy.uint64)
-
+    last_packet_timestamp_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "udp", "parameter": "last_packet_timestamp"},
+        datatype=numpy.uint64,
+    )
 
     # queue fill percentage, as reported by the consumer
-    queue_collector_fill_percentage_R = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "queue", "parameter": "collector_fill_percentage"}, datatype=numpy.uint64)
-    queue_replicator_fill_percentage_R = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "queue", "parameter": "replicator_fill_percentage"}, dims=(64,), datatype=numpy.uint64)
+    queue_collector_fill_percentage_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "queue", "parameter": "collector_fill_percentage"},
+        datatype=numpy.uint64,
+    )
+    queue_replicator_fill_percentage_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "queue", "parameter": "replicator_fill_percentage"},
+        dims=(64,),
+        datatype=numpy.uint64,
+    )
 
-    replicator_clients_R = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "replicator", "parameter": "clients"}, dims=(MAX_STATISTICS_CLIENTS,), datatype=str)
-    replicator_nof_bytes_sent_R = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "replicator", "parameter": "nof_bytes_sent"}, datatype=numpy.uint64)
+    replicator_clients_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "replicator", "parameter": "clients"},
+        dims=(MAX_STATISTICS_CLIENTS,),
+        datatype=str,
+    )
+    replicator_nof_bytes_sent_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "replicator", "parameter": "nof_bytes_sent"},
+        datatype=numpy.uint64,
+    )
 
-    replicator_nof_packets_sent_R = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "replicator", "parameter": "nof_packets_sent"}, datatype=numpy.uint64)
-    replicator_nof_tasks_pending_R = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "replicator", "parameter": "nof_tasks_pending"}, datatype=numpy.uint64)
+    replicator_nof_packets_sent_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "replicator", "parameter": "nof_packets_sent"},
+        datatype=numpy.uint64,
+    )
+    replicator_nof_tasks_pending_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "replicator", "parameter": "nof_tasks_pending"},
+        datatype=numpy.uint64,
+    )
 
     # number of UDP packets that were processed
-    nof_packets_processed_R = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_packets"}, datatype=numpy.uint64)
+    nof_packets_processed_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "nof_packets"},
+        datatype=numpy.uint64,
+    )
     # number of invalid (non-SST) packets received
-    nof_invalid_packets_R   = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_invalid_packets"}, datatype=numpy.uint64)
+    nof_invalid_packets_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "nof_invalid_packets"},
+        datatype=numpy.uint64,
+    )
     # last packet that could not be parsed
-    last_invalid_packet_R   = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "last_invalid_packet"}, dims=(MAX_ETH_FRAME_SIZE,), datatype=numpy.uint8)
+    last_invalid_packet_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "last_invalid_packet"},
+        dims=(MAX_ETH_FRAME_SIZE,),
+        datatype=numpy.uint8,
+    )
     # what the last exception was
-    last_invalid_packet_exception_R = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "last_invalid_packet_exception"}, datatype=str)
+    last_invalid_packet_exception_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={
+            "type": "statistics",
+            "parameter": "last_invalid_packet_exception",
+        },
+        datatype=str,
+    )
 
     # --------
     # Overloaded functions
     # --------
 
     def configure_for_off(self):
-        """ user code here. is called when the state is set to OFF """
+        """user code here. is called when the state is set to OFF"""
 
         try:
             self.statistics_client.sync_stop()
         except Exception as e:
-            logger.exception("Exception while stopping statistics_client in configure_for_off. Exception ignored")
+            logger.exception(
+                "Exception while stopping statistics_client in configure_for_off. Exception ignored"
+            )
 
         super().configure_for_off()
 
     @log_exceptions()
     def configure_for_initialise(self):
-        """ user code here. is called when the sate is set to INIT """
+        """user code here. is called when the sate is set to INIT"""
         """Initialises the attributes and properties of the statistics device."""
 
         super().configure_for_initialise()
@@ -115,7 +170,7 @@ class Statistics(OPCUADevice):
         # Options for UDPReceiver
         udp_options = {
             "udp_port": self.Statistics_Client_UDP_Port,
-            "udp_host": "0.0.0.0"
+            "udp_host": "0.0.0.0",
         }
 
         # Options for TCPReplicator
@@ -125,10 +180,18 @@ class Statistics(OPCUADevice):
         }
 
         self.statistics_collector = self.STATISTICS_COLLECTOR_CLASS()
-        self.statistics_client = StatisticsClient(self.statistics_collector, udp_options, tcp_options, self.Fault, self.opcua_connection.event_loop) # can share event loop
+        self.statistics_client = StatisticsClient(
+            self.statistics_collector,
+            udp_options,
+            tcp_options,
+            self.Fault,
+            self.opcua_connection.event_loop,
+        )  # can share event loop
 
         # schedule the opc-ua initialisation, and wait for it to finish
-        future = asyncio.run_coroutine_threadsafe(self._connect_statistics(), self.statistics_client.event_loop)
+        future = asyncio.run_coroutine_threadsafe(
+            self._connect_statistics(), self.statistics_client.event_loop
+        )
         _ = future.result()
 
         # proxy the SDP device in case we need the FPGA mask
@@ -144,7 +207,11 @@ class Statistics(OPCUADevice):
             except Exception as e:
                 # use the pass function instead of setting read/write fails
                 i.set_pass_func(self)
-                logger.warning("error while setting the sst attribute {} read/write function. {}. using pass function instead".format(i, e))
+                logger.warning(
+                    "error while setting the sst attribute {} read/write function. {}. using pass function instead".format(
+                        i, e
+                    )
+                )
 
         await self.statistics_client.start()
 
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py b/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py
index 5b1628702e32f63c9a364dfbcd116d27095f784f..df01df22ca4554ad2ff92628e40e31d9c657ef03 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py
@@ -1,33 +1,35 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the XST project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ XST Device Server for LOFAR2.0
 
 """
 
-# PyTango imports
-from tango.server import device_property, attribute
+import numpy
+from lofar_station_client.statistics.collector import XSTCollector
 from tango import AttrWriteType
 
-from lofar_station_client.statistics.collector import XSTCollector
+# PyTango imports
+from tango.server import device_property, attribute
 
-# Additional import
-from tangostationcontrol.common.entrypoint import entry
-from tangostationcontrol.common.constants import N_pn, P_sq, DEFAULT_SUBBAND, MAX_PARALLEL_SUBBANDS, MAX_BLOCKS, BLOCK_LENGTH, VALUES_PER_COMPLEX, MAX_INPUTS
 from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
 from tangostationcontrol.clients.opcua_client import OPCUAConnection
 from tangostationcontrol.clients.statistics.client import StatisticsClient
+from tangostationcontrol.common.constants import (
+    N_pn,
+    P_sq,
+    DEFAULT_SUBBAND,
+    MAX_PARALLEL_SUBBANDS,
+    MAX_BLOCKS,
+    BLOCK_LENGTH,
+    VALUES_PER_COMPLEX,
+    MAX_INPUTS,
+)
 
+# Additional import
+from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.devices.sdp.statistics import Statistics
 
-import numpy
-
 __all__ = ["XST", "main"]
 
 
@@ -40,54 +42,43 @@ class XST(Statistics):
     # -----------------
 
     FPGA_xst_offload_hdr_eth_destination_mac_RW_default = device_property(
-        dtype='DevVarStringArray',
-        mandatory=True
+        dtype="DevVarStringArray", mandatory=True
     )
 
     FPGA_xst_offload_hdr_ip_destination_address_RW_default = device_property(
-        dtype='DevVarStringArray',
-        mandatory=True
+        dtype="DevVarStringArray", mandatory=True
     )
 
     FPGA_xst_offload_hdr_udp_destination_port_RW_default = device_property(
-        dtype='DevVarUShortArray',
-        mandatory=True
+        dtype="DevVarUShortArray", mandatory=True
     )
 
     FPGA_xst_processing_enable_RW_default = device_property(
-        dtype='DevVarBooleanArray',
-        mandatory=False,
-        default_value=[True] * N_pn
+        dtype="DevVarBooleanArray", mandatory=False, default_value=[True] * N_pn
     )
 
     FPGA_xst_subband_select_RW_default = device_property(
-        dtype='DevVarULongArray',
+        dtype="DevVarULongArray",
         mandatory=False,
-        default_value=[[0,DEFAULT_SUBBAND,0,0,0,0,0,0]] * N_pn
+        default_value=[[0, DEFAULT_SUBBAND, 0, 0, 0, 0, 0, 0]] * N_pn,
     )
 
     FPGA_xst_integration_interval_RW_default = device_property(
-        dtype='DevVarDoubleArray',
-        mandatory=False,
-        default_value=[1.0] * N_pn
+        dtype="DevVarDoubleArray", mandatory=False, default_value=[1.0] * N_pn
     )
 
     FPGA_xst_offload_enable_RW_default = device_property(
-        dtype='DevVarBooleanArray',
-        mandatory=False,
-        default_value=[True] * N_pn
+        dtype="DevVarBooleanArray", mandatory=False, default_value=[True] * N_pn
     )
 
     FIRST_DEFAULT_SETTINGS = [
-        'FPGA_xst_offload_hdr_eth_destination_mac_RW',
-        'FPGA_xst_offload_hdr_ip_destination_address_RW',
-        'FPGA_xst_offload_hdr_udp_destination_port_RW',
-
-        'FPGA_xst_subband_select_RW',
-        'FPGA_xst_integration_interval_RW',
-
+        "FPGA_xst_offload_hdr_eth_destination_mac_RW",
+        "FPGA_xst_offload_hdr_ip_destination_address_RW",
+        "FPGA_xst_offload_hdr_udp_destination_port_RW",
+        "FPGA_xst_subband_select_RW",
+        "FPGA_xst_integration_interval_RW",
         # enable only after the offloading is configured correctly
-        'FPGA_xst_offload_enable_RW'
+        "FPGA_xst_offload_enable_RW",
     ]
 
     # ----------
@@ -95,129 +86,604 @@ class XST(Statistics):
     # ----------
 
     # FPGA control points for XSTs
-    FPGA_xst_integration_interval_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_integration_interval_RW"], datatype=numpy.double, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_xst_integration_interval_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_integration_interval_R"], datatype=numpy.double, dims=(N_pn,))
-    FPGA_xst_offload_enable_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_enable_RW"], datatype=bool, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_xst_offload_enable_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_enable_R"], datatype=bool, dims=(N_pn,))
-    FPGA_xst_offload_hdr_eth_destination_mac_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_hdr_eth_destination_mac_RW"], datatype=str, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_xst_offload_hdr_eth_destination_mac_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_hdr_eth_destination_mac_R"], datatype=str, dims=(N_pn,))
-    FPGA_xst_offload_hdr_ip_destination_address_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_hdr_ip_destination_address_RW"], datatype=str, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_xst_offload_hdr_ip_destination_address_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_hdr_ip_destination_address_R"], datatype=str, dims=(N_pn,))
-    FPGA_xst_offload_hdr_udp_destination_port_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_hdr_udp_destination_port_RW"], datatype=numpy.uint16, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_xst_offload_hdr_udp_destination_port_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_hdr_udp_destination_port_R"], datatype=numpy.uint16, dims=(N_pn,))
-    FPGA_xst_offload_bsn_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_bsn_R"], datatype=numpy.int64, dims=(N_pn,))
-    FPGA_xst_processing_enable_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_processing_enable_RW"], datatype=bool, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_xst_processing_enable_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_processing_enable_R"], datatype=bool, dims=(N_pn,))
-    FPGA_xst_subband_select_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_subband_select_RW"], datatype=numpy.uint32, dims=(MAX_PARALLEL_SUBBANDS,N_pn), access=AttrWriteType.READ_WRITE)
-    FPGA_xst_subband_select_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_subband_select_R"], datatype=numpy.uint32, dims=(MAX_PARALLEL_SUBBANDS,N_pn))
-
-    FPGA_xst_offload_nof_crosslets_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_nof_crosslets_RW"], datatype=numpy.uint32, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_xst_offload_nof_crosslets_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_nof_crosslets_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_xst_ring_nof_transport_hops_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_nof_transport_hops_RW"], datatype=numpy.uint32, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_xst_ring_nof_transport_hops_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_nof_transport_hops_R"], datatype=numpy.uint32, dims=(N_pn,))
-
-    FPGA_xst_offload_nof_packets_R = AttributeWrapper(comms_annotation=["FPGA_xst_offload_nof_packets_R"], datatype=numpy.int32, dims=(N_pn,))
-    FPGA_xst_offload_nof_valid_R = AttributeWrapper(comms_annotation=["FPGA_xst_offload_nof_valid_R"], datatype=numpy.int32, dims=(N_pn,))
-
-    FPGA_xst_ring_rx_clear_total_counts_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_clear_total_counts_RW"], datatype=bool, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
-    FPGA_xst_ring_rx_clear_total_counts_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_clear_total_counts_R"], datatype=bool, dims=(N_pn,))
-    FPGA_xst_rx_align_stream_enable_RW = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_stream_enable_RW"], datatype=bool, dims=(N_pn,P_sq), access=AttrWriteType.READ_WRITE)
-    FPGA_xst_rx_align_stream_enable_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_stream_enable_R"], datatype=bool, dims=(N_pn,P_sq))
-    FPGA_xst_ring_rx_total_nof_packets_received_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_total_nof_packets_received_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_xst_ring_rx_total_nof_packets_discarded_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_total_nof_packets_discarded_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_xst_ring_rx_total_nof_sync_received_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_total_nof_sync_received_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_xst_ring_rx_total_nof_sync_discarded_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_total_nof_sync_discarded_R"], datatype=numpy.uint32, dims=(N_pn,))
-    FPGA_xst_ring_rx_bsn_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_bsn_R"], datatype=numpy.int64, dims=(N_pn, N_pn))
-    FPGA_xst_ring_rx_nof_packets_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_nof_packets_R"], datatype=numpy.int32, dims=(N_pn, N_pn))
-    FPGA_xst_ring_rx_nof_valid_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_nof_valid_R"], datatype=numpy.int32, dims=(N_pn, N_pn))
-    FPGA_xst_ring_rx_latency_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_latency_R"], datatype=numpy.int32, dims=(N_pn, N_pn))
-    FPGA_xst_rx_align_bsn_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_bsn_R"], datatype=numpy.int64, dims=(N_pn,P_sq))
-    FPGA_xst_rx_align_nof_packets_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_nof_packets_R"], datatype=numpy.int32, dims=(N_pn,P_sq))
-    FPGA_xst_rx_align_nof_valid_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_nof_valid_R"], datatype=numpy.int32, dims=(N_pn,P_sq))
-    FPGA_xst_rx_align_latency_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_latency_R"], datatype=numpy.int32, dims=(N_pn,P_sq))
-    FPGA_xst_rx_align_nof_replaced_packets_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_nof_replaced_packets_R"], datatype=numpy.int32, dims=(N_pn,P_sq))
-    FPGA_xst_aligned_bsn_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_aligned_bsn_R"], datatype=numpy.int64, dims=(N_pn,))
-    FPGA_xst_aligned_nof_packets_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_aligned_nof_packets_R"], datatype=numpy.int32, dims=(N_pn,))
-    FPGA_xst_aligned_nof_valid_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_aligned_nof_valid_R"], datatype=numpy.int32, dims=(N_pn,))
-    FPGA_xst_aligned_latency_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_aligned_latency_R"], datatype=numpy.int32, dims=(N_pn,))
-    FPGA_xst_ring_tx_bsn_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_tx_bsn_R"], datatype=numpy.int64, dims=(N_pn,N_pn))
-    FPGA_xst_ring_tx_nof_packets_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_tx_nof_packets_R"], datatype=numpy.int32, dims=(N_pn,N_pn))
-    FPGA_xst_ring_tx_nof_valid_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_tx_nof_valid_R"], datatype=numpy.int32, dims=(N_pn,N_pn))
-    FPGA_xst_ring_tx_latency_R = AttributeWrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_tx_latency_R"], datatype=numpy.int32, dims=(N_pn,N_pn))
+    FPGA_xst_integration_interval_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_integration_interval_RW"],
+        datatype=numpy.double,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_xst_integration_interval_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_integration_interval_R"],
+        datatype=numpy.double,
+        dims=(N_pn,),
+    )
+    FPGA_xst_offload_enable_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_offload_enable_RW"],
+        datatype=bool,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_xst_offload_enable_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_offload_enable_R"],
+        datatype=bool,
+        dims=(N_pn,),
+    )
+    FPGA_xst_offload_hdr_eth_destination_mac_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_offload_hdr_eth_destination_mac_RW"],
+        datatype=str,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_xst_offload_hdr_eth_destination_mac_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_offload_hdr_eth_destination_mac_R"],
+        datatype=str,
+        dims=(N_pn,),
+    )
+    FPGA_xst_offload_hdr_ip_destination_address_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_offload_hdr_ip_destination_address_RW"],
+        datatype=str,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_xst_offload_hdr_ip_destination_address_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_offload_hdr_ip_destination_address_R"],
+        datatype=str,
+        dims=(N_pn,),
+    )
+    FPGA_xst_offload_hdr_udp_destination_port_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_offload_hdr_udp_destination_port_RW"],
+        datatype=numpy.uint16,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_xst_offload_hdr_udp_destination_port_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_offload_hdr_udp_destination_port_R"],
+        datatype=numpy.uint16,
+        dims=(N_pn,),
+    )
+    FPGA_xst_offload_bsn_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_offload_bsn_R"],
+        datatype=numpy.int64,
+        dims=(N_pn,),
+    )
+    FPGA_xst_processing_enable_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_processing_enable_RW"],
+        datatype=bool,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_xst_processing_enable_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_processing_enable_R"],
+        datatype=bool,
+        dims=(N_pn,),
+    )
+    FPGA_xst_subband_select_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_subband_select_RW"],
+        datatype=numpy.uint32,
+        dims=(MAX_PARALLEL_SUBBANDS, N_pn),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_xst_subband_select_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_subband_select_R"],
+        datatype=numpy.uint32,
+        dims=(MAX_PARALLEL_SUBBANDS, N_pn),
+    )
+
+    FPGA_xst_offload_nof_crosslets_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_offload_nof_crosslets_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_xst_offload_nof_crosslets_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_offload_nof_crosslets_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_xst_ring_nof_transport_hops_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_nof_transport_hops_RW"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_xst_ring_nof_transport_hops_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_nof_transport_hops_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+
+    FPGA_xst_offload_nof_packets_R = AttributeWrapper(
+        comms_annotation=["FPGA_xst_offload_nof_packets_R"],
+        datatype=numpy.int32,
+        dims=(N_pn,),
+    )
+    FPGA_xst_offload_nof_valid_R = AttributeWrapper(
+        comms_annotation=["FPGA_xst_offload_nof_valid_R"],
+        datatype=numpy.int32,
+        dims=(N_pn,),
+    )
+
+    FPGA_xst_ring_rx_clear_total_counts_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_rx_clear_total_counts_RW"],
+        datatype=bool,
+        dims=(N_pn,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_xst_ring_rx_clear_total_counts_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_rx_clear_total_counts_R"],
+        datatype=bool,
+        dims=(N_pn,),
+    )
+    FPGA_xst_rx_align_stream_enable_RW = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_rx_align_stream_enable_RW"],
+        datatype=bool,
+        dims=(N_pn, P_sq),
+        access=AttrWriteType.READ_WRITE,
+    )
+    FPGA_xst_rx_align_stream_enable_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_rx_align_stream_enable_R"],
+        datatype=bool,
+        dims=(N_pn, P_sq),
+    )
+    FPGA_xst_ring_rx_total_nof_packets_received_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_rx_total_nof_packets_received_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_xst_ring_rx_total_nof_packets_discarded_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_rx_total_nof_packets_discarded_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_xst_ring_rx_total_nof_sync_received_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_rx_total_nof_sync_received_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_xst_ring_rx_total_nof_sync_discarded_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_rx_total_nof_sync_discarded_R"],
+        datatype=numpy.uint32,
+        dims=(N_pn,),
+    )
+    FPGA_xst_ring_rx_bsn_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_rx_bsn_R"],
+        datatype=numpy.int64,
+        dims=(N_pn, N_pn),
+    )
+    FPGA_xst_ring_rx_nof_packets_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_rx_nof_packets_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, N_pn),
+    )
+    FPGA_xst_ring_rx_nof_valid_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_rx_nof_valid_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, N_pn),
+    )
+    FPGA_xst_ring_rx_latency_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_rx_latency_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, N_pn),
+    )
+    FPGA_xst_rx_align_bsn_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_rx_align_bsn_R"],
+        datatype=numpy.int64,
+        dims=(N_pn, P_sq),
+    )
+    FPGA_xst_rx_align_nof_packets_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_rx_align_nof_packets_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, P_sq),
+    )
+    FPGA_xst_rx_align_nof_valid_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_rx_align_nof_valid_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, P_sq),
+    )
+    FPGA_xst_rx_align_latency_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_rx_align_latency_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, P_sq),
+    )
+    FPGA_xst_rx_align_nof_replaced_packets_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_rx_align_nof_replaced_packets_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, P_sq),
+    )
+    FPGA_xst_aligned_bsn_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_aligned_bsn_R"],
+        datatype=numpy.int64,
+        dims=(N_pn,),
+    )
+    FPGA_xst_aligned_nof_packets_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_aligned_nof_packets_R"],
+        datatype=numpy.int32,
+        dims=(N_pn,),
+    )
+    FPGA_xst_aligned_nof_valid_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_aligned_nof_valid_R"],
+        datatype=numpy.int32,
+        dims=(N_pn,),
+    )
+    FPGA_xst_aligned_latency_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_aligned_latency_R"],
+        datatype=numpy.int32,
+        dims=(N_pn,),
+    )
+    FPGA_xst_ring_tx_bsn_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_tx_bsn_R"],
+        datatype=numpy.int64,
+        dims=(N_pn, N_pn),
+    )
+    FPGA_xst_ring_tx_nof_packets_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_tx_nof_packets_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, N_pn),
+    )
+    FPGA_xst_ring_tx_nof_valid_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_tx_nof_valid_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, N_pn),
+    )
+    FPGA_xst_ring_tx_latency_R = AttributeWrapper(
+        comms_id=OPCUAConnection,
+        comms_annotation=["FPGA_xst_ring_tx_latency_R"],
+        datatype=numpy.int32,
+        dims=(N_pn, N_pn),
+    )
 
     # number of packets with valid payloads
-    nof_valid_payloads_R    = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_valid_payloads"}, dims=(N_pn,), datatype=numpy.uint64)
+    nof_valid_payloads_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "nof_valid_payloads"},
+        dims=(N_pn,),
+        datatype=numpy.uint64,
+    )
     # number of packets with invalid payloads
-    nof_payload_errors_R    = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_payload_errors"}, dims=(N_pn,), datatype=numpy.uint64)
+    nof_payload_errors_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "nof_payload_errors"},
+        dims=(N_pn,),
+        datatype=numpy.uint64,
+    )
     # latest XSTs
-    xst_blocks_R            = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "xst_blocks", "reshape": True}, dims=(MAX_PARALLEL_SUBBANDS, MAX_BLOCKS, BLOCK_LENGTH, BLOCK_LENGTH, VALUES_PER_COMPLEX), datatype=numpy.int64)
+    xst_blocks_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={
+            "type": "statistics",
+            "parameter": "xst_blocks",
+            "reshape": True,
+        },
+        dims=(
+            MAX_PARALLEL_SUBBANDS,
+            MAX_BLOCKS,
+            BLOCK_LENGTH,
+            BLOCK_LENGTH,
+            VALUES_PER_COMPLEX,
+        ),
+        datatype=numpy.int64,
+    )
     # whether the values in the block are conjugated and transposed
-    xst_conjugated_R        = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "xst_conjugated", "reshape": True}, dims=(MAX_PARALLEL_SUBBANDS, MAX_BLOCKS), datatype=bool)
+    xst_conjugated_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={
+            "type": "statistics",
+            "parameter": "xst_conjugated",
+            "reshape": True,
+        },
+        dims=(MAX_PARALLEL_SUBBANDS, MAX_BLOCKS),
+        datatype=bool,
+    )
     # reported timestamp for each subband in the latest XSTs
-    xst_timestamp_R         = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "xst_timestamps"}, dims=(MAX_PARALLEL_SUBBANDS,), datatype=numpy.uint64)
+    xst_timestamp_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "xst_timestamps"},
+        dims=(MAX_PARALLEL_SUBBANDS,),
+        datatype=numpy.uint64,
+    )
     # which subband the XSTs describe
-    xst_subbands_R          = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "xst_subbands"}, dims=(MAX_PARALLEL_SUBBANDS,), datatype=numpy.uint16)
+    xst_subbands_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={"type": "statistics", "parameter": "xst_subbands"},
+        dims=(MAX_PARALLEL_SUBBANDS,),
+        datatype=numpy.uint16,
+    )
     # integration interval for each subband in the latest XSTs
-    xst_integration_interval_R  = AttributeWrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "xst_integration_intervals"}, dims=(MAX_PARALLEL_SUBBANDS,), datatype=numpy.float32)
+    xst_integration_interval_R = AttributeWrapper(
+        comms_id=StatisticsClient,
+        comms_annotation={
+            "type": "statistics",
+            "parameter": "xst_integration_intervals",
+        },
+        dims=(MAX_PARALLEL_SUBBANDS,),
+        datatype=numpy.float32,
+    )
 
     # xst_R, but as a matrix of subband x (input x input)
-    xst_real_R              = attribute(max_dim_x=MAX_INPUTS * MAX_INPUTS, max_dim_y=MAX_PARALLEL_SUBBANDS, dtype=((numpy.float32,),))
-    xst_imag_R              = attribute(max_dim_x=MAX_INPUTS * MAX_INPUTS, max_dim_y=MAX_PARALLEL_SUBBANDS, dtype=((numpy.float32,),))
-    xst_power_R             = attribute(max_dim_x=MAX_INPUTS * MAX_INPUTS, max_dim_y=MAX_PARALLEL_SUBBANDS, dtype=((numpy.float32,),))
-    xst_phase_R             = attribute(max_dim_x=MAX_INPUTS * MAX_INPUTS, max_dim_y=MAX_PARALLEL_SUBBANDS, dtype=((numpy.float32,),))
+    xst_real_R = attribute(
+        max_dim_x=MAX_INPUTS * MAX_INPUTS,
+        max_dim_y=MAX_PARALLEL_SUBBANDS,
+        dtype=((numpy.float32,),),
+    )
+    xst_imag_R = attribute(
+        max_dim_x=MAX_INPUTS * MAX_INPUTS,
+        max_dim_y=MAX_PARALLEL_SUBBANDS,
+        dtype=((numpy.float32,),),
+    )
+    xst_power_R = attribute(
+        max_dim_x=MAX_INPUTS * MAX_INPUTS,
+        max_dim_y=MAX_PARALLEL_SUBBANDS,
+        dtype=((numpy.float32,),),
+    )
+    xst_phase_R = attribute(
+        max_dim_x=MAX_INPUTS * MAX_INPUTS,
+        max_dim_y=MAX_PARALLEL_SUBBANDS,
+        dtype=((numpy.float32,),),
+    )
 
     def read_xst_real_R(self):
-        return numpy.real(self.statistics_client.collector.xst_values()).reshape(MAX_PARALLEL_SUBBANDS, MAX_INPUTS * MAX_INPUTS)
+        return numpy.real(self.statistics_client.collector.xst_values()).reshape(
+            MAX_PARALLEL_SUBBANDS, MAX_INPUTS * MAX_INPUTS
+        )
 
     def read_xst_imag_R(self):
-        return numpy.imag(self.statistics_client.collector.xst_values()).reshape(MAX_PARALLEL_SUBBANDS, MAX_INPUTS * MAX_INPUTS)
+        return numpy.imag(self.statistics_client.collector.xst_values()).reshape(
+            MAX_PARALLEL_SUBBANDS, MAX_INPUTS * MAX_INPUTS
+        )
 
     def read_xst_power_R(self):
-        return numpy.abs(self.statistics_client.collector.xst_values()).reshape(MAX_PARALLEL_SUBBANDS, MAX_INPUTS * MAX_INPUTS)
+        return numpy.abs(self.statistics_client.collector.xst_values()).reshape(
+            MAX_PARALLEL_SUBBANDS, MAX_INPUTS * MAX_INPUTS
+        )
 
     def read_xst_phase_R(self):
-        return numpy.angle(self.statistics_client.collector.xst_values()).reshape(MAX_PARALLEL_SUBBANDS, MAX_INPUTS * MAX_INPUTS)
+        return numpy.angle(self.statistics_client.collector.xst_values()).reshape(
+            MAX_PARALLEL_SUBBANDS, MAX_INPUTS * MAX_INPUTS
+        )
 
     # xst_R, but as a matrix of input x input, for each specific subband index
-    xst_0_real_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_real_R(0))
-    xst_0_imag_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_imag_R(0))
-    xst_0_power_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_power_R(0))
-    xst_0_phase_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_phase_R(0))
-
-    xst_1_real_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_real_R(1))
-    xst_1_imag_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_imag_R(1))
-    xst_1_power_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_power_R(1))
-    xst_1_phase_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_phase_R(1))
-
-    xst_2_real_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_real_R(2))
-    xst_2_imag_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_imag_R(2))
-    xst_2_power_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_power_R(2))
-    xst_2_phase_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_phase_R(2))
-
-    xst_3_real_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_real_R(3))
-    xst_3_imag_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_imag_R(3))
-    xst_3_power_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_power_R(3))
-    xst_3_phase_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_phase_R(3))
-
-    xst_4_real_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_real_R(4))
-    xst_4_imag_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_imag_R(4))
-    xst_4_power_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_power_R(4))
-    xst_4_phase_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_phase_R(4))
-
-    xst_5_real_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_real_R(5))
-    xst_5_imag_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_imag_R(5))
-    xst_5_power_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_power_R(5))
-    xst_5_phase_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_phase_R(5))
-
-    xst_6_real_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_real_R(6))
-    xst_6_imag_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_imag_R(6))
-    xst_6_power_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_power_R(6))
-    xst_6_phase_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_phase_R(6))
-
-    xst_7_real_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_real_R(7))
-    xst_7_imag_R            = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_imag_R(7))
-    xst_7_power_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_power_R(7))
-    xst_7_phase_R           = attribute(max_dim_x=MAX_INPUTS, max_dim_y=MAX_INPUTS, dtype=((numpy.float32,),), fget = lambda self: self.read_xst_N_phase_R(7))
+    xst_0_real_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_real_R(0),
+    )
+    xst_0_imag_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_imag_R(0),
+    )
+    xst_0_power_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_power_R(0),
+    )
+    xst_0_phase_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_phase_R(0),
+    )
+
+    xst_1_real_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_real_R(1),
+    )
+    xst_1_imag_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_imag_R(1),
+    )
+    xst_1_power_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_power_R(1),
+    )
+    xst_1_phase_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_phase_R(1),
+    )
+
+    xst_2_real_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_real_R(2),
+    )
+    xst_2_imag_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_imag_R(2),
+    )
+    xst_2_power_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_power_R(2),
+    )
+    xst_2_phase_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_phase_R(2),
+    )
+
+    xst_3_real_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_real_R(3),
+    )
+    xst_3_imag_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_imag_R(3),
+    )
+    xst_3_power_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_power_R(3),
+    )
+    xst_3_phase_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_phase_R(3),
+    )
+
+    xst_4_real_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_real_R(4),
+    )
+    xst_4_imag_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_imag_R(4),
+    )
+    xst_4_power_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_power_R(4),
+    )
+    xst_4_phase_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_phase_R(4),
+    )
+
+    xst_5_real_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_real_R(5),
+    )
+    xst_5_imag_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_imag_R(5),
+    )
+    xst_5_power_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_power_R(5),
+    )
+    xst_5_phase_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_phase_R(5),
+    )
+
+    xst_6_real_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_real_R(6),
+    )
+    xst_6_imag_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_imag_R(6),
+    )
+    xst_6_power_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_power_R(6),
+    )
+    xst_6_phase_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_phase_R(6),
+    )
+
+    xst_7_real_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_real_R(7),
+    )
+    xst_7_imag_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_imag_R(7),
+    )
+    xst_7_power_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_power_R(7),
+    )
+    xst_7_phase_R = attribute(
+        max_dim_x=MAX_INPUTS,
+        max_dim_y=MAX_INPUTS,
+        dtype=((numpy.float32,),),
+        fget=lambda self: self.read_xst_N_phase_R(7),
+    )
 
     def read_xst_N_real_R(self, subband_idx):
         return numpy.real(self.statistics_client.collector.xst_values([subband_idx])[0])
@@ -229,18 +695,20 @@ class XST(Statistics):
         return numpy.abs(self.statistics_client.collector.xst_values([subband_idx])[0])
 
     def read_xst_N_phase_R(self, subband_idx):
-        return numpy.angle(self.statistics_client.collector.xst_values([subband_idx])[0])
+        return numpy.angle(
+            self.statistics_client.collector.xst_values([subband_idx])[0]
+        )
 
     # ----------
     # Summarising Attributes
     # ----------
-    FPGA_processing_error_R      = attribute(dtype=(bool,), max_dim_x=N_pn)
+    FPGA_processing_error_R = attribute(dtype=(bool,), max_dim_x=N_pn)
 
     def read_FPGA_processing_error_R(self):
         return self.sdp_proxy.TR_fpga_mask_RW & (
-                 ~self.read_attribute("FPGA_xst_offload_enable_R")
-               | ~self.read_attribute("FPGA_xst_processing_enable_R")
-               )
+            ~self.read_attribute("FPGA_xst_offload_enable_R")
+            | ~self.read_attribute("FPGA_xst_processing_enable_R")
+        )
 
     # --------
     # Overloaded functions
@@ -250,6 +718,7 @@ class XST(Statistics):
     # Commands
     # --------
 
+
 # ----------
 # Run server
 # ----------
diff --git a/tangostationcontrol/tangostationcontrol/devices/snmp_device.py b/tangostationcontrol/tangostationcontrol/devices/snmp_device.py
index f400d0cf99e933b7a2ab2d62c614d086300c278d..06ce1393d8335083f8b12d8eb8fbc827566dd4f4 100644
--- a/tangostationcontrol/tangostationcontrol/devices/snmp_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/snmp_device.py
@@ -1,27 +1,26 @@
-# -*- coding: utf-8 -*-
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ SNMP Device Server for LOFAR2.0
 
 """
 
-# Additional import
-from tangostationcontrol.devices.lofar_device import LOFARDevice
-from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
-
-from tango.server import device_property
-import os
-
 import logging
-from tangostationcontrol.clients.snmp_client import SNMPClient, MIBLoader
+import os
 
 import pkg_resources
-
 from pysmi import debug
+from tango.server import device_property
+from tangostationcontrol.clients.snmp_client import SNMPClient, MIBLoader
+from tangostationcontrol.common.lofar_logging import (
+    device_logging_to_python,
+    log_exceptions,
+)
+
+# Additional import
+from tangostationcontrol.devices.lofar_device import LOFARDevice
 
-debug.setLogger(debug.Debug('searcher', "compiler", "borrower", "reader"))
+debug.setLogger(debug.Debug("searcher", "compiler", "borrower", "reader"))
 
 logger = logging.getLogger()
 
@@ -33,36 +32,28 @@ class SNMPDevice(LOFARDevice):
     # -----------------
     # Device Properties
     # -----------------
-    SNMP_community = device_property(
-        dtype='DevString',
-        mandatory=True
-    )
-
-    SNMP_host = device_property(
-        dtype='DevString',
-        mandatory=True
-    )
-
-    SNMP_mib_dir = device_property(
-        dtype='DevString',
-        mandatory=True
-    )
-
-    SNMP_timeout = device_property(
-        dtype='DevDouble',
-        mandatory=True
-    )
-
-    SNMP_version = device_property(
-        dtype='DevULong',
-        mandatory=True
-    )
+    SNMP_community = device_property(dtype="DevString", mandatory=True)
+
+    SNMP_host = device_property(dtype="DevString", mandatory=True)
+
+    SNMP_mib_dir = device_property(dtype="DevString", mandatory=True)
+
+    SNMP_timeout = device_property(dtype="DevDouble", mandatory=True)
+
+    SNMP_version = device_property(dtype="DevULong", mandatory=True)
 
     @log_exceptions()
     def configure_for_initialise(self):
 
         # set up the SNMP client
-        self.snmp_manager = SNMPClient(self.SNMP_community, self.SNMP_host, self.SNMP_timeout, self.SNMP_version, self.Fault, self)
+        self.snmp_manager = SNMPClient(
+            self.SNMP_community,
+            self.SNMP_host,
+            self.SNMP_timeout,
+            self.SNMP_version,
+            self.Fault,
+            self,
+        )
 
         # map an access helper class
         for i in self.attr_list():
@@ -71,7 +62,11 @@ class SNMPDevice(LOFARDevice):
             except Exception as e:
                 # use the pass function instead of setting read/write fails
                 i.set_pass_func(self)
-                logger.warning("error while setting the SNMP attribute {} read/write function. {}".format(i, e))
+                logger.warning(
+                    "error while setting the SNMP attribute {} read/write function. {}".format(
+                        i, e
+                    )
+                )
 
         self.snmp_manager.start()
 
@@ -86,7 +81,9 @@ class SNMPDevice(LOFARDevice):
         super().configure_for_off()
 
     def get_mib_dir(self):
-        mib_filename_path = pkg_resources.resource_filename('tangostationcontrol', self.SNMP_mib_dir)
+        mib_filename_path = pkg_resources.resource_filename(
+            "tangostationcontrol", self.SNMP_mib_dir
+        )
         mib_path = os.path.dirname(mib_filename_path)
 
         return mib_path
@@ -103,4 +100,5 @@ class SNMPDevice(LOFARDevice):
                 self.loader.load_pymib(i.comms_annotation["mib"])
             except Exception as e:
                 raise Exception(
-                    f"Failed to load MIB file: {i.comms_annotation.get('mib')} for attribute {i.name} in directory {self.get_mib_dir()} ") from e
+                    f"Failed to load MIB file: {i.comms_annotation.get('mib')} for attribute {i.name} in directory {self.get_mib_dir()} "
+                ) from e
diff --git a/tangostationcontrol/tangostationcontrol/devices/temperature_manager.py b/tangostationcontrol/tangostationcontrol/devices/temperature_manager.py
index b725a9d4d315abdf97a863515ed91325994b4417..522f0eaae2310673c9dc001460a7bc6e7801fc10 100644
--- a/tangostationcontrol/tangostationcontrol/devices/temperature_manager.py
+++ b/tangostationcontrol/tangostationcontrol/devices/temperature_manager.py
@@ -1,31 +1,40 @@
-# -*- coding: utf-8 -*-
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ overtemperature managing Device Server for LOFAR2.0
 
 """
-import numpy as np
+import logging
 
-from tango import Util, DeviceProxy, AttributeInfoEx, AttrDataFormat, EventType, DevSource, DebugIt
+import numpy as np
+from tango import (
+    Util,
+    DeviceProxy,
+    AttributeInfoEx,
+    AttrDataFormat,
+    EventType,
+    DevSource,
+    DebugIt,
+)
 from tango.server import attribute, device_property
 from tango.server import command
 
 # Additional import
 from tangostationcontrol.common.constants import DEFAULT_POLLING_PERIOD
 from tangostationcontrol.common.entrypoint import entry
-from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
+from tangostationcontrol.common.lofar_logging import (
+    device_logging_to_python,
+    log_exceptions,
+)
 from tangostationcontrol.devices.lofar_device import LOFARDevice
 
-import logging
 logger = logging.getLogger()
 
 __all__ = ["TemperatureManager", "main"]
 
 
 class AttrInfo:
-    def __init__(self, attr : AttributeInfoEx, subscription_id, proxy):
+    def __init__(self, attr: AttributeInfoEx, subscription_id, proxy):
         self.attr_name = attr.name
         self.subscription_id = subscription_id
         self.proxy = proxy
@@ -55,16 +64,18 @@ class TemperatureManager(LOFARDevice):
     # Device Properties
     # -----------------
 
-    Alarm_Error_List = device_property(
-        dtype=[str],
-        mandatory=False,
-        default_value=[]
-    )
+    Alarm_Error_List = device_property(dtype=[str], mandatory=False, default_value=[])
 
     Shutdown_Device_List = device_property(
         dtype=[str],
         mandatory=False,
-        default_value=["STAT/SDP/1", "STAT/UNB2/1", "STAT/RECV/1", "STAT/APSCT/1", "STAT/APSPU/1"]
+        default_value=[
+            "STAT/SDP/1",
+            "STAT/UNB2/1",
+            "STAT/RECV/1",
+            "STAT/APSCT/1",
+            "STAT/APSPU/1",
+        ],
     )
 
     # ----------
@@ -83,7 +94,7 @@ class TemperatureManager(LOFARDevice):
 
     @log_exceptions()
     def configure_for_initialise(self):
-        instance_number = self.get_name().split('/')[2]
+        instance_number = self.get_name().split("/")[2]
         util = Util.instance()
         ds_inst = util.get_ds_inst_name()
 
@@ -98,14 +109,23 @@ class TemperatureManager(LOFARDevice):
 
             # make sure the attribute is polled, otherwise we wont receive events
             if not proxy.is_attribute_polled(f"{attribute_name}"):
-                raise Exception(f"Error, no polling_period set for attribute {proxy.name()}/{attribute_name}. We will not get any events without a polling_period")
+                raise Exception(
+                    f"Error, no polling_period set for attribute {proxy.name()}/{attribute_name}. We will not get any events without a polling_period"
+                )
 
             # subscribe to change events
-            subscription_id = proxy.subscribe_event(f"{attribute_name}", EventType.CHANGE_EVENT, self.auto_alarm_handler, stateless=True)
+            subscription_id = proxy.subscribe_event(
+                f"{attribute_name}",
+                EventType.CHANGE_EVENT,
+                self.auto_alarm_handler,
+                stateless=True,
+            )
 
             # store some info about the attribute
             atr_cfg = proxy.get_attribute_config(attribute_name)
-            self.temp_error_attrs[f"{proxy.name().lower()}/{attribute_name.lower()}"] = (AttrInfo(atr_cfg, subscription_id, proxy))
+            self.temp_error_attrs[
+                f"{proxy.name().lower()}/{attribute_name.lower()}"
+            ] = AttrInfo(atr_cfg, subscription_id, proxy)
 
         super().configure_for_initialise()
 
@@ -122,7 +142,11 @@ class TemperatureManager(LOFARDevice):
             dev_attr.proxy.unsubscribe_event(dev_attr.subscription_id)
             del dev_attr
 
-    is_alarming_R = attribute(dtype=bool, polling_period=DEFAULT_POLLING_PERIOD, fisallowed="is_attribute_access_allowed")
+    is_alarming_R = attribute(
+        dtype=bool,
+        polling_period=DEFAULT_POLLING_PERIOD,
+        fisallowed="is_attribute_access_allowed",
+    )
 
     def read_is_alarming_R(self):
         # return whether any of the devices are alarming
@@ -142,12 +166,16 @@ class TemperatureManager(LOFARDevice):
             return
 
         # get the correct device/attribute
-        dev_attr = self.temp_error_attrs[f"{event.device.name().lower()}/{event.attr_value.name.lower()}"]
+        dev_attr = self.temp_error_attrs[
+            f"{event.device.name().lower()}/{event.attr_value.name.lower()}"
+        ]
 
         dev_attr.update_alarm_state(event.attr_value)
 
         if dev_attr.is_alarming:
-            logger.warning(f"Detected a temperature alarm for {event.device}: {event.attr_value.name} := {event.attr_value.value}")
+            logger.warning(
+                f"Detected a temperature alarm for {event.device}: {event.attr_value.name} := {event.attr_value.value}"
+            )
             self.auto_shutdown_hardware()
 
     # --------
@@ -165,9 +193,14 @@ class TemperatureManager(LOFARDevice):
                 proxy = DeviceProxy(dev_name)
                 proxy.disable_hardware()
             except Exception as e:
-                logger.warning(f"Automatic hardware shutdown of device {dev_name} has failed: {e.args[0]}")
+                logger.warning(
+                    f"Automatic hardware shutdown of device {dev_name} has failed: {e.args[0]}"
+                )
         # TODO(Stefano): Add "STAT/PSOC/1" to the shutdown list and develop its behaviour
-        logger.warning(f"Temperature alarm triggered auto shutdown of all hardware devices")      
+        logger.warning(
+            f"Temperature alarm triggered auto shutdown of all hardware devices"
+        )
+
 
 # ----------
 # Run server
diff --git a/tangostationcontrol/tangostationcontrol/devices/tilebeam.py b/tangostationcontrol/tangostationcontrol/devices/tilebeam.py
index ba399c1c7d07f4480e56e4db523a3d5afaa369c2..ed86c9c5a32be0bda60fcf7591370fa2032aa697 100644
--- a/tangostationcontrol/tangostationcontrol/devices/tilebeam.py
+++ b/tangostationcontrol/tangostationcontrol/devices/tilebeam.py
@@ -1,15 +1,14 @@
-# -*- coding: utf-8 -*-
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ TileBeam Device Server for LOFAR2.0
 
 """
 
-import numpy
 import datetime
+import logging
 
+import numpy
 from tango import DeviceProxy, DevSource
 from tango import Util
 
@@ -17,20 +16,21 @@ from tango import Util
 from tangostationcontrol.beam.delays import Delays
 from tangostationcontrol.common.constants import N_xyz, N_elements, N_pol
 from tangostationcontrol.common.entrypoint import entry
-from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
+from tangostationcontrol.common.lofar_logging import (
+    device_logging_to_python,
+    log_exceptions,
+)
 from tangostationcontrol.devices.beam_device import BeamDevice
 from tangostationcontrol.devices.device_decorators import TimeIt
 
-import logging
 logger = logging.getLogger()
 
-
 __all__ = ["TileBeam", "main"]
 
 
 @device_logging_to_python()
 class TileBeam(BeamDevice):
-    """ Tracks a Tile Beam for all tiles in a single AntennaField. """
+    """Tracks a Tile Beam for all tiles in a single AntennaField."""
 
     # -----------------
     # Device Properties
@@ -48,9 +48,10 @@ class TileBeam(BeamDevice):
     def configure_for_initialise(self):
         # Set a reference of AntennaField device that is correlated to this BEAM device
         util = Util.instance()
-        instance_number = self.get_name().split('/')[2]
+        instance_number = self.get_name().split("/")[2]
         self.antennafield_proxy = DeviceProxy(
-            f"{util.get_ds_inst_name()}/AntennaField/{instance_number}")
+            f"{util.get_ds_inst_name()}/AntennaField/{instance_number}"
+        )
         self.antennafield_proxy.set_source(DevSource.DEV)
 
         # We maintain the same number of tiles as the AntennaField
@@ -59,13 +60,22 @@ class TileBeam(BeamDevice):
 
         # Retrieve positions from AntennaField device
         Antenna_Reference_itrf = self.antennafield_proxy.Antenna_Reference_itrf_R
-        HBAT_antenna_itrf_offsets = self.antennafield_proxy.HBAT_antenna_itrf_offsets_R.reshape(self._nr_tiles, N_elements, N_xyz)
+        HBAT_antenna_itrf_offsets = (
+            self.antennafield_proxy.HBAT_antenna_itrf_offsets_R.reshape(
+                self._nr_tiles, N_elements, N_xyz
+            )
+        )
 
         # a delay calculator for each tile
-        self.HBAT_delay_calculators = [Delays(reference_itrf) for reference_itrf in Antenna_Reference_itrf]
+        self.HBAT_delay_calculators = [
+            Delays(reference_itrf) for reference_itrf in Antenna_Reference_itrf
+        ]
 
         # absolute positions of each antenna element
-        self.HBAT_antenna_positions = [Antenna_Reference_itrf[tile] + HBAT_antenna_itrf_offsets[tile] for tile in range(self._nr_tiles)]
+        self.HBAT_antenna_positions = [
+            Antenna_Reference_itrf[tile] + HBAT_antenna_itrf_offsets[tile]
+            for tile in range(self._nr_tiles)
+        ]
 
     # --------
     # internal functions
@@ -86,12 +96,16 @@ class TileBeam(BeamDevice):
             d.set_measure_time(timestamp)
 
             # calculate the delays based on the set reference position, the set time and now the set direction and antenna positions
-            delays[tile] = d.delays(pointing_direction[tile], self.HBAT_antenna_positions[tile])
+            delays[tile] = d.delays(
+                pointing_direction[tile], self.HBAT_antenna_positions[tile]
+            )
 
         return delays
 
     @TimeIt()
-    def _compute_weights(self, pointing_direction: numpy.array, timestamp: datetime.datetime) -> numpy.array:
+    def _compute_weights(
+        self, pointing_direction: numpy.array, timestamp: datetime.datetime
+    ) -> numpy.array:
         """
         Uploads beam weights based on a given pointing direction 2D array (96 tiles x 3 parameters)
         """
@@ -106,23 +120,31 @@ class TileBeam(BeamDevice):
         return bf_delay_steps
 
     @TimeIt()
-    def _apply_weights(self, pointing_direction: numpy.array, timestamp: datetime.datetime, bf_delay_steps: numpy.array):
+    def _apply_weights(
+        self,
+        pointing_direction: numpy.array,
+        timestamp: datetime.datetime,
+        bf_delay_steps: numpy.array,
+    ):
         # Write weights to RECV through the AntennaToRecvMapper
-        self.antennafield_proxy.HBAT_bf_delay_steps_RW = bf_delay_steps.reshape(self._nr_tiles, N_elements * N_pol)
+        self.antennafield_proxy.HBAT_bf_delay_steps_RW = bf_delay_steps.reshape(
+            self._nr_tiles, N_elements * N_pol
+        )
 
         # Record where we now point to, now that we've updated the weights.
         # Only the entries within the mask have been updated
         mask = self.antennafield_proxy.ANT_mask_RW
         for rcu in range(self._nr_tiles):
             if mask[rcu]:
-                self._pointing_direction_r[rcu]    = pointing_direction[rcu]
-                self._pointing_timestamp_r[rcu]    = timestamp.timestamp()
+                self._pointing_direction_r[rcu] = pointing_direction[rcu]
+                self._pointing_timestamp_r[rcu] = timestamp.timestamp()
         logger.info("Pointing direction updated")
 
     # --------
     # Commands
     # --------
 
+
 # ----------
 # Run server
 # ----------
diff --git a/tangostationcontrol/tangostationcontrol/devices/unb2.py b/tangostationcontrol/tangostationcontrol/devices/unb2.py
index 08a316ba9f034dc5ccada13a9c7078a1e31bbeb7..2176be920f510caa4493b9b2afcf0dbc83e8817b 100644
--- a/tangostationcontrol/tangostationcontrol/devices/unb2.py
+++ b/tangostationcontrol/tangostationcontrol/devices/unb2.py
@@ -1,33 +1,34 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the SDP project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ SDP Device Server for LOFAR2.0
 
 """
 
-# PyTango imports
-from tango.server import command, attribute, device_property
+import numpy
 from tango import AttrWriteType, DebugIt
-# Additional import
 
+# PyTango imports
+from tango.server import command, attribute, device_property
 from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
-from tangostationcontrol.common.constants import N_unb, N_fpga, N_ddr, N_qsfp, DEFAULT_POLLING_PERIOD
+from tangostationcontrol.common.constants import (
+    N_unb,
+    N_fpga,
+    N_ddr,
+    N_qsfp,
+    DEFAULT_POLLING_PERIOD,
+)
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.common.lofar_logging import device_logging_to_python
 from tangostationcontrol.common.states import DEFAULT_COMMAND_STATES
 from tangostationcontrol.devices.device_decorators import only_in_states
 from tangostationcontrol.devices.opcua_device import OPCUADevice
 
-import numpy
+# Additional import
 
 __all__ = ["UNB2", "main"]
 
+
 @device_logging_to_python()
 class UNB2(OPCUADevice):
     # -----------------
@@ -35,162 +36,373 @@ class UNB2(OPCUADevice):
     # -----------------
 
     UNB2_mask_RW_default = device_property(
-        dtype='DevVarBooleanArray',
-        mandatory=False,
-        default_value=[True] * N_unb
+        dtype="DevVarBooleanArray", mandatory=False, default_value=[True] * N_unb
     )
 
     UNB2TR_monitor_rate_RW_default = device_property(
-        dtype='DevLong64',
-        mandatory=False,
-        default_value=1
+        dtype="DevLong64", mandatory=False, default_value=1
     )
 
     # ----- Timing values
 
     UNB2_On_Off_timeout = device_property(
-        doc='Maximum amount of time to wait after turning Uniboard(s) on or off',
-        dtype='DevFloat',
+        doc="Maximum amount of time to wait after turning Uniboard(s) on or off",
+        dtype="DevFloat",
         mandatory=False,
-        default_value=10.0
+        default_value=10.0,
     )
 
     # ----------
     # Attributes
     # ----------
 
-    TRANSLATOR_DEFAULT_SETTINGS = [
-        'UNB2_mask_RW',
-        'UNB2TR_monitor_rate_RW'
-    ]
-
-    UNB2TR_I2C_bus_DDR4_error_R  = AttributeWrapper(comms_annotation=["UNB2TR_I2C_bus_DDR4_error_R"], datatype=numpy.int64, dims=(N_unb, N_fpga))
-    UNB2TR_I2C_bus_error_R       = AttributeWrapper(comms_annotation=["UNB2TR_I2C_bus_error_R"    ], datatype=numpy.int64, dims=(N_unb,))
-    UNB2TR_I2C_bus_FPGA_PS_error_R = AttributeWrapper(comms_annotation=["UNB2TR_I2C_bus_FPGA_PS_error_R"], datatype=numpy.int64, dims=(N_unb, N_fpga))
-    UNB2TR_I2C_bus_PS_error_R    = AttributeWrapper(comms_annotation=["UNB2TR_I2C_bus_PS_error_R" ], datatype=numpy.int64, dims=(N_unb,))
-    UNB2TR_I2C_bus_QSFP_error_R  = AttributeWrapper(comms_annotation=["UNB2TR_I2C_bus_QSFP_error_R"], datatype=numpy.int64, dims=(N_unb, N_qsfp))
-    UNB2TR_monitor_rate_RW       = AttributeWrapper(comms_annotation=["UNB2TR_monitor_rate_RW"    ],datatype=numpy.int64  , access=AttrWriteType.READ_WRITE)
-    UNB2TR_translator_busy_R     = AttributeWrapper(comms_annotation=["UNB2TR_translator_busy_R"  ],datatype=bool)
-    UNB2_DC_DC_48V_12V_IOUT_R    = AttributeWrapper(comms_annotation=["UNB2_DC_DC_48V_12V_IOUT_R" ], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_DC_DC_48V_12V_TEMP_R    = AttributeWrapper(comms_annotation=["UNB2_DC_DC_48V_12V_TEMP_R" ], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_DC_DC_48V_12V_VIN_R     = AttributeWrapper(comms_annotation=["UNB2_DC_DC_48V_12V_VIN_R"  ], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_DC_DC_48V_12V_VOUT_R    = AttributeWrapper(comms_annotation=["UNB2_DC_DC_48V_12V_VOUT_R" ], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_FPGA_DDR4_SLOT_TEMP_R   = AttributeWrapper(comms_annotation=["UNB2_FPGA_DDR4_SLOT_TEMP_R"], datatype=numpy.float64, dims=(N_unb, N_fpga * N_ddr))
-    UNB2_FPGA_POL_CORE_IOUT_R    = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_CORE_IOUT_R" ], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_CORE_TEMP_R    = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_CORE_TEMP_R" ], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_CORE_VOUT_R    = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_CORE_VOUT_R" ], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_ERAM_IOUT_R    = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_ERAM_IOUT_R" ], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_ERAM_TEMP_R    = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_ERAM_TEMP_R" ], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_ERAM_VOUT_R    = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_ERAM_VOUT_R" ], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_HGXB_IOUT_R    = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_HGXB_IOUT_R" ], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_HGXB_TEMP_R    = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_HGXB_TEMP_R" ], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_HGXB_VOUT_R    = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_HGXB_VOUT_R" ], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_PGM_IOUT_R     = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_PGM_IOUT_R"  ], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_PGM_TEMP_R     = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_PGM_TEMP_R"  ], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_PGM_VOUT_R     = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_PGM_VOUT_R"  ], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_RXGXB_IOUT_R   = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_RXGXB_IOUT_R"], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_RXGXB_TEMP_R   = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_RXGXB_TEMP_R"], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_RXGXB_VOUT_R   = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_RXGXB_VOUT_R"], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_TXGXB_IOUT_R   = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_TXGXB_IOUT_R"], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_TXGXB_TEMP_R   = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_TXGXB_TEMP_R"], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_POL_TXGXB_VOUT_R   = AttributeWrapper(comms_annotation=["UNB2_FPGA_POL_TXGXB_VOUT_R"], datatype=numpy.float64, dims=(N_unb, N_fpga))
-    UNB2_FPGA_QSFP_CAGE_LOS_R    = AttributeWrapper(comms_annotation=["UNB2_FPGA_QSFP_CAGE_LOS_R" ], datatype=numpy.int64, dims=(N_unb, N_qsfp))
-    UNB2_FPGA_QSFP_CAGE_TEMP_R   = AttributeWrapper(comms_annotation=["UNB2_FPGA_QSFP_CAGE_TEMP_R"], datatype=numpy.float64, dims=(N_unb, N_qsfp))
-    UNB2_Front_Panel_LED_colour_R = AttributeWrapper(comms_annotation=["UNB2_Front_Panel_LED_colour_R"], datatype=numpy.int64, dims=(N_unb,))
-    UNB2_Front_Panel_LED_colour_RW = AttributeWrapper(comms_annotation=["UNB2_Front_Panel_LED_colour_RW"], datatype=numpy.int64, dims=(N_unb,), access=AttrWriteType.READ_WRITE)
-    UNB2_mask_RW                 = AttributeWrapper(comms_annotation=["UNB2_mask_RW"              ], datatype=bool, dims=(N_unb,), access=AttrWriteType.READ_WRITE)
-    UNB2_PCB_ID_R                = AttributeWrapper(comms_annotation=["UNB2_PCB_ID_R"             ], datatype=numpy.int64, dims=(N_unb,))
-    UNB2_PCB_number_R            = AttributeWrapper(comms_annotation=["UNB2_PCB_number_R"         ], datatype=str, dims=(N_unb,))
-    UNB2_PCB_version_R           = AttributeWrapper(comms_annotation=["UNB2_PCB_version_R"        ], datatype=str, dims=(N_unb,))
-    UNB2_POL_CLOCK_IOUT_R        = AttributeWrapper(comms_annotation=["UNB2_POL_CLOCK_IOUT_R"     ], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_CLOCK_TEMP_R        = AttributeWrapper(comms_annotation=["UNB2_POL_CLOCK_TEMP_R"     ], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_CLOCK_VOUT_R        = AttributeWrapper(comms_annotation=["UNB2_POL_CLOCK_VOUT_R"     ], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_QSFP_N01_IOUT_R     = AttributeWrapper(comms_annotation=["UNB2_POL_QSFP_N01_IOUT_R"  ], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_QSFP_N01_TEMP_R     = AttributeWrapper(comms_annotation=["UNB2_POL_QSFP_N01_TEMP_R"  ], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_QSFP_N01_VOUT_R     = AttributeWrapper(comms_annotation=["UNB2_POL_QSFP_N01_VOUT_R"  ], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_QSFP_N23_IOUT_R     = AttributeWrapper(comms_annotation=["UNB2_POL_QSFP_N23_IOUT_R"  ], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_QSFP_N23_TEMP_R     = AttributeWrapper(comms_annotation=["UNB2_POL_QSFP_N23_TEMP_R"  ], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_QSFP_N23_VOUT_R     = AttributeWrapper(comms_annotation=["UNB2_POL_QSFP_N23_VOUT_R"  ], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_SWITCH_1V2_IOUT_R   = AttributeWrapper(comms_annotation=["UNB2_POL_SWITCH_1V2_IOUT_R"], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_SWITCH_1V2_TEMP_R   = AttributeWrapper(comms_annotation=["UNB2_POL_SWITCH_1V2_TEMP_R"], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_SWITCH_1V2_VOUT_R   = AttributeWrapper(comms_annotation=["UNB2_POL_SWITCH_1V2_VOUT_R"], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_SWITCH_PHY_IOUT_R   = AttributeWrapper(comms_annotation=["UNB2_POL_SWITCH_PHY_IOUT_R"], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_SWITCH_PHY_TEMP_R   = AttributeWrapper(comms_annotation=["UNB2_POL_SWITCH_PHY_TEMP_R"], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_POL_SWITCH_PHY_VOUT_R   = AttributeWrapper(comms_annotation=["UNB2_POL_SWITCH_PHY_VOUT_R"], datatype=numpy.float64, dims=(N_unb,))
-    UNB2_PWR_on_R                = AttributeWrapper(comms_annotation=["UNB2_PWR_on_R"             ], datatype=bool, dims=(N_unb,))
+    TRANSLATOR_DEFAULT_SETTINGS = ["UNB2_mask_RW", "UNB2TR_monitor_rate_RW"]
+
+    UNB2TR_I2C_bus_DDR4_error_R = AttributeWrapper(
+        comms_annotation=["UNB2TR_I2C_bus_DDR4_error_R"],
+        datatype=numpy.int64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2TR_I2C_bus_error_R = AttributeWrapper(
+        comms_annotation=["UNB2TR_I2C_bus_error_R"], datatype=numpy.int64, dims=(N_unb,)
+    )
+    UNB2TR_I2C_bus_FPGA_PS_error_R = AttributeWrapper(
+        comms_annotation=["UNB2TR_I2C_bus_FPGA_PS_error_R"],
+        datatype=numpy.int64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2TR_I2C_bus_PS_error_R = AttributeWrapper(
+        comms_annotation=["UNB2TR_I2C_bus_PS_error_R"],
+        datatype=numpy.int64,
+        dims=(N_unb,),
+    )
+    UNB2TR_I2C_bus_QSFP_error_R = AttributeWrapper(
+        comms_annotation=["UNB2TR_I2C_bus_QSFP_error_R"],
+        datatype=numpy.int64,
+        dims=(N_unb, N_qsfp),
+    )
+    UNB2TR_monitor_rate_RW = AttributeWrapper(
+        comms_annotation=["UNB2TR_monitor_rate_RW"],
+        datatype=numpy.int64,
+        access=AttrWriteType.READ_WRITE,
+    )
+    UNB2TR_translator_busy_R = AttributeWrapper(
+        comms_annotation=["UNB2TR_translator_busy_R"], datatype=bool
+    )
+    UNB2_DC_DC_48V_12V_IOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_DC_DC_48V_12V_IOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_DC_DC_48V_12V_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_DC_DC_48V_12V_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_DC_DC_48V_12V_VIN_R = AttributeWrapper(
+        comms_annotation=["UNB2_DC_DC_48V_12V_VIN_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_DC_DC_48V_12V_VOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_DC_DC_48V_12V_VOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_FPGA_DDR4_SLOT_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_DDR4_SLOT_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga * N_ddr),
+    )
+    UNB2_FPGA_POL_CORE_IOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_CORE_IOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_CORE_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_CORE_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_CORE_VOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_CORE_VOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_ERAM_IOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_ERAM_IOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_ERAM_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_ERAM_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_ERAM_VOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_ERAM_VOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_HGXB_IOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_HGXB_IOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_HGXB_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_HGXB_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_HGXB_VOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_HGXB_VOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_PGM_IOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_PGM_IOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_PGM_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_PGM_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_PGM_VOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_PGM_VOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_RXGXB_IOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_RXGXB_IOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_RXGXB_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_RXGXB_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_RXGXB_VOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_RXGXB_VOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_TXGXB_IOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_TXGXB_IOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_TXGXB_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_TXGXB_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_POL_TXGXB_VOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_POL_TXGXB_VOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_fpga),
+    )
+    UNB2_FPGA_QSFP_CAGE_LOS_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_QSFP_CAGE_LOS_R"],
+        datatype=numpy.int64,
+        dims=(N_unb, N_qsfp),
+    )
+    UNB2_FPGA_QSFP_CAGE_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_FPGA_QSFP_CAGE_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb, N_qsfp),
+    )
+    UNB2_Front_Panel_LED_colour_R = AttributeWrapper(
+        comms_annotation=["UNB2_Front_Panel_LED_colour_R"],
+        datatype=numpy.int64,
+        dims=(N_unb,),
+    )
+    UNB2_Front_Panel_LED_colour_RW = AttributeWrapper(
+        comms_annotation=["UNB2_Front_Panel_LED_colour_RW"],
+        datatype=numpy.int64,
+        dims=(N_unb,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    UNB2_mask_RW = AttributeWrapper(
+        comms_annotation=["UNB2_mask_RW"],
+        datatype=bool,
+        dims=(N_unb,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    UNB2_PCB_ID_R = AttributeWrapper(
+        comms_annotation=["UNB2_PCB_ID_R"], datatype=numpy.int64, dims=(N_unb,)
+    )
+    UNB2_PCB_number_R = AttributeWrapper(
+        comms_annotation=["UNB2_PCB_number_R"], datatype=str, dims=(N_unb,)
+    )
+    UNB2_PCB_version_R = AttributeWrapper(
+        comms_annotation=["UNB2_PCB_version_R"], datatype=str, dims=(N_unb,)
+    )
+    UNB2_POL_CLOCK_IOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_CLOCK_IOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_CLOCK_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_CLOCK_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_CLOCK_VOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_CLOCK_VOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_QSFP_N01_IOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_QSFP_N01_IOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_QSFP_N01_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_QSFP_N01_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_QSFP_N01_VOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_QSFP_N01_VOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_QSFP_N23_IOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_QSFP_N23_IOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_QSFP_N23_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_QSFP_N23_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_QSFP_N23_VOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_QSFP_N23_VOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_SWITCH_1V2_IOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_SWITCH_1V2_IOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_SWITCH_1V2_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_SWITCH_1V2_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_SWITCH_1V2_VOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_SWITCH_1V2_VOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_SWITCH_PHY_IOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_SWITCH_PHY_IOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_SWITCH_PHY_TEMP_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_SWITCH_PHY_TEMP_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_POL_SWITCH_PHY_VOUT_R = AttributeWrapper(
+        comms_annotation=["UNB2_POL_SWITCH_PHY_VOUT_R"],
+        datatype=numpy.float64,
+        dims=(N_unb,),
+    )
+    UNB2_PWR_on_R = AttributeWrapper(
+        comms_annotation=["UNB2_PWR_on_R"], datatype=bool, dims=(N_unb,)
+    )
 
     # ----------
     # Summarising Attributes
     # ----------
-    UNB2_error_R                  = attribute(dtype=(bool,), max_dim_x=N_unb, fisallowed="is_attribute_access_allowed")
+    UNB2_error_R = attribute(
+        dtype=(bool,), max_dim_x=N_unb, fisallowed="is_attribute_access_allowed"
+    )
 
     def read_UNB2_error_R(self):
         return self.read_attribute("UNB2_mask_RW") & (
-                 (self.read_attribute("UNB2TR_I2C_bus_error_R") > 0)
-               | self.alarm_val("UNB2_PCB_ID_R")
-               | (self.read_attribute("UNB2TR_I2C_bus_DDR4_error_R") > 0).any(axis=1)
-               | (self.read_attribute("UNB2TR_I2C_bus_FPGA_PS_error_R") > 0).any(axis=1)
-               | (self.read_attribute("UNB2TR_I2C_bus_QSFP_error_R") > 0).any(axis=1)
-               )
+            (self.read_attribute("UNB2TR_I2C_bus_error_R") > 0)
+            | self.alarm_val("UNB2_PCB_ID_R")
+            | (self.read_attribute("UNB2TR_I2C_bus_DDR4_error_R") > 0).any(axis=1)
+            | (self.read_attribute("UNB2TR_I2C_bus_FPGA_PS_error_R") > 0).any(axis=1)
+            | (self.read_attribute("UNB2TR_I2C_bus_QSFP_error_R") > 0).any(axis=1)
+        )
 
-    UNB2_IOUT_error_R          = attribute(dtype=(bool,), max_dim_x=N_unb, fisallowed="is_attribute_access_allowed")
-    UNB2_TEMP_error_R          = attribute(dtype=(bool,), max_dim_x=N_unb, fisallowed="is_attribute_access_allowed", polling_period=DEFAULT_POLLING_PERIOD)
-    UNB2_VOUT_error_R          = attribute(dtype=(bool,), max_dim_x=N_unb, fisallowed="is_attribute_access_allowed")
+    UNB2_IOUT_error_R = attribute(
+        dtype=(bool,), max_dim_x=N_unb, fisallowed="is_attribute_access_allowed"
+    )
+    UNB2_TEMP_error_R = attribute(
+        dtype=(bool,),
+        max_dim_x=N_unb,
+        fisallowed="is_attribute_access_allowed",
+        polling_period=DEFAULT_POLLING_PERIOD,
+    )
+    UNB2_VOUT_error_R = attribute(
+        dtype=(bool,), max_dim_x=N_unb, fisallowed="is_attribute_access_allowed"
+    )
 
     def read_UNB2_IOUT_error_R(self):
         return self.read_attribute("UNB2_mask_RW") & (
-                 self.alarm_val("UNB2_DC_DC_48V_12V_IOUT_R")
-               | self.alarm_val("UNB2_FPGA_POL_CORE_IOUT_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_ERAM_IOUT_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_HGXB_IOUT_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_PGM_IOUT_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_RXGXB_IOUT_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_TXGXB_IOUT_R").any(axis=1)
-               | self.alarm_val("UNB2_POL_CLOCK_IOUT_R")
-               | self.alarm_val("UNB2_POL_QSFP_N01_IOUT_R")
-               | self.alarm_val("UNB2_POL_QSFP_N23_IOUT_R")
-               | self.alarm_val("UNB2_POL_SWITCH_1V2_IOUT_R")
-               | self.alarm_val("UNB2_POL_SWITCH_PHY_IOUT_R")
-               )
+            self.alarm_val("UNB2_DC_DC_48V_12V_IOUT_R")
+            | self.alarm_val("UNB2_FPGA_POL_CORE_IOUT_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_ERAM_IOUT_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_HGXB_IOUT_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_PGM_IOUT_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_RXGXB_IOUT_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_TXGXB_IOUT_R").any(axis=1)
+            | self.alarm_val("UNB2_POL_CLOCK_IOUT_R")
+            | self.alarm_val("UNB2_POL_QSFP_N01_IOUT_R")
+            | self.alarm_val("UNB2_POL_QSFP_N23_IOUT_R")
+            | self.alarm_val("UNB2_POL_SWITCH_1V2_IOUT_R")
+            | self.alarm_val("UNB2_POL_SWITCH_PHY_IOUT_R")
+        )
 
     def read_UNB2_TEMP_error_R(self):
         # Don't apply the mask here --- we always want to know if things get too hot!
-        return ( self.alarm_val("UNB2_DC_DC_48V_12V_TEMP_R")
-               | self.alarm_val("UNB2_FPGA_POL_CORE_TEMP_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_ERAM_TEMP_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_HGXB_TEMP_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_PGM_TEMP_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_RXGXB_TEMP_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_TXGXB_TEMP_R").any(axis=1)
-               | self.alarm_val("UNB2_POL_CLOCK_TEMP_R")
-               | self.alarm_val("UNB2_POL_QSFP_N01_TEMP_R")
-               | self.alarm_val("UNB2_POL_QSFP_N23_TEMP_R")
-               | self.alarm_val("UNB2_POL_SWITCH_1V2_TEMP_R")
-               | self.alarm_val("UNB2_POL_SWITCH_PHY_TEMP_R")
-               )
+        return (
+            self.alarm_val("UNB2_DC_DC_48V_12V_TEMP_R")
+            | self.alarm_val("UNB2_FPGA_POL_CORE_TEMP_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_ERAM_TEMP_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_HGXB_TEMP_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_PGM_TEMP_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_RXGXB_TEMP_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_TXGXB_TEMP_R").any(axis=1)
+            | self.alarm_val("UNB2_POL_CLOCK_TEMP_R")
+            | self.alarm_val("UNB2_POL_QSFP_N01_TEMP_R")
+            | self.alarm_val("UNB2_POL_QSFP_N23_TEMP_R")
+            | self.alarm_val("UNB2_POL_SWITCH_1V2_TEMP_R")
+            | self.alarm_val("UNB2_POL_SWITCH_PHY_TEMP_R")
+        )
 
     def read_UNB2_VOUT_error_R(self):
         return self.read_attribute("UNB2_mask_RW") & (
-                 self.alarm_val("UNB2_DC_DC_48V_12V_VOUT_R")
-               | self.alarm_val("UNB2_FPGA_POL_CORE_VOUT_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_ERAM_VOUT_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_HGXB_VOUT_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_PGM_VOUT_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_RXGXB_VOUT_R").any(axis=1)
-               | self.alarm_val("UNB2_FPGA_POL_TXGXB_VOUT_R").any(axis=1)
-               | self.alarm_val("UNB2_POL_CLOCK_VOUT_R")
-               | self.alarm_val("UNB2_POL_QSFP_N01_VOUT_R")
-               | self.alarm_val("UNB2_POL_QSFP_N23_VOUT_R")
-               | self.alarm_val("UNB2_POL_SWITCH_1V2_VOUT_R")
-               | self.alarm_val("UNB2_POL_SWITCH_PHY_VOUT_R")
-               )
+            self.alarm_val("UNB2_DC_DC_48V_12V_VOUT_R")
+            | self.alarm_val("UNB2_FPGA_POL_CORE_VOUT_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_ERAM_VOUT_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_HGXB_VOUT_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_PGM_VOUT_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_RXGXB_VOUT_R").any(axis=1)
+            | self.alarm_val("UNB2_FPGA_POL_TXGXB_VOUT_R").any(axis=1)
+            | self.alarm_val("UNB2_POL_CLOCK_VOUT_R")
+            | self.alarm_val("UNB2_POL_QSFP_N01_VOUT_R")
+            | self.alarm_val("UNB2_POL_QSFP_N23_VOUT_R")
+            | self.alarm_val("UNB2_POL_SWITCH_1V2_VOUT_R")
+            | self.alarm_val("UNB2_POL_SWITCH_PHY_VOUT_R")
+        )
 
     # --------
     # overloaded functions
     # --------
 
     def _prepare_hardware(self):
-        """ Initialise the UNB2 hardware. """
+        """Initialise the UNB2 hardware."""
 
         # Cycle UNB2s
         self.UNB2_off()
@@ -199,7 +411,7 @@ class UNB2(OPCUADevice):
         self.wait_attribute("UNB2TR_translator_busy_R", False, self.UNB2_On_Off_timeout)
 
     def _disable_hardware(self):
-        """ Disable the UNB2 hardware. """
+        """Disable the UNB2 hardware."""
 
         # Save actual mask values
         UNB2_mask = self.proxy.UNB2_mask_RW
@@ -235,6 +447,7 @@ class UNB2(OPCUADevice):
         """
         self.opcua_connection.call_method(["UNB2_on"])
 
+
 # ----------
 # Run server
 # ----------
diff --git a/tangostationcontrol/tangostationcontrol/examples/HW_device_template.py b/tangostationcontrol/tangostationcontrol/examples/HW_device_template.py
index de311503cbec99264e72178884c5d0e458d1ac3c..474584f35011de442ee28c6ccadc9c6548442717 100644
--- a/tangostationcontrol/tangostationcontrol/examples/HW_device_template.py
+++ b/tangostationcontrol/tangostationcontrol/examples/HW_device_template.py
@@ -1,21 +1,14 @@
-# -*- coding: utf-8 -*-
-#
-# This file wraps around a tango device class and provides a number of abstractions useful for hardware devices. It works together
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-"""
-
-"""
+import logging
 
 # PyTango imports
 from tango.server import run
-# Additional import
-
 from tangostationcontrol.devices.lofar_device import LOFARDevice
 
-import logging
+# Additional import
+
 logger = logging.getLogger()
 
 __all__ = ["HWDev"]
@@ -57,24 +50,24 @@ class HWDev(LOFARDevice):
     # overloaded functions
     # --------
     def configure_for_fault(self):
-        """ user code here. is called when the state is set to FAULT """
+        """user code here. is called when the state is set to FAULT"""
         pass
 
     def configure_for_off(self):
-        """ user code here. is called when the state is set to OFF """
+        """user code here. is called when the state is set to OFF"""
         pass
 
     def configure_for_on(self):
-        """ user code here. is called when the state is set to ON """
+        """user code here. is called when the state is set to ON"""
 
         pass
 
     def configure_for_standby(self):
-        """ user code here. is called when the state is set to STANDBY """
+        """user code here. is called when the state is set to STANDBY"""
         pass
 
     def configure_for_initialise(self):
-        """ user code here. is called when the sate is set to INIT """
+        """user code here. is called when the sate is set to INIT"""
         pass
 
 
diff --git a/tangostationcontrol/tangostationcontrol/examples/__init__.py b/tangostationcontrol/tangostationcontrol/examples/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/examples/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/examples/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/examples/load_from_disk/__init__.py b/tangostationcontrol/tangostationcontrol/examples/load_from_disk/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/examples/load_from_disk/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/examples/load_from_disk/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/examples/load_from_disk/ini_client.py b/tangostationcontrol/tangostationcontrol/examples/load_from_disk/ini_client.py
index 34a5ec1c053d08633fc3b5e0aa2c9a2b6465e8f7..d08462cb297474d08180ec0ab7e432ab90673f0c 100644
--- a/tangostationcontrol/tangostationcontrol/examples/load_from_disk/ini_client.py
+++ b/tangostationcontrol/tangostationcontrol/examples/load_from_disk/ini_client.py
@@ -1,13 +1,16 @@
-from tangostationcontrol.clients.comms_client import CommClient
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import configparser
+import logging
+
 import numpy
+from tangostationcontrol.clients.comms_client import CommClient
 
-import logging
 logger = logging.getLogger()
 
 __all__ = ["IniClient"]
 
-
 NUMPY_TO_INI_DICT = {
     numpy.int64: int,
     numpy.double: float,
@@ -63,7 +66,9 @@ class IniClient(CommClient):
         return True  # if successful, return true. otherwise return false
 
     def disconnect(self):
-        self.connected = False  # always force a reconnect, regardless of a successful disconnect
+        self.connected = (
+            False  # always force a reconnect, regardless of a successful disconnect
+        )
         logger.debug("disconnected from the 'client' ")
 
     def _setup_annotation(self, annotation):
@@ -84,16 +89,17 @@ class IniClient(CommClient):
 
         # as this is an example, just print the annotation
         logger.debug("annotation: {}".format(annotation))
-        name = annotation.get('name')
+        name = annotation.get("name")
         if name is None:
-            ValueError("ini client requires a variable `name` in the annotation to set/get")
-        section = annotation.get('section')
+            ValueError(
+                "ini client requires a variable `name` in the annotation to set/get"
+            )
+        section = annotation.get("section")
         if section is None:
             ValueError("requires a `section` specified in the annotation to open")
 
         return section, name
 
-
     def _setup_value_conversion(self, attribute):
         """
         gives the client access to the AttributeWrapper object in order to access all
@@ -134,7 +140,7 @@ class IniClient(CommClient):
 
             self.config.read_file(self.config_file)
             self.config.set(section, name, write_value)
-            fp = open(self.filename, 'w')
+            fp = open(self.filename, "w")
             self.config.write(fp)
 
         return read_function, write_function
@@ -152,11 +158,14 @@ class IniClient(CommClient):
         dim_y, dim_x, dtype = self._setup_value_conversion(attribute)
 
         # configure and return the read/write functions
-        read_function, write_function = self._setup_mapping(name, section, dtype, dim_y, dim_x)
+        read_function, write_function = self._setup_mapping(
+            name, section, dtype, dim_y, dim_x
+        )
 
         # return the read/write functions
         return read_function, write_function
 
+
 def data_handler(string, dtype):
     value = []
 
@@ -169,7 +178,11 @@ def data_handler(string, dtype):
             elif "False" == i:
                 value.append(False)
             else:
-                raise ValueError("String to bool failed. String is not True/False, but is: '{}'".format(i))
+                raise ValueError(
+                    "String to bool failed. String is not True/False, but is: '{}'".format(
+                        i
+                    )
+                )
 
         value = dtype(value)
 
@@ -187,7 +200,6 @@ def data_handler(string, dtype):
             val = dtype(i)
             value.append(val)
 
-
         # convert values from buildin type to numpy type
         value = dtype(value)
 
diff --git a/tangostationcontrol/tangostationcontrol/examples/load_from_disk/ini_device.py b/tangostationcontrol/tangostationcontrol/examples/load_from_disk/ini_device.py
index e9d7f228ae308b729cfcdcbb7cfdb64863fe7476..43f0c238b430fa59c675939dd89fdc0e9318403e 100644
--- a/tangostationcontrol/tangostationcontrol/examples/load_from_disk/ini_device.py
+++ b/tangostationcontrol/tangostationcontrol/examples/load_from_disk/ini_device.py
@@ -1,59 +1,49 @@
-# -*- coding: utf-8 -*-
-#
-# This file wraps around a tango device class and provides a number of abstractions useful for hardware devices. It works together
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-"""
+import configparser
+import logging
 
-"""
+import numpy
+from tango import AttrWriteType
 
 # PyTango imports
 from tango.server import run
-from tango import AttrWriteType
-
-import configparser
-import numpy
 
 # Additional import
 from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
 from tangostationcontrol.devices.lofar_device import LOFARDevice
 from tangostationcontrol.examples.load_from_disk.ini_client import IniClient
 
-import logging
 logger = logging.getLogger()
 
-
 __all__ = ["IniDevice"]
 
 
 def write_ini_file(filename):
-    with open(filename, 'w') as configfile:
-
+    with open(filename, "w") as configfile:
         config = configparser.ConfigParser()
-        config['scalar'] = {}
-        config['scalar']['double_scalar_R'] = '1.2'
-        config['scalar']['bool_scalar_R'] = 'True'
-        config['scalar']['int_scalar_R'] = '5'
-        config['scalar']['str_scalar_R'] = 'this is a test'
-
-        config['spectrum'] = {}
-        config['spectrum']['double_spectrum_R'] = '1.2, 2.3, 3.4, 4.5'
-        config['spectrum']['bool_spectrum_R'] = 'True, True, False, False'
-        config['spectrum']['int_spectrum_R'] = '1, 2, 3, 4'
-        config['spectrum']['str_spectrum_R'] = '"a", "b", "c", "d"'
-
-        config['image'] = {}
-        config['image']['double_image_R'] = '1.2, 2.3, 3.4, 4.5, 5.6, 6.7'
-        config['image']['bool_image_R'] = 'True, True, False, False, True, False'
-        config['image']['int_image_R'] = '1, 2, 3, 4, 5, 6'
-        config['image']['str_image_R'] = '"a", "b", "c", "d", "e", "f"'
+        config["scalar"] = {}
+        config["scalar"]["double_scalar_R"] = "1.2"
+        config["scalar"]["bool_scalar_R"] = "True"
+        config["scalar"]["int_scalar_R"] = "5"
+        config["scalar"]["str_scalar_R"] = "this is a test"
+
+        config["spectrum"] = {}
+        config["spectrum"]["double_spectrum_R"] = "1.2, 2.3, 3.4, 4.5"
+        config["spectrum"]["bool_spectrum_R"] = "True, True, False, False"
+        config["spectrum"]["int_spectrum_R"] = "1, 2, 3, 4"
+        config["spectrum"]["str_spectrum_R"] = '"a", "b", "c", "d"'
+
+        config["image"] = {}
+        config["image"]["double_image_R"] = "1.2, 2.3, 3.4, 4.5, 5.6, 6.7"
+        config["image"]["bool_image_R"] = "True, True, False, False, True, False"
+        config["image"]["int_image_R"] = "1, 2, 3, 4, 5, 6"
+        config["image"]["str_image_R"] = '"a", "b", "c", "d", "e", "f"'
 
         config.write(configfile)
 
 
-
 class IniDevice(LOFARDevice):
     """
     This class is the minimal (read empty) implementation of a class using 'LOFARDevice'
@@ -69,38 +59,136 @@ class IniDevice(LOFARDevice):
     ...
 
     """
-    double_scalar_RW = AttributeWrapper(comms_annotation={"section": "scalar", "name": "double_scalar_RW"}, datatype=numpy.double, access=AttrWriteType.READ_WRITE)
-    double_scalar_R = AttributeWrapper(comms_annotation={"section": "scalar", "name": "double_scalar_R"}, datatype=numpy.double)
-    bool_scalar_RW = AttributeWrapper(comms_annotation={"section": "scalar", "name": "bool_scalar_RW"}, datatype=bool, access=AttrWriteType.READ_WRITE)
-    bool_scalar_R = AttributeWrapper(comms_annotation={"section": "scalar", "name": "bool_scalar_R"}, datatype=bool)
-    int_scalar_RW = AttributeWrapper(comms_annotation={"section": "scalar", "name": "int_scalar_RW"}, datatype=numpy.int64, access=AttrWriteType.READ_WRITE)
-    int_scalar_R = AttributeWrapper(comms_annotation={"section": "scalar", "name": "int_scalar_R"}, datatype=numpy.int64)
-    str_scalar_RW = AttributeWrapper(comms_annotation={"section": "scalar", "name": "str_scalar_RW"}, datatype=str, access=AttrWriteType.READ_WRITE)
-    str_scalar_R = AttributeWrapper(comms_annotation={"section": "scalar", "name": "str_scalar_R"}, datatype=str)
-
-    double_spectrum_RW = AttributeWrapper(comms_annotation={"section": "spectrum", "name": "double_spectrum_RW"}, datatype=numpy.double, dims=(4,), access=AttrWriteType.READ_WRITE)
-    double_spectrum_R = AttributeWrapper(comms_annotation={"section": "spectrum", "name": "double_spectrum_R"}, datatype=numpy.double, dims=(4,))
-    bool_spectrum_RW = AttributeWrapper(comms_annotation={"section": "spectrum", "name": "bool_spectrum_RW"}, datatype=bool, dims=(4,), access=AttrWriteType.READ_WRITE)
-    bool_spectrum_R = AttributeWrapper(comms_annotation={"section": "spectrum", "name": "bool_spectrum_R"}, datatype=bool, dims=(4,))
-    int_spectrum_RW = AttributeWrapper(comms_annotation={"section": "spectrum", "name": "int_spectrum_RW"}, datatype=numpy.int64, dims=(4,), access=AttrWriteType.READ_WRITE)
-    int_spectrum_R = AttributeWrapper(comms_annotation={"section": "spectrum", "name": "int_spectrum_R"}, datatype=numpy.int64, dims=(4,))
-    str_spectrum_RW = AttributeWrapper(comms_annotation={"section": "spectrum", "name": "str_spectrum_RW"}, datatype=str, dims=(4,), access=AttrWriteType.READ_WRITE)
-    str_spectrum_R = AttributeWrapper(comms_annotation={"section": "spectrum", "name": "str_spectrum_R"}, datatype=str, dims=(4,))
-
-    double_image_RW = AttributeWrapper(comms_annotation={"section": "image", "name": "double_image_RW"}, datatype=numpy.double, dims=(3, 2), access=AttrWriteType.READ_WRITE)
-    double_image_R = AttributeWrapper(comms_annotation={"section": "image", "name": "double_image_R"}, datatype=numpy.double, dims=(3, 2))
-    bool_image_RW = AttributeWrapper(comms_annotation={"section": "image", "name": "bool_image_RW"}, datatype=bool, dims=(3, 2), access=AttrWriteType.READ_WRITE)
-    bool_image_R = AttributeWrapper(comms_annotation={"section": "image", "name": "bool_image_R"}, datatype=bool, dims=(3, 2))
-    int_image_RW = AttributeWrapper(comms_annotation={"section": "image", "name": "int_image_RW"}, datatype=numpy.int64, dims=(3, 2), access=AttrWriteType.READ_WRITE)
-    int_image_R = AttributeWrapper(comms_annotation={"section": "image", "name": "int_image_R"}, datatype=numpy.int64, dims=(3, 2))
-    str_image_RW = AttributeWrapper(comms_annotation={"section": "image", "name": "str_image_RW"}, datatype=str, dims=(3, 2), access=AttrWriteType.READ_WRITE)
-    str_image_R = AttributeWrapper(comms_annotation={"section": "image", "name": "str_image_R"}, datatype=str, dims=(3, 2))
+    double_scalar_RW = AttributeWrapper(
+        comms_annotation={"section": "scalar", "name": "double_scalar_RW"},
+        datatype=numpy.double,
+        access=AttrWriteType.READ_WRITE,
+    )
+    double_scalar_R = AttributeWrapper(
+        comms_annotation={"section": "scalar", "name": "double_scalar_R"},
+        datatype=numpy.double,
+    )
+    bool_scalar_RW = AttributeWrapper(
+        comms_annotation={"section": "scalar", "name": "bool_scalar_RW"},
+        datatype=bool,
+        access=AttrWriteType.READ_WRITE,
+    )
+    bool_scalar_R = AttributeWrapper(
+        comms_annotation={"section": "scalar", "name": "bool_scalar_R"}, datatype=bool
+    )
+    int_scalar_RW = AttributeWrapper(
+        comms_annotation={"section": "scalar", "name": "int_scalar_RW"},
+        datatype=numpy.int64,
+        access=AttrWriteType.READ_WRITE,
+    )
+    int_scalar_R = AttributeWrapper(
+        comms_annotation={"section": "scalar", "name": "int_scalar_R"},
+        datatype=numpy.int64,
+    )
+    str_scalar_RW = AttributeWrapper(
+        comms_annotation={"section": "scalar", "name": "str_scalar_RW"},
+        datatype=str,
+        access=AttrWriteType.READ_WRITE,
+    )
+    str_scalar_R = AttributeWrapper(
+        comms_annotation={"section": "scalar", "name": "str_scalar_R"}, datatype=str
+    )
+
+    double_spectrum_RW = AttributeWrapper(
+        comms_annotation={"section": "spectrum", "name": "double_spectrum_RW"},
+        datatype=numpy.double,
+        dims=(4,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    double_spectrum_R = AttributeWrapper(
+        comms_annotation={"section": "spectrum", "name": "double_spectrum_R"},
+        datatype=numpy.double,
+        dims=(4,),
+    )
+    bool_spectrum_RW = AttributeWrapper(
+        comms_annotation={"section": "spectrum", "name": "bool_spectrum_RW"},
+        datatype=bool,
+        dims=(4,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    bool_spectrum_R = AttributeWrapper(
+        comms_annotation={"section": "spectrum", "name": "bool_spectrum_R"},
+        datatype=bool,
+        dims=(4,),
+    )
+    int_spectrum_RW = AttributeWrapper(
+        comms_annotation={"section": "spectrum", "name": "int_spectrum_RW"},
+        datatype=numpy.int64,
+        dims=(4,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    int_spectrum_R = AttributeWrapper(
+        comms_annotation={"section": "spectrum", "name": "int_spectrum_R"},
+        datatype=numpy.int64,
+        dims=(4,),
+    )
+    str_spectrum_RW = AttributeWrapper(
+        comms_annotation={"section": "spectrum", "name": "str_spectrum_RW"},
+        datatype=str,
+        dims=(4,),
+        access=AttrWriteType.READ_WRITE,
+    )
+    str_spectrum_R = AttributeWrapper(
+        comms_annotation={"section": "spectrum", "name": "str_spectrum_R"},
+        datatype=str,
+        dims=(4,),
+    )
+
+    double_image_RW = AttributeWrapper(
+        comms_annotation={"section": "image", "name": "double_image_RW"},
+        datatype=numpy.double,
+        dims=(3, 2),
+        access=AttrWriteType.READ_WRITE,
+    )
+    double_image_R = AttributeWrapper(
+        comms_annotation={"section": "image", "name": "double_image_R"},
+        datatype=numpy.double,
+        dims=(3, 2),
+    )
+    bool_image_RW = AttributeWrapper(
+        comms_annotation={"section": "image", "name": "bool_image_RW"},
+        datatype=bool,
+        dims=(3, 2),
+        access=AttrWriteType.READ_WRITE,
+    )
+    bool_image_R = AttributeWrapper(
+        comms_annotation={"section": "image", "name": "bool_image_R"},
+        datatype=bool,
+        dims=(3, 2),
+    )
+    int_image_RW = AttributeWrapper(
+        comms_annotation={"section": "image", "name": "int_image_RW"},
+        datatype=numpy.int64,
+        dims=(3, 2),
+        access=AttrWriteType.READ_WRITE,
+    )
+    int_image_R = AttributeWrapper(
+        comms_annotation={"section": "image", "name": "int_image_R"},
+        datatype=numpy.int64,
+        dims=(3, 2),
+    )
+    str_image_RW = AttributeWrapper(
+        comms_annotation={"section": "image", "name": "str_image_RW"},
+        datatype=str,
+        dims=(3, 2),
+        access=AttrWriteType.READ_WRITE,
+    )
+    str_image_R = AttributeWrapper(
+        comms_annotation={"section": "image", "name": "str_image_R"},
+        datatype=str,
+        dims=(3, 2),
+    )
 
     # --------
     # overloaded functions
     # --------
     def configure_for_initialise(self):
-        """ user code here. is called when the sate is set to INIT """
+        """user code here. is called when the sate is set to INIT"""
         """Initialises the attributes and properties of the Hardware."""
 
         # set up the OPC ua client
@@ -114,7 +202,11 @@ class IniDevice(LOFARDevice):
                 # use the pass function instead of setting read/write fails
                 i.set_pass_func()
 
-                logger.warning("error while setting the ini attribute {} read/write function. {}".format(i, e))
+                logger.warning(
+                    "error while setting the ini attribute {} read/write function. {}".format(
+                        i, e
+                    )
+                )
 
         self.ini_client.start()
 
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/base.py b/tangostationcontrol/tangostationcontrol/integration_test/base.py
index ed1eb2239af74251e22b42adf8ea5e2596688076..cc42e12a2cfda188697e0cf6eb9345998bb79531 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/base.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/base.py
@@ -1,17 +1,11 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
-
-from tangostationcontrol.common.lofar_logging import configure_logger
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import unittest
+
 import asynctest
 import testscenarios
+from tangostationcontrol.common.lofar_logging import configure_logger
 
 """Setup logging for integration tests"""
 configure_logger(debug=True)
@@ -30,6 +24,7 @@ class IntegrationTestCase(BaseIntegrationTestCase):
     def setUp(self):
         super().setUp()
 
+
 class IntegrationAsyncTestCase(testscenarios.WithScenarios, asynctest.TestCase):
     """Integration test case base class for all asyncio unit tests."""
 
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/configuration/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/configuration/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/configuration/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/configuration/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/configuration/configDB/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/configuration/configDB/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/configuration/configDB/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/configuration/configDB/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/configuration/test_device_configuration.py b/tangostationcontrol/tangostationcontrol/integration_test/configuration/test_device_configuration.py
index 2833eecf46cf06b7fb2bffc428ac1f137f73dab8..6d948153b92b2327c08699648d6f9b9417aa67fc 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/configuration/test_device_configuration.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/configuration/test_device_configuration.py
@@ -1,22 +1,16 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from tango import DevState
+import json
 
+import pkg_resources
+from tango import DevState
 from tangostationcontrol.integration_test.default.devices.base import AbstractTestBases
 
-import json
-import pkg_resources
 
 class TestDeviceConfiguration(AbstractTestBases.TestDeviceBase):
 
-    TEST_CONFIGURATION = '''{
+    TEST_CONFIGURATION = """{
                             "servers": {
                                 "AntennaField": {
                                     "STAT": {
@@ -37,25 +31,31 @@ class TestDeviceConfiguration(AbstractTestBases.TestDeviceBase):
                                     }
                                 }
                             }
-                        }'''
-    DB_DEFAULT_CONFIG_FILE = 'LOFAR_ConfigDb.json'
-    DB_FILE_LIST = ['test_environment_ConfigDb.json', 'archiver-devices.json', 
-                    'simulators_ConfigDb.json', 'dummy_positions_ConfigDb.json']
+                        }"""
+    DB_DEFAULT_CONFIG_FILE = "LOFAR_ConfigDb.json"
+    DB_FILE_LIST = [
+        "test_environment_ConfigDb.json",
+        "archiver-devices.json",
+        "simulators_ConfigDb.json",
+        "dummy_positions_ConfigDb.json",
+    ]
 
     def setUp(self):
         super().setUp("STAT/Configuration/1")
         # Ensure that each test begins with the default station configuration
         self.upload_default_lofar_configuration()
         self.addCleanup(self.upload_default_lofar_configuration)
-    
+
     def upload_default_lofar_configuration(self):
         self.proxy.off()
         self.proxy.warm_boot()
         """ Upload station configuration with original default values """
-        f = pkg_resources.resource_stream(__name__, f'configDB/{self.DB_DEFAULT_CONFIG_FILE}')
+        f = pkg_resources.resource_stream(
+            __name__, f"configDB/{self.DB_DEFAULT_CONFIG_FILE}"
+        )
         self.proxy.station_configuration_RW = json.dumps(json.load(f))
         for json_file in self.DB_FILE_LIST:
-            f = pkg_resources.resource_stream(__name__, f'configDB/{json_file}')
+            f = pkg_resources.resource_stream(__name__, f"configDB/{json_file}")
             self.proxy.update_station_configuration(json.dumps(json.load(f)))
         self.proxy.off()
 
@@ -66,39 +66,82 @@ class TestDeviceConfiguration(AbstractTestBases.TestDeviceBase):
         station_configuration = self.proxy.station_configuration_RW
         dbdata = json.loads(station_configuration)
         self.assertEqual(type(dbdata), dict)
-        self.assertGreater(len(dbdata['servers']), 0)
+        self.assertGreater(len(dbdata["servers"]), 0)
         # Verify if Configuration Device exists
-        self.assertTrue('configuration' in dbdata['servers'], msg=f'{dbdata}') # server-name
-        self.assertTrue('stat' in dbdata['servers']['configuration'], msg=f'{dbdata}') # server-instance
-        self.assertTrue('configuration' in dbdata['servers']['configuration']['stat'], msg=f'{dbdata}') # server-class
-        self.assertTrue('stat/configuration/1' in dbdata['servers']['configuration']['stat']['configuration'], msg=f'{dbdata}') # device
-    
+        self.assertTrue(
+            "configuration" in dbdata["servers"], msg=f"{dbdata}"
+        )  # server-name
+        self.assertTrue(
+            "stat" in dbdata["servers"]["configuration"], msg=f"{dbdata}"
+        )  # server-instance
+        self.assertTrue(
+            "configuration" in dbdata["servers"]["configuration"]["stat"],
+            msg=f"{dbdata}",
+        )  # server-class
+        self.assertTrue(
+            "stat/configuration/1"
+            in dbdata["servers"]["configuration"]["stat"]["configuration"],
+            msg=f"{dbdata}",
+        )  # device
+
     def test_load_station_configuration(self):
         self.proxy.warm_boot()
         """ Test whether the station control configuration is correctly loaded into Tango Database """
         dbdata = json.loads(self.proxy.station_configuration_RW)
         # So far configuration Database is set from files in the DB_FILE_LIST
-        self.assertTrue('stat/configuration/1' in dbdata['servers']['configuration']['stat']['configuration'], msg=f'{dbdata}') # configuration device
-        self.assertFalse('stat/observation/11' in dbdata['servers']['observation']['stat']['observation'], msg=f'{dbdata}') # observation device
-        self.assertTrue('stat/antennafield/1' in dbdata['servers']['antennafield']['stat']['antennafield'], msg=f'{dbdata}') # antennafield device
-        antennafield_properties = dbdata['servers']['antennafield']['stat']['antennafield']['stat/antennafield/1']['properties']
+        self.assertTrue(
+            "stat/configuration/1"
+            in dbdata["servers"]["configuration"]["stat"]["configuration"],
+            msg=f"{dbdata}",
+        )  # configuration device
+        self.assertFalse(
+            "stat/observation/11"
+            in dbdata["servers"]["observation"]["stat"]["observation"],
+            msg=f"{dbdata}",
+        )  # observation device
+        self.assertTrue(
+            "stat/antennafield/1"
+            in dbdata["servers"]["antennafield"]["stat"]["antennafield"],
+            msg=f"{dbdata}",
+        )  # antennafield device
+        antennafield_properties = dbdata["servers"]["antennafield"]["stat"][
+            "antennafield"
+        ]["stat/antennafield/1"]["properties"]
         self.assertTrue("recv_devices" in antennafield_properties)
-        self.assertEqual(antennafield_properties['recv_devices'][0], "STAT/RECV/1")
+        self.assertEqual(antennafield_properties["recv_devices"][0], "STAT/RECV/1")
         # Load a full new configuration, and consequently erase the previous one
         self.proxy.station_configuration_RW = self.TEST_CONFIGURATION
         updated_dbdata = json.loads(self.proxy.station_configuration_RW)
         # Test whether default 'protected' devices have not been deleted
-        self.assertTrue('stat/configuration/1' in updated_dbdata['servers']['configuration']['stat']['configuration'], msg=f'{updated_dbdata}')
+        self.assertTrue(
+            "stat/configuration/1"
+            in updated_dbdata["servers"]["configuration"]["stat"]["configuration"],
+            msg=f"{updated_dbdata}",
+        )
         # Test whether new device has been added
-        self.assertTrue('stat/observation/11' in updated_dbdata['servers']['observation']['stat']['observation'], msg=f'{updated_dbdata}') # observation device
+        self.assertTrue(
+            "stat/observation/11"
+            in updated_dbdata["servers"]["observation"]["stat"]["observation"],
+            msg=f"{updated_dbdata}",
+        )  # observation device
         # Test whether old device has been deleted
-        self.assertFalse('recv' in updated_dbdata['servers'].keys(), msg=f'{updated_dbdata}') # recv device
+        self.assertFalse(
+            "recv" in updated_dbdata["servers"].keys(), msg=f"{updated_dbdata}"
+        )  # recv device
         # Test whether old attribute has been updated
-        self.assertTrue('stat/antennafield/1' in updated_dbdata['servers']['antennafield']['stat']['antennafield'], msg=f'{updated_dbdata}')
-        antennafield_properties = updated_dbdata['servers']['antennafield']['stat']['antennafield']['stat/antennafield/1']['properties']
-        self.assertTrue("recv_devices" in antennafield_properties, msg=f'{antennafield_properties}')
-        self.assertEqual(antennafield_properties['recv_devices'][0], "STAT/MOCKRECV/1")
-    
+        self.assertTrue(
+            "stat/antennafield/1"
+            in updated_dbdata["servers"]["antennafield"]["stat"]["antennafield"],
+            msg=f"{updated_dbdata}",
+        )
+        antennafield_properties = updated_dbdata["servers"]["antennafield"]["stat"][
+            "antennafield"
+        ]["stat/antennafield/1"]["properties"]
+        self.assertTrue(
+            "recv_devices" in antennafield_properties, msg=f"{antennafield_properties}"
+        )
+        self.assertEqual(antennafield_properties["recv_devices"][0], "STAT/MOCKRECV/1")
+
     def test_update_station_configuration(self):
         self.proxy.warm_boot()
         """ Test whether the station control configuration is correctly updated into Tango Database """
@@ -107,13 +150,32 @@ class TestDeviceConfiguration(AbstractTestBases.TestDeviceBase):
         self.proxy.update_station_configuration(self.TEST_CONFIGURATION)
         updated_dbdata = json.loads(self.proxy.station_configuration_RW)
         # Test whether default 'protected' devices have not been deleted
-        self.assertTrue('stat/configuration/1' in updated_dbdata['servers']['configuration']['stat']['configuration'], msg=f'{updated_dbdata}')
+        self.assertTrue(
+            "stat/configuration/1"
+            in updated_dbdata["servers"]["configuration"]["stat"]["configuration"],
+            msg=f"{updated_dbdata}",
+        )
         # Test whether new device has been added
-        self.assertTrue('stat/observation/11' in updated_dbdata['servers']['observation']['stat']['observation'], msg=f'{updated_dbdata}') # observation device
+        self.assertTrue(
+            "stat/observation/11"
+            in updated_dbdata["servers"]["observation"]["stat"]["observation"],
+            msg=f"{updated_dbdata}",
+        )  # observation device
         # Test whether old device has NOT been deleted
-        self.assertTrue('stat/recv/1' in updated_dbdata['servers']['recv']['stat']['recv'], msg=f'{updated_dbdata}') # recv device
+        self.assertTrue(
+            "stat/recv/1" in updated_dbdata["servers"]["recv"]["stat"]["recv"],
+            msg=f"{updated_dbdata}",
+        )  # recv device
         # Test whether old attribute has been updated
-        self.assertTrue('stat/antennafield/1' in updated_dbdata['servers']['antennafield']['stat']['antennafield'], msg=f'{updated_dbdata}')
-        antennafield_properties = updated_dbdata['servers']['antennafield']['stat']['antennafield']['stat/antennafield/1']['properties']
-        self.assertTrue("recv_devices" in antennafield_properties, msg=f'{antennafield_properties}')
-        self.assertEqual(antennafield_properties['recv_devices'][0], "STAT/MOCKRECV/1")
+        self.assertTrue(
+            "stat/antennafield/1"
+            in updated_dbdata["servers"]["antennafield"]["stat"]["antennafield"],
+            msg=f"{updated_dbdata}",
+        )
+        antennafield_properties = updated_dbdata["servers"]["antennafield"]["stat"][
+            "antennafield"
+        ]["stat/antennafield/1"]["properties"]
+        self.assertTrue(
+            "recv_devices" in antennafield_properties, msg=f"{antennafield_properties}"
+        )
+        self.assertEqual(antennafield_properties["recv_devices"][0], "STAT/MOCKRECV/1")
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/default/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/client/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/default/client/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/client/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/client/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_opcua_client_against_server.py b/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_opcua_client_against_server.py
index 090444cae29b140a8388fed6612ba603ba140ca0..db3cbc85798ff1baf263f08aeafac17cfc6ed74d 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_opcua_client_against_server.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_opcua_client_against_server.py
@@ -1,3 +1,6 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import asyncua
 import numpy
 
@@ -7,11 +10,11 @@ from tangostationcontrol.integration_test import base
 
 
 class TestClientServer(base.IntegrationAsyncTestCase):
-    """ Test the OPCUAConnection against an OPCUA server we instantiate ourselves. """
+    """Test the OPCUAConnection against an OPCUA server we instantiate ourselves."""
 
     async def setup_server(self, port):
-        """ Setup a server on a dedicated port for the test, to allow
-            the tests to be run in parallel. """
+        """Setup a server on a dedicated port for the test, to allow
+        the tests to be run in parallel."""
 
         # where we will run the server
         self.endpoint = f"opc.tcp://127.0.0.1:{port}"
@@ -49,7 +52,13 @@ class TestClientServer(base.IntegrationAsyncTestCase):
             be caught"""
             raise ArithmeticError("Expected test exception")
 
-        multiply_method = await obj.add_method(idx, "multiply", multiply, [asyncua.ua.VariantType.Double, asyncua.ua.VariantType.Int64], [asyncua.ua.VariantType.Double])
+        multiply_method = await obj.add_method(
+            idx,
+            "multiply",
+            multiply,
+            [asyncua.ua.VariantType.Double, asyncua.ua.VariantType.Int64],
+            [asyncua.ua.VariantType.Double],
+        )
         procedure_method = await obj.add_method(idx, "procedure", procedure, [], [])
         throws_method = await obj.add_method(idx, "throws", throws, [], [])
 
@@ -71,7 +80,9 @@ class TestClientServer(base.IntegrationAsyncTestCase):
     async def test_opcua_connection(self):
         await self.setup_server(14840)
 
-        test_client = OPCUAConnection(self.endpoint, self.namespace, 5, self.fault_func, self.loop)
+        test_client = OPCUAConnection(
+            self.endpoint, self.namespace, 5, self.fault_func, self.loop
+        )
         try:
             await test_client.start()
         finally:
@@ -80,7 +91,9 @@ class TestClientServer(base.IntegrationAsyncTestCase):
     async def test_read_attribute(self):
         await self.setup_server(14841)
 
-        test_client = OPCUAConnection(self.endpoint, self.namespace, 5, self.fault_func, self.loop)
+        test_client = OPCUAConnection(
+            self.endpoint, self.namespace, 5, self.fault_func, self.loop
+        )
         try:
             await test_client.start()
 
@@ -90,7 +103,9 @@ class TestClientServer(base.IntegrationAsyncTestCase):
                 dim_y = 0
                 datatype = numpy.double
 
-            prot_attr = await test_client.setup_protocol_attribute(["double_R"], Attribute())
+            prot_attr = await test_client.setup_protocol_attribute(
+                ["double_R"], Attribute()
+            )
 
             # read it from the server
             self.assertEqual(42.0, await prot_attr.read_function())
@@ -100,7 +115,9 @@ class TestClientServer(base.IntegrationAsyncTestCase):
     async def test_write_attribute(self):
         await self.setup_server(14842)
 
-        test_client = OPCUAConnection(self.endpoint, self.namespace, 5, self.fault_func, self.loop)
+        test_client = OPCUAConnection(
+            self.endpoint, self.namespace, 5, self.fault_func, self.loop
+        )
         try:
             await test_client.start()
 
@@ -110,7 +127,9 @@ class TestClientServer(base.IntegrationAsyncTestCase):
                 dim_y = 0
                 datatype = numpy.double
 
-            prot_attr = await test_client.setup_protocol_attribute(["double_RW"], Attribute())
+            prot_attr = await test_client.setup_protocol_attribute(
+                ["double_RW"], Attribute()
+            )
 
             # write it to the server and read it back to verify
             await prot_attr.write_function(123.0)
@@ -122,7 +141,9 @@ class TestClientServer(base.IntegrationAsyncTestCase):
     async def test_method_without_args(self):
         await self.setup_server(14843)
 
-        test_client = OPCUAConnection(self.endpoint, self.namespace, 5, self.fault_func, self.loop)
+        test_client = OPCUAConnection(
+            self.endpoint, self.namespace, 5, self.fault_func, self.loop
+        )
         try:
             await test_client.start()
 
@@ -133,31 +154,44 @@ class TestClientServer(base.IntegrationAsyncTestCase):
     async def test_method_with_args(self):
         await self.setup_server(14843)
 
-        test_client = OPCUAConnection(self.endpoint, self.namespace, 5, self.fault_func, self.loop)
+        test_client = OPCUAConnection(
+            self.endpoint, self.namespace, 5, self.fault_func, self.loop
+        )
         try:
             await test_client.start()
 
-            self.assertEqual(21.0, await test_client._call_method(["multiply"], numpy.double(3.0), numpy.int64(7)))
+            self.assertEqual(
+                21.0,
+                await test_client._call_method(
+                    ["multiply"], numpy.double(3.0), numpy.int64(7)
+                ),
+            )
         finally:
             await test_client.stop()
 
     async def test_method_with_wrong_arg_types(self):
         await self.setup_server(14844)
 
-        test_client = OPCUAConnection(self.endpoint, self.namespace, 5, self.fault_func, self.loop)
+        test_client = OPCUAConnection(
+            self.endpoint, self.namespace, 5, self.fault_func, self.loop
+        )
         try:
             await test_client.start()
 
             with self.assertRaises(RuntimeError):
                 # correct signature is multiply(double,int64)
-                _ = await test_client._call_method(["multiply"], numpy.double(3.0), numpy.double(7))
+                _ = await test_client._call_method(
+                    ["multiply"], numpy.double(3.0), numpy.double(7)
+                )
         finally:
             await test_client.stop()
 
     async def test_errorring_method(self):
         await self.setup_server(14845)
 
-        test_client = OPCUAConnection(self.endpoint, self.namespace, 5, self.fault_func, self.loop)
+        test_client = OPCUAConnection(
+            self.endpoint, self.namespace, 5, self.fault_func, self.loop
+        )
         try:
             await test_client.start()
 
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_sdptr_sim.py b/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_sdptr_sim.py
index a09f407e2982d7b873021f03b1eb9e78fe336e44..38e6ca03114ab17057db0f0df4d3dfe0abe052d2 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_sdptr_sim.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_sdptr_sim.py
@@ -1,11 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from asyncua import Client
 
@@ -13,7 +7,6 @@ from tangostationcontrol.integration_test import base
 
 
 class TestSDPTRSim(base.IntegrationAsyncTestCase):
-
     def setUp(self):
         super(TestSDPTRSim, self).setUp()
 
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_tcp_replicator.py b/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_tcp_replicator.py
index bd4238d76656ec5214f651ed6df11fe08b46028d..e6f4470e66b1ad24fb5ff80481aa0f14c5b48a7d 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_tcp_replicator.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_tcp_replicator.py
@@ -1,29 +1,20 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import logging
-import time
 import socket
 import sys
+import time
 
 import timeout_decorator
-
 from tangostationcontrol.clients.tcp_replicator import TCPReplicator
 from tangostationcontrol.common.constants import MAX_ETH_FRAME_SIZE
-
 from tangostationcontrol.integration_test import base
 
 logger = logging.getLogger()
 
 
 class TestTCPReplicator(base.IntegrationTestCase):
-
     def setUp(self):
 
         super(TestTCPReplicator, self).setUp()
@@ -60,7 +51,7 @@ class TestTCPReplicator(base.IntegrationTestCase):
         replicator = TCPReplicator(test_options)
         self.assertTrue(replicator.is_alive())
 
-        replicator.transmit("Hello World!".encode('utf-8'))
+        replicator.transmit("Hello World!".encode("utf-8"))
 
     def test_start_connect_close(self):
         test_options = {
@@ -73,13 +64,13 @@ class TestTCPReplicator(base.IntegrationTestCase):
         time.sleep(2)
 
         s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        s.connect(("127.0.0.1", test_options['tcp_port']))
+        s.connect(("127.0.0.1", test_options["tcp_port"]))
 
         time.sleep(2)
 
         replicator.join()
 
-        self.assertEqual(b'', s.recv(MAX_ETH_FRAME_SIZE))
+        self.assertEqual(b"", s.recv(MAX_ETH_FRAME_SIZE))
 
     def test_client_disconnected_lost(self):
         test_options = {
@@ -94,17 +85,17 @@ class TestTCPReplicator(base.IntegrationTestCase):
         s_disconnected = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         s_open = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         s_lost = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        s_disconnected.connect(("127.0.0.1", test_options['tcp_port']))
-        s_open.connect(("127.0.0.1", test_options['tcp_port']))
+        s_disconnected.connect(("127.0.0.1", test_options["tcp_port"]))
+        s_open.connect(("127.0.0.1", test_options["tcp_port"]))
 
         time.sleep(2)
         s_disconnected.shutdown(socket.SHUT_RDWR)
         s_disconnected.close()
-        s_lost.close() # close without shutdown does not inform the server and creates a stale connection
+        s_lost.close()  # close without shutdown does not inform the server and creates a stale connection
 
         for _ in range(10):
             replicator.put("hello world".encode("utf-8"))
-            self.assertEqual(b'hello world', s_open.recv(MAX_ETH_FRAME_SIZE))
+            self.assertEqual(b"hello world", s_open.recv(MAX_ETH_FRAME_SIZE))
 
         replicator.join()
 
@@ -122,7 +113,7 @@ class TestTCPReplicator(base.IntegrationTestCase):
         time.sleep(2)
 
         s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        s.connect(("127.0.0.1", test_options['tcp_port']))
+        s.connect(("127.0.0.1", test_options["tcp_port"]))
 
         time.sleep(2)
 
@@ -147,10 +138,10 @@ class TestTCPReplicator(base.IntegrationTestCase):
         time.sleep(2)
 
         s1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        s1.connect(("127.0.0.1", test_options['tcp_port']))
+        s1.connect(("127.0.0.1", test_options["tcp_port"]))
 
         s2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        s2.connect(("127.0.0.1", test_options['tcp_port']))
+        s2.connect(("127.0.0.1", test_options["tcp_port"]))
 
         time.sleep(3)
 
@@ -179,10 +170,10 @@ class TestTCPReplicator(base.IntegrationTestCase):
         time.sleep(2)
 
         s1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        s1.connect(("127.0.0.1", test_options['tcp_port']))
+        s1.connect(("127.0.0.1", test_options["tcp_port"]))
 
         s2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
-        s2.connect(("127.0.0.1", test_options['tcp_port']))
+        s2.connect(("127.0.0.1", test_options["tcp_port"]))
 
         time.sleep(3)
 
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_unb2_sim.py b/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_unb2_sim.py
index 261441901c589a26256b586dc571f7b063c08408..0b3d2f864c8b5e25a20ff4ca237dd1a7cef670f8 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_unb2_sim.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/client/test_unb2_sim.py
@@ -1,11 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from asyncua import Client
 
@@ -13,7 +7,6 @@ from tangostationcontrol.integration_test import base
 
 
 class TestUNB2Sim(base.IntegrationAsyncTestCase):
-
     def setUp(self):
         super(TestUNB2Sim, self).setUp()
 
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/common/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/default/common/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/common/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/common/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/common/test_configuration.py b/tangostationcontrol/tangostationcontrol/integration_test/default/common/test_configuration.py
index dc19fd5fdea9d110aafb2144c81214581dcf462f..24f4df0ab30a2ed078742bb1e64c5ba4e1931342 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/common/test_configuration.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/common/test_configuration.py
@@ -1,11 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tango import Database
 
@@ -13,47 +7,98 @@ from tangostationcontrol.common.configuration import StationConfiguration
 
 from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
 
+
 class TestStationConfiguration(BaseIntegrationTestCase):
 
     sc = StationConfiguration(Database())
 
     def test_query_to_tuples(self):
-        """ Test whether Tango DB data are correctly converted into tuples """
-        raw_result = ['device1', 'property_name1', 'value1', 'device1', 'property_name2', 'value2']
+        """Test whether Tango DB data are correctly converted into tuples"""
+        raw_result = [
+            "device1",
+            "property_name1",
+            "value1",
+            "device1",
+            "property_name2",
+            "value2",
+        ]
         num_col = 3
-        record1 = ('device1', 'property_name1', 'value1')
-        record2 = ('device1', 'property_name2', 'value2')
+        record1 = ("device1", "property_name1", "value1")
+        record2 = ("device1", "property_name2", "value2")
         expected_result = [record1, record2]
         self.assertEqual(self.sc.query_to_tuples(raw_result, num_col), expected_result)
-    
+
     def test_add_to_devices_dict(self):
-        """ Test whether data retrieved from DB are correctly inserted into devices dictionary """
-        data = [('device1', 'property_name1', 'value1'), ('device1', 'property_name2', 'value2')]
-        expected_result = {'device1': {'properties': {  'property_name1': ['value1'],
-                                                        'property_name2': ['value2']}}}
+        """Test whether data retrieved from DB are correctly inserted into devices dictionary"""
+        data = [
+            ("device1", "property_name1", "value1"),
+            ("device1", "property_name2", "value2"),
+        ]
+        expected_result = {
+            "device1": {
+                "properties": {
+                    "property_name1": ["value1"],
+                    "property_name2": ["value2"],
+                }
+            }
+        }
         self.assertEqual(self.sc.add_to_devices_dict({}, data), expected_result)
 
     def test_add_to_attrs_dict(self):
-        """ Test whether data retrieved from DB are correctly inserted into attributes dictionary """
-        # Two attributes 
-        data_2attrs = [('device1', 'attribute1', 'attr_property_name1', 'value1'), 
-                ('device1', 'attribute2', 'attr_property_name1', 'value2')]
-        expected_result = {'device1': {'attribute_properties': {'attribute1': {'attr_property_name1': ['value1']},
-                                                                'attribute2': {'attr_property_name1': ['value2']}}}}
+        """Test whether data retrieved from DB are correctly inserted into attributes dictionary"""
+        # Two attributes
+        data_2attrs = [
+            ("device1", "attribute1", "attr_property_name1", "value1"),
+            ("device1", "attribute2", "attr_property_name1", "value2"),
+        ]
+        expected_result = {
+            "device1": {
+                "attribute_properties": {
+                    "attribute1": {"attr_property_name1": ["value1"]},
+                    "attribute2": {"attr_property_name1": ["value2"]},
+                }
+            }
+        }
         self.assertEqual(self.sc.add_to_attrs_dict({}, data_2attrs), expected_result)
         # One attribute, two property values
-        data_1attr = [('device1', 'attribute1', 'attr_property_name1', 'value1'), 
-                ('device1', 'attribute1', 'attr_property_name1', 'value2')]
-        expected_result = {'device1': {'attribute_properties': {'attribute1': 
-                                        {'attr_property_name1': ['value1','value2']}}}}
+        data_1attr = [
+            ("device1", "attribute1", "attr_property_name1", "value1"),
+            ("device1", "attribute1", "attr_property_name1", "value2"),
+        ]
+        expected_result = {
+            "device1": {
+                "attribute_properties": {
+                    "attribute1": {"attr_property_name1": ["value1", "value2"]}
+                }
+            }
+        }
         self.assertEqual(self.sc.add_to_attrs_dict({}, data_1attr), expected_result)
-    
+
     def test_add_to_server_dict(self):
-        """ Test whether data retrieved from DB are correctly inserted into server dictionary """
-        data = [('server_name/server_instance', 'server_class', 'device1')]
-        devices_dict = {'device1': {'properties': {  'property_name1': ['value1'],
-                                                     'property_name2': ['value2']}}}
-        expected_result = {'server_name': {'server_instance': {'server_class': 
-                            {'device1': {'properties': {'property_name1': ['value1'],
-                                                        'property_name2': ['value2']}}}}}}
-        self.assertEqual(self.sc.add_to_server_dict({}, devices_dict, data), expected_result)
+        """Test whether data retrieved from DB are correctly inserted into server dictionary"""
+        data = [("server_name/server_instance", "server_class", "device1")]
+        devices_dict = {
+            "device1": {
+                "properties": {
+                    "property_name1": ["value1"],
+                    "property_name2": ["value2"],
+                }
+            }
+        }
+        expected_result = {
+            "server_name": {
+                "server_instance": {
+                    "server_class": {
+                        "device1": {
+                            "properties": {
+                                "property_name1": ["value1"],
+                                "property_name2": ["value2"],
+                            }
+                        }
+                    }
+                }
+            }
+        }
+        self.assertEqual(
+            self.sc.add_to_server_dict({}, devices_dict, data), expected_result
+        )
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/base.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/base.py
index 973bd3d8726e7b9300c7ee045898489ca9c4f8f7..accc07fd22089ec82ed32522a1c1841e7a164d60 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/base.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/base.py
@@ -1,28 +1,21 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tango._tango import DevState
-
 from tangostationcontrol.devices.opcua_device import OPCUADevice
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 from tangostationcontrol.integration_test import base
+from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 
 
 class AbstractTestBases:
-    """ Holder for abstract test base classes. If we define these at the top level,
-        the test runner will execute them. """
+    """Holder for abstract test base classes. If we define these at the top level,
+    the test runner will execute them."""
 
     class TestDeviceBase(base.IntegrationTestCase):
         __test__ = False
 
-        def setUp(self, name = ""):
-            #if name == "":
+        def setUp(self, name=""):
+            # if name == "":
             #    raise unittest.SkipTest("This is a base class for other tests")
 
             """Intentionally recreate the device object in each test"""
@@ -35,7 +28,9 @@ class AbstractTestBases:
 
             # make a backup of the properties, in case they're changed
             # NB: "or {}" is needed to deal with devices that have no properties.
-            self.original_properties = self.proxy.get_property(self.proxy.get_property_list("*") or {}) or {}
+            self.original_properties = (
+                self.proxy.get_property(self.proxy.get_property_list("*") or {}) or {}
+            )
 
             self.addCleanup(TestDeviceProxy.test_device_turn_off, self.name)
             self.addCleanup(self.restore_properties)
@@ -88,7 +83,7 @@ class AbstractTestBases:
         def test_device_read_all_attributes(self):
             """Test if we can read all of the exposed attributes in the ON state.
 
-               This test covers the reading logic of all attributes. """
+            This test covers the reading logic of all attributes."""
 
             self.proxy.initialise()
             self.proxy.on()
@@ -97,4 +92,6 @@ class AbstractTestBases:
                 try:
                     _ = self.proxy.read_attribute(attribute_name).value
                 except Exception as e:
-                    raise AssertionError(f"Could not read attribute {attribute_name} from device {self.name}") from e
+                    raise AssertionError(
+                        f"Could not read attribute {attribute_name} from device {self.name}"
+                    ) from e
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_antennafield.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_antennafield.py
index 50aa6f24da10c11e642635a2ca6e4c330bccf765..fb754b5f88e6c0b939b7007c0f64ee6479355d99 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_antennafield.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_antennafield.py
@@ -1,30 +1,38 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from tango._tango import DevState
 import numpy
-
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
+from tango._tango import DevState
+from tangostationcontrol.common.constants import (
+    N_elements,
+    MAX_ANTENNA,
+    N_pol,
+    N_rcu,
+    N_rcu_inp,
+    DEFAULT_N_HBA_TILES,
+    CLK_200_MHZ,
+    N_pn,
+    A_pn,
+    N_subbands,
+)
 from tangostationcontrol.devices.antennafield import AntennaQuality, AntennaUse
 from tangostationcontrol.devices.sdp.common import weight_to_complex
 from tangostationcontrol.devices.sdp.sdp import SDP
+from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
+
 from .base import AbstractTestBases
-from tangostationcontrol.common.constants import N_elements, MAX_ANTENNA, N_pol, N_rcu, N_rcu_inp, DEFAULT_N_HBA_TILES, CLK_200_MHZ, N_pn, A_pn, N_subbands
 
-class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
 
+class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
     def setUp(self):
         super().setUp("STAT/AntennaField/1")
-        self.proxy.put_property({
-            "RECV_devices": ["STAT/RECV/1"],
-            "Power_to_RECV_mapping": [1, 1, 1, 0] + [-1] * ((DEFAULT_N_HBA_TILES * 2) - 4)
-        })
+        self.proxy.put_property(
+            {
+                "RECV_devices": ["STAT/RECV/1"],
+                "Power_to_RECV_mapping": [1, 1, 1, 0]
+                + [-1] * ((DEFAULT_N_HBA_TILES * 2) - 4),
+            }
+        )
         self.recv_proxy = self.setup_recv_proxy()
         self.sdp_proxy = self.setup_sdp_proxy()
 
@@ -37,11 +45,13 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
         self.recv_proxy.RCU_band_select_RW = [[1] * N_rcu_inp] * N_rcu
 
     def restore_antennafield(self):
-        self.proxy.put_property({
-            "RECV_devices": ["STAT/RECV/1"],
-            "Power_to_RECV_mapping": [-1, -1] * DEFAULT_N_HBA_TILES,
-            "Control_to_RECV_mapping": [-1, -1] * DEFAULT_N_HBA_TILES
-        })
+        self.proxy.put_property(
+            {
+                "RECV_devices": ["STAT/RECV/1"],
+                "Power_to_RECV_mapping": [-1, -1] * DEFAULT_N_HBA_TILES,
+                "Control_to_RECV_mapping": [-1, -1] * DEFAULT_N_HBA_TILES,
+            }
+        )
 
     @staticmethod
     def shutdown_recv():
@@ -73,7 +83,7 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
         self.assertSequenceEqual(result["RECV_devices"], ["STAT/RECV/1"])
 
     def test_ANT_mask_RW_configured_after_Antenna_Usage_Mask(self):
-        """ Verify if ANT_mask_RW values are correctly configured from Antenna_Usage_Mask values"""
+        """Verify if ANT_mask_RW values are correctly configured from Antenna_Usage_Mask values"""
 
         antennafield_proxy = self.proxy
         numpy.testing.assert_equal(
@@ -81,21 +91,25 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
         )
 
         antenna_qualities = numpy.array([AntennaQuality.OK] * MAX_ANTENNA)
-        antenna_use = numpy.array([AntennaUse.ON] + [AntennaUse.AUTO] * (MAX_ANTENNA - 1))
+        antenna_use = numpy.array(
+            [AntennaUse.ON] + [AntennaUse.AUTO] * (MAX_ANTENNA - 1)
+        )
         antenna_properties = {
-            'Antenna_Quality': antenna_qualities, 'Antenna_Use': antenna_use
+            "Antenna_Quality": antenna_qualities,
+            "Antenna_Use": antenna_use,
         }
         mapping_properties = {
             "RECV_devices": ["STAT/RECV/1"],
             "Power_to_RECV_mapping": [-1, -1] * DEFAULT_N_HBA_TILES,
             # Two inputs of recv device connected, only defined for 48 inputs
             # each pair is one input
-            "Control_to_RECV_mapping":  [1, 0 , 1, 1] + [-1, -1] * (DEFAULT_N_HBA_TILES - 2)
+            "Control_to_RECV_mapping": [1, 0, 1, 1]
+            + [-1, -1] * (DEFAULT_N_HBA_TILES - 2),
         }
         antennafield_proxy.off()
         antennafield_proxy.put_property(antenna_properties)
         antennafield_proxy.put_property(mapping_properties)
-        antennafield_proxy.boot()   # initialises hardware values as well
+        antennafield_proxy.boot()  # initialises hardware values as well
 
         # Verify all antennas are indicated to work
         numpy.testing.assert_equal(
@@ -106,25 +120,32 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
         # As well as dimensions of ANT_mask_RW must match control mapping
         numpy.testing.assert_equal(
             numpy.array([True] * 2 + [False] * (DEFAULT_N_HBA_TILES - 2)),
-            antennafield_proxy.ANT_mask_RW
+            antennafield_proxy.ANT_mask_RW,
         )
 
         # Verify recv proxy values unaffected as default for ANT_mask_RW is true
         numpy.testing.assert_equal(
             numpy.array([True] * 2 + [True] * (MAX_ANTENNA - 2)),
-            self.recv_proxy.ANT_mask_RW.flatten()
+            self.recv_proxy.ANT_mask_RW.flatten(),
         )
 
-    def test_ANT_mask_RW_configured_after_Antenna_Usage_Mask_only_one_functioning_antenna(self):
+    def test_ANT_mask_RW_configured_after_Antenna_Usage_Mask_only_one_functioning_antenna(
+        self,
+    ):
         """Verify if ANT_mask_RW values are correctly configured from Antenna_Usage_Mask values (only second antenna is OK)"""
 
         antennafield_proxy = self.proxy
 
         # Broken antennas except second
-        antenna_qualities = numpy.array([AntennaQuality.BROKEN] + [AntennaQuality.OK] + [AntennaQuality.BROKEN] * (MAX_ANTENNA - 2))
+        antenna_qualities = numpy.array(
+            [AntennaQuality.BROKEN]
+            + [AntennaQuality.OK]
+            + [AntennaQuality.BROKEN] * (MAX_ANTENNA - 2)
+        )
         antenna_use = numpy.array([AntennaUse.AUTO] * MAX_ANTENNA)
         antenna_properties = {
-            'Antenna_Quality': antenna_qualities, 'Antenna_Use': antenna_use
+            "Antenna_Quality": antenna_qualities,
+            "Antenna_Use": antenna_use,
         }
 
         # Configure control mapping to control all 96 inputs of recv device
@@ -132,31 +153,31 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
             "RECV_devices": ["STAT/RECV/1"],
             "Power_to_RECV_mapping": [-1, -1] * DEFAULT_N_HBA_TILES,
             "Control_to_RECV_mapping":
-                # [1, 0,  1, 1,  1, 2,  1, x  ...  1, 95]
-                numpy.array([[1, x] for x in range(0, MAX_ANTENNA)]).flatten()
+            # [1, 0,  1, 1,  1, 2,  1, x  ...  1, 95]
+            numpy.array([[1, x] for x in range(0, MAX_ANTENNA)]).flatten(),
         }
 
         # Cycle device and set properties
         antennafield_proxy.off()
         antennafield_proxy.put_property(antenna_properties)
         antennafield_proxy.put_property(mapping_properties)
-        antennafield_proxy.boot()   # initialises hardware values as well
+        antennafield_proxy.boot()  # initialises hardware values as well
 
         # Antenna_Usage_Mask_R should be false except one
         numpy.testing.assert_equal(
             numpy.array([False] + [True] + [False] * (MAX_ANTENNA - 2)),
-            antennafield_proxy.Antenna_Usage_Mask_R
+            antennafield_proxy.Antenna_Usage_Mask_R,
         )
         # device.boot() writes Antenna_Usage_Mask_R to ANT_mask_RW
         numpy.testing.assert_equal(
             numpy.array([False] + [True] + [False] * (MAX_ANTENNA - 2)),
-            antennafield_proxy.ANT_mask_RW
+            antennafield_proxy.ANT_mask_RW,
         )
         # ANT_mask_RW on antennafield writes to configured recv devices for all
         # mapped inputs
         numpy.testing.assert_equal(
             numpy.array([False] + [True] + [False] * (MAX_ANTENNA - 2)),
-            self.recv_proxy.ANT_mask_RW.flatten()
+            self.recv_proxy.ANT_mask_RW.flatten(),
         )
 
     def test_antennafield_set_mapped_attribute_ignore_all(self):
@@ -166,7 +187,7 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
         mapping_properties = {
             "RECV_devices": ["STAT/RECV/1"],
             "Power_to_RECV_mapping": [-1, -1] * DEFAULT_N_HBA_TILES,
-            "Control_to_RECV_mapping": [-1, -1] * DEFAULT_N_HBA_TILES
+            "Control_to_RECV_mapping": [-1, -1] * DEFAULT_N_HBA_TILES,
         }
 
         # Cycle device an put properties
@@ -176,15 +197,18 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
         antennafield_proxy.boot()
 
         # Set HBAT_PWR_on_RW to false on recv device and read results
-        self.recv_proxy.write_attribute("HBAT_PWR_on_RW", [[False] * N_elements * N_pol] * MAX_ANTENNA)
+        self.recv_proxy.write_attribute(
+            "HBAT_PWR_on_RW", [[False] * N_elements * N_pol] * MAX_ANTENNA
+        )
         current_values = self.recv_proxy.read_attribute("HBAT_PWR_on_RW").value
 
         # write true through antennafield
-        antennafield_proxy.write_attribute("HBAT_PWR_on_RW", [[True] * N_elements * N_pol] * DEFAULT_N_HBA_TILES)
+        antennafield_proxy.write_attribute(
+            "HBAT_PWR_on_RW", [[True] * N_elements * N_pol] * DEFAULT_N_HBA_TILES
+        )
         # Test that original recv values for HBAT_PWR_on_RW match current
         numpy.testing.assert_equal(
-            current_values,
-            self.recv_proxy.read_attribute("HBAT_PWR_on_RW").value
+            current_values, self.recv_proxy.read_attribute("HBAT_PWR_on_RW").value
         )
 
         # Verify device did not enter FAULT state
@@ -197,7 +221,8 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
             "RECV_devices": ["STAT/RECV/1"],
             "Power_to_RECV_mapping": [-1, -1] * DEFAULT_N_HBA_TILES,
             # Each pair is one mapping so 2 inputs are connected
-            "Control_to_RECV_mapping": [1, 0, 1, 1] + [-1, -1] * (DEFAULT_N_HBA_TILES - 2)
+            "Control_to_RECV_mapping": [1, 0, 1, 1]
+            + [-1, -1] * (DEFAULT_N_HBA_TILES - 2),
         }
 
         antennafield_proxy = self.proxy
@@ -205,15 +230,20 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
         antennafield_proxy.put_property(mapping_properties)
         antennafield_proxy.boot()
 
-        self.recv_proxy.write_attribute("HBAT_PWR_on_RW", [[False] * N_elements * N_pol] * MAX_ANTENNA)
+        self.recv_proxy.write_attribute(
+            "HBAT_PWR_on_RW", [[False] * N_elements * N_pol] * MAX_ANTENNA
+        )
 
         try:
             antennafield_proxy.write_attribute(
                 "HBAT_PWR_on_RW", [[True] * N_elements * N_pol] * DEFAULT_N_HBA_TILES
             )
             numpy.testing.assert_equal(
-                numpy.array([[True] * N_elements * N_pol] * 2 + [[False] * N_elements * N_pol] * (MAX_ANTENNA - 2)),
-                self.recv_proxy.read_attribute("HBAT_PWR_on_RW").value
+                numpy.array(
+                    [[True] * N_elements * N_pol] * 2
+                    + [[False] * N_elements * N_pol] * (MAX_ANTENNA - 2)
+                ),
+                self.recv_proxy.read_attribute("HBAT_PWR_on_RW").value,
             )
         finally:
             # Always restore recv again
@@ -231,8 +261,8 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
             "RECV_devices": ["STAT/RECV/1"],
             "Power_to_RECV_mapping": [-1, -1] * DEFAULT_N_HBA_TILES,
             "Control_to_RECV_mapping":
-                # [1, 0, 1, 1, 1, 2, 1, x ... 1, 95]
-                numpy.array([[1, x] for x in range(0, MAX_ANTENNA)]).flatten()
+            # [1, 0, 1, 1, 1, 2, 1, x ... 1, 95]
+            numpy.array([[1, x] for x in range(0, MAX_ANTENNA)]).flatten(),
         }
 
         antennafield_proxy = self.proxy
@@ -240,7 +270,9 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
         antennafield_proxy.put_property(mapping_properties)
         antennafield_proxy.boot()
 
-        self.recv_proxy.write_attribute("HBAT_PWR_on_RW", [[False] * N_elements * N_pol] * MAX_ANTENNA)
+        self.recv_proxy.write_attribute(
+            "HBAT_PWR_on_RW", [[False] * N_elements * N_pol] * MAX_ANTENNA
+        )
 
         try:
             antennafield_proxy.write_attribute(
@@ -248,7 +280,7 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
             )
             numpy.testing.assert_equal(
                 numpy.array([[True] * N_elements * N_pol] * MAX_ANTENNA),
-                self.recv_proxy.read_attribute("HBAT_PWR_on_RW").value
+                self.recv_proxy.read_attribute("HBAT_PWR_on_RW").value,
             )
         finally:
             # Always restore recv again
@@ -266,8 +298,8 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
             "RECV_devices": ["STAT/RECV/1"],
             "Power_to_RECV_mapping": [-1, -1] * DEFAULT_N_HBA_TILES,
             "Control_to_RECV_mapping":
-                # [1, 0, 1, 1, 1, 2, 1, x ... 1, 95]
-                numpy.array([[1, x] for x in range(0, MAX_ANTENNA)]).flatten()
+            # [1, 0, 1, 1, 1, 2, 1, x ... 1, 95]
+            numpy.array([[1, x] for x in range(0, MAX_ANTENNA)]).flatten(),
         }
 
         antennafield_proxy = self.proxy
@@ -275,7 +307,9 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
         antennafield_proxy.put_property(mapping_properties)
         antennafield_proxy.boot()
 
-        self.recv_proxy.write_attribute("RCU_band_select_RW", [[False] * N_rcu_inp] * N_rcu)
+        self.recv_proxy.write_attribute(
+            "RCU_band_select_RW", [[False] * N_rcu_inp] * N_rcu
+        )
 
         try:
             antennafield_proxy.write_attribute(
@@ -283,7 +317,7 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
             )
             numpy.testing.assert_equal(
                 numpy.array([[True] * N_rcu_inp] * N_rcu),
-                self.recv_proxy.read_attribute("RCU_band_select_RW").value
+                self.recv_proxy.read_attribute("RCU_band_select_RW").value,
             )
         finally:
             # Always restore recv again
@@ -297,7 +331,7 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
     def test_calibrate_recv(self):
         calibration_properties = {
             "Antenna_Type": ["LBA"],
-            "Antenna_Cables": ["50m","80m"] * (DEFAULT_N_HBA_TILES // 2),
+            "Antenna_Cables": ["50m", "80m"] * (DEFAULT_N_HBA_TILES // 2),
         }
 
         antennafield_proxy = self.proxy
@@ -312,8 +346,16 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
         rcu_attenuator_db = antennafield_proxy.RCU_attenuator_dB_RW
 
         # values should be the same for the same cable length
-        self.assertEqual(1, len(set(rcu_attenuator_db[0::2])), msg=f"rcu_attenuator_db={rcu_attenuator_db}")
-        self.assertEqual(1, len(set(rcu_attenuator_db[1::2])), msg=f"rcu_attenuator_db={rcu_attenuator_db}")
+        self.assertEqual(
+            1,
+            len(set(rcu_attenuator_db[0::2])),
+            msg=f"rcu_attenuator_db={rcu_attenuator_db}",
+        )
+        self.assertEqual(
+            1,
+            len(set(rcu_attenuator_db[1::2])),
+            msg=f"rcu_attenuator_db={rcu_attenuator_db}",
+        )
         # value should be larger for the shorter cable, as those signals need damping
         self.assertGreater(rcu_attenuator_db[0], rcu_attenuator_db[1])
         # longest cable should require no damping
@@ -322,8 +364,9 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
     def test_calibrate_sdp(self):
         calibration_properties = {
             "Antenna_Type": ["LBA"],
-            "Antenna_Cables": ["50m","80m"] * (DEFAULT_N_HBA_TILES // 2),
-            "Antenna_to_SDP_Mapping":  [0, 1, 0, 0] + [-1, -1] * (DEFAULT_N_HBA_TILES - 2),
+            "Antenna_Cables": ["50m", "80m"] * (DEFAULT_N_HBA_TILES // 2),
+            "Antenna_to_SDP_Mapping": [0, 1, 0, 0]
+            + [-1, -1] * (DEFAULT_N_HBA_TILES - 2),
         }
 
         antennafield_proxy = self.proxy
@@ -340,29 +383,55 @@ class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
         signal_input_samples_delay = self.sdp_proxy.FPGA_signal_input_samples_delay_RW
 
         # delays should be equal for both polarisations
-        self.assertEqual(signal_input_samples_delay[0,0], signal_input_samples_delay[0,1])
-        self.assertEqual(signal_input_samples_delay[0,2], signal_input_samples_delay[0,3])
+        self.assertEqual(
+            signal_input_samples_delay[0, 0], signal_input_samples_delay[0, 1]
+        )
+        self.assertEqual(
+            signal_input_samples_delay[0, 2], signal_input_samples_delay[0, 3]
+        )
 
         # antenna #0 is shorter, so should have a greater delay
-        self.assertGreater(signal_input_samples_delay[0,2], signal_input_samples_delay[0,0], msg=f"{signal_input_samples_delay}")
+        self.assertGreater(
+            signal_input_samples_delay[0, 2],
+            signal_input_samples_delay[0, 0],
+            msg=f"{signal_input_samples_delay}",
+        )
         # antenna #1 is longest, so should have delay 0
-        self.assertEqual(0, signal_input_samples_delay[0,0])
+        self.assertEqual(0, signal_input_samples_delay[0, 0])
 
         # the subband weights depend on the frequency of the subband,
         # and on the exact delay and loss differences between the cables.
         # rather than repeating the computations from the code,
         # we implement this as a regression test.
-        subband_weights = self.sdp_proxy.FPGA_subband_weights_RW.reshape(N_pn, A_pn, N_pol, N_subbands)
+        subband_weights = self.sdp_proxy.FPGA_subband_weights_RW.reshape(
+            N_pn, A_pn, N_pol, N_subbands
+        )
 
         def to_complex(weight):
             return weight_to_complex(weight, SDP.SUBBAND_UNIT_WEIGHT)
 
         # weight should be equal for both polarisations, different per antenna
-        self.assertAlmostEqual(0.929 + 0j,     to_complex(subband_weights[0, 0, 0,   0]), places=3)
-        self.assertAlmostEqual(0.309 + 0.876j, to_complex(subband_weights[0, 0, 0, 511]), places=3)
-        self.assertAlmostEqual(0.929 + 0j,     to_complex(subband_weights[0, 0, 1,   0]), places=3)
-        self.assertAlmostEqual(0.309 + 0.876j, to_complex(subband_weights[0, 0, 1, 511]), places=3)
-        self.assertAlmostEqual(0.989 + 0j,     to_complex(subband_weights[0, 1, 0,   0]), places=3)
-        self.assertAlmostEqual(0.883 - 0.444j, to_complex(subband_weights[0, 1, 0, 511]), places=3)
-        self.assertAlmostEqual(0.989 + 0j,     to_complex(subband_weights[0, 1, 1,   0]), places=3)
-        self.assertAlmostEqual(0.883 - 0.444j, to_complex(subband_weights[0, 1, 1, 511]), places=3)
+        self.assertAlmostEqual(
+            0.929 + 0j, to_complex(subband_weights[0, 0, 0, 0]), places=3
+        )
+        self.assertAlmostEqual(
+            0.309 + 0.876j, to_complex(subband_weights[0, 0, 0, 511]), places=3
+        )
+        self.assertAlmostEqual(
+            0.929 + 0j, to_complex(subband_weights[0, 0, 1, 0]), places=3
+        )
+        self.assertAlmostEqual(
+            0.309 + 0.876j, to_complex(subband_weights[0, 0, 1, 511]), places=3
+        )
+        self.assertAlmostEqual(
+            0.989 + 0j, to_complex(subband_weights[0, 1, 0, 0]), places=3
+        )
+        self.assertAlmostEqual(
+            0.883 - 0.444j, to_complex(subband_weights[0, 1, 0, 511]), places=3
+        )
+        self.assertAlmostEqual(
+            0.989 + 0j, to_complex(subband_weights[0, 1, 1, 0]), places=3
+        )
+        self.assertAlmostEqual(
+            0.883 - 0.444j, to_complex(subband_weights[0, 1, 1, 511]), places=3
+        )
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_apsct.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_apsct.py
index d973581e88cceb7510d2d257b006aa81ebbfbdfb..745b84597147fe52d08e0026b4903a7609594606 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_apsct.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_apsct.py
@@ -1,16 +1,9 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from .base import AbstractTestBases
 
 
 class TestDeviceAPSCT(AbstractTestBases.TestDeviceBase):
-
     def setUp(self):
         super().setUp("STAT/APSCT/1")
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_apspu.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_apspu.py
index 5ebadc24029ce4a8e4a8b56805685d69aec73f2b..d2726e67682df81923f8c3c6ff7dac12902b7a1b 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_apspu.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_apspu.py
@@ -1,16 +1,9 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from .base import AbstractTestBases
 
 
 class TestDeviceAPSPU(AbstractTestBases.TestDeviceBase):
-
     def setUp(self):
         super().setUp("STAT/APSPU/1")
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_beamlet.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_beamlet.py
index d16cbb9abbdd75f443ac9e89367b00cf2e7ae082..7e548276b4f947095bcc6702b40a37992d829445 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_beamlet.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_beamlet.py
@@ -1,26 +1,26 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
-from .base import AbstractTestBases
+import time
+from ctypes import c_short
 
+import numpy
+import numpy.testing
+from tango import DevState
+from tangostationcontrol.common.constants import (
+    N_beamlets_ctrl,
+    S_pn,
+    CLK_200_MHZ,
+    CLK_160_MHZ,
+    MAX_INPUTS,
+    N_pn,
+)
 from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
-from tangostationcontrol.common.constants import N_beamlets_ctrl, S_pn, CLK_200_MHZ, CLK_160_MHZ, MAX_INPUTS, N_pn
 
-from tango import DevState
+from .base import AbstractTestBases
 
-import numpy
-import numpy.testing
-import time
-from ctypes import c_short
 
 class TestDeviceBeamlet(AbstractTestBases.TestDeviceBase):
-
     def setUp(self):
         """Intentionally recreate the device object in each test"""
         super().setUp("STAT/Beamlet/1")
@@ -60,16 +60,18 @@ class TestDeviceBeamlet(AbstractTestBases.TestDeviceBase):
 
         # With a unit weight of 2**14, we thus expect beamformer weights of -2**14 + 0j,
         # which is 49152 when read as an uint32.
-        self.assertEqual(-2**14, c_short(49152).value) # check our calculations
-        expected_bf_weights = numpy.array([49152] * MAX_INPUTS * N_beamlets_ctrl, dtype=numpy.uint32)
+        self.assertEqual(-(2**14), c_short(49152).value)  # check our calculations
+        expected_bf_weights = numpy.array(
+            [49152] * MAX_INPUTS * N_beamlets_ctrl, dtype=numpy.uint32
+        )
 
         numpy.testing.assert_almost_equal(expected_bf_weights, calculated_bf_weights)
-    
+
     def test_subband_select_change(self):
         # Setup configuration
         sdp_proxy = self.setup_sdp()
 
-        # Change subband 
+        # Change subband
         self.proxy.off()
         self.proxy.initialise()
         self.assertEqual(DevState.STANDBY, self.proxy.state())
@@ -80,38 +82,49 @@ class TestDeviceBeamlet(AbstractTestBases.TestDeviceBase):
         # The subband frequency of HBA subband 10 is 201953125 Hz
         # so its period is 4.95 ns ca, half period is 2.4758e-9
         delays = numpy.array([[2.4758e-9] * MAX_INPUTS] * N_beamlets_ctrl)
-        calculated_bf_weights_subband_10 = self.proxy.calculate_bf_weights(delays.flatten())
-        
-        self.assertEqual(-2**14, c_short(49152).value) # check our calculations
-        expected_bf_weights_10 = numpy.array([49152] * MAX_INPUTS * N_beamlets_ctrl, dtype=numpy.uint32)
-        numpy.testing.assert_almost_equal(expected_bf_weights_10, calculated_bf_weights_subband_10)
+        calculated_bf_weights_subband_10 = self.proxy.calculate_bf_weights(
+            delays.flatten()
+        )
+
+        self.assertEqual(-(2**14), c_short(49152).value)  # check our calculations
+        expected_bf_weights_10 = numpy.array(
+            [49152] * MAX_INPUTS * N_beamlets_ctrl, dtype=numpy.uint32
+        )
+        numpy.testing.assert_almost_equal(
+            expected_bf_weights_10, calculated_bf_weights_subband_10
+        )
 
     def test_sdp_clock_change(self):
         # Setup configuration
         sdp_proxy = self.setup_sdp()
 
         self.proxy.initialise()
-        self.proxy.subband_select_RW = numpy.array(list(range(317)) + [316] + list(range(318,N_beamlets_ctrl)), dtype=numpy.uint32)
+        self.proxy.subband_select_RW = numpy.array(
+            list(range(317)) + [316] + list(range(318, N_beamlets_ctrl)),
+            dtype=numpy.uint32,
+        )
         self.proxy.on()
 
         # any non-zero delay should result in different weights for different clocks
         delays = numpy.array([[2.5e-9] * MAX_INPUTS] * N_beamlets_ctrl)
 
         sdp_proxy.clock_RW = CLK_200_MHZ
-        time.sleep(1) # wait for beamlet device to process change event
+        time.sleep(1)  # wait for beamlet device to process change event
         calculated_bf_weights_200 = self.proxy.calculate_bf_weights(delays.flatten())
 
         sdp_proxy.clock_RW = CLK_160_MHZ
-        time.sleep(1) # wait for beamlet device to process change event
+        time.sleep(1)  # wait for beamlet device to process change event
         calculated_bf_weights_160 = self.proxy.calculate_bf_weights(delays.flatten())
 
         sdp_proxy.clock_RW = CLK_200_MHZ
-        time.sleep(1) # wait for beamlet device to process change event
+        time.sleep(1)  # wait for beamlet device to process change event
         calculated_bf_weights_200_v2 = self.proxy.calculate_bf_weights(delays.flatten())
 
         # outcome should be changed back and forth across clock changes
         self.assertTrue((calculated_bf_weights_200 != calculated_bf_weights_160).all())
-        self.assertTrue((calculated_bf_weights_200 == calculated_bf_weights_200_v2).all())
+        self.assertTrue(
+            (calculated_bf_weights_200 == calculated_bf_weights_200_v2).all()
+        )
 
         # change subbands
         self.proxy.off()
@@ -119,10 +132,13 @@ class TestDeviceBeamlet(AbstractTestBases.TestDeviceBase):
         self.proxy.subband_select_RW = [317] * N_beamlets_ctrl
         self.proxy.on()
         calculated_bf_weights_200_v3 = self.proxy.calculate_bf_weights(delays.flatten())
-        self.assertTrue((calculated_bf_weights_200_v2 != calculated_bf_weights_200_v3).all())
+        self.assertTrue(
+            (calculated_bf_weights_200_v2 != calculated_bf_weights_200_v3).all()
+        )
 
         sdp_proxy.clock_RW = CLK_160_MHZ
-        time.sleep(1) # wait for beamlet device to process change event
+        time.sleep(1)  # wait for beamlet device to process change event
         calculated_bf_weights_160_v2 = self.proxy.calculate_bf_weights(delays.flatten())
-        self.assertTrue((calculated_bf_weights_160 != calculated_bf_weights_160_v2).all())
-
+        self.assertTrue(
+            (calculated_bf_weights_160 != calculated_bf_weights_160_v2).all()
+        )
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_boot.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_boot.py
index 7bd56cba750ac29bce04b45f01872b15671ff2a3..49ce1140e7bbf49a9a1c7d693bf0ff223e6da679 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_boot.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_boot.py
@@ -1,23 +1,15 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import time
 
 from tango import DevState
-
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
-from tangostationcontrol.integration_test import base
 from tangostationcontrol.common.constants import DEFAULT_POLLING_PERIOD
+from tangostationcontrol.integration_test import base
+from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 
 
 class TestDeviceBoot(base.IntegrationTestCase):
-
     def setUp(self):
         self.proxy = TestDeviceProxy("STAT/Boot/1")
 
@@ -30,7 +22,9 @@ class TestDeviceBoot(base.IntegrationTestCase):
         """Test if we can reinitialise the station"""
 
         # This attribute needs to be polled for the TemperatureManager test to succesfully initialise
-        TestDeviceProxy("STAT/RECV/1").poll_attribute("HBAT_LED_on_RW", DEFAULT_POLLING_PERIOD)
+        TestDeviceProxy("STAT/RECV/1").poll_attribute(
+            "HBAT_LED_on_RW", DEFAULT_POLLING_PERIOD
+        )
 
         self.proxy.reboot()
 
@@ -40,4 +34,8 @@ class TestDeviceBoot(base.IntegrationTestCase):
             time.sleep(1)
 
         # check whether initialisation succeeded
-        self.assertEqual(100, self.proxy.progress_R, msg=f"Initialisation of station failed. Status: {self.proxy.status_R}")
+        self.assertEqual(
+            100,
+            self.proxy.progress_R,
+            msg=f"Initialisation of station failed. Status: {self.proxy.status_R}",
+        )
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_bst.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_bst.py
index c049a7befb1d9ebbaeeb2714b088058f36a983aa..f4efefad6cedc9789c831801df6f93ca70308c59 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_bst.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_bst.py
@@ -1,15 +1,10 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
-from .base import AbstractTestBases
 from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 
+from .base import AbstractTestBases
+
 
 class TestDeviceBST(AbstractTestBases.TestDeviceBase):
     def setUp(self):
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_ccd.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_ccd.py
index 3f259be1c3d11269fc387fe85e42b5f05b9e8764..b2e6560544f93478bb7817af7509f89c86b3b761 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_ccd.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_ccd.py
@@ -1,16 +1,9 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from .base import AbstractTestBases
 
 
 class TestDeviceCCD(AbstractTestBases.TestDeviceBase):
-
     def setUp(self):
         super().setUp("STAT/CCD/1")
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_digitalbeam.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_digitalbeam.py
index c83722787db23f90b91c90306ee474a1784d3b75..8bbca5307eba4d27018efc44b93009c98896beb3 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_digitalbeam.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_digitalbeam.py
@@ -1,26 +1,31 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+import time
 
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
+import numpy
+from tangostationcontrol.common.constants import (
+    MAX_ANTENNA,
+    N_beamlets_ctrl,
+    N_pn,
+    CLK_200_MHZ,
+    CLK_160_MHZ,
+    DEFAULT_N_HBA_TILES,
+)
 from tangostationcontrol.devices.antennafield import AntennaQuality, AntennaUse
-from tangostationcontrol.common.constants import MAX_ANTENNA, N_beamlets_ctrl, N_pn, CLK_200_MHZ, CLK_160_MHZ, DEFAULT_N_HBA_TILES
+from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 
 from .base import AbstractTestBases
 
-import numpy
-import time
 
 class TestDeviceDigitalBeam(AbstractTestBases.TestDeviceBase):
 
     antenna_qualities_ok = numpy.array([AntennaQuality.OK] * MAX_ANTENNA)
-    antenna_qualities_only_second = numpy.array([AntennaQuality.BROKEN] + [AntennaQuality.OK] + [AntennaQuality.BROKEN] * (MAX_ANTENNA - 2))
+    antenna_qualities_only_second = numpy.array(
+        [AntennaQuality.BROKEN]
+        + [AntennaQuality.OK]
+        + [AntennaQuality.BROKEN] * (MAX_ANTENNA - 2)
+    )
     antenna_use_ok = numpy.array([AntennaUse.AUTO] * MAX_ANTENNA)
 
     antennafield_iden = "STAT/AntennaField/1"
@@ -50,7 +55,7 @@ class TestDeviceDigitalBeam(AbstractTestBases.TestDeviceBase):
         beamlet_proxy.off()
         beamlet_proxy.initialise()
         return beamlet_proxy
-    
+
     def setup_beamlet_proxy(self):
         beamlet_proxy = TestDeviceProxy(self.beamlet_iden)
         beamlet_proxy.off()
@@ -70,13 +75,15 @@ class TestDeviceDigitalBeam(AbstractTestBases.TestDeviceBase):
         # setup AntennaField
         NR_TILES = DEFAULT_N_HBA_TILES
         antennafield_proxy = TestDeviceProxy(self.antennafield_iden)
-        control_mapping = [[1,i] for i in range(NR_TILES)]
-        antennafield_proxy.put_property({
-            "RECV_devices": [self.recv_iden],
-            "Control_to_RECV_mapping": numpy.array(control_mapping).flatten(),
-            "Antenna_Quality": antenna_qualities,
-            "Antenna_Use": antenna_use
-        })
+        control_mapping = [[1, i] for i in range(NR_TILES)]
+        antennafield_proxy.put_property(
+            {
+                "RECV_devices": [self.recv_iden],
+                "Control_to_RECV_mapping": numpy.array(control_mapping).flatten(),
+                "Antenna_Quality": antenna_qualities,
+                "Antenna_Use": antenna_use,
+            }
+        )
         antennafield_proxy.off()
         antennafield_proxy.boot()
         return antennafield_proxy
@@ -84,9 +91,7 @@ class TestDeviceDigitalBeam(AbstractTestBases.TestDeviceBase):
     def test_pointing_to_zenith_clock_change(self):
         self.addCleanup(TestDeviceProxy.test_device_turn_off, self.beamlet_iden)
         self.addCleanup(TestDeviceProxy.test_device_turn_off, self.sdp_iden)
-        self.addCleanup(
-            TestDeviceProxy.test_device_turn_off, self.antennafield_iden
-        )
+        self.addCleanup(TestDeviceProxy.test_device_turn_off, self.antennafield_iden)
 
         self.sdp_proxy = self.setup_sdp_proxy()
         self.setup_antennafield_proxy(self.antenna_qualities_ok, self.antenna_use_ok)
@@ -103,10 +108,14 @@ class TestDeviceDigitalBeam(AbstractTestBases.TestDeviceBase):
         self.proxy.on()
 
         # Point to Zenith
-        self.proxy.set_pointing(numpy.array([["AZELGEO", "0deg", "90deg"]] * N_beamlets_ctrl).flatten())
+        self.proxy.set_pointing(
+            numpy.array([["AZELGEO", "0deg", "90deg"]] * N_beamlets_ctrl).flatten()
+        )
 
         # beam weights should now be non-zero, we don't actually check their values for correctness
-        FPGA_bf_weights_xx_yy_clock200 = self.beamlet_proxy.FPGA_bf_weights_xx_yy_RW.flatten()
+        FPGA_bf_weights_xx_yy_clock200 = (
+            self.beamlet_proxy.FPGA_bf_weights_xx_yy_RW.flatten()
+        )
         self.assertNotEqual(0, sum(FPGA_bf_weights_xx_yy_clock200))
 
         self.beamlet_proxy = self.initialise_beamlet_proxy()
@@ -116,22 +125,27 @@ class TestDeviceDigitalBeam(AbstractTestBases.TestDeviceBase):
         self.sdp_proxy.clock_RW = CLK_160_MHZ
         time.sleep(1)
 
-        FPGA_bf_weights_xx_yy_clock160 = self.beamlet_proxy.FPGA_bf_weights_xx_yy_RW.flatten()
+        FPGA_bf_weights_xx_yy_clock160 = (
+            self.beamlet_proxy.FPGA_bf_weights_xx_yy_RW.flatten()
+        )
         # Assert some values are different
-        self.assertNotEqual(sum(FPGA_bf_weights_xx_yy_clock160), sum(FPGA_bf_weights_xx_yy_clock200))
-    
+        self.assertNotEqual(
+            sum(FPGA_bf_weights_xx_yy_clock160), sum(FPGA_bf_weights_xx_yy_clock200)
+        )
+
     def test_pointing_to_zenith_subband_change(self):
         self.addCleanup(TestDeviceProxy.test_device_turn_off, self.beamlet_iden)
         self.addCleanup(TestDeviceProxy.test_device_turn_off, self.sdp_iden)
-        self.addCleanup(
-            TestDeviceProxy.test_device_turn_off, self.antennafield_iden
-        )
+        self.addCleanup(TestDeviceProxy.test_device_turn_off, self.antennafield_iden)
 
         self.sdp_proxy = self.setup_sdp_proxy()
         self.setup_antennafield_proxy(self.antenna_qualities_ok, self.antenna_use_ok)
 
         self.beamlet_proxy = self.initialise_beamlet_proxy()
-        self.beamlet_proxy.subband_select_RW = numpy.array(list(range(317)) + [316] + list(range(318,N_beamlets_ctrl)), dtype=numpy.uint32)
+        self.beamlet_proxy.subband_select_RW = numpy.array(
+            list(range(317)) + [316] + list(range(318, N_beamlets_ctrl)),
+            dtype=numpy.uint32,
+        )
         self.beamlet_proxy.on()
 
         self.proxy.initialise()
@@ -139,9 +153,13 @@ class TestDeviceDigitalBeam(AbstractTestBases.TestDeviceBase):
         self.proxy.on()
 
         # Point to Zenith
-        self.proxy.set_pointing(numpy.array([["AZELGEO", "0deg", "90deg"]] * N_beamlets_ctrl).flatten())
+        self.proxy.set_pointing(
+            numpy.array([["AZELGEO", "0deg", "90deg"]] * N_beamlets_ctrl).flatten()
+        )
         # Store values with first subband configuration
-        FPGA_bf_weights_xx_yy_subband_v1 = self.beamlet_proxy.FPGA_bf_weights_xx_yy_RW.flatten()
+        FPGA_bf_weights_xx_yy_subband_v1 = (
+            self.beamlet_proxy.FPGA_bf_weights_xx_yy_RW.flatten()
+        )
 
         # Restart beamlet proxy
         self.beamlet_proxy = self.initialise_beamlet_proxy()
@@ -149,17 +167,19 @@ class TestDeviceDigitalBeam(AbstractTestBases.TestDeviceBase):
         self.beamlet_proxy.on()
 
         # Store values with second subband configuration
-        FPGA_bf_weights_xx_yy_subband_v2 = self.beamlet_proxy.FPGA_bf_weights_xx_yy_RW.flatten()
+        FPGA_bf_weights_xx_yy_subband_v2 = (
+            self.beamlet_proxy.FPGA_bf_weights_xx_yy_RW.flatten()
+        )
         # Assert some values are different
-        self.assertNotEqual(sum(FPGA_bf_weights_xx_yy_subband_v1), sum(FPGA_bf_weights_xx_yy_subband_v2))
+        self.assertNotEqual(
+            sum(FPGA_bf_weights_xx_yy_subband_v1), sum(FPGA_bf_weights_xx_yy_subband_v2)
+        )
 
     def test_set_pointing_masked_enable(self):
         """Verify that only selected inputs are written"""
 
         self.addCleanup(TestDeviceProxy.test_device_turn_off, self.sdp_iden)
-        self.addCleanup(
-            TestDeviceProxy.test_device_turn_off, self.antennafield_iden
-        )
+        self.addCleanup(TestDeviceProxy.test_device_turn_off, self.antennafield_iden)
 
         self.setup_sdp_proxy()
         self.setup_antennafield_proxy(self.antenna_qualities_ok, self.antenna_use_ok)
@@ -172,24 +192,26 @@ class TestDeviceDigitalBeam(AbstractTestBases.TestDeviceBase):
         self.beamlet_proxy.FPGA_bf_weights_xx_yy_RW = all_zeros
 
         # Enable all inputs
-        self.proxy.input_select_RW = numpy.array([[True] * N_beamlets_ctrl] * MAX_ANTENNA)
+        self.proxy.input_select_RW = numpy.array(
+            [[True] * N_beamlets_ctrl] * MAX_ANTENNA
+        )
 
         self.proxy.set_pointing(
             numpy.array([["AZELGEO", "0deg", "90deg"]] * N_beamlets_ctrl).flatten()
         )
 
         # Verify all zeros are replaced with other values for all inputs
-        self.assertTrue(numpy.any(numpy.not_equal(
-            all_zeros, self.beamlet_proxy.FPGA_bf_weights_xx_yy_RW
-        )))
+        self.assertTrue(
+            numpy.any(
+                numpy.not_equal(all_zeros, self.beamlet_proxy.FPGA_bf_weights_xx_yy_RW)
+            )
+        )
 
     def test_set_pointing_masked_disable(self):
         """Verify that only diabled inputs are unchanged"""
 
         self.addCleanup(TestDeviceProxy.test_device_turn_off, self.sdp_iden)
-        self.addCleanup(
-            TestDeviceProxy.test_device_turn_off, self.antennafield_iden
-        )
+        self.addCleanup(TestDeviceProxy.test_device_turn_off, self.antennafield_iden)
 
         self.setup_sdp_proxy()
         self.setup_antennafield_proxy(self.antenna_qualities_ok, self.antenna_use_ok)
@@ -202,7 +224,9 @@ class TestDeviceDigitalBeam(AbstractTestBases.TestDeviceBase):
         self.beamlet_proxy.FPGA_bf_weights_xx_yy_RW = non_zeros
 
         # Disable all inputs
-        self.proxy.input_select_RW = numpy.array([[False] * N_beamlets_ctrl] * MAX_ANTENNA)
+        self.proxy.input_select_RW = numpy.array(
+            [[False] * N_beamlets_ctrl] * MAX_ANTENNA
+        )
 
         self.proxy.set_pointing(
             numpy.array([["AZELGEO", "0deg", "90deg"]] * N_beamlets_ctrl).flatten()
@@ -216,31 +240,54 @@ class TestDeviceDigitalBeam(AbstractTestBases.TestDeviceBase):
     def test_input_select_with_all_antennas_ok(self):
         """Verify if input and antenna select are correctly calculated following Antennafield.Antenna_Usage_Mask"""
 
-        self.addCleanup(
-            TestDeviceProxy.test_device_turn_off, self.antennafield_iden
-        )
+        self.addCleanup(TestDeviceProxy.test_device_turn_off, self.antennafield_iden)
 
-        antennafield_proxy = self.setup_antennafield_proxy(self.antenna_qualities_ok, self.antenna_use_ok)
-        numpy.testing.assert_equal(numpy.array([True] * MAX_ANTENNA), antennafield_proxy.Antenna_Usage_Mask_R)
+        antennafield_proxy = self.setup_antennafield_proxy(
+            self.antenna_qualities_ok, self.antenna_use_ok
+        )
+        numpy.testing.assert_equal(
+            numpy.array([True] * MAX_ANTENNA), antennafield_proxy.Antenna_Usage_Mask_R
+        )
         self.setUp()
         self.proxy.warm_boot()
-        expected_input_select = numpy.array([[True] * N_beamlets_ctrl ] * DEFAULT_N_HBA_TILES + [[False] * N_beamlets_ctrl] * DEFAULT_N_HBA_TILES)    # first 48 rows are True
+        expected_input_select = numpy.array(
+            [[True] * N_beamlets_ctrl] * DEFAULT_N_HBA_TILES
+            + [[False] * N_beamlets_ctrl] * DEFAULT_N_HBA_TILES
+        )  # first 48 rows are True
         numpy.testing.assert_equal(expected_input_select, self.proxy.input_select_RW)
-        expected_antenna_select = numpy.array([[True] * N_beamlets_ctrl ] * DEFAULT_N_HBA_TILES)
-        numpy.testing.assert_equal(expected_antenna_select, self.proxy.antenna_select_RW)
+        expected_antenna_select = numpy.array(
+            [[True] * N_beamlets_ctrl] * DEFAULT_N_HBA_TILES
+        )
+        numpy.testing.assert_equal(
+            expected_antenna_select, self.proxy.antenna_select_RW
+        )
 
     def test_input_select_with_only_second_antenna_ok(self):
         """Verify if input and antenna select are correctly calculated following Antennafield.Antenna_Usage_Mask"""
 
-        self.addCleanup(
-            TestDeviceProxy.test_device_turn_off, self.antennafield_iden
-        )
+        self.addCleanup(TestDeviceProxy.test_device_turn_off, self.antennafield_iden)
 
-        antennafield_proxy = self.setup_antennafield_proxy(self.antenna_qualities_only_second, self.antenna_use_ok)
-        numpy.testing.assert_equal(numpy.array([False] + [True] + [False] * (MAX_ANTENNA - 2)), antennafield_proxy.Antenna_Usage_Mask_R)
+        antennafield_proxy = self.setup_antennafield_proxy(
+            self.antenna_qualities_only_second, self.antenna_use_ok
+        )
+        numpy.testing.assert_equal(
+            numpy.array([False] + [True] + [False] * (MAX_ANTENNA - 2)),
+            antennafield_proxy.Antenna_Usage_Mask_R,
+        )
         self.setUp()
         self.proxy.warm_boot()
-        expected_input_select = numpy.array([[False] * N_beamlets_ctrl ] + [[True] * N_beamlets_ctrl] + [[False] * N_beamlets_ctrl] * (DEFAULT_N_HBA_TILES - 2) + [[False] * N_beamlets_ctrl] * DEFAULT_N_HBA_TILES)    # first 48 rows are True
+        expected_input_select = numpy.array(
+            [[False] * N_beamlets_ctrl]
+            + [[True] * N_beamlets_ctrl]
+            + [[False] * N_beamlets_ctrl] * (DEFAULT_N_HBA_TILES - 2)
+            + [[False] * N_beamlets_ctrl] * DEFAULT_N_HBA_TILES
+        )  # first 48 rows are True
         numpy.testing.assert_equal(expected_input_select, self.proxy.input_select_RW)
-        expected_antenna_select = numpy.array([[False] * N_beamlets_ctrl ] + [[True] * N_beamlets_ctrl] + [[False] * N_beamlets_ctrl] * (DEFAULT_N_HBA_TILES - 2))
-        numpy.testing.assert_equal(expected_antenna_select, self.proxy.antenna_select_RW)
+        expected_antenna_select = numpy.array(
+            [[False] * N_beamlets_ctrl]
+            + [[True] * N_beamlets_ctrl]
+            + [[False] * N_beamlets_ctrl] * (DEFAULT_N_HBA_TILES - 2)
+        )
+        numpy.testing.assert_equal(
+            expected_antenna_select, self.proxy.antenna_select_RW
+        )
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_observation.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_observation.py
index d4f0e729cd613a596715bec9d71a1419f48820ef..ff215384c3c446581e68126bfc63caf6990e4778 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_observation.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_observation.py
@@ -1,34 +1,122 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-import numpy
-from json import loads
 from datetime import datetime
-from tango import DevState, DevFailed
+from json import loads
 
+import numpy
+from tango import DevState, DevFailed
+from tangostationcontrol.common.constants import (
+    N_beamlets_ctrl,
+    MAX_ANTENNA,
+    DEFAULT_N_HBA_TILES,
+)
+from tangostationcontrol.devices.antennafield import AntennaQuality, AntennaUse
 from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 from tangostationcontrol.test.devices.test_observation_base import TestObservationBase
-from tangostationcontrol.devices.antennafield import AntennaQuality, AntennaUse
-from tangostationcontrol.common.constants import N_beamlets_ctrl,  MAX_ANTENNA, DEFAULT_N_HBA_TILES
+
 from .base import AbstractTestBases
 
+
 class TestDeviceObservation(AbstractTestBases.TestDeviceBase):
 
     ANTENNA_TO_SDP_MAPPING = [
-      "0", "0", "0", "1", "0", "2", "0", "3", "0", "4", "0", "5",
-      "1", "0", "1", "1", "1", "2", "1", "3", "1", "4", "1", "5",
-      "2", "0", "2", "1", "2", "2", "2", "3", "2", "4", "2", "5",
-      "3", "0", "3", "1", "3", "2", "3", "3", "3", "4", "3", "5",
-      "4", "0", "4", "1", "4", "2", "4", "3", "4", "4", "4", "5",
-      "5", "0", "5", "1", "5", "2", "5", "3", "5", "4", "5", "5",
-      "6", "0", "6", "1", "6", "2", "6", "3", "6", "4", "6", "5",
-      "7", "0", "7", "1", "7", "2", "7", "3", "7", "4", "7", "5",
+        "0",
+        "0",
+        "0",
+        "1",
+        "0",
+        "2",
+        "0",
+        "3",
+        "0",
+        "4",
+        "0",
+        "5",
+        "1",
+        "0",
+        "1",
+        "1",
+        "1",
+        "2",
+        "1",
+        "3",
+        "1",
+        "4",
+        "1",
+        "5",
+        "2",
+        "0",
+        "2",
+        "1",
+        "2",
+        "2",
+        "2",
+        "3",
+        "2",
+        "4",
+        "2",
+        "5",
+        "3",
+        "0",
+        "3",
+        "1",
+        "3",
+        "2",
+        "3",
+        "3",
+        "3",
+        "4",
+        "3",
+        "5",
+        "4",
+        "0",
+        "4",
+        "1",
+        "4",
+        "2",
+        "4",
+        "3",
+        "4",
+        "4",
+        "4",
+        "5",
+        "5",
+        "0",
+        "5",
+        "1",
+        "5",
+        "2",
+        "5",
+        "3",
+        "5",
+        "4",
+        "5",
+        "5",
+        "6",
+        "0",
+        "6",
+        "1",
+        "6",
+        "2",
+        "6",
+        "3",
+        "6",
+        "4",
+        "6",
+        "5",
+        "7",
+        "0",
+        "7",
+        "1",
+        "7",
+        "2",
+        "7",
+        "3",
+        "7",
+        "4",
+        "7",
+        "5",
     ]
 
     def setUp(self):
@@ -59,13 +147,18 @@ class TestDeviceObservation(AbstractTestBases.TestDeviceBase):
     def setup_antennafield_proxy(self):
         # setup AntennaField
         antennafield_proxy = TestDeviceProxy("STAT/AntennaField/1")
-        control_mapping = [[1,i] for i in range(DEFAULT_N_HBA_TILES)]
+        control_mapping = [[1, i] for i in range(DEFAULT_N_HBA_TILES)]
         antenna_qualities = numpy.array([AntennaQuality.OK] * MAX_ANTENNA)
         antenna_use = numpy.array([AntennaUse.AUTO] * MAX_ANTENNA)
-        antennafield_proxy.put_property({"RECV_devices": ["STAT/RECV/1"],
-                                 "Control_to_RECV_mapping": numpy.array(control_mapping).flatten(),
-                                 "Antenna_to_SDP_Mapping": self.ANTENNA_TO_SDP_MAPPING,
-                                 'Antenna_Quality': antenna_qualities, 'Antenna_Use': antenna_use})
+        antennafield_proxy.put_property(
+            {
+                "RECV_devices": ["STAT/RECV/1"],
+                "Control_to_RECV_mapping": numpy.array(control_mapping).flatten(),
+                "Antenna_to_SDP_Mapping": self.ANTENNA_TO_SDP_MAPPING,
+                "Antenna_Quality": antenna_qualities,
+                "Antenna_Use": antenna_use,
+            }
+        )
         antennafield_proxy.off()
         antennafield_proxy.boot()
         return antennafield_proxy
@@ -119,23 +212,32 @@ class TestDeviceObservation(AbstractTestBases.TestDeviceBase):
         self.proxy.Initialise()
 
         with self.assertRaises(DevFailed):
-            self.proxy.write_attribute(
-                "observation_settings_RW", self.VALID_JSON)
+            self.proxy.write_attribute("observation_settings_RW", self.VALID_JSON)
 
     def test_attribute_match(self):
         """Test that JSON data is exposed to attributes"""
 
-        #failing
+        # failing
         data = loads(self.VALID_JSON)
         stop_timestamp = datetime.fromisoformat(data["stop_time"]).timestamp()
         observation_id = data["observation_id"]
         antenna_mask = data["antenna_mask"]
         filter = data["filter"]
         num_saps = len(data["SAPs"])
-        saps_subband = [ data["SAPs"][i]['subbands'] for i in range(0, num_saps)]
-        pointing_direction = data["SAPs"][0]['pointing']
-        saps_pointing = [(pointing_direction['direction_type'], f"{pointing_direction['angle1']}deg", f"{pointing_direction['angle2']}deg")]
-        tile_beam = [str(data['tile_beam']['direction_type']), f"{data['tile_beam']['angle1']}deg", f"{data['tile_beam']['angle2']}deg"]
+        saps_subband = [data["SAPs"][i]["subbands"] for i in range(0, num_saps)]
+        pointing_direction = data["SAPs"][0]["pointing"]
+        saps_pointing = [
+            (
+                pointing_direction["direction_type"],
+                f"{pointing_direction['angle1']}deg",
+                f"{pointing_direction['angle2']}deg",
+            )
+        ]
+        tile_beam = [
+            str(data["tile_beam"]["direction_type"]),
+            f"{data['tile_beam']['angle1']}deg",
+            f"{data['tile_beam']['angle2']}deg",
+        ]
         first_beamlet = data["first_beamlet"]
 
         self.proxy.off()
@@ -157,10 +259,16 @@ class TestDeviceObservation(AbstractTestBases.TestDeviceBase):
         """Test that attributes antenna_mask and filter are correctly applied"""
         self.setup_recv_proxy()
         antennafield_proxy = self.setup_antennafield_proxy()
-        antennafield_proxy.ANT_mask_RW = [True] * DEFAULT_N_HBA_TILES # set all masks to True
-        self.assertListEqual(antennafield_proxy.ANT_mask_RW.tolist(), [True] * DEFAULT_N_HBA_TILES)
+        antennafield_proxy.ANT_mask_RW = [
+            True
+        ] * DEFAULT_N_HBA_TILES  # set all masks to True
+        self.assertListEqual(
+            antennafield_proxy.ANT_mask_RW.tolist(), [True] * DEFAULT_N_HBA_TILES
+        )
         antennafield_proxy.RCU_band_select_RW = [0] * DEFAULT_N_HBA_TILES
-        self.assertListEqual(antennafield_proxy.RCU_band_select_RW.tolist(), [0] * DEFAULT_N_HBA_TILES)
+        self.assertListEqual(
+            antennafield_proxy.RCU_band_select_RW.tolist(), [0] * DEFAULT_N_HBA_TILES
+        )
         self.proxy.off()
         self.proxy.observation_settings_RW = self.VALID_JSON
         self.proxy.Initialise()
@@ -168,7 +276,9 @@ class TestDeviceObservation(AbstractTestBases.TestDeviceBase):
         expected_masks = [True, True, True] + [False] * 6 + [True] + [False] * 38
         self.assertListEqual(antennafield_proxy.ANT_mask_RW.tolist(), expected_masks)
         expected_bands = [2, 2, 2] + [0] * 6 + [2] + [0] * 38
-        self.assertListEqual(antennafield_proxy.RCU_band_select_RW.tolist(), expected_bands)
+        self.assertListEqual(
+            antennafield_proxy.RCU_band_select_RW.tolist(), expected_bands
+        )
 
     def test_apply_subbands(self):
         """Test that attribute sap subbands is correctly applied"""
@@ -180,44 +290,62 @@ class TestDeviceObservation(AbstractTestBases.TestDeviceBase):
         self.proxy.observation_settings_RW = self.VALID_JSON
         self.proxy.Initialise()
         self.proxy.On()
-        expected_subbands = [10,20,30] + [0] * (N_beamlets_ctrl-3)
-        self.assertListEqual(beamlet_proxy.subband_select_RW.tolist(), expected_subbands)
+        expected_subbands = [10, 20, 30] + [0] * (N_beamlets_ctrl - 3)
+        self.assertListEqual(
+            beamlet_proxy.subband_select_RW.tolist(), expected_subbands
+        )
 
     def test_apply_pointing(self):
         """Test that attribute sap pointing is correctly applied"""
         digitalbeam_proxy = self.setup_digitalbeam_proxy()
-        default_pointing = [("AZELGEO","0deg","90deg")]*N_beamlets_ctrl
+        default_pointing = [("AZELGEO", "0deg", "90deg")] * N_beamlets_ctrl
         digitalbeam_proxy.Pointing_direction_RW = default_pointing
-        self.assertListEqual(list(digitalbeam_proxy.Pointing_direction_RW), default_pointing)
+        self.assertListEqual(
+            list(digitalbeam_proxy.Pointing_direction_RW), default_pointing
+        )
         self.proxy.off()
         self.proxy.observation_settings_RW = self.VALID_JSON
         self.proxy.Initialise()
         self.proxy.On()
-        expected_pointing = [("J2000","1.5deg","0deg")] + [("AZELGEO","0deg","90deg")] * (N_beamlets_ctrl - 1)
-        self.assertListEqual(list(digitalbeam_proxy.Pointing_direction_RW), expected_pointing)
+        expected_pointing = [("J2000", "1.5deg", "0deg")] + [
+            ("AZELGEO", "0deg", "90deg")
+        ] * (N_beamlets_ctrl - 1)
+        self.assertListEqual(
+            list(digitalbeam_proxy.Pointing_direction_RW), expected_pointing
+        )
 
     def test_apply_antenna_select(self):
         """Test that antenna selection is correctly applied"""
         digitalbeam_proxy = self.setup_digitalbeam_proxy()
         default_selection = [[False] * N_beamlets_ctrl] * MAX_ANTENNA
         digitalbeam_proxy.antenna_select_RW = default_selection
-        self.assertListEqual(digitalbeam_proxy.antenna_select_RW.tolist()[9], default_selection[9])
+        self.assertListEqual(
+            digitalbeam_proxy.antenna_select_RW.tolist()[9], default_selection[9]
+        )
         self.proxy.off()
         self.proxy.observation_settings_RW = self.VALID_JSON
         self.proxy.Initialise()
         self.proxy.On()
-        self.assertListEqual(digitalbeam_proxy.antenna_select_RW.tolist()[9], [True] * N_beamlets_ctrl)
-        self.assertListEqual(digitalbeam_proxy.antenna_select_RW.tolist()[10], [False] * N_beamlets_ctrl)
+        self.assertListEqual(
+            digitalbeam_proxy.antenna_select_RW.tolist()[9], [True] * N_beamlets_ctrl
+        )
+        self.assertListEqual(
+            digitalbeam_proxy.antenna_select_RW.tolist()[10], [False] * N_beamlets_ctrl
+        )
 
     def test_apply_tilebeam(self):
-        #failing
+        # failing
         """Test that attribute tilebeam is correctly applied"""
         tilebeam_proxy = self.setup_tilebeam_proxy()
-        pointing_direction = [("J2000","0deg","0deg")] * DEFAULT_N_HBA_TILES
+        pointing_direction = [("J2000", "0deg", "0deg")] * DEFAULT_N_HBA_TILES
         tilebeam_proxy.Pointing_direction_RW = pointing_direction
-        self.assertListEqual(list(tilebeam_proxy.Pointing_direction_RW[0]), ["J2000","0deg","0deg"])
+        self.assertListEqual(
+            list(tilebeam_proxy.Pointing_direction_RW[0]), ["J2000", "0deg", "0deg"]
+        )
         self.proxy.off()
         self.proxy.observation_settings_RW = self.VALID_JSON
         self.proxy.Initialise()
         self.proxy.On()
-        self.assertListEqual(list(tilebeam_proxy.Pointing_direction_RW[0]), ["J2000","1.5deg","0deg"])
+        self.assertListEqual(
+            list(tilebeam_proxy.Pointing_direction_RW[0]), ["J2000", "1.5deg", "0deg"]
+        )
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_observation_control.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_observation_control.py
index 4b5489505c7c3c1c16d0739de3632cfc7236ae91..7ddc895816bca22e8d1fdbee5686ebc3b877268c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_observation_control.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_observation_control.py
@@ -1,11 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import json
 from datetime import datetime
@@ -14,23 +8,111 @@ from datetime import timedelta
 import numpy
 from tango import DevFailed
 from tango import DevState
-
 from tangostationcontrol.common.constants import DEFAULT_N_HBA_TILES
 from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 from tangostationcontrol.test.devices.test_observation_base import TestObservationBase
+
 from .base import AbstractTestBases
 
 
 class TestObservationControlDevice(AbstractTestBases.TestDeviceBase):
     ANTENNA_TO_SDP_MAPPING = [
-        "0", "0", "0", "1", "0", "2", "0", "3", "0", "4", "0", "5",
-        "1", "0", "1", "1", "1", "2", "1", "3", "1", "4", "1", "5",
-        "2", "0", "2", "1", "2", "2", "2", "3", "2", "4", "2", "5",
-        "3", "0", "3", "1", "3", "2", "3", "3", "3", "4", "3", "5",
-        "4", "0", "4", "1", "4", "2", "4", "3", "4", "4", "4", "5",
-        "5", "0", "5", "1", "5", "2", "5", "3", "5", "4", "5", "5",
-        "6", "0", "6", "1", "6", "2", "6", "3", "6", "4", "6", "5",
-        "7", "0", "7", "1", "7", "2", "7", "3", "7", "4", "7", "5",
+        "0",
+        "0",
+        "0",
+        "1",
+        "0",
+        "2",
+        "0",
+        "3",
+        "0",
+        "4",
+        "0",
+        "5",
+        "1",
+        "0",
+        "1",
+        "1",
+        "1",
+        "2",
+        "1",
+        "3",
+        "1",
+        "4",
+        "1",
+        "5",
+        "2",
+        "0",
+        "2",
+        "1",
+        "2",
+        "2",
+        "2",
+        "3",
+        "2",
+        "4",
+        "2",
+        "5",
+        "3",
+        "0",
+        "3",
+        "1",
+        "3",
+        "2",
+        "3",
+        "3",
+        "3",
+        "4",
+        "3",
+        "5",
+        "4",
+        "0",
+        "4",
+        "1",
+        "4",
+        "2",
+        "4",
+        "3",
+        "4",
+        "4",
+        "4",
+        "5",
+        "5",
+        "0",
+        "5",
+        "1",
+        "5",
+        "2",
+        "5",
+        "3",
+        "5",
+        "4",
+        "5",
+        "5",
+        "6",
+        "0",
+        "6",
+        "1",
+        "6",
+        "2",
+        "6",
+        "3",
+        "6",
+        "4",
+        "6",
+        "5",
+        "7",
+        "0",
+        "7",
+        "1",
+        "7",
+        "2",
+        "7",
+        "3",
+        "7",
+        "4",
+        "7",
+        "5",
     ]
 
     def setUp(self):
@@ -62,9 +144,13 @@ class TestObservationControlDevice(AbstractTestBases.TestDeviceBase):
         # setup AntennaField
         antennafield_proxy = TestDeviceProxy("STAT/AntennaField/1")
         control_mapping = [[1, i] for i in range(DEFAULT_N_HBA_TILES)]
-        antennafield_proxy.put_property({"RECV_devices": ["STAT/RECV/1"],
-                                         "Power_to_RECV_mapping": numpy.array(control_mapping).flatten(),
-                                         "Antenna_to_SDP_Mapping": self.ANTENNA_TO_SDP_MAPPING})
+        antennafield_proxy.put_property(
+            {
+                "RECV_devices": ["STAT/RECV/1"],
+                "Power_to_RECV_mapping": numpy.array(control_mapping).flatten(),
+                "Antenna_to_SDP_Mapping": self.ANTENNA_TO_SDP_MAPPING,
+            }
+        )
         antennafield_proxy.off()
         antennafield_proxy.warm_boot()
         antennafield_proxy.set_defaults()
@@ -120,28 +206,29 @@ class TestObservationControlDevice(AbstractTestBases.TestDeviceBase):
         """Test invalid parameter detection"""
 
         parameters = json.loads(self.VALID_JSON)
-        parameters['observation_id'] = -1
+        parameters["observation_id"] = -1
 
         self.on_device_assert(self.proxy)
         self.assertRaises(
-            DevFailed, self.proxy.start_observation, json.dumps(parameters))
+            DevFailed, self.proxy.start_observation, json.dumps(parameters)
+        )
 
     def test_check_and_convert_parameters_invalid_time(self):
         """Test invalid parameter detection"""
 
         parameters = json.loads(self.VALID_JSON)
-        parameters['stop_time'] = (datetime.now() - timedelta(seconds=1)).isoformat()
+        parameters["stop_time"] = (datetime.now() - timedelta(seconds=1)).isoformat()
 
         self.on_device_assert(self.proxy)
         self.assertRaises(
-            DevFailed, self.proxy.start_observation, json.dumps(parameters))
+            DevFailed, self.proxy.start_observation, json.dumps(parameters)
+        )
 
     def test_check_and_convert_parameters_invalid_empty(self):
         """Test empty parameter detection"""
 
         self.on_device_assert(self.proxy)
-        self.assertRaises(
-            DevFailed, self.proxy.start_observation, "{}")
+        self.assertRaises(DevFailed, self.proxy.start_observation, "{}")
 
     def test_start_observation(self):
         """Test starting an observation"""
@@ -159,7 +246,7 @@ class TestObservationControlDevice(AbstractTestBases.TestDeviceBase):
         """Test starting multiple observations"""
 
         second_observation_json = json.loads(self.VALID_JSON)
-        second_observation_json['observation_id'] = 54321
+        second_observation_json["observation_id"] = 54321
 
         self.on_device_assert(self.proxy)
 
@@ -214,7 +301,7 @@ class TestObservationControlDevice(AbstractTestBases.TestDeviceBase):
         """Test starting and stopping multiple observations"""
 
         second_observation_json = json.loads(self.VALID_JSON)
-        second_observation_json['observation_id'] = 54321
+        second_observation_json["observation_id"] = 54321
 
         self.on_device_assert(self.proxy)
 
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_psoc.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_psoc.py
index e1c3f2b20ad275542b186337adceaa2eed13eec7..718006af022ecf280882df071fe2d27c3fcd3f96 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_psoc.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_psoc.py
@@ -1,21 +1,13 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tangostationcontrol.integration_test.default.devices.base import AbstractTestBases
 
 
 class TestDevicePSOC(AbstractTestBases.TestDeviceBase):
-
     def setUp(self):
         super().setUp("STAT/PSOC/1")
 
     def test_device_read_all_attributes(self):
         """Mask reading attributes not possible without SNMP"""
         # TODO(Corne): Unmask this test once SNMP simulator in place
-
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_recv.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_recv.py
index e96c385a7f976bc3ecb76d48b509b61d80454819..7f228a29647b35ad27229530637a4dc8bd1a54c3 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_recv.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_recv.py
@@ -1,16 +1,9 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from .base import AbstractTestBases
 
 
 class TestDeviceRECV(AbstractTestBases.TestDeviceBase):
-
     def setUp(self):
         super().setUp("STAT/RECV/1")
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_sdp.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_sdp.py
index 12b1d031b83b1aca9c4e6fc5dd163fe5cab9613f..ca133780298f01cc8107c6fdaf38f5da6c8fc3c8 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_sdp.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_sdp.py
@@ -1,17 +1,12 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from .base import AbstractTestBases
 from tangostationcontrol.common.constants import N_pn
 
-class TestDeviceSDP(AbstractTestBases.TestDeviceBase):
+from .base import AbstractTestBases
 
+
+class TestDeviceSDP(AbstractTestBases.TestDeviceBase):
     def setUp(self):
         """Intentionally recreate the device object in each test"""
         super().setUp("STAT/SDP/1")
@@ -21,4 +16,6 @@ class TestDeviceSDP(AbstractTestBases.TestDeviceBase):
 
         self.proxy.warm_boot()
 
-        self.assertListEqual([True]*N_pn, list(self.proxy.TR_fpga_communication_error_R))
+        self.assertListEqual(
+            [True] * N_pn, list(self.proxy.TR_fpga_communication_error_R)
+        )
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_sst.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_sst.py
index 80f8d70ffac1346a09a070c1bc645eb0f691c03d..b54bdc5fc1a21a61205df5a132188c6441229ed8 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_sst.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_sst.py
@@ -1,12 +1,6 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
 import socket
 import sys
 import time
@@ -18,7 +12,6 @@ from .base import AbstractTestBases
 
 
 class TestDeviceSST(AbstractTestBases.TestDeviceBase):
-
     def setUp(self):
         """Intentionally recreate the device object in each test"""
         super().setUp("STAT/SST/1")
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_temperature_manager.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_temperature_manager.py
index 27fb3f8103c6b8f1f26a6cef5cca254023471ba0..f83444eb5d7fd01828ac3000f4ad9f5d169bc04b 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_temperature_manager.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_temperature_manager.py
@@ -1,25 +1,26 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
-from .base import AbstractTestBases
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
-from tango._tango import DevState
-from tango import DeviceProxy
-
-from tangostationcontrol.common.constants import DEFAULT_POLLING_PERIOD, N_elements, N_pol, N_rcu, N_rcu_inp
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
+import logging
 import time
 
-import logging
+from tango import DeviceProxy
+from tango._tango import DevState
+from tangostationcontrol.common.constants import (
+    DEFAULT_POLLING_PERIOD,
+    N_elements,
+    N_pol,
+    N_rcu,
+    N_rcu_inp,
+)
+from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
+
+from .base import AbstractTestBases
+
 logger = logging.getLogger()
 
-class TestDeviceTemperatureManager(AbstractTestBases.TestDeviceBase):
 
+class TestDeviceTemperatureManager(AbstractTestBases.TestDeviceBase):
     def setUp(self):
         """Intentionally recreate the device object in each test"""
         self.recv_proxy = self.setup_recv_proxy()
@@ -48,7 +49,7 @@ class TestDeviceTemperatureManager(AbstractTestBases.TestDeviceBase):
         return sdp_proxy
 
     def test_alarm(self):
-        # Exclude other devices which raise a TimeoutError, since they wait for the attribute *_translator_busy_R to become False 
+        # Exclude other devices which raise a TimeoutError, since they wait for the attribute *_translator_busy_R to become False
         # (set instead to True in this test environment)
         self.proxy.put_property({"Alarm_Error_List": ["RECV, HBAT_LED_on_RW"]})
         self.proxy.put_property({"Shutdown_Device_List": ["STAT/SDP/1"]})
@@ -68,15 +69,22 @@ class TestDeviceTemperatureManager(AbstractTestBases.TestDeviceBase):
                 dev.off()
                 dev.warm_boot()
 
-        self.assertEqual(self.proxy.get_property('Shutdown_Device_List')['Shutdown_Device_List'][0], "STAT/SDP/1")
+        self.assertEqual(
+            self.proxy.get_property("Shutdown_Device_List")["Shutdown_Device_List"][0],
+            "STAT/SDP/1",
+        )
 
         # Here we trigger our own change event by just using an RW attribute
-        self.recv_proxy.HBAT_LED_on_RW = [[False] * N_elements * N_pol] * N_rcu * N_rcu_inp
+        self.recv_proxy.HBAT_LED_on_RW = (
+            [[False] * N_elements * N_pol] * N_rcu * N_rcu_inp
+        )
         time.sleep(2)
 
         self.assertFalse(self.proxy.is_alarming_R)
 
-        self.recv_proxy.HBAT_LED_on_RW = [[True] * N_elements * N_pol] * N_rcu * N_rcu_inp
+        self.recv_proxy.HBAT_LED_on_RW = (
+            [[True] * N_elements * N_pol] * N_rcu * N_rcu_inp
+        )
         time.sleep(2)
 
         # the TEMP_MANAGER_is_alarming_R should now be True, since it should have detected the temperature alarm.
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_tilebeam.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_tilebeam.py
index e88a5acfc881856d1248cd376ec9e9c810e16562..09cf57722a520cd362ccecb5f5321f1fdc66f9a0 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_tilebeam.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_tilebeam.py
@@ -1,20 +1,19 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-import time
-import numpy
 import datetime
 import json
+import time
 
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
+import numpy
+from tangostationcontrol.common.constants import (
+    DEFAULT_N_HBA_TILES,
+    MAX_ANTENNA,
+    N_elements,
+    N_pol,
+)
 from tangostationcontrol.devices.antennafield import AntennaQuality, AntennaUse
-from tangostationcontrol.common.constants import DEFAULT_N_HBA_TILES, MAX_ANTENNA, N_elements, N_pol
+from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 
 from .base import AbstractTestBases
 
@@ -28,7 +27,9 @@ class NumpyEncoder(json.JSONEncoder):
 
 class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase):
 
-    POINTING_DIRECTION = numpy.array([["J2000","0deg","0deg"]] * DEFAULT_N_HBA_TILES).flatten()
+    POINTING_DIRECTION = numpy.array(
+        [["J2000", "0deg", "0deg"]] * DEFAULT_N_HBA_TILES
+    ).flatten()
 
     def setUp(self):
         super().setUp("STAT/TileBeam/1")
@@ -44,12 +45,17 @@ class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase):
     def setup_antennafield_proxy(self):
         # setup AntennaField
         antennafield_proxy = TestDeviceProxy("STAT/AntennaField/1")
-        control_mapping = [[1,i] for i in range(DEFAULT_N_HBA_TILES)]
+        control_mapping = [[1, i] for i in range(DEFAULT_N_HBA_TILES)]
         antenna_qualities = numpy.array([AntennaQuality.OK] * MAX_ANTENNA)
         antenna_use = numpy.array([AntennaUse.AUTO] * MAX_ANTENNA)
-        antennafield_proxy.put_property({"RECV_devices": ["STAT/RECV/1"],
-                                 "Control_to_RECV_mapping": numpy.array(control_mapping).flatten(),
-                                 'Antenna_Quality': antenna_qualities, 'Antenna_Use': antenna_use})
+        antennafield_proxy.put_property(
+            {
+                "RECV_devices": ["STAT/RECV/1"],
+                "Control_to_RECV_mapping": numpy.array(control_mapping).flatten(),
+                "Antenna_Quality": antenna_qualities,
+                "Antenna_Use": antenna_use,
+            }
+        )
         antennafield_proxy.off()
         antennafield_proxy.boot()
 
@@ -78,19 +84,23 @@ class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase):
         self.proxy.Tracking_enabled_RW = False
 
         # Verify attribute is present (all zeros if never used before)
-        delays_r1 = numpy.array(antennafield_proxy.read_attribute('HBAT_BF_delay_steps_RW').value)
+        delays_r1 = numpy.array(
+            antennafield_proxy.read_attribute("HBAT_BF_delay_steps_RW").value
+        )
         self.assertIsNotNone(delays_r1)
 
         time.sleep(3)
 
         # Verify writing operation does not lead to errors
         self.proxy.set_pointing(self.POINTING_DIRECTION)  # write values to RECV
-        delays_r2 = numpy.array(antennafield_proxy.read_attribute('HBAT_BF_delay_steps_RW').value)
+        delays_r2 = numpy.array(
+            antennafield_proxy.read_attribute("HBAT_BF_delay_steps_RW").value
+        )
 
         self.assertIsNotNone(delays_r2)
 
         # Verify delays changed (to be discussed)
-        #self.assertFalse((delays_r1==delays_r2).all())
+        # self.assertFalse((delays_r1==delays_r2).all())
 
     def test_pointing_to_zenith(self):
         self.setup_recv_proxy()
@@ -101,13 +111,21 @@ class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase):
         self.proxy.Tracking_enabled_RW = False
 
         # Point to Zenith
-        self.proxy.set_pointing(numpy.array([["AZELGEO","0deg","90deg"]] * DEFAULT_N_HBA_TILES).flatten())
+        self.proxy.set_pointing(
+            numpy.array([["AZELGEO", "0deg", "90deg"]] * DEFAULT_N_HBA_TILES).flatten()
+        )
 
-        calculated_HBAT_delay_steps = numpy.array(antennafield_proxy.read_attribute('HBAT_BF_delay_steps_RW').value)
+        calculated_HBAT_delay_steps = numpy.array(
+            antennafield_proxy.read_attribute("HBAT_BF_delay_steps_RW").value
+        )
 
-        expected_HBAT_delay_steps = numpy.array([[15] * N_elements * N_pol] * DEFAULT_N_HBA_TILES, dtype=numpy.int64)
+        expected_HBAT_delay_steps = numpy.array(
+            [[15] * N_elements * N_pol] * DEFAULT_N_HBA_TILES, dtype=numpy.int64
+        )
 
-        numpy.testing.assert_equal(calculated_HBAT_delay_steps, expected_HBAT_delay_steps)
+        numpy.testing.assert_equal(
+            calculated_HBAT_delay_steps, expected_HBAT_delay_steps
+        )
 
     def test_pointing_across_horizon(self):
         self.setup_recv_proxy()
@@ -118,24 +136,37 @@ class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase):
         self.proxy.Tracking_enabled_RW = False
 
         # point at north on the horizon
-        self.proxy.set_pointing(["AZELGEO","0deg","0deg"] * DEFAULT_N_HBA_TILES)
+        self.proxy.set_pointing(["AZELGEO", "0deg", "0deg"] * DEFAULT_N_HBA_TILES)
 
         # obtain delays of the X polarisation of all the elements of the first tile
-        north_beam_delay_steps = antennafield_proxy.HBAT_BF_delay_steps_RW[0].reshape(4,4,2)[:,:,0]
+        north_beam_delay_steps = antennafield_proxy.HBAT_BF_delay_steps_RW[0].reshape(
+            4, 4, 2
+        )[:, :, 0]
 
         # delays must differ under rotation, or our test will give a false positive
-        self.assertNotEqual(north_beam_delay_steps.tolist(), numpy.rot90(north_beam_delay_steps).tolist())
+        self.assertNotEqual(
+            north_beam_delay_steps.tolist(),
+            numpy.rot90(north_beam_delay_steps).tolist(),
+        )
 
-        for angle in (90,180,270):
+        for angle in (90, 180, 270):
             # point at angle degrees (90=E, 180=S, 270=W)
-            self.proxy.set_pointing(["AZELGEO",f"{angle}deg","0deg"] * DEFAULT_N_HBA_TILES)
+            self.proxy.set_pointing(
+                ["AZELGEO", f"{angle}deg", "0deg"] * DEFAULT_N_HBA_TILES
+            )
 
             # obtain delays of the X polarisation of all the elements of the first tile
-            angled_beam_delay_steps = antennafield_proxy.HBAT_BF_delay_steps_RW[0].reshape(4,4,2)[:,:,0]
+            angled_beam_delay_steps = antennafield_proxy.HBAT_BF_delay_steps_RW[
+                0
+            ].reshape(4, 4, 2)[:, :, 0]
 
-            expected_delay_steps = numpy.rot90(north_beam_delay_steps, k=-(angle/90))
+            expected_delay_steps = numpy.rot90(north_beam_delay_steps, k=-(angle / 90))
 
-            self.assertListEqual(expected_delay_steps.tolist(), angled_beam_delay_steps.tolist(), msg=f"angle={angle}")
+            self.assertListEqual(
+                expected_delay_steps.tolist(),
+                angled_beam_delay_steps.tolist(),
+                msg=f"angle={angle}",
+            )
 
     def test_delays_same_as_LOFAR_ref_pointing(self):
         self.setup_recv_proxy()
@@ -146,28 +177,40 @@ class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase):
         self.proxy.Tracking_enabled_RW = False
 
         # Point to LOFAR 1 ref pointing (0.929342, 0.952579, J2000)
-        pointings = numpy.array([["J2000", "0.929342rad", "0.952579rad"]] * DEFAULT_N_HBA_TILES).flatten()
+        pointings = numpy.array(
+            [["J2000", "0.929342rad", "0.952579rad"]] * DEFAULT_N_HBA_TILES
+        ).flatten()
         # Need to set the time to '2022-01-18 11:19:35'
         timestamp = datetime.datetime(2022, 1, 18, 11, 19, 35).timestamp()
 
-        parameters = {
-            "pointing_direction": pointings,
-            "timestamp": timestamp
-        }
+        parameters = {"pointing_direction": pointings, "timestamp": timestamp}
 
         json_string = json.dumps(parameters, cls=NumpyEncoder)
         self.proxy.set_pointing_for_specific_time(json_string)
 
-        calculated_HBAT_delay_steps = numpy.array(antennafield_proxy.read_attribute('HBAT_BF_delay_steps_RW').value) # dims (DEFAULT_N_HBA_TILES, 32)
+        calculated_HBAT_delay_steps = numpy.array(
+            antennafield_proxy.read_attribute("HBAT_BF_delay_steps_RW").value
+        )  # dims (DEFAULT_N_HBA_TILES, 32)
 
         # Check all delay steps are zero with small margin
         # [24, 25, 27, 28, 17, 18, 20, 21, 10, 11, 13, 14, 3, 4, 5, 7] These are the real values from LOFAR.
         # The [3] = 28 diff is explained that we match the closest delay step and LOFAR 1 wants the one with
         # in 0.2ns but if it can't it will do a int(delay / 0.5ns) so we get slightly different results but
         # they can be explained.
-        expected_HBAT_delay_steps = numpy.repeat(numpy.array([24, 25, 27, 29, 17, 18, 20, 21, 10, 11, 13, 14, 3, 4, 5, 7], dtype=numpy.int64), 2)
-        numpy.testing.assert_equal(calculated_HBAT_delay_steps[0], expected_HBAT_delay_steps)
-        numpy.testing.assert_equal(calculated_HBAT_delay_steps[DEFAULT_N_HBA_TILES - 1], expected_HBAT_delay_steps)
+        expected_HBAT_delay_steps = numpy.repeat(
+            numpy.array(
+                [24, 25, 27, 29, 17, 18, 20, 21, 10, 11, 13, 14, 3, 4, 5, 7],
+                dtype=numpy.int64,
+            ),
+            2,
+        )
+        numpy.testing.assert_equal(
+            calculated_HBAT_delay_steps[0], expected_HBAT_delay_steps
+        )
+        numpy.testing.assert_equal(
+            calculated_HBAT_delay_steps[DEFAULT_N_HBA_TILES - 1],
+            expected_HBAT_delay_steps,
+        )
 
     def test_tilebeam_tracking(self):
         self.setup_recv_proxy()
@@ -178,9 +221,12 @@ class TestDeviceTileBeam(AbstractTestBases.TestDeviceBase):
         self.assertTrue(self.proxy.Tracking_enabled_R)
 
         # point somewhere
-        new_pointings = [("J2000",f"{tile}deg","0deg") for tile in range(DEFAULT_N_HBA_TILES)]
+        new_pointings = [
+            ("J2000", f"{tile}deg", "0deg") for tile in range(DEFAULT_N_HBA_TILES)
+        ]
         self.proxy.Pointing_direction_RW = new_pointings
 
         # check pointing
-        self.assertListEqual(new_pointings[0:2], list(self.proxy.Pointing_direction_R[0:2]))
-
+        self.assertListEqual(
+            new_pointings[0:2], list(self.proxy.Pointing_direction_R[0:2])
+        )
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_unb2.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_unb2.py
index d5731630188879e5f79f94f98951f8d6c1637ace..85f482a2d33e634ad038102ea163bacae33bcc79 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_unb2.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_unb2.py
@@ -1,17 +1,10 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from .base import AbstractTestBases
 
 
 class TestDeviceUNB2(AbstractTestBases.TestDeviceBase):
-
     def setUp(self):
         """Intentionally recreate the device object in each test"""
         super().setUp("STAT/UNB2/1")
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_xst.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_xst.py
index abccb44def53f76a0838bef18c6778622f9212db..238f32f1b4187470db697946f0db788ec11a0b31 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_xst.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_xst.py
@@ -1,18 +1,12 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
+from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
 from .base import AbstractTestBases
 
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 
 class TestDeviceSST(AbstractTestBases.TestDeviceBase):
-
     def setUp(self):
         """Intentionally recreate the device object in each test"""
         super().setUp("STAT/XST/1")
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_lofar_device.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_lofar_device.py
index 96a7d0e4d7582fb3d639ae2c436c4a5cdefb19d3..7bf662901b1b34a5f6a69837a069e7e122add737 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_lofar_device.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_lofar_device.py
@@ -1,23 +1,16 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import time
-from tango import DevState
 
+from tango import DevState
 from tangostationcontrol.common.constants import DEFAULT_POLLING_PERIOD
-
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 from tangostationcontrol.integration_test import base
+from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 
 
 class TestProxyAttributeAccess(base.IntegrationTestCase):
-    """ Test whether DeviceProxy's can always access attributes immediately after turning them on. """
+    """Test whether DeviceProxy's can always access attributes immediately after turning them on."""
 
     # We use RECV as our victim. Any device would do.
     DEVICE_NAME = "STAT/RECV/1"
@@ -49,31 +42,27 @@ class TestProxyAttributeAccess(base.IntegrationTestCase):
         _ = self.proxy.read_attribute(self.ATTRIBUTE_NAME)
 
     def test_fast_setup_polled_attribute(self):
-        """ Setup a device as fast as possible and access its attributes immediately. """
+        """Setup a device as fast as possible and access its attributes immediately."""
 
         self.poll_attribute()
         self.read_attribute()
 
-
     def test_slow_setup_polled_attribute(self):
-        """ Have the device be off for a while, allowing Tango to poll. Then,
-            Setup a device as fast as possible and access its attributes immediately. """
+        """Have the device be off for a while, allowing Tango to poll. Then,
+        Setup a device as fast as possible and access its attributes immediately."""
 
         self.poll_attribute()
-        time.sleep(3) # allow Tango to poll the attribute in OFF state
+        time.sleep(3)  # allow Tango to poll the attribute in OFF state
         self.read_attribute()
 
-
     def test_fast_setup_nonpolled_attribute(self):
-        """ Setup a device as fast as possible and access its attributes immediately. """
+        """Setup a device as fast as possible and access its attributes immediately."""
         self.dont_poll_attribute()
         self.read_attribute()
 
-
     def test_slow_setup_nonpolled_attribute(self):
-        """ Have the device be off for a while, allowing Tango to poll. Then,
-            Setup a device as fast as possible and access its attributes immediately. """
+        """Have the device be off for a while, allowing Tango to poll. Then,
+        Setup a device as fast as possible and access its attributes immediately."""
         self.dont_poll_attribute()
-        time.sleep(3) # allow Tango to poll the attribute in OFF state
+        time.sleep(3)  # allow Tango to poll the attribute in OFF state
         self.read_attribute()
-
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_observation.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_observation.py
index cb9011a00e0f3448a4a84a74d1fef8ac15178291..e25063f594ee574117ebb888c9f1204a61b7bc51 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_observation.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_observation.py
@@ -1,25 +1,17 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from tangostationcontrol.test.devices.test_observation_base import TestObservationBase
-from tangostationcontrol.integration_test import base
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
-
-from lofar_station_client.observation.observation import Observation
-
-from os import environ
 from json import loads
+from os import environ
 
+from lofar_station_client.observation.observation import Observation
 from tango import DevState
+from tangostationcontrol.integration_test import base
+from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
+from tangostationcontrol.test.devices.test_observation_base import TestObservationBase
 
-class TestObservation(base.IntegrationTestCase):
 
+class TestObservation(base.IntegrationTestCase):
     def setUp(self):
         self.observation_control_proxy = TestDeviceProxy("STAT/ObservationControl/1")
         self.observation_control_proxy.off()
@@ -32,7 +24,9 @@ class TestObservation(base.IntegrationTestCase):
         specification_dict = loads(TestObservationBase.VALID_JSON)
 
         # create an observation class using the dict and as host just get it using a util function
-        observation = Observation(specification=specification_dict, host=environ["TANGO_HOST"])
+        observation = Observation(
+            specification=specification_dict, host=environ["TANGO_HOST"]
+        )
 
         # Assert the observation is running after starting it
         observation.start()
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_tango_database.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_tango_database.py
index c61611723258c4ad74141dcf519820923416537f..07f4703825cd24d066200f8b09fcd228def1dcbd 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_tango_database.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_tango_database.py
@@ -1,11 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tango import Database
 
@@ -13,7 +7,6 @@ from tangostationcontrol.integration_test import base
 
 
 class TestTangoDatabase(base.IntegrationTestCase):
-
     def setUp(self):
         """Intentionally recreate the device object in each test"""
         super(TestTangoDatabase, self).setUp()
@@ -28,4 +21,6 @@ class TestTangoDatabase(base.IntegrationTestCase):
 
         # Ensure this value is close to actual amount of servers defined by
         # integration_ConfigDb.json
-        self.assertGreater(len(d.get_server_list()), 16, msg=f"Servers: {d.get_server_list()}")
+        self.assertGreater(
+            len(d.get_server_list()), 16, msg=f"Servers: {d.get_server_list()}"
+        )
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/prometheus/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/default/prometheus/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/prometheus/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/prometheus/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/prometheus/test_tango_prometheus_client.py b/tangostationcontrol/tangostationcontrol/integration_test/default/prometheus/test_tango_prometheus_client.py
index f8e9ce9bd15bf7318116912a7c81b4c4d0ad7d64..d7e2aa83570831d256a8c881e484e196318134fb 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/prometheus/test_tango_prometheus_client.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/prometheus/test_tango_prometheus_client.py
@@ -1,37 +1,38 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import importlib
-import sys, os
-import numpy
+import os
+import sys
 
+import numpy
 from tango import Database
-
 from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
 from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 
-module_name = 'ArchiverPolicy'
-file_path = os.path.join(os.path.realpath('..'), 'docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py')
+module_name = "ArchiverPolicy"
+file_path = os.path.join(
+    os.path.realpath(".."),
+    "docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py",
+)
 spec = importlib.util.spec_from_file_location(module_name, file_path)
 tpc_policy = importlib.util.module_from_spec(spec)
 sys.modules[module_name] = tpc_policy
 spec.loader.exec_module(tpc_policy)
 
-module_name = 'CustomCollector'
+module_name = "CustomCollector"
 spec = importlib.util.spec_from_file_location(module_name, file_path)
 tpc_cc = importlib.util.module_from_spec(spec)
 sys.modules[module_name] = tpc_cc
 spec.loader.exec_module(tpc_cc)
 
+
 class TestPrometheusClient(BaseIntegrationTestCase):
 
-    config_path = os.path.join(os.path.realpath('..'), 'docker-compose/tango-prometheus-exporter/lofar2-policy.json')
+    config_path = os.path.join(
+        os.path.realpath(".."),
+        "docker-compose/tango-prometheus-exporter/lofar2-policy.json",
+    )
     CONFIG = tpc_policy.ArchiverPolicy.load_config(config_path)
 
     def setUp(self):
@@ -39,12 +40,12 @@ class TestPrometheusClient(BaseIntegrationTestCase):
 
     def initialise_collector(self):
         db = Database()
-        station = db.get_property("station","name")["name"][0]
+        station = db.get_property("station", "name")["name"][0]
         custom_collector = tpc_cc.CustomCollector(self.CONFIG, station)
         self.assertIsNotNone(custom_collector)
         return custom_collector
 
-    def setup_recv_proxy(self, device_name='stat/recv/1'):
+    def setup_recv_proxy(self, device_name="stat/recv/1"):
         # setup RECV
         recv_proxy = TestDeviceProxy(device_name)
         recv_proxy.off()
@@ -53,73 +54,99 @@ class TestPrometheusClient(BaseIntegrationTestCase):
         return recv_proxy
 
     def test_tango_db_devices(self):
-        """ Test if device names are correctly retrieved from Tango DB """
+        """Test if device names are correctly retrieved from Tango DB"""
         policy = tpc_policy.ArchiverPolicy(self.CONFIG)
         db_devices = policy.device_list()
         self.assertNotEqual(len(db_devices), 0)
 
     def test_policy_devices(self):
-        """ Test if device names are correctly filtered with policy file """
+        """Test if device names are correctly filtered with policy file"""
         policy = tpc_policy.ArchiverPolicy(self.CONFIG)
         db_devices = policy.device_list()
         policy_devices = policy.devices()
         self.assertLessEqual(len(policy_devices), len(db_devices))
-        config_retrieved_devices = [*policy.config['devices'].keys()]   # list of device names from policy file
+        config_retrieved_devices = [
+            *policy.config["devices"].keys()
+        ]  # list of device names from policy file
         for d in config_retrieved_devices:
-            if '*' not in d:                                            # filter out wildcards
+            if "*" not in d:  # filter out wildcards
                 self.assertIn(d, policy_devices)
 
     def test_archiver_policy_attribute_list(self):
-        """ Test if the full set of archiving policy for the given device is retrieved """
-        device_name = 'stat/recv/1'
+        """Test if the full set of archiving policy for the given device is retrieved"""
+        device_name = "stat/recv/1"
         recv_proxy = self.setup_recv_proxy(device_name)
         policy = tpc_policy.ArchiverPolicy(self.CONFIG)
-        attribute_list = policy.attribute_list(device_name, recv_proxy.get_attribute_list())
-        include = policy.config['devices']['stat/recv/1']['include'] # attribute that must be included
+        attribute_list = policy.attribute_list(
+            device_name, recv_proxy.get_attribute_list()
+        )
+        include = policy.config["devices"]["stat/recv/1"][
+            "include"
+        ]  # attribute that must be included
         for i in include:
-            if '*' not in i:    # exclude wildcard
+            if "*" not in i:  # exclude wildcard
                 self.assertIn(i, attribute_list)
-        exclude = policy.config['devices']['stat/recv/1']['exclude'] # attribute that must be excluded
+        exclude = policy.config["devices"]["stat/recv/1"][
+            "exclude"
+        ]  # attribute that must be excluded
         for e in exclude:
-            if '*' not in e:    # exclude wildcard
+            if "*" not in e:  # exclude wildcard
                 self.assertNotIn(e, attribute_list)
 
     def test_label_metric_list(self):
-        """ Test whether the metric label list matches up with the ones defined in the GaugeMetricFamily constructor"""
+        """Test whether the metric label list matches up with the ones defined in the GaugeMetricFamily constructor"""
         collector = self.initialise_collector()
         attribute_metrics, scraping_metrics = collector.collect()
-        expected_attribute_labels = ['station', 'device', 'name', 'str_value', 'type', 'x', 'y', 'idx']
-        expected_scraping_labels = ['station', 'device']
-        numpy.testing.assert_equal([*attribute_metrics.samples[0].labels.keys()], expected_attribute_labels)
-        numpy.testing.assert_equal([*scraping_metrics.samples[0].labels.keys()], expected_scraping_labels)
+        expected_attribute_labels = [
+            "station",
+            "device",
+            "name",
+            "str_value",
+            "type",
+            "x",
+            "y",
+            "idx",
+        ]
+        expected_scraping_labels = ["station", "device"]
+        numpy.testing.assert_equal(
+            [*attribute_metrics.samples[0].labels.keys()], expected_attribute_labels
+        )
+        numpy.testing.assert_equal(
+            [*scraping_metrics.samples[0].labels.keys()], expected_scraping_labels
+        )
 
     def test_collector_metrics_with_devices_in_off(self):
-        """ Test if the metrics are exposed even if devices are in OFF state """
-        device_name = 'stat/recv/1'
+        """Test if the metrics are exposed even if devices are in OFF state"""
+        device_name = "stat/recv/1"
         recv_proxy = TestDeviceProxy(device_name)
         recv_proxy.off()
         collector = self.initialise_collector()
-        expected_attrs = ['State', 'Status']    # only state attributes are scraped when device is in OFF
+        expected_attrs = [
+            "State",
+            "Status",
+        ]  # only state attributes are scraped when device is in OFF
         metrics = collector.device_metrics(device_name)
         actual_attrs = [metrics[0][0][2], metrics[1][0][2]]
         numpy.testing.assert_equal(sorted(actual_attrs), expected_attrs)
 
     def test_collector_metrics(self):
-        """ Test if the metrics are correctly exposed """
-        device_name = 'stat/recv/1'
+        """Test if the metrics are correctly exposed"""
+        device_name = "stat/recv/1"
         recv_proxy = self.setup_recv_proxy(device_name)
         collector = self.initialise_collector()
         expected_attr_values = recv_proxy.ANT_error_R
-        numpy.testing.assert_equal(expected_attr_values, numpy.array([[True, True, True]] * 32))
+        numpy.testing.assert_equal(
+            expected_attr_values, numpy.array([[True, True, True]] * 32)
+        )
         attribute_metrics, scraping_metrics = collector.collect()
-        metric_samples = attribute_metrics.samples  
+        metric_samples = attribute_metrics.samples
         # Test attribute metrics ANT_error_R
         samples_values = []
         for s in metric_samples:
-            if (s.labels['name'] == 'ANT_error_R'):
+            if s.labels["name"] == "ANT_error_R":
                 # NB: this does flatten the array, as we ignore the x/y labels
-                samples_values.append(numpy.bool(s.value)) 
+                samples_values.append(numpy.bool(s.value))
         numpy.testing.assert_equal(samples_values, expected_attr_values.flatten())
         # Test scraping metrics
         total_scraping_time = scraping_metrics.samples[-1].value
-        self.assertLess(total_scraping_time, 10)    # Set acceptable scraping time ?
+        self.assertLess(total_scraping_time, 10)  # Set acceptable scraping time ?
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/statistics/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/default/statistics/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/statistics/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/statistics/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/statistics/test_writer_sst.py b/tangostationcontrol/tangostationcontrol/integration_test/default/statistics/test_writer_sst.py
index 43506f467fcdff7d68ab47964a7d8eb91022a4bf..0d94b0e1d062f1e4e33d12a524d69534153f01f5 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/statistics/test_writer_sst.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/statistics/test_writer_sst.py
@@ -1,18 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
-
-from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
-
-from tangostationcontrol.statistics.collector import StationSSTCollector
-from tangostationcontrol.statistics import reader
-from tangostationcontrol.statistics.writer import entry
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import sys
 from os import getcwd
@@ -20,8 +7,13 @@ from os.path import dirname, isfile, join
 from tempfile import TemporaryDirectory
 from unittest import mock
 
-from tango import DevState
 import numpy
+from tango import DevState
+from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
+from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
+from tangostationcontrol.statistics import reader
+from tangostationcontrol.statistics.collector import StationSSTCollector
+from tangostationcontrol.statistics.writer import entry
 
 
 class TestStatisticsWriterSST(BaseIntegrationTestCase):
@@ -52,20 +44,24 @@ class TestStatisticsWriterSST(BaseIntegrationTestCase):
         self.assertIsNotNone(self.recv_proxy.RCU_DTH_on_R)
 
     def test_header_info(self):
-        """ Test whether the header info are inserted and collected in the proper way"""
+        """Test whether the header info are inserted and collected in the proper way"""
         with TemporaryDirectory() as tmpdir:
             new_sys_argv = [
                 sys.argv[0],
-                "--mode", "SST",
+                "--mode",
+                "SST",
                 "--no-tango",
-                "--file", join(
+                "--file",
+                join(
                     dirname(dirname(dirname(dirname(__file__)))),
-                    "test/statistics", "SDP_SST_statistics_packets.bin"
+                    "test/statistics",
+                    "SDP_SST_statistics_packets.bin",
                 ),
-                "--output_dir", tmpdir
+                "--output_dir",
+                tmpdir,
             ]
 
-            with mock.patch.object(entry.sys, 'argv', new_sys_argv):
+            with mock.patch.object(entry.sys, "argv", new_sys_argv):
                 with self.assertRaises(SystemExit):
                     entry.main()
 
@@ -75,26 +71,25 @@ class TestStatisticsWriterSST(BaseIntegrationTestCase):
             # test statistics reader
             new_sys_argv = [
                 sys.argv[0],
-                "--files", f"{tmpdir}/SST_2021-09-20-12-17-40.h5",
-                "--start_time", "2021-09-20#07:40:08.937+00:00",
-                "--end_time", "2021-10-04#07:50:08.937+00:00"
+                "--files",
+                f"{tmpdir}/SST_2021-09-20-12-17-40.h5",
+                "--start_time",
+                "2021-09-20#07:40:08.937+00:00",
+                "--end_time",
+                "2021-10-04#07:50:08.937+00:00",
             ]
-            with mock.patch.object(reader.sys, 'argv', new_sys_argv):
+            with mock.patch.object(reader.sys, "argv", new_sys_argv):
                 stat_parser = reader.setup_stat_parser()
                 SSTstatistics = stat_parser.list_statistics()
                 self.assertIsNotNone(SSTstatistics)
-                stat = stat_parser.get_statistic(
-                    '2021-09-20T12:17:40.000+00:00'
-                )
+                stat = stat_parser.get_statistic("2021-09-20T12:17:40.000+00:00")
 
                 # Open VERSION file
                 version_file = open(self.VERSION_FILE_PATH, "rt")
 
                 self.assertIsNotNone(stat)
                 # Remove last character, should be \n as asserted by CI/CD
-                self.assertEqual(
-                    version_file.readline()[:-1], stat.station_version_id
-                )
+                self.assertEqual(version_file.readline()[:-1], stat.station_version_id)
                 self.assertEqual("0.1", stat.writer_version_id)
 
     def test_insert_tango_SST_statistics(self):
@@ -105,29 +100,31 @@ class TestStatisticsWriterSST(BaseIntegrationTestCase):
         collector.parse_device_attributes()
         numpy.testing.assert_equal(
             collector.parameters["rcu_attenuator_dB"],
-            self.recv_proxy.rcu_attenuator_dB_r
+            self.recv_proxy.rcu_attenuator_dB_r,
         )
         numpy.testing.assert_equal(
             collector.parameters["rcu_band_select"],
-            self.recv_proxy.rcu_band_select_r.tolist()
+            self.recv_proxy.rcu_band_select_r.tolist(),
         )
         numpy.testing.assert_equal(
-            collector.parameters["rcu_dth_on"],
-            self.recv_proxy.rcu_dth_on_r.tolist()
+            collector.parameters["rcu_dth_on"], self.recv_proxy.rcu_dth_on_r.tolist()
         )
 
         with TemporaryDirectory() as tmpdir:
             new_sys_argv = [
                 sys.argv[0],
-                "--mode", "SST",
-                "--file", join(
+                "--mode",
+                "SST",
+                "--file",
+                join(
                     dirname(dirname(dirname(dirname(__file__)))),
-                    "test/statistics", "SDP_SST_statistics_packets.bin"
+                    "test/statistics",
+                    "SDP_SST_statistics_packets.bin",
                 ),
-                ""
-                "--output_dir", tmpdir
+                "" "--output_dir",
+                tmpdir,
             ]
-            with mock.patch.object(entry.sys, 'argv', new_sys_argv):
+            with mock.patch.object(entry.sys, "argv", new_sys_argv):
                 with self.assertRaises(SystemExit):
                     entry.main()
 
@@ -137,18 +134,19 @@ class TestStatisticsWriterSST(BaseIntegrationTestCase):
             # test statistics reader
             new_sys_argv = [
                 sys.argv[0],
-                "--files", f"{tmpdir}/SST_2021-09-20-12-17-40.h5",
-                "--start_time", "2021-09-20#07:40:08.937+00:00",
-                "--end_time", "2021-10-04#07:50:08.937+00:00"
+                "--files",
+                f"{tmpdir}/SST_2021-09-20-12-17-40.h5",
+                "--start_time",
+                "2021-09-20#07:40:08.937+00:00",
+                "--end_time",
+                "2021-10-04#07:50:08.937+00:00",
             ]
-            with mock.patch.object(reader.sys, 'argv', new_sys_argv):
+            with mock.patch.object(reader.sys, "argv", new_sys_argv):
                 stat_parser = reader.setup_stat_parser()
                 SSTstatistics = stat_parser.list_statistics()
                 self.assertIsNotNone(SSTstatistics)
                 # same as stat_parser.statistics[0]
-                stat = stat_parser.get_statistic(
-                    '2021-09-20T12:17:40.000+00:00'
-                )
+                stat = stat_parser.get_statistic("2021-09-20T12:17:40.000+00:00")
                 self.assertIsNotNone(stat)
                 self.assertEqual(121, stat.data_id_signal_input_index)
                 # Test RECV attributes
@@ -160,16 +158,20 @@ class TestStatisticsWriterSST(BaseIntegrationTestCase):
         with TemporaryDirectory() as tmpdir:
             new_sys_argv = [
                 sys.argv[0],
-                "--mode", "SST",
+                "--mode",
+                "SST",
                 "--no-tango",
-                "--file", join(
+                "--file",
+                join(
                     dirname(dirname(dirname(dirname(__file__)))),
-                    "test/statistics", "SDP_SST_statistics_packets.bin"
+                    "test/statistics",
+                    "SDP_SST_statistics_packets.bin",
                 ),
-                "--output_dir", tmpdir
+                "--output_dir",
+                tmpdir,
             ]
 
-            with mock.patch.object(entry.sys, 'argv', new_sys_argv):
+            with mock.patch.object(entry.sys, "argv", new_sys_argv):
                 with self.assertRaises(SystemExit):
                     entry.main()
 
@@ -179,18 +181,19 @@ class TestStatisticsWriterSST(BaseIntegrationTestCase):
             # test statistics reader
             new_sys_argv = [
                 sys.argv[0],
-                "--files", f"{tmpdir}/SST_2021-09-20-12-17-40.h5",
-                "--start_time", "2021-09-20#07:40:08.937+00:00",
-                "--end_time", "2021-10-04#07:50:08.937+00:00"
+                "--files",
+                f"{tmpdir}/SST_2021-09-20-12-17-40.h5",
+                "--start_time",
+                "2021-09-20#07:40:08.937+00:00",
+                "--end_time",
+                "2021-10-04#07:50:08.937+00:00",
             ]
-            with mock.patch.object(reader.sys, 'argv', new_sys_argv):
+            with mock.patch.object(reader.sys, "argv", new_sys_argv):
                 stat_parser = reader.setup_stat_parser()
                 SSTstatistics = stat_parser.list_statistics()
                 self.assertIsNotNone(SSTstatistics)
                 # same as stat_parser.statistics[0]
-                stat = stat_parser.get_statistic(
-                    '2021-09-20T12:17:40.000+00:00'
-                )
+                stat = stat_parser.get_statistic("2021-09-20T12:17:40.000+00:00")
                 self.assertIsNotNone(stat)
                 self.assertEqual(121, stat.data_id_signal_input_index)
                 # Test RECV attributes
@@ -206,13 +209,18 @@ class TestStatisticsWriterSST(BaseIntegrationTestCase):
         with TemporaryDirectory() as tmpdir:
             new_sys_argv = [
                 sys.argv[0],
-                "--mode", "SST",
-                "--file", join(
+                "--mode",
+                "SST",
+                "--file",
+                join(
                     dirname(dirname(dirname(dirname(__file__)))),
-                    "test/statistics", "SDP_SST_statistics_packets.bin"
-                ), "--output_dir", tmpdir
+                    "test/statistics",
+                    "SDP_SST_statistics_packets.bin",
+                ),
+                "--output_dir",
+                tmpdir,
             ]
-            with mock.patch.object(entry.sys, 'argv', new_sys_argv):
+            with mock.patch.object(entry.sys, "argv", new_sys_argv):
                 with self.assertRaises(SystemExit):
                     entry.main()
 
@@ -222,15 +230,20 @@ class TestStatisticsWriterSST(BaseIntegrationTestCase):
             # test statistics reader
             new_sys_argv = [
                 sys.argv[0],
-                "--files", f"{tmpdir}/SST_2021-09-20-12-17-40.h5",
-                "--start_time", "2021-09-20#07:40:08.937+00:00",
-                "--end_time", "2021-10-04#07:50:08.937+00:00"
+                "--files",
+                f"{tmpdir}/SST_2021-09-20-12-17-40.h5",
+                "--start_time",
+                "2021-09-20#07:40:08.937+00:00",
+                "--end_time",
+                "2021-10-04#07:50:08.937+00:00",
             ]
-            with mock.patch.object(reader.sys, 'argv', new_sys_argv):
+            with mock.patch.object(reader.sys, "argv", new_sys_argv):
                 stat_parser = reader.setup_stat_parser()
                 SSTstatistics = stat_parser.list_statistics()
                 self.assertIsNotNone(SSTstatistics)
-                stat = stat_parser.get_statistic('2021-09-20T12:17:40.000+00:00')  # same as stat_parser.statistics[0]
+                stat = stat_parser.get_statistic(
+                    "2021-09-20T12:17:40.000+00:00"
+                )  # same as stat_parser.statistics[0]
                 self.assertIsNotNone(stat)
                 self.assertEqual(121, stat.data_id_signal_input_index)
                 # Test RECV attributes
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py
index 9c0b61647a2b51eb44041a5f89057807cfb36c62..08af5e3473911b5cf4965b87c1d26340486ecba7 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py
@@ -1,39 +1,34 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
-
-from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
-from tangostationcontrol.toolkit.archiver import Archiver
-from tangostationcontrol.toolkit.retriever import RetrieverTimescale
-from tangostationcontrol.toolkit.archiver_util import attribute_fqdn
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import time
 from datetime import datetime
+
 from tango import DevState
+from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
+from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
+from tangostationcontrol.toolkit.archiver import Archiver
+from tangostationcontrol.toolkit.archiver_util import attribute_fqdn
+from tangostationcontrol.toolkit.retriever import RetrieverTimescale
 
 
 class TestArchiver(BaseIntegrationTestCase):
-
     def setUp(self):
         super().setUp()
         self.archiver = Archiver()
         self.assertIsNotNone(self.archiver)
 
     def test_archiver_initialisation(self):
-        """Test archiver main attributes"""            
-        self.assertEqual(self.archiver.cm_name,"archiving/hdbppts/confmanager01")
+        """Test archiver main attributes"""
+        self.assertEqual(self.archiver.cm_name, "archiving/hdbppts/confmanager01")
         self.assertTrue(len(self.archiver.es_list))  # subscribers list not empty
 
     def test_hdbpp_library(self):
         """Test if the correct hdbpp library is retrieved (TimescaleDB)"""
         cm_name = self.archiver.cm_name
-        self.assertEqual('libhdb++timescale.so', self.archiver.get_hdbpp_libname(cm_name))
+        self.assertEqual(
+            "libhdb++timescale.so", self.archiver.get_hdbpp_libname(cm_name)
+        )
 
     def test_next_subscriber(self):
         """Test if there is an available subscriber"""
@@ -41,8 +36,8 @@ class TestArchiver(BaseIntegrationTestCase):
 
     def test_archiver_configuration(self):
         """Test archiver configuration file"""
-        dev_config_dict = self.archiver.get_configuration('lofar2_dev')
-        prod_config_dict = self.archiver.get_configuration('lofar2_prod')
+        dev_config_dict = self.archiver.get_configuration("lofar2_dev")
+        prod_config_dict = self.archiver.get_configuration("lofar2_prod")
         self.assertIsNotNone(dev_config_dict)
         self.assertIsNotNone(prod_config_dict)
         self.assertNotEqual(dev_config_dict, prod_config_dict)
@@ -52,39 +47,44 @@ class TestArchiver(BaseIntegrationTestCase):
         # Start RECV Device
         recv_proxy = TestDeviceProxy("STAT/RECV/1")
         recv_proxy.off()
-        time.sleep(1)   # To be deleted with L2SS-592
+        time.sleep(1)  # To be deleted with L2SS-592
         recv_proxy.initialise()
-        time.sleep(1)   # To be deleted with L2SS-592
+        time.sleep(1)  # To be deleted with L2SS-592
         self.assertEqual(DevState.STANDBY, recv_proxy.state())
         recv_proxy.set_defaults()
         recv_proxy.on()
         self.assertEqual(DevState.ON, recv_proxy.state())
 
-        polling_period=1000
-        archive_event_period=3000
-        attr_fullname = 'stat/recv/1/recvtr_translator_busy_r'  # boolean, but lofar view returns int
-        self.archiver.add_attribute_to_archiver(attr_fullname, polling_period, archive_event_period)
+        polling_period = 1000
+        archive_event_period = 3000
+        attr_fullname = "stat/recv/1/recvtr_translator_busy_r"  # boolean, but lofar view returns int
+        self.archiver.add_attribute_to_archiver(
+            attr_fullname, polling_period, archive_event_period
+        )
         time.sleep(3)
         # Test if the attribute has been correctly added to event subscriber
-        self.assertTrue(self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname)))
+        self.assertTrue(
+            self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))
+        )
 
         # Retrieve data from DB views
         self.retriever = RetrieverTimescale()
         self.assertIsNotNone(self.retriever)
-        records = self._wait_for_archiving(attr_fullname, archive_event_period) 
-        self.assertTrue(len(records)>0)
-        item = records[-1]                                        # last table record
-        self.assertEqual('stat/recv/1',item.device)               # column device
-        self.assertEqual('recvtr_translator_busy_r',item.name)    # column attribute
-        self.assertEqual(datetime,type(item.data_time))           # column datetime
-        self.assertEqual(int,type(item.value))                    # column value
-
+        records = self._wait_for_archiving(attr_fullname, archive_event_period)
+        self.assertTrue(len(records) > 0)
+        item = records[-1]  # last table record
+        self.assertEqual("stat/recv/1", item.device)  # column device
+        self.assertEqual("recvtr_translator_busy_r", item.name)  # column attribute
+        self.assertEqual(datetime, type(item.data_time))  # column datetime
+        self.assertEqual(int, type(item.value))  # column value
 
         # Remove attribute at the end of the test
         self.archiver.remove_attribute_from_archiver(attr_fullname)
         time.sleep(3)
         # Test if the attribute has been correctly removed
-        self.assertFalse(self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname)))
+        self.assertFalse(
+            self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))
+        )
 
         recv_proxy.off()
 
@@ -93,40 +93,45 @@ class TestArchiver(BaseIntegrationTestCase):
         # Start SDP Device
         sdp_proxy = TestDeviceProxy("STAT/SDP/1")
         sdp_proxy.off()
-        time.sleep(1)   # To be deleted with L2SS-592
+        time.sleep(1)  # To be deleted with L2SS-592
         sdp_proxy.initialise()
-        time.sleep(1)   # To be deleted with L2SS-592
+        time.sleep(1)  # To be deleted with L2SS-592
         self.assertEqual(DevState.STANDBY, sdp_proxy.state())
         sdp_proxy.set_defaults()
         sdp_proxy.on()
         self.assertEqual(DevState.ON, sdp_proxy.state())
 
-        polling_period=1000
-        archive_event_period=3000
-        attr_fullname = 'stat/sdp/1/fpga_temp_r'  # double
-        self.archiver.add_attribute_to_archiver(attr_fullname, polling_period, archive_event_period)
+        polling_period = 1000
+        archive_event_period = 3000
+        attr_fullname = "stat/sdp/1/fpga_temp_r"  # double
+        self.archiver.add_attribute_to_archiver(
+            attr_fullname, polling_period, archive_event_period
+        )
         time.sleep(3)
         # Test if the attribute has been correctly added to event subscriber
-        self.assertTrue(self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname)))
+        self.assertTrue(
+            self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))
+        )
 
         # Retrieve data from DB views
         self.retriever = RetrieverTimescale()
         self.assertIsNotNone(self.retriever)
-        records = self._wait_for_archiving(attr_fullname, archive_event_period)  
-        self.assertTrue(len(records)>0)
-        item = records[-1]                                                  # last table record
-        self.assertEqual('stat/sdp/1',item.device)                          # column device
-        self.assertEqual('fpga_temp_r',item.name)                           # column attribute
-        self.assertEqual(datetime,type(item.data_time))                     # column datetime
-        self.assertEqual(int,type(item.x))                                  # column index
-        self.assertEqual(float,type(item.value))                            # column value
-
+        records = self._wait_for_archiving(attr_fullname, archive_event_period)
+        self.assertTrue(len(records) > 0)
+        item = records[-1]  # last table record
+        self.assertEqual("stat/sdp/1", item.device)  # column device
+        self.assertEqual("fpga_temp_r", item.name)  # column attribute
+        self.assertEqual(datetime, type(item.data_time))  # column datetime
+        self.assertEqual(int, type(item.x))  # column index
+        self.assertEqual(float, type(item.value))  # column value
 
         # Remove attribute at the end of the test
         self.archiver.remove_attribute_from_archiver(attr_fullname)
         time.sleep(3)
         # Test if the attribute has been correctly removed
-        self.assertFalse(self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname)))
+        self.assertFalse(
+            self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))
+        )
 
         sdp_proxy.off()
 
@@ -135,55 +140,60 @@ class TestArchiver(BaseIntegrationTestCase):
         # Start RECV Device
         recv_proxy = TestDeviceProxy("STAT/RECV/1")
         recv_proxy.off()
-        time.sleep(1)   # To be deleted with L2SS-592
+        time.sleep(1)  # To be deleted with L2SS-592
         recv_proxy.initialise()
-        time.sleep(1)   # To be deleted with L2SS-592
+        time.sleep(1)  # To be deleted with L2SS-592
         self.assertEqual(DevState.STANDBY, recv_proxy.state())
         recv_proxy.set_defaults()
         recv_proxy.on()
         self.assertEqual(DevState.ON, recv_proxy.state())
 
-        polling_period=1000
-        archive_event_period=5000
-        attr_fullname = 'stat/recv/1/hbat_pwr_on_rw'  # boolean 96x32
-        self.archiver.add_attribute_to_archiver(attr_fullname, polling_period, archive_event_period)
+        polling_period = 1000
+        archive_event_period = 5000
+        attr_fullname = "stat/recv/1/hbat_pwr_on_rw"  # boolean 96x32
+        self.archiver.add_attribute_to_archiver(
+            attr_fullname, polling_period, archive_event_period
+        )
         time.sleep(3)
         # Test if the attribute has been correctly added to event subscriber
-        self.assertTrue(self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname)))
+        self.assertTrue(
+            self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))
+        )
 
         # Retrieve data from DB views
         self.retriever = RetrieverTimescale()
         self.assertIsNotNone(self.retriever)
-        records = self._wait_for_archiving(attr_fullname, archive_event_period)  
-        self.assertTrue(len(records)>0)
-        item = records[-1]                                                  # last table record
-        self.assertEqual('stat/recv/1',item.device)                         # column device
-        self.assertEqual('hbat_pwr_on_rw',item.name)                        # column attribute
-        self.assertEqual(datetime,type(item.data_time))                     # column datetime
-        self.assertEqual(int,type(item.x))                                  # column index x
-        self.assertEqual(int,type(item.y))                                  # column index y
-        self.assertEqual(int,type(item.value))                              # column value (bool stored as int)
-        self.assertLessEqual(item.value,1)                                  # column value (must be 0 or 1)
-
+        records = self._wait_for_archiving(attr_fullname, archive_event_period)
+        self.assertTrue(len(records) > 0)
+        item = records[-1]  # last table record
+        self.assertEqual("stat/recv/1", item.device)  # column device
+        self.assertEqual("hbat_pwr_on_rw", item.name)  # column attribute
+        self.assertEqual(datetime, type(item.data_time))  # column datetime
+        self.assertEqual(int, type(item.x))  # column index x
+        self.assertEqual(int, type(item.y))  # column index y
+        self.assertEqual(int, type(item.value))  # column value (bool stored as int)
+        self.assertLessEqual(item.value, 1)  # column value (must be 0 or 1)
 
         # Remove attribute at the end of the test
         self.archiver.remove_attribute_from_archiver(attr_fullname)
         time.sleep(3)
         # Test if the attribute has been correctly removed
-        self.assertFalse(self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname)))
+        self.assertFalse(
+            self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname))
+        )
 
         recv_proxy.off()
 
     def test_get_maximum_device_load(self):
-        """ Test if the maximum device load is correctly computed """
+        """Test if the maximum device load is correctly computed"""
         # Start RECV Device
         device_name = "STAT/RECV/1"
         # Start RECV Device
         recv_proxy = TestDeviceProxy(device_name)
         recv_proxy.off()
-        time.sleep(1)   # To be deleted with L2SS-592
+        time.sleep(1)  # To be deleted with L2SS-592
         recv_proxy.initialise()
-        time.sleep(1)   # To be deleted with L2SS-592
+        time.sleep(1)  # To be deleted with L2SS-592
         self.assertEqual(DevState.STANDBY, recv_proxy.state())
         recv_proxy.set_defaults()
         recv_proxy.on()
@@ -193,23 +203,23 @@ class TestArchiver(BaseIntegrationTestCase):
         self.archiver.apply_configuration(config_dict)
         time.sleep(3)
         max_load = self.archiver.get_maximum_device_load(device_name)
-        self.assertGreater(max_load,0)
+        self.assertGreater(max_load, 0)
 
     def test_archive_right_number_of_attributes(self):
-        """ Test if the right number of attributes are archived, following the JSON configuration file"""
+        """Test if the right number of attributes are archived, following the JSON configuration file"""
         # Start SDP Device
         device_name = "STAT/SDP/1"
         sdp_proxy = TestDeviceProxy("STAT/SDP/1")
         sdp_proxy.off()
-        time.sleep(1)   # To be deleted with L2SS-592
+        time.sleep(1)  # To be deleted with L2SS-592
         sdp_proxy.initialise()
-        time.sleep(1)   # To be deleted with L2SS-592
+        time.sleep(1)  # To be deleted with L2SS-592
         self.assertEqual(DevState.STANDBY, sdp_proxy.state())
         sdp_proxy.set_defaults()
         sdp_proxy.on()
         self.assertEqual(DevState.ON, sdp_proxy.state())
 
-        config_dict = self.archiver.get_configuration('lofar2_dev')
+        config_dict = self.archiver.get_configuration("lofar2_dev")
         self.archiver.apply_configuration(config_dict)
         # 4 SDP_attributes contain the suffix '_error_R'
         # 1 SDP_attribute contains the suffix '_mask_RW'
@@ -220,11 +230,13 @@ class TestArchiver(BaseIntegrationTestCase):
         sdp_archived_attrs = [a for a in archived_attrs if device_name.lower() in a]
         self.assertEqual(9, len(sdp_archived_attrs))
 
-    def _wait_for_archiving(self, attr_fullname: str, archive_event_period: int, max_wait: int = 10):
+    def _wait_for_archiving(
+        self, attr_fullname: str, archive_event_period: int, max_wait: int = 10
+    ):
         wait = 0
         records = self.retriever.get_lofar_attribute(attr_fullname)
-        while (not (len(records) > 0) and wait < max_wait):
+        while not (len(records) > 0) and wait < max_wait:
             time.sleep(archive_event_period)
             records = self.retriever.get_lofar_attribute(attr_fullname)
-            wait+=1
+            wait += 1
         return records
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver_util.py b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver_util.py
index 37fab6821952b5d0b19988c92d48ab77b2fb8c4e..37e429dd332f3b661885a108f122d56fdb625fd3 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver_util.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver_util.py
@@ -1,45 +1,51 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
-from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
-from tangostationcontrol.toolkit.archiver_util import get_attributes_from_suffix, retrieve_attributes_from_wildcards
-from tango import DevState
 import json
+
 import pkg_resources
+from tango import DevState
+from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
+from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
+from tangostationcontrol.toolkit.archiver_util import (
+    get_attributes_from_suffix,
+    retrieve_attributes_from_wildcards,
+)
 
 
 class TestArchiverUtil(BaseIntegrationTestCase):
-
     def setUp(self):
         super().setUp()
-        self.config_dict = json.load(pkg_resources.resource_stream('tangostationcontrol.toolkit', f'archiver_config/lofar2_dev.json'))
+        self.config_dict = json.load(
+            pkg_resources.resource_stream(
+                "tangostationcontrol.toolkit", f"archiver_config/lofar2_dev.json"
+            )
+        )
 
     def test_get_attributes_from_suffix(self):
         """Test if attributes are correctly matched with the defined global suffixes"""
-        device_name = 'STAT/RECV/1'
-        attribute_name = 'ANT_mask_RW'
-        dev_suffixes = self.config_dict['global']['suffixes']
+        device_name = "STAT/RECV/1"
+        attribute_name = "ANT_mask_RW"
+        dev_suffixes = self.config_dict["global"]["suffixes"]
         # Start RECV Device
         recv_proxy = TestDeviceProxy(device_name)
         recv_proxy.off()
         self.assertEqual(DevState.OFF, recv_proxy.state())
-        self.assertIn(attribute_name, get_attributes_from_suffix(device_name,dev_suffixes))
+        self.assertIn(
+            attribute_name, get_attributes_from_suffix(device_name, dev_suffixes)
+        )
 
     def test_retrieve_attributes_from_wildcards(self):
         """Test if attributes are correctly retrieved with wildcards matching"""
-        device_name = 'STAT/SDP/1'
-        attribute_names = ['FPGA_scrap_R','FPGA_scrap_RW']
-        exclude_list = self.config_dict['devices'][device_name]['exclude']
+        device_name = "STAT/SDP/1"
+        attribute_names = ["FPGA_scrap_R", "FPGA_scrap_RW"]
+        exclude_list = self.config_dict["devices"][device_name]["exclude"]
         # Start SDP Device
         sdp_proxy = TestDeviceProxy(device_name)
         sdp_proxy.off()
         self.assertEqual(DevState.OFF, sdp_proxy.state())
         for a in attribute_names:
-            self.assertIn(f"{device_name}/{a}".lower(), retrieve_attributes_from_wildcards(device_name,exclude_list))
+            self.assertIn(
+                f"{device_name}/{a}".lower(),
+                retrieve_attributes_from_wildcards(device_name, exclude_list),
+            )
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/device_proxy.py b/tangostationcontrol/tangostationcontrol/integration_test/device_proxy.py
index 4c519a41184e933e6d9bc720df75f494f7b87051..d61e4f00a6a3afee2f8c2925dc61baa6deabedd0 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/device_proxy.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/device_proxy.py
@@ -1,3 +1,6 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import logging
 import time
 
@@ -7,7 +10,6 @@ logger = logging.getLogger()
 
 
 class TestDeviceProxy(DeviceProxy):
-
     def __init__(self, *args, **kwargs):
         super(TestDeviceProxy, self).__init__(*args, **kwargs)
 
@@ -22,7 +24,6 @@ class TestDeviceProxy(DeviceProxy):
         # See also https://www.tango-controls.org/community/forum/c/development/python/attribute-direct-reading-from-device-when-polling-is-turned-on/
         self.set_source(DevSource.DEV)
 
-
     @staticmethod
     def test_device_turn_off(endpoint):
         d = TestDeviceProxy(endpoint)
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/dummy/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/dummy/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/dummy/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/dummy/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/dummy/test_dummy.py b/tangostationcontrol/tangostationcontrol/integration_test/dummy/test_dummy.py
index 61956173c0142948f0b1137e4d5bf28489eee8d1..af30935e7edc4875d943c9dd9c831b9f01318dcc 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/dummy/test_dummy.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/dummy/test_dummy.py
@@ -1,11 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tangostationcontrol.integration_test import base
 
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/observations/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/observations/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/observations/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/observations/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/observations/test_archiver.py b/tangostationcontrol/tangostationcontrol/integration_test/observations/test_archiver.py
index b5d8c93b4fc550d2e524f64a43a021e8e8818028..69a686b142a986b26dc66d06b5ca213ad07a70d2 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/observations/test_archiver.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/observations/test_archiver.py
@@ -1,26 +1,20 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
 from tangostationcontrol.toolkit.archiver import Archiver
 from tangostationcontrol.toolkit.archiver_configurator import get_multimember_devices
 
-class TestArchiver(BaseIntegrationTestCase):
 
+class TestArchiver(BaseIntegrationTestCase):
     def setUp(self):
         super().setUp()
         self.archiver = Archiver()
         self.assertIsNotNone(self.archiver)
 
     def test_archiver_initialisation(self):
-        """Test archiver main attributes"""         
-        self.assertEqual(self.archiver.cm_name,"archiving/hdbppts/confmanager01")
+        """Test archiver main attributes"""
+        self.assertEqual(self.archiver.cm_name, "archiving/hdbppts/confmanager01")
         self.assertTrue(len(self.archiver.es_list))  # subscribers list not empty
         """Test if there is an available subscriber"""
         self.assertIsNotNone(self.archiver.get_next_subscriber())
@@ -29,7 +23,10 @@ class TestArchiver(BaseIntegrationTestCase):
         """Test if multimember devices are correctly identified"""
         config_dict = self.archiver.get_configuration()
         self.assertIsNotNone(config_dict)
-        env_dict = config_dict['devices']
+        env_dict = config_dict["devices"]
         matched_devices_dict = get_multimember_devices(env_dict)
         obs_devices_list = sorted(list(matched_devices_dict.keys()))
-        self.assertListEqual(obs_devices_list, ['STAT/Observation/1', 'STAT/Observation/2', 'STAT/Observation/3'])
+        self.assertListEqual(
+            obs_devices_list,
+            ["STAT/Observation/1", "STAT/Observation/2", "STAT/Observation/3"],
+        )
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/__init__.py b/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/test_recv_cluster.py b/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/test_recv_cluster.py
index 92db0ffbf8607f375c1b5a9993889f0089e44087..1f1402a7b9e697fc649df9f88a923a4c944bb503 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/test_recv_cluster.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/recv_cluster/test_recv_cluster.py
@@ -1,22 +1,15 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-import time
-import numpy
 import logging
 import statistics
+import time
 
+import numpy
 from tango import DevState
-
+from tangostationcontrol.devices.antennafield import AntennaQuality, AntennaUse
 from tangostationcontrol.integration_test import base
 from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
-from tangostationcontrol.devices.antennafield import AntennaQuality, AntennaUse
 
 logger = logging.getLogger()
 
@@ -47,7 +40,7 @@ class TestRecvCluster(base.IntegrationTestCase):
         for i in range(1, 5):
             recv_proxies.append(TestDeviceProxy(f"STAT/RECV/{i}"))
             antenna_field_proxies.append(TestDeviceProxy(f"STAT/AntennaField/{i}"))
-            beam_proxies.append(TestDeviceProxy(f"STAT/TileBeam/{i}"))   
+            beam_proxies.append(TestDeviceProxy(f"STAT/TileBeam/{i}"))
 
         # Recv and AntennaField devices must be ready before TileBeam
         for proxy in recv_proxies:
@@ -58,15 +51,20 @@ class TestRecvCluster(base.IntegrationTestCase):
             self.assertTrue(proxy.state() is DevState.ON)
 
         for n in range(1, 5):
-            proxy = antenna_field_proxies[n-1]
+            proxy = antenna_field_proxies[n - 1]
             # setup AntennaField
             NR_TILES = 48
-            control_mapping = [[1,i] for i in range(NR_TILES)]
+            control_mapping = [[1, i] for i in range(NR_TILES)]
             antenna_qualities = numpy.array([AntennaQuality.OK] * 96)
             antenna_use = numpy.array([AntennaUse.AUTO] * 96)
-            proxy.put_property({"RECV_devices": [f"STAT/RECV/{n}"],
-                                    "Control_to_RECV_mapping": numpy.array(control_mapping).flatten(),
-                                    'Antenna_Quality': antenna_qualities, 'Antenna_Use': antenna_use})
+            proxy.put_property(
+                {
+                    "RECV_devices": [f"STAT/RECV/{n}"],
+                    "Control_to_RECV_mapping": numpy.array(control_mapping).flatten(),
+                    "Antenna_Quality": antenna_qualities,
+                    "Antenna_Use": antenna_use,
+                }
+            )
             proxy.off()
             proxy.boot()
             self.assertEqual(NR_TILES, proxy.nr_antennas_R)
@@ -88,5 +86,7 @@ class TestRecvCluster(base.IntegrationTestCase):
             stop_time = time.monotonic_ns()
             results.append(stop_time - start_time)
 
-        logging.error(f"Median {statistics.median(results) / 1.e9} Stdev "
-                      f"{statistics.stdev(results) / 1.e9}")
+        logging.error(
+            f"Median {statistics.median(results) / 1.e9} Stdev "
+            f"{statistics.stdev(results) / 1.e9}"
+        )
diff --git a/tangostationcontrol/tangostationcontrol/statistics/__init__.py b/tangostationcontrol/tangostationcontrol/statistics/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/statistics/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/statistics/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/statistics/collector.py b/tangostationcontrol/tangostationcontrol/statistics/collector.py
index 0f68d1adb2ab2679a665ce5d1f20ae434a6efa13..2b322df80455a9e303d779ef71b64274b436c09c 100644
--- a/tangostationcontrol/tangostationcontrol/statistics/collector.py
+++ b/tangostationcontrol/tangostationcontrol/statistics/collector.py
@@ -1,20 +1,13 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import abc
 import logging
 
 from lofar_station_client.statistics.collector import SSTCollector
-
 from tango import DevFailed
-from tango import DeviceProxy
 from tango import DevState
+from tango import DeviceProxy
 
 logger = logging.getLogger()
 
@@ -32,7 +25,6 @@ class DeviceCollectorInterface(abc.ABC):
 
 
 class StationSSTCollector(DeviceCollectorInterface, SSTCollector):
-
     def __init__(self, device: DeviceProxy = None):
         """Manually combine the constructors with appropriate arguments"""
 
@@ -55,12 +47,8 @@ class StationSSTCollector(DeviceCollectorInterface, SSTCollector):
             self.parameters["rcu_dth_on"] = None
         else:
             try:
-                self.parameters[
-                    "rcu_attenuator_dB"
-                ] = self.device.RCU_Attenuator_dB_R
-                self.parameters[
-                    "rcu_band_select"
-                ] = self.device.RCU_Band_Select_R
+                self.parameters["rcu_attenuator_dB"] = self.device.RCU_Attenuator_dB_R
+                self.parameters["rcu_band_select"] = self.device.RCU_Band_Select_R
                 self.parameters["rcu_dth_on"] = self.device.RCU_DTH_on_R
             except DevFailed as e:
                 logger.warning("Device: %s not responding.", self.device.name())
diff --git a/tangostationcontrol/tangostationcontrol/statistics/reader.py b/tangostationcontrol/tangostationcontrol/statistics/reader.py
index fd5055868379d88b98718bd4bad79b5ef1375c78..80a492f6849ed7344c911300f5ca5840a51be35b 100644
--- a/tangostationcontrol/tangostationcontrol/statistics/reader.py
+++ b/tangostationcontrol/tangostationcontrol/statistics/reader.py
@@ -1,15 +1,20 @@
-import h5py
-import numpy
-import datetime
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import argparse
+import datetime
 import os
+import sys  # noqa: F401
+
+import h5py
+import numpy
 import psutil
 import pytz
-import sys # noqa: F401
 
 process = psutil.Process(os.getpid())
 
 import logging
+
 logging.basicConfig(level=logging.INFO)
 logger = logging.getLogger("hdf5_explorer")
 logger.setLevel(logging.DEBUG)
@@ -29,8 +34,11 @@ def timeit(method):
         sizeMb = process.memory_info().rss / 1024 / 1024
         sizeMbStr = "{0:,}".format(round(sizeMb, 2))
 
-        logger.debug(f'Time taken = {e - s},  {method.__name__}  ,size = {sizeMbStr} MB')
+        logger.debug(
+            f"Time taken = {e - s},  {method.__name__}  ,size = {sizeMbStr} MB"
+        )
         return RESULT
+
     return timed
 
 
@@ -74,7 +82,7 @@ class StatisticsParser:
             files = [files]
 
         for file in files:
-            hdf5_file = h5py.File(file, 'r')
+            hdf5_file = h5py.File(file, "r")
 
             # go through all the groups
             logger.debug(f"Parsing hdf5 statistics file")
@@ -99,18 +107,22 @@ class StatisticsParser:
 
                     # append to the statistics list
                     self.statistics.append(statistic)
-                    self.statistics_dict[statistic.timestamp.isoformat(timespec="milliseconds")] = statistic
+                    self.statistics_dict[
+                        statistic.timestamp.isoformat(timespec="milliseconds")
+                    ] = statistic
 
                 except Exception:
                     "B001 Do not use bare `except:`, it also catches unexpected"
                     "events like memory errors, interrupts, system exit"
-                    logger.warning(f"Encountered an error while parsing statistic. Skipped: {group_key}")
+                    logger.warning(
+                        f"Encountered an error while parsing statistic. Skipped: {group_key}"
+                    )
 
             logger.debug(f"Parsed {len(self.statistics)} statistics")
 
     @timeit
     def collect_values(self):
-        """"
+        """ "
         Collects all of the statistics values in to a single giant numpy array
         Uses a lot more memory (Basically double since the values make up the bulk of memory)
         """
@@ -133,7 +145,9 @@ class StatisticsParser:
             if i.timestamp == datetime.datetime.fromisoformat(timestamp):
                 return i
 
-        raise ValueError(f"No statistic with timestamp {timestamp} found, make sure to use the isoformat")
+        raise ValueError(
+            f"No statistic with timestamp {timestamp} found, make sure to use the isoformat"
+        )
 
     def list_statistics(self):
         """
@@ -156,13 +170,45 @@ class StatisticsData:
 
     # we will be creating potentially tens of thousands of these object. Using __slots__ makes them faster and uses less memory. At the cost of
     # having to list all self attributes here.
-    __slots__ = ("version_id", "station_version_id", "writer_version_id", "timestamp", "station_id", "source_info_t_adc", "source_info_subband_calibrated_flag", "source_info_payload_error",
-                "source_info_payload_error", "source_info_payload_error", "source_info_nyquist_zone_index", "source_info_gn_index",
-                "source_info_fsub_type", "source_info_beam_repositioning_flag", "source_info_antenna_band_index", "source_info__raw",
-                "observation_id", "nof_statistics_per_packet", "nof_signal_inputs", "nof_bytes_per_statistic", "marker", "integration_interval_raw",
-                "integration_interval", "data_id__raw", "block_serial_number", "block_period_raw", "block_period", "data_id_signal_input_index",
-                "data_id_subband_index", "data_id_first_baseline", "data_id_beamlet_index", "nof_valid_payloads", "nof_payload_errors", "values",
-                "rcu_attenuator_dB", "rcu_band_select", "rcu_dth_on")
+    __slots__ = (
+        "version_id",
+        "station_version_id",
+        "writer_version_id",
+        "timestamp",
+        "station_id",
+        "source_info_t_adc",
+        "source_info_subband_calibrated_flag",
+        "source_info_payload_error",
+        "source_info_payload_error",
+        "source_info_payload_error",
+        "source_info_nyquist_zone_index",
+        "source_info_gn_index",
+        "source_info_fsub_type",
+        "source_info_beam_repositioning_flag",
+        "source_info_antenna_band_index",
+        "source_info__raw",
+        "observation_id",
+        "nof_statistics_per_packet",
+        "nof_signal_inputs",
+        "nof_bytes_per_statistic",
+        "marker",
+        "integration_interval_raw",
+        "integration_interval",
+        "data_id__raw",
+        "block_serial_number",
+        "block_period_raw",
+        "block_period",
+        "data_id_signal_input_index",
+        "data_id_subband_index",
+        "data_id_first_baseline",
+        "data_id_beamlet_index",
+        "nof_valid_payloads",
+        "nof_payload_errors",
+        "values",
+        "rcu_attenuator_dB",
+        "rcu_band_select",
+        "rcu_dth_on",
+    )
 
     def __init__(self, file, group_key):
 
@@ -175,24 +221,40 @@ class StatisticsData:
         self.writer_version_id = file[group_key].attrs["writer_version_id"]
 
         # convert string timestamp to datetime object
-        self.timestamp = datetime.datetime.fromisoformat(file[group_key].attrs["timestamp"])
+        self.timestamp = datetime.datetime.fromisoformat(
+            file[group_key].attrs["timestamp"]
+        )
 
         self.source_info_t_adc = file[group_key].attrs["source_info_t_adc"]
-        self.source_info_subband_calibrated_flag = file[group_key].attrs["source_info_subband_calibrated_flag"]
-        self.source_info_payload_error = file[group_key].attrs["source_info_payload_error"]
-        self.source_info_nyquist_zone_index = file[group_key].attrs["source_info_payload_error"]
+        self.source_info_subband_calibrated_flag = file[group_key].attrs[
+            "source_info_subband_calibrated_flag"
+        ]
+        self.source_info_payload_error = file[group_key].attrs[
+            "source_info_payload_error"
+        ]
+        self.source_info_nyquist_zone_index = file[group_key].attrs[
+            "source_info_payload_error"
+        ]
         self.source_info_gn_index = file[group_key].attrs["source_info_gn_index"]
         self.source_info_fsub_type = file[group_key].attrs["source_info_fsub_type"]
-        self.source_info_beam_repositioning_flag = file[group_key].attrs["source_info_beam_repositioning_flag"]
-        self.source_info_antenna_band_index = file[group_key].attrs["source_info_antenna_band_index"]
+        self.source_info_beam_repositioning_flag = file[group_key].attrs[
+            "source_info_beam_repositioning_flag"
+        ]
+        self.source_info_antenna_band_index = file[group_key].attrs[
+            "source_info_antenna_band_index"
+        ]
         self.source_info__raw = file[group_key].attrs["source_info__raw"]
 
         self.observation_id = file[group_key].attrs["observation_id"]
-        self.nof_statistics_per_packet = file[group_key].attrs["nof_statistics_per_packet"]
+        self.nof_statistics_per_packet = file[group_key].attrs[
+            "nof_statistics_per_packet"
+        ]
         self.nof_signal_inputs = file[group_key].attrs["nof_signal_inputs"]
         self.nof_bytes_per_statistic = file[group_key].attrs["nof_bytes_per_statistic"]
         self.marker = file[group_key].attrs["marker"]
-        self.integration_interval_raw = file[group_key].attrs["integration_interval_raw"]
+        self.integration_interval_raw = file[group_key].attrs[
+            "integration_interval_raw"
+        ]
         self.integration_interval = file[group_key].attrs["integration_interval"]
         self.data_id__raw = file[group_key].attrs["data_id__raw"]
 
@@ -202,27 +264,35 @@ class StatisticsData:
 
         # get SST specific stuff
         if self.marker == "S":
-            self.data_id_signal_input_index = file[group_key].attrs["data_id_signal_input_index"]
+            self.data_id_signal_input_index = file[group_key].attrs[
+                "data_id_signal_input_index"
+            ]
             # check if the dataset is empty or not. if empty, set to None, if not get the value
             attribute_names = ["rcu_attenuator_dB", "rcu_band_select", "rcu_dth_on"]
             for a in attribute_names:
                 if file[group_key].attrs[a].shape is None:
-                    setattr(self, a, None) 
-                else :
+                    setattr(self, a, None)
+                else:
                     setattr(self, a, numpy.array(file[group_key].attrs[a]))
 
         # get XST specific stuff
         if self.marker == "X":
             self.data_id_subband_index = file[group_key].attrs["data_id_subband_index"]
-            self.data_id_first_baseline = file[group_key].attrs["data_id_first_baseline"]
+            self.data_id_first_baseline = file[group_key].attrs[
+                "data_id_first_baseline"
+            ]
 
         # get BST specific stuff
         if self.marker == "B":
             self.data_id_beamlet_index = file[group_key].attrs["data_id_beamlet_index"]
 
         # get the datasets
-        self.nof_valid_payloads = numpy.array(file.get(f"{group_key}/nof_valid_payloads"))
-        self.nof_payload_errors = numpy.array(file.get(f"{group_key}/nof_payload_errors"))
+        self.nof_valid_payloads = numpy.array(
+            file.get(f"{group_key}/nof_valid_payloads")
+        )
+        self.nof_payload_errors = numpy.array(
+            file.get(f"{group_key}/nof_payload_errors")
+        )
         self.values = numpy.array(file.get(f"{group_key}/values"))
 
 
@@ -231,18 +301,28 @@ def parse_arguments():
     This function parses the input arguments.
     """
 
-    parser = argparse.ArgumentParser(description='Select a file to explore')
+    parser = argparse.ArgumentParser(description="Select a file to explore")
     parser.add_argument(
-        '--files', type=str, nargs="+", required=True,
-        help='the name and path of the files, takes one or more files')
+        "--files",
+        type=str,
+        nargs="+",
+        required=True,
+        help="the name and path of the files, takes one or more files",
+    )
     parser.add_argument(
-        '--start_time', type=str, required=True,
-        help='lowest timestamp to process (uses isoformat, ex: 2021-10-04T07:50'
-             ':08.937+00:00)')
+        "--start_time",
+        type=str,
+        required=True,
+        help="lowest timestamp to process (uses isoformat, ex: 2021-10-04T07:50"
+        ":08.937+00:00)",
+    )
     parser.add_argument(
-        '--end_time', type=str,  required=True,
-        help='highest timestamp to process (usesisoformat, ex: 2021-10-04T07:50'
-             ':08.937+00:00)')
+        "--end_time",
+        type=str,
+        required=True,
+        help="highest timestamp to process (usesisoformat, ex: 2021-10-04T07:50"
+        ":08.937+00:00)",
+    )
 
     args = parser.parse_args()
     files = args.files
@@ -284,7 +364,9 @@ def main():
 
     # get a single numpy array of all the statistics stored.
     array = stat_parser.collect_values()
-    logger.debug(f"Collected the statistics values of {stat_parser.get_statistics_count()} statistics in to one gaint array of shape: {array.shape} and type: {array.dtype}")
+    logger.debug(
+        f"Collected the statistics values of {stat_parser.get_statistics_count()} statistics in to one gaint array of shape: {array.shape} and type: {array.dtype}"
+    )
 
     # Get a list of all the statistics timestamps we have
     statistics = stat_parser.list_statistics()
diff --git a/tangostationcontrol/tangostationcontrol/statistics/receiver.py b/tangostationcontrol/tangostationcontrol/statistics/receiver.py
index 943dbf5ef843a89843621b660078d16e7a845d9a..da06d7d8cb3b795e614213b1382a1ddf676b9bbb 100644
--- a/tangostationcontrol/tangostationcontrol/statistics/receiver.py
+++ b/tangostationcontrol/tangostationcontrol/statistics/receiver.py
@@ -1,11 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import os
 import socket
@@ -14,7 +8,7 @@ from lofar_station_client.statistics.packet import StatisticsPacket
 
 
 class Receiver:
-    """ Reads data from a file descriptor. """
+    """Reads data from a file descriptor."""
 
     HEADER_LENGTH = 32
 
@@ -22,7 +16,7 @@ class Receiver:
         self.fd = fd
 
     def get_packet(self) -> bytes:
-        """ Read exactly one statistics packet from the TCP connection. """
+        """Read exactly one statistics packet from the TCP connection."""
 
         # read only the header, to compute the size of the packet
         header = self.read_data(self.HEADER_LENGTH)
@@ -36,15 +30,15 @@ class Receiver:
         return header + payload
 
     def _read(self, length: int) -> bytes:
-        """ Low-level read function to fetch at most "length" (>1) bytes. Returns
-            nothing if there is no data left. """
+        """Low-level read function to fetch at most "length" (>1) bytes. Returns
+        nothing if there is no data left."""
 
         return os.read(self.fd, length)
 
     def read_data(self, data_length: int) -> bytes:
-        """ Read exactly data_length bytes from the TCP connection. """
+        """Read exactly data_length bytes from the TCP connection."""
 
-        data = b''
+        data = b""
         while len(data) < data_length:
             # try to read the remainder.
             # NOTE: recv() may return less data than requested, and returns 0
diff --git a/tangostationcontrol/tangostationcontrol/statistics/udp_dev/__init__.py b/tangostationcontrol/tangostationcontrol/statistics/udp_dev/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/statistics/udp_dev/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/statistics/udp_dev/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/statistics/udp_dev/udp_client.py b/tangostationcontrol/tangostationcontrol/statistics/udp_dev/udp_client.py
index 270b24e667d4cb043f2a402229d086156c4b5218..3fc0f449d1881f577eac56d103e5cd40122a5439 100644
--- a/tangostationcontrol/tangostationcontrol/statistics/udp_dev/udp_client.py
+++ b/tangostationcontrol/tangostationcontrol/statistics/udp_dev/udp_client.py
@@ -1,33 +1,38 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import socket
 import sys
-import netifaces as ni
-from datetime import datetime
 import time
+from datetime import datetime
 
+import netifaces as ni
 
-class UDPClient:
 
-    def __init__(self, server_ip:str, server_port:int):
+class UDPClient:
+    def __init__(self, server_ip: str, server_port: int):
         self.server_ip = server_ip
         self.server_port = server_port
         self.server_data = None
-        self.server_addr = None # tuple of address info
+        self.server_addr = None  # tuple of address info
 
     def run(self):
         # Create socket for server
         s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
         print("Do Ctrl+c to exit the program !!")
-        print('\n\n*** This Client keeps sending the same SST packet with an interval of 1s ***')
+        print(
+            "\n\n*** This Client keeps sending the same SST packet with an interval of 1s ***"
+        )
 
         # Let's send data through UDP protocol
         while True:
 
-            #Old interactive interface
-            #send_data = input("Type some text to send =>");
-            #s.sendto(send_data.encode('utf-8'), (self.server_ip, self.server_port))
-            #print("\n\n 1. Client Sent : ", send_data, "\n\n")
-            #self.server_data, self.server_addr = s.recvfrom(4096)
-            #print("\n\n 2. Client received : ", self.server_data.decode('utf-8'), "\n\n")
+            # Old interactive interface
+            # send_data = input("Type some text to send =>");
+            # s.sendto(send_data.encode('utf-8'), (self.server_ip, self.server_port))
+            # print("\n\n 1. Client Sent : ", send_data, "\n\n")
+            # self.server_data, self.server_addr = s.recvfrom(4096)
+            # print("\n\n 2. Client received : ", self.server_data.decode('utf-8'), "\n\n")
 
             time.sleep(1)
 
@@ -36,25 +41,27 @@ class UDPClient:
             s.sendto(send_data, (self.server_ip, self.server_port))
             print("\n\n 1. Client Sent SST Packet at: ", datetime.now())
             self.server_data, self.server_addr = s.recvfrom(4096)
-            print("\n\n 2. Client received : ", self.server_data.decode('utf-8'), "\n\n")
+            print(
+                "\n\n 2. Client received : ", self.server_data.decode("utf-8"), "\n\n"
+            )
 
         # close the socket
         s.close()
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
 
     if len(sys.argv) == 3:
-        if sys.argv[1]=='localhost':
-            server_ip = ni.ifaddresses('eth0')[ni.AF_INET][0]['addr']
-        else :
+        if sys.argv[1] == "localhost":
+            server_ip = ni.ifaddresses("eth0")[ni.AF_INET][0]["addr"]
+        else:
             server_ip = sys.argv[1]
         server_port = int(sys.argv[2])
-        #local_ip = local_ip = ni.ifaddresses('eth0')[ni.AF_INET][0]['addr']
-        #server_ip = local_ip
+        # local_ip = local_ip = ni.ifaddresses('eth0')[ni.AF_INET][0]['addr']
+        # server_ip = local_ip
     else:
         print("Run like : python3 udp_client.py <server_ip> <server_port>")
         exit(1)
 
-    client = UDPClient(server_ip,server_port)
+    client = UDPClient(server_ip, server_port)
     client.run()
diff --git a/tangostationcontrol/tangostationcontrol/statistics/udp_dev/udp_server.py b/tangostationcontrol/tangostationcontrol/statistics/udp_dev/udp_server.py
index 8e7968dacc0a0ab5d577798d573ee2b4ed498000..43503c13d59d6289a1ce93e12abfec4b02d074bc 100644
--- a/tangostationcontrol/tangostationcontrol/statistics/udp_dev/udp_server.py
+++ b/tangostationcontrol/tangostationcontrol/statistics/udp_dev/udp_server.py
@@ -1,11 +1,14 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import socket
-import netifaces as ni
 from datetime import datetime
 
+import netifaces as ni
 
-class UDPServer:
 
-    def __init__(self, ip:str, port:int, buffer_size:int = 8192):
+class UDPServer:
+    def __init__(self, ip: str, port: int, buffer_size: int = 8192):
         self.ip = ip
         self.port = port
         self.buffer_size = buffer_size
@@ -19,21 +22,23 @@ class UDPServer:
         server_address = (self.ip, self.port)
         s.bind(server_address)
         print("Do Ctrl+c to exit the program !!")
-        print(f"\n\n####### Server is listening on {self.ip} - port {self.port} #######")
+        print(
+            f"\n\n####### Server is listening on {self.ip} - port {self.port} #######"
+        )
 
         while True:
 
             self.recv_data, self.recv_addr = s.recvfrom(self.buffer_size)
             print("\n\n 2. Server received at: ", datetime.now(), "\n\n")
 
-            '''Server response'''
-            #send_data = input("Type some text to send => ")
-            send_data = 'Packet received. Waiting for the next one.'
-            s.sendto(send_data.encode('utf-8'), self.recv_addr)
-            print("\n\n 1. Server sent : ", send_data,"\n\n")
+            """Server response"""
+            # send_data = input("Type some text to send => ")
+            send_data = "Packet received. Waiting for the next one."
+            s.sendto(send_data.encode("utf-8"), self.recv_addr)
+            print("\n\n 1. Server sent : ", send_data, "\n\n")
 
-            #time.sleep(10)
-            #s.close()
+            # time.sleep(10)
+            # s.close()
 
             break
 
@@ -44,7 +49,7 @@ class UDPServer:
         return self.recv_data
 
 
-if __name__ == '__main__':
-    local_ip = ni.ifaddresses('eth0')[ni.AF_INET][0]['addr']
-    server = UDPServer(local_ip,5600)
+if __name__ == "__main__":
+    local_ip = ni.ifaddresses("eth0")[ni.AF_INET][0]["addr"]
+    server = UDPServer(local_ip, 5600)
     server.run()
diff --git a/tangostationcontrol/tangostationcontrol/statistics/udp_dev/udp_write_manager.py b/tangostationcontrol/tangostationcontrol/statistics/udp_dev/udp_write_manager.py
index a5a20513e30b77af08b8dae4ab74d6c95001ec88..5a6ebcc3d9a381a0d06a239f915b4beebe3837d5 100644
--- a/tangostationcontrol/tangostationcontrol/statistics/udp_dev/udp_write_manager.py
+++ b/tangostationcontrol/tangostationcontrol/statistics/udp_dev/udp_write_manager.py
@@ -1,33 +1,36 @@
-from datetime import datetime
-import time
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import os
+import time
+from datetime import datetime
+
 import h5py
-import numpy as np
-from statistics.udp_dev.udp_server import UDPServer
 import netifaces as ni
+import numpy as np
 from packet import SSTPacket
+from statistics.udp_dev.udp_server import UDPServer
 
 __all__ = ["StatisticsWriter"]
 
 
 class StatisticsWriter:
-
     def __init__(self, new_file_time_interval):
 
         self.new_file_time_interval = new_file_time_interval
         self.packet_cnt = 0
 
         # Define ip and port of the receiver
-        self.local_ip = ni.ifaddresses('eth0')[ni.AF_INET][0]['addr']
+        self.local_ip = ni.ifaddresses("eth0")[ni.AF_INET][0]["addr"]
         self.server = UDPServer(self.local_ip, 5600)
 
         # Create data directory if not exists
         try:
-            os.makedirs('../data')
+            os.makedirs("../data")
         except Exception:
             "B001 Do not use bare `except:`, it also catches unexpected events"
             "like memory errors, interrupts, system exit"
-            print('Data directory already created')
+            print("Data directory already created")
 
         # create initial file
         self.last_file_time = time.time()
@@ -53,7 +56,7 @@ class StatisticsWriter:
         timestamp = datetime.now()
         current_time = str(timestamp.strftime("%Y-%m-%d-%H-%M-%S"))
         print("creating new file: data/{}.h5".format(current_time))
-        self.file = h5py.File("data/{}.h5".format(current_time), 'w')
+        self.file = h5py.File("data/{}.h5".format(current_time), "w")
         self.last_file_time = time.time()
 
     def write_metadata(self, packet):
@@ -62,12 +65,14 @@ class StatisticsWriter:
         header = self.sst.header()
         header_bytes = bytes(str(header), "utf-8")
         header_bytes = np.frombuffer(header_bytes, dtype=np.uint8)
-        self.file.create_dataset('packet_{}_header'.format(self.packet_cnt), data=header_bytes)
+        self.file.create_dataset(
+            "packet_{}_header".format(self.packet_cnt), data=header_bytes
+        )
 
     def write_raw(self, packet):
         # create dataset with the raw data in it
         data = np.frombuffer(packet, dtype=np.uint8)
-        self.file.create_dataset('packet_{}_raw'.format(self.packet_cnt), data=data)
+        self.file.create_dataset("packet_{}_raw".format(self.packet_cnt), data=data)
 
 
 if __name__ == "__main__":
diff --git a/tangostationcontrol/tangostationcontrol/statistics/writer/__init__.py b/tangostationcontrol/tangostationcontrol/statistics/writer/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/statistics/writer/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/statistics/writer/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/statistics/writer/entry.py b/tangostationcontrol/tangostationcontrol/statistics/writer/entry.py
index 0159dda4aaa0f65b6bb06fca6ff7fd7b7b732d63..4aebed1bf219556c02b3e1bf0ad0be6c3c61b4f0 100644
--- a/tangostationcontrol/tangostationcontrol/statistics/writer/entry.py
+++ b/tangostationcontrol/tangostationcontrol/statistics/writer/entry.py
@@ -1,92 +1,123 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import argparse
-import time
+import logging
 import sys
-from tango import DeviceProxy
+import time
 
+from tango import DeviceProxy
 from tangostationcontrol.statistics.receiver import FileReceiver
 from tangostationcontrol.statistics.receiver import TCPReceiver
 from tangostationcontrol.statistics.writer.hdf5 import BstHdf5Writer
-from tangostationcontrol.statistics.writer.hdf5 import SstHdf5Writer
 from tangostationcontrol.statistics.writer.hdf5 import ParallelXstHdf5Writer
+from tangostationcontrol.statistics.writer.hdf5 import SstHdf5Writer
 
-import logging
-
-logging.basicConfig(
-    level=logging.INFO,
-    format='%(asctime)s:%(levelname)s: %(message)s'
-)
+logging.basicConfig(level=logging.INFO, format="%(asctime)s:%(levelname)s: %(message)s")
 logger = logging.getLogger("statistics_writer")
 
 
 def _create_parser():
     """Define the parser"""
     parser = argparse.ArgumentParser(
-        description='Converts a stream of statistics packets into HDF5 files.')
+        description="Converts a stream of statistics packets into HDF5 files."
+    )
     parser.add_argument(
-        '-a', '--host', type=str, required=False, help='the host to connect to')
+        "-a", "--host", type=str, required=False, help="the host to connect to"
+    )
     parser.add_argument(
-        '-p', '--port', type=int, default=0,
-        help='the port to connect to, or 0 to use default port for the '
-             'selected mode (default: %(default)s)')
+        "-p",
+        "--port",
+        type=int,
+        default=0,
+        help="the port to connect to, or 0 to use default port for the "
+        "selected mode (default: %(default)s)",
+    )
     parser.add_argument(
-        '-f', '--file', type=str, required=False, help='the file to read from')
+        "-f", "--file", type=str, required=False, help="the file to read from"
+    )
     parser.add_argument(
-        '-m', '--mode', type=str, choices=['SST', 'XST', 'BST'], default='SST',
-        help='sets the statistics type to be decoded options (default: '
-             '%(default)s)')
+        "-m",
+        "--mode",
+        type=str,
+        choices=["SST", "XST", "BST"],
+        default="SST",
+        help="sets the statistics type to be decoded options (default: " "%(default)s)",
+    )
     parser.add_argument(
-        '-i', '--interval', type=float, default=3600, nargs="?",
-        help='The time between creating new files in seconds (default: '
-             '%(default)s)')
+        "-i",
+        "--interval",
+        type=float,
+        default=3600,
+        nargs="?",
+        help="The time between creating new files in seconds (default: " "%(default)s)",
+    )
     parser.add_argument(
-        '-o', '--output_dir', type=str, default=".", nargs="?",
-        help='specifies the folder to write all the files (default: '
-             '%(default)s)')
+        "-o",
+        "--output_dir",
+        type=str,
+        default=".",
+        nargs="?",
+        help="specifies the folder to write all the files (default: " "%(default)s)",
+    )
     parser.add_argument(
-        '-v', '--debug', dest='debug', action='store_true', default=False,
-        help='increase log output')
+        "-v",
+        "--debug",
+        dest="debug",
+        action="store_true",
+        default=False,
+        help="increase log output",
+    )
     parser.add_argument(
-        '-d', '--decimation', type=int, default=1,
-        help='Configure the writer to only store one every n samples. Saves '
-             'storage space')
+        "-d",
+        "--decimation",
+        type=int,
+        default=1,
+        help="Configure the writer to only store one every n samples. Saves "
+        "storage space",
+    )
     parser.add_argument(
-        '-r', '--reconnect', dest='reconnect', action='store_true', default=False,
-        help='Set the writer to keep trying to reconnect whenever connection '
-             'is lost. (default: %(default)s)')
+        "-r",
+        "--reconnect",
+        dest="reconnect",
+        action="store_true",
+        default=False,
+        help="Set the writer to keep trying to reconnect whenever connection "
+        "is lost. (default: %(default)s)",
+    )
     parser.add_argument(
-        '-nt', '--no-tango', dest='no_tango', action='store_true', default=False,
-        help='Disable connection to Tango attribute values retrieval'
+        "-nt",
+        "--no-tango",
+        dest="no_tango",
+        action="store_true",
+        default=False,
+        help="Disable connection to Tango attribute values retrieval",
     )
     parser.add_argument(
-        '-dev', '--device', type=str, choices=['STAT/RECV/1'], default='STAT/RECV/1',
-        help='List the Tango device names needed'
+        "-dev",
+        "--device",
+        type=str,
+        choices=["STAT/RECV/1"],
+        default="STAT/RECV/1",
+        help="List the Tango device names needed",
     )
     return parser
 
 
 def _create_receiver(filename, host, port):
-    """ creates the TCP receiver that is given to the writer """
+    """creates the TCP receiver that is given to the writer"""
     if filename:
         return FileReceiver(filename)
     elif host and port:
         return TCPReceiver(host, port)
     else:
-        logger.fatal("Must provide either a host and port, or a file to receive input from")
+        logger.fatal(
+            "Must provide either a host and port, or a file to receive input from"
+        )
         sys.exit(1)
 
 
-def _create_writer(
-    mode, interval, output_dir, decimation, device: DeviceProxy = None
-):
+def _create_writer(mode, interval, output_dir, decimation, device: DeviceProxy = None):
     """Create the writer"""
     if mode == "XST":
         return ParallelXstHdf5Writer(
@@ -99,7 +130,7 @@ def _create_writer(
             new_file_time_interval=interval,
             file_location=output_dir,
             decimation_factor=decimation,
-            device=device
+            device=device,
         )
     elif mode == "BST":
         return BstHdf5Writer(
@@ -167,24 +198,30 @@ def main():
         raise ValueError("Supply either a filename (--file) or a hostname (--host)")
 
     if decimation < 1:
-        raise ValueError("Please use an integer --Decimation value 1 or higher to only store one every n statistics' ")
+        raise ValueError(
+            "Please use an integer --Decimation value 1 or higher to only store one every n statistics' "
+        )
 
     if port == 0:
-        default_ports = { "SST": 5101, "XST": 5102, "BST": 5103 }
+        default_ports = {"SST": 5101, "XST": 5102, "BST": 5103}
         port = default_ports[mode]
 
     if debug:
         logger.setLevel(logging.DEBUG)
         logger.debug("Setting loglevel to DEBUG")
 
-    # sets the Tango connection in order to retrieve attribute values 
+    # sets the Tango connection in order to retrieve attribute values
     if tango_disabled:
         logger.warning("Tango connection is DISABLED")
         device = None
     elif host:
-        device = DeviceProxy(f"tango://{host}:10000/{args.device}".lower()) if mode == 'SST' else None
-    else:     
-        device = DeviceProxy(args.device) if mode == 'SST' else None
+        device = (
+            DeviceProxy(f"tango://{host}:10000/{args.device}".lower())
+            if mode == "SST"
+            else None
+        )
+    else:
+        device = DeviceProxy(args.device) if mode == "SST" else None
 
     # creates the TCP receiver that is given to the writer
     receiver = _create_receiver(filename, host, port)
@@ -194,4 +231,3 @@ def main():
 
     # start looping
     _start_loop(receiver, writer, reconnect, filename)
-
diff --git a/tangostationcontrol/tangostationcontrol/statistics/writer/hdf5.py b/tangostationcontrol/tangostationcontrol/statistics/writer/hdf5.py
index 4d84e39c90b574d29119192d345a9be45957d86f..57e7672d05b3bee44d060f93e5e021cc7d0117a7 100644
--- a/tangostationcontrol/tangostationcontrol/statistics/writer/hdf5.py
+++ b/tangostationcontrol/tangostationcontrol/statistics/writer/hdf5.py
@@ -1,61 +1,62 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from abc import ABC, abstractmethod
-from datetime import datetime, timedelta
 import logging
 import os
+from abc import ABC, abstractmethod
+from datetime import datetime, timedelta
 
 # python hdf5
 import h5py
 import numpy
 import pytz
-
 from lofar_station_client.statistics.collector import BSTCollector
 from lofar_station_client.statistics.collector import XSTCollector
-from tangostationcontrol.statistics.collector import StationSSTCollector
-
 from lofar_station_client.statistics.packet import BSTPacket
 from lofar_station_client.statistics.packet import SSTPacket
 from lofar_station_client.statistics.packet import XSTPacket
-
 from tango import DeviceProxy
+from tangostationcontrol.statistics.collector import StationSSTCollector
 
 logger = logging.getLogger("statistics_writer")
 
 __all__ = [
-    "HDF5Writer", "ParallelXstHdf5Writer", "XstHdf5Writer",
-    "SstHdf5Writer", "BstHdf5Writer",
+    "HDF5Writer",
+    "ParallelXstHdf5Writer",
+    "XstHdf5Writer",
+    "SstHdf5Writer",
+    "BstHdf5Writer",
 ]
 
+
 def _get_lsc_version() -> str:
-    """ Retrieve the Lofar Station Control software version """
-    filepath = os.path.join(os.path.realpath('../'), 'tangostationcontrol/VERSION')
+    """Retrieve the Lofar Station Control software version"""
+    filepath = os.path.join(os.path.realpath("../"), "tangostationcontrol/VERSION")
     with open(filepath) as f:
         version = f.readline().strip()
     return version or ""
 
+
 def _get_writer_version() -> str:
-    """ Retrieve the Statistics Writer software version """
-    filepath = os.path.join(os.path.dirname(__file__), 'VERSION')
+    """Retrieve the Statistics Writer software version"""
+    filepath = os.path.join(os.path.dirname(__file__), "VERSION")
     with open(filepath) as f:
         version = f.readline().strip()
     return version or ""
 
+
 class HDF5Writer(ABC):
     SST_MODE = "SST"
     XST_MODE = "XST"
     BST_MODE = "BST"
 
     def __init__(
-        self, new_file_time_interval: int, file_location, statistics_mode,
-        decimation_factor, device: DeviceProxy = None
+        self,
+        new_file_time_interval: int,
+        file_location,
+        statistics_mode,
+        decimation_factor,
+        device: DeviceProxy = None,
     ):
 
         # all variables that deal with the matrix that's currently being decoded
@@ -71,7 +72,7 @@ class HDF5Writer(ABC):
         self.statistics_header = None
 
         # file handing
-        self.file_location = file_location or '.'
+        self.file_location = file_location or "."
         self.decimation_factor = decimation_factor
         self.new_file_time_interval = timedelta(seconds=new_file_time_interval)
         self.last_file_time = datetime.min.replace(tzinfo=pytz.UTC)
@@ -123,9 +124,7 @@ class HDF5Writer(ABC):
         # ignore packets with no timestamp, as they indicate FPGA processing was
         # disabled and are useless anyway.
         if statistics_packet.block_serial_number == 0:
-            logger.warning(
-                "Received statistics with no timestamp. Packet dropped."
-            )
+            logger.warning("Received statistics with no timestamp. Packet dropped.")
             return
 
         # check if te statistics timestamp is unexpectedly older than the
@@ -134,7 +133,7 @@ class HDF5Writer(ABC):
             logger.warning(
                 "Received statistics with earlier timestamp than is currently"
                 "being processed (%s). Packet dropped.",
-                statistics_timestamp
+                statistics_timestamp,
             )
             return
 
@@ -159,9 +158,9 @@ class HDF5Writer(ABC):
         # only write the specified fraction of statistics, skip the rest
         if self.statistics_counter % self.decimation_factor != 0:
             logger.debug(
-                "Skipping statistic with timestamp: %s. Only writing"
-                "1/%d statistics", timestamp,
-                self.decimation_factor
+                "Skipping statistic with timestamp: %s. Only writing" "1/%d statistics",
+                timestamp,
+                self.decimation_factor,
             )
             return
 
@@ -174,12 +173,11 @@ class HDF5Writer(ABC):
                 self.retrieve_attribute_values()
                 self.write_matrix()
             except Exception as e:
-                time = self.current_timestamp.strftime(
-                    "%Y-%m-%d-%H-%M-%S-%f"
-                )[:-3]
+                time = self.current_timestamp.strftime("%Y-%m-%d-%H-%M-%S-%f")[:-3]
                 logger.exception(
                     "Exception while attempting to write matrix to HDF5."
-                    "Matrix: %s dropped", time
+                    "Matrix: %s dropped",
+                    time,
                 )
 
         # only start a new file if its time AND we are done with the previous
@@ -199,9 +197,9 @@ class HDF5Writer(ABC):
 
         # create the new hdf5 group based on the timestamp of packets
         current_group = self.file.create_group(
-            "{}_{}".format(self.mode, self.current_timestamp.isoformat(
-                timespec="milliseconds"
-            ))
+            "{}_{}".format(
+                self.mode, self.current_timestamp.isoformat(timespec="milliseconds")
+            )
         )
 
         # store the statistics values for the current group
@@ -210,11 +208,11 @@ class HDF5Writer(ABC):
         # might be optional, but they're easy to add.
         current_group.create_dataset(
             name="nof_payload_errors",
-            data=self.current_matrix.parameters["nof_payload_errors"]
+            data=self.current_matrix.parameters["nof_payload_errors"],
         )
         current_group.create_dataset(
             name="nof_valid_payloads",
-            data=self.current_matrix.parameters["nof_valid_payloads"]
+            data=self.current_matrix.parameters["nof_valid_payloads"],
         )
 
         # get the statistics header
@@ -225,9 +223,7 @@ class HDF5Writer(ABC):
             return
 
         # can't store datetime objects, convert to string instead
-        header["timestamp"] = header["timestamp"].isoformat(
-            timespec="milliseconds"
-        )
+        header["timestamp"] = header["timestamp"].isoformat(timespec="milliseconds")
 
         # store software version entries
         header["station_version_id"] = self.station_version_id
@@ -280,7 +276,7 @@ class HDF5Writer(ABC):
         logger.info(f"creating new file: {filename}")
 
         try:
-            self.file = h5py.File(filename, 'w')
+            self.file = h5py.File(filename, "w")
         except Exception as e:
             logger.exception(f"Error while creating new file")
             raise e
@@ -305,7 +301,7 @@ class HDF5Writer(ABC):
                         "Received a total of %d statistics while running. With "
                         "%d written to disk",
                         self.statistics_counter,
-                        int(self.statistics_counter / self.decimation_factor)
+                        int(self.statistics_counter / self.decimation_factor),
                     )
 
 
@@ -322,7 +318,7 @@ class SstHdf5Writer(HDF5Writer):
             file_location,
             HDF5Writer.SST_MODE,
             decimation_factor,
-            device=device
+            device=device,
         )
 
     def decoder(self, packet):
@@ -333,14 +329,22 @@ class SstHdf5Writer(HDF5Writer):
 
     def retrieve_attribute_values(self):
         attribute_names = ["rcu_attenuator_dB", "rcu_band_select", "rcu_dth_on"]
-        attribute_types = {"rcu_attenuator_dB": numpy.int64, "rcu_band_select" : numpy.int64, "rcu_dth_on" : bool}
+        attribute_types = {
+            "rcu_attenuator_dB": numpy.int64,
+            "rcu_band_select": numpy.int64,
+            "rcu_dth_on": bool,
+        }
         # write the device attributes
         for a in attribute_names:
             try:
                 if self.current_matrix.parameters[a] is None:
                     self.device_attributes[a] = h5py.Empty("f")
                 else:
-                    self.device_attributes[a] = self.current_matrix.parameters[a].flatten().astype(attribute_types[a])                 
+                    self.device_attributes[a] = (
+                        self.current_matrix.parameters[a]
+                        .flatten()
+                        .astype(attribute_types[a])
+                    )
             except AttributeError:
                 self.device_attributes[a] = h5py.Empty("f")
 
@@ -348,19 +352,16 @@ class SstHdf5Writer(HDF5Writer):
         # store the SST values
         current_group.create_dataset(
             name="values",
-            data=self.current_matrix.parameters[
-                "sst_values"
-            ].astype(numpy.float32),
+            data=self.current_matrix.parameters["sst_values"].astype(numpy.float32),
             compression="gzip",
         )
 
-        for k,v in self.device_attributes.items():
+        for k, v in self.device_attributes.items():
             current_group.attrs[k] = v
 
+
 class BstHdf5Writer(HDF5Writer):
-    def __init__(
-        self, new_file_time_interval, file_location, decimation_factor
-    ):
+    def __init__(self, new_file_time_interval, file_location, decimation_factor):
         super().__init__(
             new_file_time_interval,
             file_location,
@@ -381,9 +382,7 @@ class BstHdf5Writer(HDF5Writer):
         # store the BST values
         current_group.create_dataset(
             name="values",
-            data=self.current_matrix.parameters["bst_values"].astype(
-                numpy.float32
-            ),
+            data=self.current_matrix.parameters["bst_values"].astype(numpy.float32),
             compression="gzip",
         )
 
@@ -428,9 +427,8 @@ class XstHdf5Writer(HDF5Writer):
         # slot 0 in their writer, so we only need to store the first set of
         # xst_values.
         current_group.create_dataset(
-            name="values", data=self.current_matrix.xst_values([0])[0].astype(
-                numpy.cfloat
-            ),
+            name="values",
+            data=self.current_matrix.xst_values([0])[0].astype(numpy.cfloat),
             compression="gzip",
         )
 
@@ -438,9 +436,7 @@ class XstHdf5Writer(HDF5Writer):
 class ParallelXstHdf5Writer:
     """Writes multiple subbands in parallel. Each subband to separate file."""
 
-    def __init__(
-            self, new_file_time_interval, file_location, decimation_factor
-    ):
+    def __init__(self, new_file_time_interval, file_location, decimation_factor):
         # maintain a dedicated HDF5Writer per subband
         self.writers = {}
 
diff --git a/tangostationcontrol/tangostationcontrol/test/__init__.py b/tangostationcontrol/tangostationcontrol/test/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/test/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/test/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/test/base.py b/tangostationcontrol/tangostationcontrol/test/base.py
index 7cf3af7f8becb1f92cde139290394ea540f5d8d6..97951ba5077d320b5517408a9e2a6bc62a6ac9c2 100644
--- a/tangostationcontrol/tangostationcontrol/test/base.py
+++ b/tangostationcontrol/tangostationcontrol/test/base.py
@@ -1,17 +1,11 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
-
-from tangostationcontrol.common.lofar_logging import configure_logger
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import unittest
-import testscenarios
+
 import asynctest
+import testscenarios
+from tangostationcontrol.common.lofar_logging import configure_logger
 
 """Setup logging for unit tests"""
 configure_logger(debug=True)
diff --git a/tangostationcontrol/tangostationcontrol/test/beam/__init__.py b/tangostationcontrol/tangostationcontrol/test/beam/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/test/beam/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/test/beam/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/test/beam/test_delays.py b/tangostationcontrol/tangostationcontrol/test/beam/test_delays.py
index 9c99a5ac776a61334bbade193c12c425e0d92d33..fb55297ca4ec4ac35a60b4692256f80401360b00 100644
--- a/tangostationcontrol/tangostationcontrol/test/beam/test_delays.py
+++ b/tangostationcontrol/tangostationcontrol/test/beam/test_delays.py
@@ -1,15 +1,16 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import datetime
-import time
 import logging
-import mock
+import time
 
+import casacore
+import mock
 import numpy
 import numpy.testing
-import casacore
-
-from tangostationcontrol.common.constants import MAX_ANTENNA, N_beamlets_ctrl
-
 from tangostationcontrol.beam.delays import Delays
+from tangostationcontrol.common.constants import MAX_ANTENNA, N_beamlets_ctrl
 from tangostationcontrol.test import base
 
 
@@ -17,7 +18,11 @@ class TestDelays(base.TestCase):
     def test_init(self):
         """Fail condition is simply the object creation failing"""
 
-        reference_itrf = [3826577.066, 461022.948, 5064892.786]  # CS002LBA, in ITRF2005 epoch 2012.5
+        reference_itrf = [
+            3826577.066,
+            461022.948,
+            5064892.786,
+        ]  # CS002LBA, in ITRF2005 epoch 2012.5
         d = Delays(reference_itrf)
 
         self.assertIsNotNone(d)
@@ -43,12 +48,21 @@ class TestDelays(base.TestCase):
         # i dont get these either, but casacore accepts them
         self.assertTrue(d.is_valid_direction([]))
         self.assertTrue(d.is_valid_direction(("J2000",)))
-        self.assertTrue(d.is_valid_direction(("J2000", "0deg",)))
+        self.assertTrue(
+            d.is_valid_direction(
+                (
+                    "J2000",
+                    "0deg",
+                )
+            )
+        )
         self.assertTrue(d.is_valid_direction(("J2000", "0deg", "0deg", "0deg")))
 
         # should not throw, and return False, on bad uses
         self.assertFalse(d.is_valid_direction(("", "", "")))
-        self.assertFalse(d.is_valid_direction(("J2000", "0deg", "0deg", "0deg", "0deg")))
+        self.assertFalse(
+            d.is_valid_direction(("J2000", "0deg", "0deg", "0deg", "0deg"))
+        )
         self.assertFalse(d.is_valid_direction((1, 2, 3)))
         self.assertFalse(d.is_valid_direction("foo"))
         self.assertFalse(d.is_valid_direction(None))
@@ -64,7 +78,6 @@ class TestDelays(base.TestCase):
             timestamp = datetime.datetime(2021, 12, 21, i, 58, 0)
             d.set_measure_time(timestamp)
 
-
             # point to the sun
             direction = "SUN", "0deg", "0deg"
 
@@ -73,11 +86,11 @@ class TestDelays(base.TestCase):
             direction = d.get_direction_vector(pointing)
 
             """
-            direction[2] is the z-coordinate of ITRF, which points to the north pole. 
-            This direction is constant when pointing to the sun, as the earth rotates around its axis, 
+            direction[2] is the z-coordinate of ITRF, which points to the north pole.
+            This direction is constant when pointing to the sun, as the earth rotates around its axis,
             but changes slowly due to the earths rotation around the sun.
             The summer and winter solstices are when these values are at their peaks and the changes are the smallest.
-            This test takes the value at the winter solstice and checks whether the measured values are near enough to that. 
+            This test takes the value at the winter solstice and checks whether the measured values are near enough to that.
             """
 
             # Measured manually at the winter solstice. Using datetime.datetime(2021, 12, 21, 16, 58, 0)
@@ -88,11 +101,17 @@ class TestDelays(base.TestCase):
 
     def test_identical_location(self):
         # # create a frame tied to the reference position
-        reference_itrf = [3826577.066, 461022.948, 5064892.786]  # CS002LBA, in ITRF2005 epoch 2012.5
+        reference_itrf = [
+            3826577.066,
+            461022.948,
+            5064892.786,
+        ]  # CS002LBA, in ITRF2005 epoch 2012.5
         d = Delays(reference_itrf)
 
         # set the antenna position identical to the reference position
-        antenna_itrf = [[reference_itrf[0], reference_itrf[1], reference_itrf[2]]]  # CS001LBA, in ITRF2005 epoch 2012.5
+        antenna_itrf = [
+            [reference_itrf[0], reference_itrf[1], reference_itrf[2]]
+        ]  # CS001LBA, in ITRF2005 epoch 2012.5
 
         # # set the timestamp to solve for
         timestamp = datetime.datetime(2000, 1, 1, 0, 0, 0)
@@ -109,11 +128,17 @@ class TestDelays(base.TestCase):
         self.assertListEqual(delays.tolist(), [0.0], msg=f"delays = {delays}")
 
     def test_regression(self):
-        reference_itrf = [3826577.066, 461022.948, 5064892.786]  # CS002LBA, in ITRF2005 epoch 2012.5
+        reference_itrf = [
+            3826577.066,
+            461022.948,
+            5064892.786,
+        ]  # CS002LBA, in ITRF2005 epoch 2012.5
         d = Delays(reference_itrf)
 
         # set the antenna position identical to the reference position
-        antenna_itrf = [[3826923.503, 460915.488, 5064643.517]]  # CS001LBA, in ITRF2005 epoch 2012.5
+        antenna_itrf = [
+            [3826923.503, 460915.488, 5064643.517]
+        ]  # CS001LBA, in ITRF2005 epoch 2012.5
 
         # # set the timestamp to solve for
         timestamp = datetime.datetime(2000, 1, 1, 0, 0, 0)
@@ -146,7 +171,9 @@ class TestDelays(base.TestCase):
         #
         # We point at the North Celestial Pole in J2000, which is always at 90 degrees declanation,
         # see https://gssc.esa.int/navipedia/index.php/Conventional_Celestial_Reference_System
-        timestamp = datetime.datetime(2022, 3, 1, 0, 0, 0) # timestamp does not actually matter, but casacore doesn't know that.
+        timestamp = datetime.datetime(
+            2022, 3, 1, 0, 0, 0
+        )  # timestamp does not actually matter, but casacore doesn't know that.
         d.set_measure_time(timestamp)
         direction = "J2000", "0deg", "90deg"
 
@@ -157,11 +184,13 @@ class TestDelays(base.TestCase):
 
     def test_delays_bulk(self):
         d = Delays([0, 0, 0])
-        timestamp = datetime.datetime(2022, 3, 1, 0, 0, 0) # timestamp does not actually matter, but casacore doesn't know that.
+        timestamp = datetime.datetime(
+            2022, 3, 1, 0, 0, 0
+        )  # timestamp does not actually matter, but casacore doesn't know that.
         d.set_measure_time(timestamp)
 
         # generate different positions and directions
-        positions = numpy.array([[i,2,3] for i in range(5)])
+        positions = numpy.array([[i, 2, 3] for i in range(5)])
         directions = numpy.array([["J2000", f"{i}deg", f"{i}deg"] for i in range(90)])
 
         bulk_result = d.delays_bulk(directions, positions)
@@ -169,19 +198,25 @@ class TestDelays(base.TestCase):
         # verify parallellisation along direction axis
         for i, single_dir in enumerate(directions):
             single_dir_result = d.delays_bulk([single_dir], positions)
-            numpy.testing.assert_almost_equal(single_dir_result[:, 0], bulk_result[:, i], 4)
+            numpy.testing.assert_almost_equal(
+                single_dir_result[:, 0], bulk_result[:, i], 4
+            )
 
         # verify parallellisation along position axis
         for i, single_pos in enumerate(positions):
             single_pos_result = d.delays_bulk(directions, [single_pos])
-            numpy.testing.assert_almost_equal(single_pos_result[0, :], bulk_result[i, :], 4)
+            numpy.testing.assert_almost_equal(
+                single_pos_result[0, :], bulk_result[i, :], 4
+            )
 
     def test_delays_bulk_speed(self):
         d = Delays([0, 0, 0])
-        timestamp = datetime.datetime(2022, 3, 1, 0, 0, 0) # timestamp does not actually matter, but casacore doesn't know that.
+        timestamp = datetime.datetime(
+            2022, 3, 1, 0, 0, 0
+        )  # timestamp does not actually matter, but casacore doesn't know that.
         d.set_measure_time(timestamp)
 
-        positions = numpy.array([[1,2,3]] * MAX_ANTENNA)
+        positions = numpy.array([[1, 2, 3]] * MAX_ANTENNA)
         directions = numpy.array([["J2000", "0deg", "0deg"]] * N_beamlets_ctrl)
 
         count = 10
@@ -189,4 +224,6 @@ class TestDelays(base.TestCase):
         for _ in range(count):
             _ = d.delays_bulk(directions, positions)
         after = time.monotonic_ns()
-        logging.error(f"delays bulk averaged {(after - before)/count/1e6} ms to convert 488 directions for 96 antennas.")
+        logging.error(
+            f"delays bulk averaged {(after - before) / count / 1e6} ms to convert 488 directions for 96 antennas."
+        )
diff --git a/tangostationcontrol/tangostationcontrol/test/beam/test_geo.py b/tangostationcontrol/tangostationcontrol/test/beam/test_geo.py
index e3b8d726a67560e80ea73d963e4d80f8eddce848..7f6711a93a0490760a084bc7f7eba20f369ffffc 100644
--- a/tangostationcontrol/tangostationcontrol/test/beam/test_geo.py
+++ b/tangostationcontrol/tangostationcontrol/test/beam/test_geo.py
@@ -1,37 +1,30 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
+import numpy.testing
 from tangostationcontrol.beam.geo import ETRS_to_ITRF, ETRS_to_GEO, GEO_to_GEOHASH
-
 from tangostationcontrol.test import base
 
-import numpy.testing
 
 class TestETRSToITRF(base.TestCase):
     def test_convert_single_coordinate(self):
-        """ Convert a single coordinate. """
+        """Convert a single coordinate."""
         ETRS_coords = numpy.array([1.0, 1.0, 1.0])
         ITRF_coords = ETRS_to_ITRF(ETRS_coords, "ITRF2005", 2015.5)
 
         self.assertEqual(ETRS_coords.shape, ITRF_coords.shape)
 
     def test_convert_array(self):
-        """ Convert an array of coordinates. """
-        ETRS_coords = numpy.array([ [1.0, 1.0, 1.0], [2.0, 2.0, 2.0] ])
+        """Convert an array of coordinates."""
+        ETRS_coords = numpy.array([[1.0, 1.0, 1.0], [2.0, 2.0, 2.0]])
         ITRF_coords = ETRS_to_ITRF(ETRS_coords, "ITRF2005", 2015.5)
 
         self.assertEqual(ETRS_coords.shape, ITRF_coords.shape)
 
     def test_verify_CS001_LBA(self):
-        """ Verify if the calculated CS001LBA phase center matches those calculated in LOFAR1. """
+        """Verify if the calculated CS001LBA phase center matches those calculated in LOFAR1."""
 
-        # See CLBA in MAC/Deployment/data/Coordinates/ETRF_FILES/CS001/CS001-antenna-positions-ETRS.csv 
+        # See CLBA in MAC/Deployment/data/Coordinates/ETRF_FILES/CS001/CS001-antenna-positions-ETRS.csv
         CS001_LBA_ETRS = [3826923.942, 460915.117, 5064643.229]
 
         # Convert to ITRF
@@ -40,27 +33,30 @@ class TestETRSToITRF(base.TestCase):
         # verify against LOFAR1 (MAC/Deployment/data/StaticMetaData/AntennaFields/CS001-AntennaField.conf)
         LOFAR1_CS001_LBA_ITRF = [3826923.50275, 460915.488115, 5064643.517]
 
-        numpy.testing.assert_almost_equal(CS001_LBA_ITRF, LOFAR1_CS001_LBA_ITRF, decimal=1.5)
+        numpy.testing.assert_almost_equal(
+            CS001_LBA_ITRF, LOFAR1_CS001_LBA_ITRF, decimal=1.5
+        )
+
 
 class TestETRSToGEO(base.TestCase):
     def test_convert_single_coordinate(self):
-        """ Convert a single coordinate. """
+        """Convert a single coordinate."""
         ETRS_coords = numpy.array([1.0, 1.0, 1.0])
         GEO_coords = ETRS_to_GEO(ETRS_coords)
 
         self.assertEqual((2,), GEO_coords.shape)
 
     def test_convert_array(self):
-        """ Convert an array of coordinates. """
-        ETRS_coords = numpy.array([ [1.0, 1.0, 1.0], [2.0, 2.0, 2.0], [3.0, 3.0, 3.0] ])
+        """Convert an array of coordinates."""
+        ETRS_coords = numpy.array([[1.0, 1.0, 1.0], [2.0, 2.0, 2.0], [3.0, 3.0, 3.0]])
         GEO_coords = ETRS_to_GEO(ETRS_coords)
 
-        self.assertEqual((3,2), GEO_coords.shape)
+        self.assertEqual((3, 2), GEO_coords.shape)
 
     def test_verify_CS001_LBA(self):
-        """ Verify if the calculated CS001LBA phase center matches those calculated in LOFAR1. """
+        """Verify if the calculated CS001LBA phase center matches those calculated in LOFAR1."""
 
-        # See CLBA in MAC/Deployment/data/Coordinates/ETRF_FILES/CS001/CS001-antenna-positions-ETRS.csv 
+        # See CLBA in MAC/Deployment/data/Coordinates/ETRF_FILES/CS001/CS001-antenna-positions-ETRS.csv
         CS001_LBA_ETRS = [3826923.942, 460915.117, 5064643.229]
 
         # Convert to GEO
@@ -69,25 +65,28 @@ class TestETRSToGEO(base.TestCase):
         # verify against actual position
         LOFAR1_CS001_LBA_GEO = [52.911, 6.868]
 
-        numpy.testing.assert_almost_equal(CS001_LBA_GEO, LOFAR1_CS001_LBA_GEO, decimal=3)
+        numpy.testing.assert_almost_equal(
+            CS001_LBA_GEO, LOFAR1_CS001_LBA_GEO, decimal=3
+        )
+
 
 class TestGEOToGEOHASH(base.TestCase):
     def test_convert_single_coordinate(self):
-        """ Convert a single coordinate. """
+        """Convert a single coordinate."""
         GEO_coords = numpy.array([1.0, 1.0])
         GEOHASH_coords = GEO_to_GEOHASH(GEO_coords)
 
         self.assertEqual(str, type(GEOHASH_coords))
 
     def test_convert_array(self):
-        """ Convert an array of coordinates. """
-        GEO_coords = numpy.array([ [1.0, 1.0], [2.0, 2.0], [3.0, 3.0] ])
+        """Convert an array of coordinates."""
+        GEO_coords = numpy.array([[1.0, 1.0], [2.0, 2.0], [3.0, 3.0]])
         GEOHASH_coords = GEO_to_GEOHASH(GEO_coords)
 
         self.assertEqual((3,), GEOHASH_coords.shape)
 
     def test_CS001_LBA_regression(self):
-        """ Verify if the calculated CS001LBA phase center match fixed values, to detect changes in computation. """
+        """Verify if the calculated CS001LBA phase center match fixed values, to detect changes in computation."""
 
         CS001_LBA_GEO = [52.911, 6.868]
 
@@ -95,4 +94,4 @@ class TestGEOToGEOHASH(base.TestCase):
         CS001_LBA_GEOHASH = GEO_to_GEOHASH(numpy.array(CS001_LBA_GEO))
 
         # verify against precomputed value
-        self.assertEqual('u1kvh21hgvrcpm28', CS001_LBA_GEOHASH)
+        self.assertEqual("u1kvh21hgvrcpm28", CS001_LBA_GEOHASH)
diff --git a/tangostationcontrol/tangostationcontrol/test/beam/test_hba_tile.py b/tangostationcontrol/tangostationcontrol/test/beam/test_hba_tile.py
index d698264f845cde35c5af63612e040832120e2455..3063008a56cfc767b03df0b8aaf80b894dcaf8b4 100644
--- a/tangostationcontrol/tangostationcontrol/test/beam/test_hba_tile.py
+++ b/tangostationcontrol/tangostationcontrol/test/beam/test_hba_tile.py
@@ -1,52 +1,55 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from tangostationcontrol.beam.hba_tile import HBATAntennaOffsets
+from math import pi
 
+import numpy.testing
+from tangostationcontrol.beam.hba_tile import HBATAntennaOffsets
 from tangostationcontrol.test import base
 
-from math import pi
-import numpy.testing
 
 class TestHBATAntennaOffsets(base.TestCase):
     def test_verify_CS001_HBA0(self):
-        """ Verify if the calculated HBAT Antenna Offsets match those calculated in LOFAR1. """
+        """Verify if the calculated HBAT Antenna Offsets match those calculated in LOFAR1."""
 
         CS001_HBA0_rotation_angle_deg = 24
-        CS001_PQR_to_ETRS_rotation_matrix = numpy.array([
-                            [-0.1195951054, -0.7919544517, 0.5987530018],
-                            [ 0.9928227484, -0.0954186800, 0.0720990002],
-                            [ 0.0000330969,  0.6030782884, 0.7976820024]])
+        CS001_PQR_to_ETRS_rotation_matrix = numpy.array(
+            [
+                [-0.1195951054, -0.7919544517, 0.5987530018],
+                [0.9928227484, -0.0954186800, 0.0720990002],
+                [0.0000330969, 0.6030782884, 0.7976820024],
+            ]
+        )
 
         # recalculate the ITRF offsets
         ITRF_offsets = HBATAntennaOffsets.ITRF_offsets(
             HBATAntennaOffsets.HBAT1_BASE_ANTENNA_OFFSETS,
             CS001_HBA0_rotation_angle_deg * pi / 180,
-            CS001_PQR_to_ETRS_rotation_matrix)
+            CS001_PQR_to_ETRS_rotation_matrix,
+        )
 
         # verify against LOFAR1 (MAC/Deployment/data/StaticMetaData/iHBADeltas/CS001-iHBADeltas.conf)
-        LOFAR1_CS001_HBA0_ITRF_offsets = numpy.array([
-                               [-1.847, -1.180,  1.493],
-                               [-1.581,  0.003,  1.186],
-                               [-1.315,  1.185,  0.880],
-                               [-1.049,  2.367,  0.573],
-                               [-0.882, -1.575,  0.804],
-                               [-0.616, -0.393,  0.498],
-                               [-0.350,  0.789,  0.191],
-                               [-0.083,  1.971, -0.116],
-                               [ 0.083, -1.971,  0.116],
-                               [ 0.350, -0.789, -0.191],
-                               [ 0.616,  0.393, -0.498],
-                               [ 0.882,  1.575, -0.804],
-                               [ 1.049, -2.367, -0.573],
-                               [ 1.315, -1.185, -0.880],
-                               [ 1.581, -0.003, -1.186],
-                               [ 1.847,  1.180, -1.493]])
-
-        numpy.testing.assert_almost_equal(ITRF_offsets, LOFAR1_CS001_HBA0_ITRF_offsets, decimal=3)
+        LOFAR1_CS001_HBA0_ITRF_offsets = numpy.array(
+            [
+                [-1.847, -1.180, 1.493],
+                [-1.581, 0.003, 1.186],
+                [-1.315, 1.185, 0.880],
+                [-1.049, 2.367, 0.573],
+                [-0.882, -1.575, 0.804],
+                [-0.616, -0.393, 0.498],
+                [-0.350, 0.789, 0.191],
+                [-0.083, 1.971, -0.116],
+                [0.083, -1.971, 0.116],
+                [0.350, -0.789, -0.191],
+                [0.616, 0.393, -0.498],
+                [0.882, 1.575, -0.804],
+                [1.049, -2.367, -0.573],
+                [1.315, -1.185, -0.880],
+                [1.581, -0.003, -1.186],
+                [1.847, 1.180, -1.493],
+            ]
+        )
+
+        numpy.testing.assert_almost_equal(
+            ITRF_offsets, LOFAR1_CS001_HBA0_ITRF_offsets, decimal=3
+        )
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/__init__.py b/tangostationcontrol/tangostationcontrol/test/clients/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/test/clients/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/test/clients/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/test_attr_wrapper.py b/tangostationcontrol/tangostationcontrol/test/clients/test_attr_wrapper.py
index 8df95918abcb7cf6dd33d002ac5db60087008d57..040e8ba5bc607f01d16600ab68be850e5589cbf3 100644
--- a/tangostationcontrol/tangostationcontrol/test/clients/test_attr_wrapper.py
+++ b/tangostationcontrol/tangostationcontrol/test/clients/test_attr_wrapper.py
@@ -1,35 +1,34 @@
-# -*- coding: utf-8 -*-
-
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """ test Device Server
 """
 
-# External imports
-from tango import DevState, DevFailed, AttrWriteType
+import asyncio
 
-# Internal imports
-from tangostationcontrol.test.clients.test_client import TestClient
-from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
-from tangostationcontrol.devices.lofar_device import LOFARDevice
+import mock
+import numpy
 import tangostationcontrol.devices.lofar_device
 
+# External imports
+from tango import DevState, DevFailed, AttrWriteType
+
 # Test imports
 from tango.test_context import DeviceTestContext
+from tangostationcontrol.clients.attribute_wrapper import AttributeWrapper
+from tangostationcontrol.devices.lofar_device import LOFARDevice
 from tangostationcontrol.test import base
 
-import asyncio
-import mock
-import numpy
+# Internal imports
+from tangostationcontrol.test.clients.test_client import TestClient
 
 SCALAR_DIMS = (1,)
 SPECTRUM_DIMS = (4,)
-IMAGE_DIMS = (3,2)
+IMAGE_DIMS = (3, 2)
 
-STR_SCALAR_VAL = '1'
-STR_SPECTRUM_VAL = ['1','1', '1','1']
-STR_IMAGE_VAL = [['1','1'],['1','1'],['1','1']]
+STR_SCALAR_VAL = "1"
+STR_SPECTRUM_VAL = ["1", "1", "1", "1"]
+STR_IMAGE_VAL = [["1", "1"], ["1", "1"], ["1", "1"]]
 
 
 def dev_init(device):
@@ -39,270 +38,529 @@ def dev_init(device):
         asyncio.run(i.async_set_comm_client(device, device.test_client))
     device.test_client.start()
 
+
 class TestAttributeTypes(base.TestCase):
     def setUp(self):
         # Avoid the device trying to access itself as a client
-        self.deviceproxy_patch = mock.patch.object(tangostationcontrol.devices.lofar_device,'DeviceProxy')
+        self.deviceproxy_patch = mock.patch.object(
+            tangostationcontrol.devices.lofar_device, "DeviceProxy"
+        )
         self.deviceproxy_patch.start()
         self.addCleanup(self.deviceproxy_patch.stop)
 
     class StrScalarDevice(LOFARDevice):
         scalar_R = AttributeWrapper(comms_annotation="str_scalar_R", datatype=str)
-        scalar_RW = AttributeWrapper(comms_annotation="str_scalar_RW", datatype=str, access=AttrWriteType.READ_WRITE)
+        scalar_RW = AttributeWrapper(
+            comms_annotation="str_scalar_RW",
+            datatype=str,
+            access=AttrWriteType.READ_WRITE,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class BoolScalarDevice(LOFARDevice):
         scalar_R = AttributeWrapper(comms_annotation="bool_scalar_R", datatype=bool)
-        scalar_RW = AttributeWrapper(comms_annotation="bool_scalar_RW", datatype=bool, access=AttrWriteType.READ_WRITE)
+        scalar_RW = AttributeWrapper(
+            comms_annotation="bool_scalar_RW",
+            datatype=bool,
+            access=AttrWriteType.READ_WRITE,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Float32ScalarDevice(LOFARDevice):
-        scalar_R = AttributeWrapper(comms_annotation="float32_scalar_R", datatype=numpy.float32)
-        scalar_RW = AttributeWrapper(comms_annotation="float32_scalar_RW", datatype=numpy.float32, access=AttrWriteType.READ_WRITE)
+        scalar_R = AttributeWrapper(
+            comms_annotation="float32_scalar_R", datatype=numpy.float32
+        )
+        scalar_RW = AttributeWrapper(
+            comms_annotation="float32_scalar_RW",
+            datatype=numpy.float32,
+            access=AttrWriteType.READ_WRITE,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Float64ScalarDevice(LOFARDevice):
-        scalar_R = AttributeWrapper(comms_annotation="float64_scalar_R", datatype=numpy.float64)
-        scalar_RW = AttributeWrapper(comms_annotation="float64_scalar_RW", datatype=numpy.float64, access=AttrWriteType.READ_WRITE)
+        scalar_R = AttributeWrapper(
+            comms_annotation="float64_scalar_R", datatype=numpy.float64
+        )
+        scalar_RW = AttributeWrapper(
+            comms_annotation="float64_scalar_RW",
+            datatype=numpy.float64,
+            access=AttrWriteType.READ_WRITE,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class DoubleScalarDevice(LOFARDevice):
-        scalar_R = AttributeWrapper(comms_annotation="double_scalar_R", datatype=numpy.double)
-        scalar_RW = AttributeWrapper(comms_annotation="double_scalar_RW", datatype=numpy.double, access=AttrWriteType.READ_WRITE)
+        scalar_R = AttributeWrapper(
+            comms_annotation="double_scalar_R", datatype=numpy.double
+        )
+        scalar_RW = AttributeWrapper(
+            comms_annotation="double_scalar_RW",
+            datatype=numpy.double,
+            access=AttrWriteType.READ_WRITE,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Uint8ScalarDevice(LOFARDevice):
-        scalar_R = AttributeWrapper(comms_annotation="uint8_scalar_R", datatype=numpy.uint8)
-        scalar_RW = AttributeWrapper(comms_annotation="uint8_scalar_RW", datatype=numpy.uint8, access=AttrWriteType.READ_WRITE)
+        scalar_R = AttributeWrapper(
+            comms_annotation="uint8_scalar_R", datatype=numpy.uint8
+        )
+        scalar_RW = AttributeWrapper(
+            comms_annotation="uint8_scalar_RW",
+            datatype=numpy.uint8,
+            access=AttrWriteType.READ_WRITE,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Uint16ScalarDevice(LOFARDevice):
-        scalar_R = AttributeWrapper(comms_annotation="uint16_scalar_R", datatype=numpy.uint16)
-        scalar_RW = AttributeWrapper(comms_annotation="uint16_scalar_RW", datatype=numpy.uint16, access=AttrWriteType.READ_WRITE)
+        scalar_R = AttributeWrapper(
+            comms_annotation="uint16_scalar_R", datatype=numpy.uint16
+        )
+        scalar_RW = AttributeWrapper(
+            comms_annotation="uint16_scalar_RW",
+            datatype=numpy.uint16,
+            access=AttrWriteType.READ_WRITE,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Uint32ScalarDevice(LOFARDevice):
-        scalar_R = AttributeWrapper(comms_annotation="uint32_scalar_R", datatype=numpy.uint32)
-        scalar_RW = AttributeWrapper(comms_annotation="uint32_scalar_RW", datatype=numpy.uint32, access=AttrWriteType.READ_WRITE)
+        scalar_R = AttributeWrapper(
+            comms_annotation="uint32_scalar_R", datatype=numpy.uint32
+        )
+        scalar_RW = AttributeWrapper(
+            comms_annotation="uint32_scalar_RW",
+            datatype=numpy.uint32,
+            access=AttrWriteType.READ_WRITE,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Uint64ScalarDevice(LOFARDevice):
-        scalar_R = AttributeWrapper(comms_annotation="uint64_scalar_R", datatype=numpy.uint64)
-        scalar_RW = AttributeWrapper(comms_annotation="uint64_scalar_RW", datatype=numpy.uint64, access=AttrWriteType.READ_WRITE)
+        scalar_R = AttributeWrapper(
+            comms_annotation="uint64_scalar_R", datatype=numpy.uint64
+        )
+        scalar_RW = AttributeWrapper(
+            comms_annotation="uint64_scalar_RW",
+            datatype=numpy.uint64,
+            access=AttrWriteType.READ_WRITE,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Int16ScalarDevice(LOFARDevice):
-        scalar_R = AttributeWrapper(comms_annotation="int16_scalar_R", datatype=numpy.int16)
-        scalar_RW = AttributeWrapper(comms_annotation="int16_scalar_RW", datatype=numpy.int16, access=AttrWriteType.READ_WRITE)
+        scalar_R = AttributeWrapper(
+            comms_annotation="int16_scalar_R", datatype=numpy.int16
+        )
+        scalar_RW = AttributeWrapper(
+            comms_annotation="int16_scalar_RW",
+            datatype=numpy.int16,
+            access=AttrWriteType.READ_WRITE,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Int32ScalarDevice(LOFARDevice):
-        scalar_R = AttributeWrapper(comms_annotation="int32_scalar_R", datatype=numpy.int32)
-        scalar_RW = AttributeWrapper(comms_annotation="int32_scalar_RW", datatype=numpy.int32, access=AttrWriteType.READ_WRITE)
+        scalar_R = AttributeWrapper(
+            comms_annotation="int32_scalar_R", datatype=numpy.int32
+        )
+        scalar_RW = AttributeWrapper(
+            comms_annotation="int32_scalar_RW",
+            datatype=numpy.int32,
+            access=AttrWriteType.READ_WRITE,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Int64ScalarDevice(LOFARDevice):
-        scalar_R = AttributeWrapper(comms_annotation="int64_scalar_R", datatype=numpy.int64)
-        scalar_RW = AttributeWrapper(comms_annotation="int64_scalar_RW", datatype=numpy.int64, access=AttrWriteType.READ_WRITE)
+        scalar_R = AttributeWrapper(
+            comms_annotation="int64_scalar_R", datatype=numpy.int64
+        )
+        scalar_RW = AttributeWrapper(
+            comms_annotation="int64_scalar_RW",
+            datatype=numpy.int64,
+            access=AttrWriteType.READ_WRITE,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class StrSpectrumDevice(LOFARDevice):
-        spectrum_R = AttributeWrapper(comms_annotation="str_spectrum_R", datatype=str, dims=SPECTRUM_DIMS)
-        spectrum_RW = AttributeWrapper(comms_annotation="str_spectrum_RW", datatype=str, access=AttrWriteType.READ_WRITE, dims=SPECTRUM_DIMS)
+        spectrum_R = AttributeWrapper(
+            comms_annotation="str_spectrum_R", datatype=str, dims=SPECTRUM_DIMS
+        )
+        spectrum_RW = AttributeWrapper(
+            comms_annotation="str_spectrum_RW",
+            datatype=str,
+            access=AttrWriteType.READ_WRITE,
+            dims=SPECTRUM_DIMS,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class BoolSpectrumDevice(LOFARDevice):
-        spectrum_R = AttributeWrapper(comms_annotation="bool_spectrum_R", datatype=bool, dims=SPECTRUM_DIMS)
-        spectrum_RW = AttributeWrapper(comms_annotation="bool_spectrum_RW", datatype=bool, access=AttrWriteType.READ_WRITE, dims=SPECTRUM_DIMS)
+        spectrum_R = AttributeWrapper(
+            comms_annotation="bool_spectrum_R", datatype=bool, dims=SPECTRUM_DIMS
+        )
+        spectrum_RW = AttributeWrapper(
+            comms_annotation="bool_spectrum_RW",
+            datatype=bool,
+            access=AttrWriteType.READ_WRITE,
+            dims=SPECTRUM_DIMS,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Float32SpectrumDevice(LOFARDevice):
-        spectrum_R = AttributeWrapper(comms_annotation="float32_spectrum_R", datatype=numpy.float32, dims=SPECTRUM_DIMS)
-        spectrum_RW = AttributeWrapper(comms_annotation="float32_spectrum_RW", datatype=numpy.float32, access=AttrWriteType.READ_WRITE, dims=SPECTRUM_DIMS)
+        spectrum_R = AttributeWrapper(
+            comms_annotation="float32_spectrum_R",
+            datatype=numpy.float32,
+            dims=SPECTRUM_DIMS,
+        )
+        spectrum_RW = AttributeWrapper(
+            comms_annotation="float32_spectrum_RW",
+            datatype=numpy.float32,
+            access=AttrWriteType.READ_WRITE,
+            dims=SPECTRUM_DIMS,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Float64SpectrumDevice(LOFARDevice):
-        spectrum_R = AttributeWrapper(comms_annotation="float64_spectrum_R", datatype=numpy.float64, dims=SPECTRUM_DIMS)
-        spectrum_RW = AttributeWrapper(comms_annotation="float64_spectrum_RW", datatype=numpy.float64, access=AttrWriteType.READ_WRITE, dims=SPECTRUM_DIMS)
+        spectrum_R = AttributeWrapper(
+            comms_annotation="float64_spectrum_R",
+            datatype=numpy.float64,
+            dims=SPECTRUM_DIMS,
+        )
+        spectrum_RW = AttributeWrapper(
+            comms_annotation="float64_spectrum_RW",
+            datatype=numpy.float64,
+            access=AttrWriteType.READ_WRITE,
+            dims=SPECTRUM_DIMS,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class DoubleSpectrumDevice(LOFARDevice):
-        spectrum_R = AttributeWrapper(comms_annotation="double_spectrum_R", datatype=numpy.double, dims=SPECTRUM_DIMS)
-        spectrum_RW = AttributeWrapper(comms_annotation="double_spectrum_RW", datatype=numpy.double, access=AttrWriteType.READ_WRITE, dims=SPECTRUM_DIMS)
+        spectrum_R = AttributeWrapper(
+            comms_annotation="double_spectrum_R",
+            datatype=numpy.double,
+            dims=SPECTRUM_DIMS,
+        )
+        spectrum_RW = AttributeWrapper(
+            comms_annotation="double_spectrum_RW",
+            datatype=numpy.double,
+            access=AttrWriteType.READ_WRITE,
+            dims=SPECTRUM_DIMS,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Uint8SpectrumDevice(LOFARDevice):
-        spectrum_R = AttributeWrapper(comms_annotation="uint8_spectrum_R", datatype=numpy.uint8, dims=SPECTRUM_DIMS)
-        spectrum_RW = AttributeWrapper(comms_annotation="uint8_spectrum_RW", datatype=numpy.uint8, access=AttrWriteType.READ_WRITE, dims=SPECTRUM_DIMS)
+        spectrum_R = AttributeWrapper(
+            comms_annotation="uint8_spectrum_R",
+            datatype=numpy.uint8,
+            dims=SPECTRUM_DIMS,
+        )
+        spectrum_RW = AttributeWrapper(
+            comms_annotation="uint8_spectrum_RW",
+            datatype=numpy.uint8,
+            access=AttrWriteType.READ_WRITE,
+            dims=SPECTRUM_DIMS,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Uint16SpectrumDevice(LOFARDevice):
-        spectrum_R = AttributeWrapper(comms_annotation="uint16_spectrum_R", datatype=numpy.uint16, dims=SPECTRUM_DIMS)
-        spectrum_RW = AttributeWrapper(comms_annotation="uint16_spectrum_RW", datatype=numpy.uint16, access=AttrWriteType.READ_WRITE, dims=SPECTRUM_DIMS)
+        spectrum_R = AttributeWrapper(
+            comms_annotation="uint16_spectrum_R",
+            datatype=numpy.uint16,
+            dims=SPECTRUM_DIMS,
+        )
+        spectrum_RW = AttributeWrapper(
+            comms_annotation="uint16_spectrum_RW",
+            datatype=numpy.uint16,
+            access=AttrWriteType.READ_WRITE,
+            dims=SPECTRUM_DIMS,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Uint32SpectrumDevice(LOFARDevice):
-        spectrum_R = AttributeWrapper(comms_annotation="uint32_spectrum_R", datatype=numpy.uint32, dims=SPECTRUM_DIMS)
-        spectrum_RW = AttributeWrapper(comms_annotation="uint32_spectrum_RW", datatype=numpy.uint32, access=AttrWriteType.READ_WRITE, dims=SPECTRUM_DIMS)
+        spectrum_R = AttributeWrapper(
+            comms_annotation="uint32_spectrum_R",
+            datatype=numpy.uint32,
+            dims=SPECTRUM_DIMS,
+        )
+        spectrum_RW = AttributeWrapper(
+            comms_annotation="uint32_spectrum_RW",
+            datatype=numpy.uint32,
+            access=AttrWriteType.READ_WRITE,
+            dims=SPECTRUM_DIMS,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Uint64SpectrumDevice(LOFARDevice):
-        spectrum_R = AttributeWrapper(comms_annotation="uint64_spectrum_R", datatype=numpy.uint64, dims=SPECTRUM_DIMS)
-        spectrum_RW = AttributeWrapper(comms_annotation="uint64_spectrum_RW", datatype=numpy.uint64, access=AttrWriteType.READ_WRITE, dims=SPECTRUM_DIMS)
+        spectrum_R = AttributeWrapper(
+            comms_annotation="uint64_spectrum_R",
+            datatype=numpy.uint64,
+            dims=SPECTRUM_DIMS,
+        )
+        spectrum_RW = AttributeWrapper(
+            comms_annotation="uint64_spectrum_RW",
+            datatype=numpy.uint64,
+            access=AttrWriteType.READ_WRITE,
+            dims=SPECTRUM_DIMS,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Int16SpectrumDevice(LOFARDevice):
-        spectrum_R = AttributeWrapper(comms_annotation="int16_spectrum_R", datatype=numpy.int16, dims=SPECTRUM_DIMS)
-        spectrum_RW = AttributeWrapper(comms_annotation="int16_spectrum_RW", datatype=numpy.int16, access=AttrWriteType.READ_WRITE, dims=SPECTRUM_DIMS)
+        spectrum_R = AttributeWrapper(
+            comms_annotation="int16_spectrum_R",
+            datatype=numpy.int16,
+            dims=SPECTRUM_DIMS,
+        )
+        spectrum_RW = AttributeWrapper(
+            comms_annotation="int16_spectrum_RW",
+            datatype=numpy.int16,
+            access=AttrWriteType.READ_WRITE,
+            dims=SPECTRUM_DIMS,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Int32SpectrumDevice(LOFARDevice):
-        spectrum_R = AttributeWrapper(comms_annotation="int32_spectrum_R", datatype=numpy.int32, dims=SPECTRUM_DIMS)
-        spectrum_RW = AttributeWrapper(comms_annotation="int32_spectrum_RW", datatype=numpy.int32, access=AttrWriteType.READ_WRITE, dims=SPECTRUM_DIMS)
+        spectrum_R = AttributeWrapper(
+            comms_annotation="int32_spectrum_R",
+            datatype=numpy.int32,
+            dims=SPECTRUM_DIMS,
+        )
+        spectrum_RW = AttributeWrapper(
+            comms_annotation="int32_spectrum_RW",
+            datatype=numpy.int32,
+            access=AttrWriteType.READ_WRITE,
+            dims=SPECTRUM_DIMS,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Int64SpectrumDevice(LOFARDevice):
-        spectrum_R = AttributeWrapper(comms_annotation="int64_spectrum_R", datatype=numpy.int64, dims=SPECTRUM_DIMS)
-        spectrum_RW = AttributeWrapper(comms_annotation="int64_spectrum_RW", datatype=numpy.int64, access=AttrWriteType.READ_WRITE, dims=SPECTRUM_DIMS)
+        spectrum_R = AttributeWrapper(
+            comms_annotation="int64_spectrum_R",
+            datatype=numpy.int64,
+            dims=SPECTRUM_DIMS,
+        )
+        spectrum_RW = AttributeWrapper(
+            comms_annotation="int64_spectrum_RW",
+            datatype=numpy.int64,
+            access=AttrWriteType.READ_WRITE,
+            dims=SPECTRUM_DIMS,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class StrImageDevice(LOFARDevice):
-        image_R = AttributeWrapper(comms_annotation="str_image_R", datatype=str, dims=(3,2))
-        image_RW = AttributeWrapper(comms_annotation="str_image_RW", datatype=str, access=AttrWriteType.READ_WRITE, dims=(3,2))
+        image_R = AttributeWrapper(
+            comms_annotation="str_image_R", datatype=str, dims=(3, 2)
+        )
+        image_RW = AttributeWrapper(
+            comms_annotation="str_image_RW",
+            datatype=str,
+            access=AttrWriteType.READ_WRITE,
+            dims=(3, 2),
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class BoolImageDevice(LOFARDevice):
-        image_R = AttributeWrapper(comms_annotation="bool_image_R", datatype=bool, dims=(3,2))
-        image_RW = AttributeWrapper(comms_annotation="bool_image_RW", datatype=bool, access=AttrWriteType.READ_WRITE, dims=(3,2))
+        image_R = AttributeWrapper(
+            comms_annotation="bool_image_R", datatype=bool, dims=(3, 2)
+        )
+        image_RW = AttributeWrapper(
+            comms_annotation="bool_image_RW",
+            datatype=bool,
+            access=AttrWriteType.READ_WRITE,
+            dims=(3, 2),
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Float32ImageDevice(LOFARDevice):
-        image_R = AttributeWrapper(comms_annotation="float32_image_R", datatype=numpy.float32, dims=(3,2))
-        image_RW = AttributeWrapper(comms_annotation="float32_image_RW", datatype=numpy.float32, access=AttrWriteType.READ_WRITE, dims=(3,2))
+        image_R = AttributeWrapper(
+            comms_annotation="float32_image_R", datatype=numpy.float32, dims=(3, 2)
+        )
+        image_RW = AttributeWrapper(
+            comms_annotation="float32_image_RW",
+            datatype=numpy.float32,
+            access=AttrWriteType.READ_WRITE,
+            dims=(3, 2),
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Float64ImageDevice(LOFARDevice):
-        image_R = AttributeWrapper(comms_annotation="float64_image_R", datatype=numpy.float64, dims=(3,2))
-        image_RW = AttributeWrapper(comms_annotation="float64_image_RW", datatype=numpy.float64, access=AttrWriteType.READ_WRITE, dims=(3,2))
+        image_R = AttributeWrapper(
+            comms_annotation="float64_image_R", datatype=numpy.float64, dims=(3, 2)
+        )
+        image_RW = AttributeWrapper(
+            comms_annotation="float64_image_RW",
+            datatype=numpy.float64,
+            access=AttrWriteType.READ_WRITE,
+            dims=(3, 2),
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class DoubleImageDevice(LOFARDevice):
-        image_R = AttributeWrapper(comms_annotation="double_image_R", datatype=numpy.double, dims=(3,2))
-        image_RW = AttributeWrapper(comms_annotation="double_image_RW", datatype=numpy.double, access=AttrWriteType.READ_WRITE, dims=(3,2))
+        image_R = AttributeWrapper(
+            comms_annotation="double_image_R", datatype=numpy.double, dims=(3, 2)
+        )
+        image_RW = AttributeWrapper(
+            comms_annotation="double_image_RW",
+            datatype=numpy.double,
+            access=AttrWriteType.READ_WRITE,
+            dims=(3, 2),
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Uint8ImageDevice(LOFARDevice):
-        image_R = AttributeWrapper(comms_annotation="uint8_image_R", datatype=numpy.uint8, dims=(3,2))
-        image_RW = AttributeWrapper(comms_annotation="uint8_image_RW", datatype=numpy.uint8, access=AttrWriteType.READ_WRITE, dims=(3,2))
+        image_R = AttributeWrapper(
+            comms_annotation="uint8_image_R", datatype=numpy.uint8, dims=(3, 2)
+        )
+        image_RW = AttributeWrapper(
+            comms_annotation="uint8_image_RW",
+            datatype=numpy.uint8,
+            access=AttrWriteType.READ_WRITE,
+            dims=(3, 2),
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Uint16ImageDevice(LOFARDevice):
-        image_R = AttributeWrapper(comms_annotation="uint16_image_R", datatype=numpy.uint16, dims=(3,2))
-        image_RW = AttributeWrapper(comms_annotation="uint16_image_RW", datatype=numpy.uint16, access=AttrWriteType.READ_WRITE, dims=(3,2))
+        image_R = AttributeWrapper(
+            comms_annotation="uint16_image_R", datatype=numpy.uint16, dims=(3, 2)
+        )
+        image_RW = AttributeWrapper(
+            comms_annotation="uint16_image_RW",
+            datatype=numpy.uint16,
+            access=AttrWriteType.READ_WRITE,
+            dims=(3, 2),
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Uint32ImageDevice(LOFARDevice):
-        image_R = AttributeWrapper(comms_annotation="uint32_image_R", datatype=numpy.uint32, dims=(3,2))
-        image_RW = AttributeWrapper(comms_annotation="uint32_image_RW", datatype=numpy.uint32, access=AttrWriteType.READ_WRITE, dims=(3,2))
+        image_R = AttributeWrapper(
+            comms_annotation="uint32_image_R", datatype=numpy.uint32, dims=(3, 2)
+        )
+        image_RW = AttributeWrapper(
+            comms_annotation="uint32_image_RW",
+            datatype=numpy.uint32,
+            access=AttrWriteType.READ_WRITE,
+            dims=(3, 2),
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Uint64ImageDevice(LOFARDevice):
-        image_R = AttributeWrapper(comms_annotation="uint64_image_R", datatype=numpy.uint64, dims=(3,2))
-        image_RW = AttributeWrapper(comms_annotation="uint64_image_RW", datatype=numpy.uint64, access=AttrWriteType.READ_WRITE, dims=(3,2))
+        image_R = AttributeWrapper(
+            comms_annotation="uint64_image_R", datatype=numpy.uint64, dims=(3, 2)
+        )
+        image_RW = AttributeWrapper(
+            comms_annotation="uint64_image_RW",
+            datatype=numpy.uint64,
+            access=AttrWriteType.READ_WRITE,
+            dims=(3, 2),
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Int16ImageDevice(LOFARDevice):
-        image_R = AttributeWrapper(comms_annotation="int16_image_R", datatype=numpy.int16, dims=(3,2))
-        image_RW = AttributeWrapper(comms_annotation="int16_image_RW", datatype=numpy.int16, access=AttrWriteType.READ_WRITE, dims=(3,2))
+        image_R = AttributeWrapper(
+            comms_annotation="int16_image_R", datatype=numpy.int16, dims=(3, 2)
+        )
+        image_RW = AttributeWrapper(
+            comms_annotation="int16_image_RW",
+            datatype=numpy.int16,
+            access=AttrWriteType.READ_WRITE,
+            dims=(3, 2),
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Int32ImageDevice(LOFARDevice):
-        image_R = AttributeWrapper(comms_annotation="int32_image_R", datatype=numpy.int32, dims=(3,2))
-        image_RW = AttributeWrapper(comms_annotation="int32_image_RW", datatype=numpy.int32, access=AttrWriteType.READ_WRITE, dims=(3,2))
+        image_R = AttributeWrapper(
+            comms_annotation="int32_image_R", datatype=numpy.int32, dims=(3, 2)
+        )
+        image_RW = AttributeWrapper(
+            comms_annotation="int32_image_RW",
+            datatype=numpy.int32,
+            access=AttrWriteType.READ_WRITE,
+            dims=(3, 2),
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     class Int64ImageDevice(LOFARDevice):
-        image_R = AttributeWrapper(comms_annotation="int64_image_R", datatype=numpy.int64, dims=(3,2))
-        image_RW = AttributeWrapper(comms_annotation="int64_image_RW", datatype=numpy.int64, access=AttrWriteType.READ_WRITE, dims=(3,2))
+        image_R = AttributeWrapper(
+            comms_annotation="int64_image_R", datatype=numpy.int64, dims=(3, 2)
+        )
+        image_RW = AttributeWrapper(
+            comms_annotation="int64_image_RW",
+            datatype=numpy.int64,
+            access=AttrWriteType.READ_WRITE,
+            dims=(3, 2),
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
 
     def read_R_test(self, dev, dtype, test_type):
-        '''Test device'''
+        """Test device"""
         with DeviceTestContext(dev, process=True) as proxy:
 
-            #initialise
+            # initialise
             proxy.initialise()
             proxy.on()
 
@@ -314,29 +572,55 @@ class TestAttributeTypes(base.TestCase):
                 val = proxy.spectrum_R
             elif test_type == "image":
                 expected = numpy.zeros(IMAGE_DIMS, dtype=dtype)
-                val = numpy.array(proxy.image_R) #is needed for STR since they act differently
+                val = numpy.array(
+                    proxy.image_R
+                )  # is needed for STR since they act differently
 
                 # cant use all() for 2d arrays so instead compare the dimensions and then flatten to 2d
-                self.assertEqual(val.shape, expected.shape, " image R array dimensions got mangled. Expected {}, got  {}".format(expected.shape, val.shape))
+                self.assertEqual(
+                    val.shape,
+                    expected.shape,
+                    " image R array dimensions got mangled. Expected {}, got  {}".format(
+                        expected.shape, val.shape
+                    ),
+                )
                 val.reshape(-1)
             else:
-                self.assertEqual(1,2, " {} is not a valid test_type. please use either scalar, spectrum or image".format(test_type))
+                self.assertEqual(
+                    1,
+                    2,
+                    " {} is not a valid test_type. please use either scalar, spectrum or image".format(
+                        test_type
+                    ),
+                )
 
             if test_type == "scalar":
                 comparison = expected == val
-                self.assertTrue(comparison, " Value could not be read or was not what was expected. Expected: {}, got {}".format(expected, val))
+                self.assertTrue(
+                    comparison,
+                    " Value could not be read or was not what was expected. Expected: {}, got {}".format(
+                        expected, val
+                    ),
+                )
             else:
                 comparison = expected == val
                 equal_arrays = comparison.all()
-                self.assertTrue(equal_arrays, " Value could not be read or was not what was expected. Expected: {}, got {}".format(expected, val))
+                self.assertTrue(
+                    equal_arrays,
+                    " Value could not be read or was not what was expected. Expected: {}, got {}".format(
+                        expected, val
+                    ),
+                )
 
-            print(" Test passed! Managed to read R attribute value. got: {}".format(val))
+            print(
+                " Test passed! Managed to read R attribute value. got: {}".format(val)
+            )
 
     def write_RW_test(self, dev, dtype, test_type):
-        '''Test device'''
+        """Test device"""
         with DeviceTestContext(dev, process=True) as proxy:
 
-            #initialise
+            # initialise
             proxy.initialise()
             proxy.on()
 
@@ -361,7 +645,13 @@ class TestAttributeTypes(base.TestCase):
                     val = numpy.full(IMAGE_DIMS, dtype=dtype, fill_value=1)
                 proxy.image_RW = val
             else:
-                self.assertEqual(1,2, " {} is not a valid test_type. please use either scalar, spectrum or image".format(test_type))
+                self.assertEqual(
+                    1,
+                    2,
+                    " {} is not a valid test_type. please use either scalar, spectrum or image".format(
+                        test_type
+                    ),
+                )
 
             # can't really test anything here except that the writing didnt cause an error.
             # reading back happens in readback_test
@@ -369,14 +659,14 @@ class TestAttributeTypes(base.TestCase):
             print(" Test passed! Managed to write: ".format(val))
 
     def read_RW_test(self, dev, dtype, test_type):
-        '''Test device'''
+        """Test device"""
         expected = None
         val = None
 
         try:
             with DeviceTestContext(dev, process=True) as proxy:
 
-                #initialise
+                # initialise
                 proxy.initialise()
                 proxy.on()
 
@@ -388,26 +678,52 @@ class TestAttributeTypes(base.TestCase):
                     val = proxy.spectrum_RW
                 elif test_type == "image":
                     expected = numpy.zeros(IMAGE_DIMS, dtype=dtype)
-                    val = numpy.array(proxy.image_RW) #is needed for STR since they act differently
+                    val = numpy.array(
+                        proxy.image_RW
+                    )  # is needed for STR since they act differently
 
                     # cant use all() for 2d arrays so instead compare the dimensions and then flatten to 2d
-                    self.assertEqual(val.shape, expected.shape, " image R array dimensions got mangled. Expected {}, got  {}".format(expected.shape, val.shape))
+                    self.assertEqual(
+                        val.shape,
+                        expected.shape,
+                        " image R array dimensions got mangled. Expected {}, got  {}".format(
+                            expected.shape, val.shape
+                        ),
+                    )
                     val.reshape(-1)
                 else:
-                    self.assertEqual(1,2, " {} is not a valid test_type. please use either scalar, spectrum or image".format(test_type))
+                    self.assertEqual(
+                        1,
+                        2,
+                        " {} is not a valid test_type. please use either scalar, spectrum or image".format(
+                            test_type
+                        ),
+                    )
 
                 if test_type != "scalar":
                     # spectrums and the now flattened images can be compared with .all()
                     comparison = expected == val
                     equal_arrays = comparison.all()
-                    self.assertTrue(equal_arrays, " Value could not be handled by the atrribute_wrappers internal RW storer")
+                    self.assertTrue(
+                        equal_arrays,
+                        " Value could not be handled by the atrribute_wrappers internal RW storer",
+                    )
                 else:
                     comparison = expected == val
-                    self.assertTrue(comparison, " Value could not be handled by the atrribute_wrappers internal RW storer")
-
-                print(" Test passed! Managed to read internal RW value. got: {}".format(val))
+                    self.assertTrue(
+                        comparison,
+                        " Value could not be handled by the atrribute_wrappers internal RW storer",
+                    )
+
+                print(
+                    " Test passed! Managed to read internal RW value. got: {}".format(
+                        val
+                    )
+                )
         except Exception as e:
-            info = "Test failure in {} {} read RW test. Expected: {}, got {}".format(test_type, dtype, expected, val)
+            info = "Test failure in {} {} read RW test. Expected: {}, got {}".format(
+                test_type, dtype, expected, val
+            )
             raise Exception(info) from e
 
     def _get_result_type(self, dtype, test_type, proxy):
@@ -439,7 +755,9 @@ class TestAttributeTypes(base.TestCase):
             result_RW = proxy.image_RW
 
             if dtype != str:
-                self.assertEqual(result_R.shape, IMAGE_DIMS, "not the correct dimensions")
+                self.assertEqual(
+                    result_R.shape, IMAGE_DIMS, "not the correct dimensions"
+                )
 
                 result_R = result_R.reshape(-1)
                 result_RW = result_RW.reshape(-1)
@@ -447,47 +765,102 @@ class TestAttributeTypes(base.TestCase):
 
         else:
             # if the test isn't scalar/spectrum or image its wrong
-            self.assertEqual(1,2, " {} is not a valid test_type. please use either scalar, spectrum or image".format(test_type))
+            self.assertEqual(
+                1,
+                2,
+                " {} is not a valid test_type. please use either scalar, spectrum or image".format(
+                    test_type
+                ),
+            )
 
         return result_R, result_RW, val
 
-
     def readback_test(self, dev, dtype, test_type):
-        '''Test device'''
+        """Test device"""
         try:
             with DeviceTestContext(dev, process=True) as proxy:
 
-                #initialise
+                # initialise
                 proxy.initialise()
                 proxy.on()
 
-                #get result and val
-                result_R, result_RW, val = self._get_result_type(dtype, test_type, proxy)
+                # get result and val
+                result_R, result_RW, val = self._get_result_type(
+                    dtype, test_type, proxy
+                )
 
                 if test_type == "scalar":
                     comparison = result_RW == val
-                    self.assertTrue(comparison, " Value could not be handled by the atrribute_wrappers internal RW storer. attempted to write: {}".format(val))
+                    self.assertTrue(
+                        comparison,
+                        " Value could not be handled by the atrribute_wrappers internal RW storer. attempted to write: {}".format(
+                            val
+                        ),
+                    )
                     comparison = result_R == val
-                    self.assertTrue(comparison, " value in the clients R attribute not equal to what was written. read: {}, wrote {}".format(result_R, val))
+                    self.assertTrue(
+                        comparison,
+                        " value in the clients R attribute not equal to what was written. read: {}, wrote {}".format(
+                            result_R, val
+                        ),
+                    )
                 elif dtype != str:
                     comparison = result_RW == val
                     equal_arrays = comparison.all()
-                    self.assertTrue(equal_arrays, " Value could not be handled by the atrribute_wrappers internal RW storer. attempted to write: {}".format(val))
+                    self.assertTrue(
+                        equal_arrays,
+                        " Value could not be handled by the atrribute_wrappers internal RW storer. attempted to write: {}".format(
+                            val
+                        ),
+                    )
                     comparison = result_R == val
                     equal_arrays = comparison.all()
-                    self.assertTrue(equal_arrays, " value in the clients R attribute not equal to what was written. read: {}, wrote {}".format(result_R, val))
+                    self.assertTrue(
+                        equal_arrays,
+                        " value in the clients R attribute not equal to what was written. read: {}, wrote {}".format(
+                            result_R, val
+                        ),
+                    )
                 else:
                     if test_type == "image":
-                        self.assertEqual(len(result_RW)*len(result_RW[0]), 6, "array dimensions do not match the expected dimensions. expected {}, got: {}".format(val, len(result_RW) * len(result_RW[0])))
-                        self.assertEqual(len(result_RW) * len(result_RW[0]), 6,"array dimensions do not match the expected dimensions. expected {}, got: {}".format(val, len(result_R) * len([0])))
+                        self.assertEqual(
+                            len(result_RW) * len(result_RW[0]),
+                            6,
+                            "array dimensions do not match the expected dimensions. expected {}, got: {}".format(
+                                val, len(result_RW) * len(result_RW[0])
+                            ),
+                        )
+                        self.assertEqual(
+                            len(result_RW) * len(result_RW[0]),
+                            6,
+                            "array dimensions do not match the expected dimensions. expected {}, got: {}".format(
+                                val, len(result_R) * len([0])
+                            ),
+                        )
                     else:
-                        self.assertEqual(len(result_RW), 4,"array dimensions do not match the expected dimensions. expected {}, got: {}".format(4, len(result_RW)))
-                        self.assertEqual(len(result_R), 4, "array dimensions do not match the expected dimensions. expected {}, got: {}".format(4, len(result_R)))
-
-                print(" Test passed! Managed write and read back a value: {}".format(val))
+                        self.assertEqual(
+                            len(result_RW),
+                            4,
+                            "array dimensions do not match the expected dimensions. expected {}, got: {}".format(
+                                4, len(result_RW)
+                            ),
+                        )
+                        self.assertEqual(
+                            len(result_R),
+                            4,
+                            "array dimensions do not match the expected dimensions. expected {}, got: {}".format(
+                                4, len(result_R)
+                            ),
+                        )
+
+                print(
+                    " Test passed! Managed write and read back a value: {}".format(val)
+                )
 
         except Exception as e:
-            info = "Test failure in {} {} readback test  \n\tW: {} \n\tRW: {} \n\tR: {}".format(test_type, dtype, val, result_RW, result_R)
+            info = "Test failure in {} {} readback test  \n\tW: {} \n\tRW: {} \n\tR: {}".format(
+                test_type, dtype, val, result_RW, result_R
+            )
             raise Exception(info) from e
 
     """
@@ -497,138 +870,183 @@ class TestAttributeTypes(base.TestCase):
     """
     ATTRIBUTE_TYPE_TESTS = [
         {
-            'type': str, 'scalar': StrScalarDevice,
-            'spectrum': StrSpectrumDevice, "image": StrImageDevice
+            "type": str,
+            "scalar": StrScalarDevice,
+            "spectrum": StrSpectrumDevice,
+            "image": StrImageDevice,
         },
         {
-            'type': bool, 'scalar': BoolScalarDevice,
-            'spectrum': BoolSpectrumDevice, "image":  BoolImageDevice
+            "type": bool,
+            "scalar": BoolScalarDevice,
+            "spectrum": BoolSpectrumDevice,
+            "image": BoolImageDevice,
         },
         {
-            'type': numpy.float32, 'scalar': Float32ScalarDevice,
-            'spectrum': Float32SpectrumDevice, "image": Float32ImageDevice
+            "type": numpy.float32,
+            "scalar": Float32ScalarDevice,
+            "spectrum": Float32SpectrumDevice,
+            "image": Float32ImageDevice,
         },
         {
-            'type': numpy.float64, 'scalar': Float64ScalarDevice,
-            'spectrum': Float64SpectrumDevice, "image": Float64ImageDevice
+            "type": numpy.float64,
+            "scalar": Float64ScalarDevice,
+            "spectrum": Float64SpectrumDevice,
+            "image": Float64ImageDevice,
         },
         {
-            'type': numpy.double, 'scalar': DoubleScalarDevice,
-            'spectrum': DoubleSpectrumDevice, "image": DoubleImageDevice
+            "type": numpy.double,
+            "scalar": DoubleScalarDevice,
+            "spectrum": DoubleSpectrumDevice,
+            "image": DoubleImageDevice,
         },
         {
-            'type': numpy.uint8, 'scalar': Uint8ScalarDevice,
-            'spectrum': Uint8SpectrumDevice, "image": Uint8ImageDevice
+            "type": numpy.uint8,
+            "scalar": Uint8ScalarDevice,
+            "spectrum": Uint8SpectrumDevice,
+            "image": Uint8ImageDevice,
         },
         {
-            'type': numpy.uint16, 'scalar': Uint16ScalarDevice,
-            'spectrum': Uint16SpectrumDevice, "image": Uint16ImageDevice
+            "type": numpy.uint16,
+            "scalar": Uint16ScalarDevice,
+            "spectrum": Uint16SpectrumDevice,
+            "image": Uint16ImageDevice,
         },
         {
-            'type': numpy.uint32, 'scalar': Uint32ScalarDevice,
-            'spectrum': Uint32SpectrumDevice, "image": Uint32ImageDevice
+            "type": numpy.uint32,
+            "scalar": Uint32ScalarDevice,
+            "spectrum": Uint32SpectrumDevice,
+            "image": Uint32ImageDevice,
         },
         {
-            'type': numpy.uint64, 'scalar': Uint64ScalarDevice,
-            'spectrum': Uint64SpectrumDevice, "image": Uint64ImageDevice
+            "type": numpy.uint64,
+            "scalar": Uint64ScalarDevice,
+            "spectrum": Uint64SpectrumDevice,
+            "image": Uint64ImageDevice,
         },
         {
-            'type': numpy.int16, 'scalar': Int16ScalarDevice,
-            'spectrum': Int16SpectrumDevice, "image": Int16ImageDevice
+            "type": numpy.int16,
+            "scalar": Int16ScalarDevice,
+            "spectrum": Int16SpectrumDevice,
+            "image": Int16ImageDevice,
         },
         {
-            'type': numpy.int32, 'scalar': Int32ScalarDevice,
-            'spectrum': Int32SpectrumDevice, "image": Int32ImageDevice
+            "type": numpy.int32,
+            "scalar": Int32ScalarDevice,
+            "spectrum": Int32SpectrumDevice,
+            "image": Int32ImageDevice,
         },
         {
-            'type': numpy.int64, 'scalar': Int64ScalarDevice,
-            'spectrum': Int64SpectrumDevice, "image": Int64ImageDevice
-        }
+            "type": numpy.int64,
+            "scalar": Int64ScalarDevice,
+            "spectrum": Int64SpectrumDevice,
+            "image": Int64ImageDevice,
+        },
     ]
 
     def test_scalar_R(self):
         for attribute_type_test in self.ATTRIBUTE_TYPE_TESTS:
             self.read_R_test(
-                attribute_type_test['scalar'], attribute_type_test['type'],
-                'scalar')
+                attribute_type_test["scalar"], attribute_type_test["type"], "scalar"
+            )
 
     def test_scalar_RW(self):
         for attribute_type_test in self.ATTRIBUTE_TYPE_TESTS:
             self.read_RW_test(
-                attribute_type_test['scalar'], attribute_type_test['type'],
-                'scalar')
+                attribute_type_test["scalar"], attribute_type_test["type"], "scalar"
+            )
 
     def test_scalar_W(self):
         for attribute_type_test in self.ATTRIBUTE_TYPE_TESTS:
             self.write_RW_test(
-                attribute_type_test['scalar'], attribute_type_test['type'],
-                'scalar')
+                attribute_type_test["scalar"], attribute_type_test["type"], "scalar"
+            )
 
     def test_scalar_readback(self):
         for attribute_type_test in self.ATTRIBUTE_TYPE_TESTS:
             self.readback_test(
-                attribute_type_test['scalar'], attribute_type_test['type'],
-                'scalar')
+                attribute_type_test["scalar"], attribute_type_test["type"], "scalar"
+            )
 
     def test_spectrum_R(self):
         for attribute_type_test in self.ATTRIBUTE_TYPE_TESTS:
             self.read_R_test(
-                attribute_type_test['spectrum'], attribute_type_test['type'],
-                'spectrum')
+                attribute_type_test["spectrum"], attribute_type_test["type"], "spectrum"
+            )
 
     def test_spectrum_RW(self):
         for attribute_type_test in self.ATTRIBUTE_TYPE_TESTS:
             self.read_RW_test(
-                attribute_type_test['spectrum'], attribute_type_test['type'],
-                'spectrum')
+                attribute_type_test["spectrum"], attribute_type_test["type"], "spectrum"
+            )
 
     def test_spectrum_W(self):
         for attribute_type_test in self.ATTRIBUTE_TYPE_TESTS:
             self.write_RW_test(
-                attribute_type_test['spectrum'], attribute_type_test['type'],
-                'spectrum')
+                attribute_type_test["spectrum"], attribute_type_test["type"], "spectrum"
+            )
 
     def test_spectrum_readback(self):
         for attribute_type_test in self.ATTRIBUTE_TYPE_TESTS:
             self.readback_test(
-                attribute_type_test['spectrum'], attribute_type_test['type'],
-                'spectrum')
+                attribute_type_test["spectrum"], attribute_type_test["type"], "spectrum"
+            )
 
     def test_image_R(self):
         for attribute_type_test in self.ATTRIBUTE_TYPE_TESTS:
             self.read_R_test(
-                attribute_type_test['image'], attribute_type_test['type'],
-                'image')
+                attribute_type_test["image"], attribute_type_test["type"], "image"
+            )
 
     def test_image_RW(self):
         for attribute_type_test in self.ATTRIBUTE_TYPE_TESTS:
             self.read_RW_test(
-                attribute_type_test['image'], attribute_type_test['type'],
-                'image')
+                attribute_type_test["image"], attribute_type_test["type"], "image"
+            )
 
     def test_image_W(self):
         for attribute_type_test in self.ATTRIBUTE_TYPE_TESTS:
-            self.write_RW_test(attribute_type_test['image'], attribute_type_test['type'], 'image')
+            self.write_RW_test(
+                attribute_type_test["image"], attribute_type_test["type"], "image"
+            )
 
     def test_image_readback(self):
         for attribute_type_test in self.ATTRIBUTE_TYPE_TESTS:
             self.readback_test(
-                attribute_type_test['image'], attribute_type_test['type'],
-                'image')
+                attribute_type_test["image"], attribute_type_test["type"], "image"
+            )
+
 
 class TestAttributeAccess(base.TestCase):
     def setUp(self):
         # Avoid the device trying to access itself as a client
-        self.deviceproxy_patch = mock.patch.object(tangostationcontrol.devices.lofar_device,'DeviceProxy')
+        self.deviceproxy_patch = mock.patch.object(
+            tangostationcontrol.devices.lofar_device, "DeviceProxy"
+        )
         self.deviceproxy_patch.start()
         self.addCleanup(self.deviceproxy_patch.stop)
 
     class float32_scalar_device(LOFARDevice):
-        scalar_R = AttributeWrapper(comms_annotation="float32_scalar_R", datatype=numpy.float32)
-        scalar_RW = AttributeWrapper(comms_annotation="float32_scalar_RW", datatype=numpy.float32, access=AttrWriteType.READ_WRITE)
-
-        spectrum_RW = AttributeWrapper(comms_annotation="spectrum_RW", dims=(3,), datatype=numpy.float32, access=AttrWriteType.READ_WRITE)
-        image_RW = AttributeWrapper(comms_annotation="image_RW", dims=(3,2), datatype=numpy.float32, access=AttrWriteType.READ_WRITE)
+        scalar_R = AttributeWrapper(
+            comms_annotation="float32_scalar_R", datatype=numpy.float32
+        )
+        scalar_RW = AttributeWrapper(
+            comms_annotation="float32_scalar_RW",
+            datatype=numpy.float32,
+            access=AttrWriteType.READ_WRITE,
+        )
+
+        spectrum_RW = AttributeWrapper(
+            comms_annotation="spectrum_RW",
+            dims=(3,),
+            datatype=numpy.float32,
+            access=AttrWriteType.READ_WRITE,
+        )
+        image_RW = AttributeWrapper(
+            comms_annotation="image_RW",
+            dims=(3, 2),
+            datatype=numpy.float32,
+            access=AttrWriteType.READ_WRITE,
+        )
 
         def configure_for_initialise(self):
             dev_init(self)
@@ -670,4 +1088,3 @@ class TestAttributeAccess(base.TestCase):
 
             with self.assertRaises(DevFailed):
                 proxy.image_RW = [[1.0, 2.0], [2.0, 3.0]]
-
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/test_client.py b/tangostationcontrol/tangostationcontrol/test/clients/test_client.py
index 98d3dd8f0b28e1db64f915f99caf781ec242a8fe..1b31de2c47c7ec2255d98cbaa8e3c40e82af47ee 100644
--- a/tangostationcontrol/tangostationcontrol/test/clients/test_client.py
+++ b/tangostationcontrol/tangostationcontrol/test/clients/test_client.py
@@ -1,13 +1,17 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
+import logging
+
 # External imports
 import numpy
 
-
 # Test imports
 from tangostationcontrol.clients.comms_client import CommClient
 
-import logging
 logger = logging.getLogger()
 
+
 class TestClient(CommClient):
     """
     this class provides an example implementation of a comms_client.
@@ -38,7 +42,9 @@ class TestClient(CommClient):
         self.connected = True  # set connected to true
 
     def disconnect(self):
-        self.connected = False  # always force a reconnect, regardless of a successful disconnect
+        self.connected = (
+            False  # always force a reconnect, regardless of a successful disconnect
+        )
         logger.debug("disconnected from the 'client' ")
 
     def _setup_annotation(self, annotation):
@@ -83,23 +89,32 @@ class TestClient(CommClient):
             annotation = annotation[:-1]
 
         if dtype == str and dims == (1,):
-            self.values[annotation] = ''
+            self.values[annotation] = ""
         elif dims == (1,):
             self.values[annotation] = dtype(0)
         else:
             self.values[annotation] = numpy.zeros(dims, dtype)
 
-
         def read_function():
-            logger.debug("from read_function, reading {}: {} array of type {} == {}".format(annotation, dims, dtype, self.values[annotation]))
+            logger.debug(
+                "from read_function, reading {}: {} array of type {} == {}".format(
+                    annotation, dims, dtype, self.values[annotation]
+                )
+            )
             return self.values[annotation]
 
         def write_function(write_value):
-            logger.debug("from write_function, writing {}: {} array of type {}".format(annotation, dims, dtype))
+            logger.debug(
+                "from write_function, writing {}: {} array of type {}".format(
+                    annotation, dims, dtype
+                )
+            )
 
             self.values[annotation] = write_value
 
-        logger.debug("created and bound example_client read/write functions to AttributeWrapper object")
+        logger.debug(
+            "created and bound example_client read/write functions to AttributeWrapper object"
+        )
         return read_function, write_function
 
     async def setup_attribute(self, annotation=None, attribute=None):
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/test_opcua_client.py b/tangostationcontrol/tangostationcontrol/test/clients/test_opcua_client.py
index a0579e8b9552cecdeb89b42e280c63c2154848a9..79ec67fccdc1d7c7ed5489b47eb84638c0530134 100644
--- a/tangostationcontrol/tangostationcontrol/test/clients/test_opcua_client.py
+++ b/tangostationcontrol/tangostationcontrol/test/clients/test_opcua_client.py
@@ -1,15 +1,15 @@
-import numpy
-import asyncua
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import asyncio
 import io
-
-import asynctest
 from unittest import mock
 
-
-from tangostationcontrol.clients.opcua_client import OPCUAConnection
+import asynctest
+import asyncua
+import numpy
 from tangostationcontrol.clients import opcua_client
-
+from tangostationcontrol.clients.opcua_client import OPCUAConnection
 from tangostationcontrol.test import base
 
 
@@ -30,7 +30,7 @@ ATTR_TEST_TYPES = [
     AttrProps(numpy_type=numpy.uint64),
     AttrProps(numpy_type=numpy.int16),
     AttrProps(numpy_type=numpy.int32),
-    AttrProps(numpy_type=numpy.int64)
+    AttrProps(numpy_type=numpy.int64),
 ]
 
 SCALAR_SHAPE = (1,)
@@ -49,28 +49,37 @@ class TestOPCua(base.AsyncTestCase):
         """
 
         m_opc_client_members = asynctest.asynctest.CoroutineMock()
-        m_opc_client_members.get_namespace_index = asynctest.asynctest.CoroutineMock(return_value=42)
+        m_opc_client_members.get_namespace_index = asynctest.asynctest.CoroutineMock(
+            return_value=42
+        )
         m_opc_client_members.connect = asynctest.asynctest.CoroutineMock()
         m_opc_client_members.disconnect = asynctest.asynctest.CoroutineMock()
         m_opc_client_members.send_hello = asynctest.asynctest.CoroutineMock()
         m_opc_client.return_value = m_opc_client_members
 
-        test_client = OPCUAConnection("opc.tcp://localhost:4874/freeopcua/server/", "http://lofar.eu", 5, mock.Mock(), self.loop)
+        test_client = OPCUAConnection(
+            "opc.tcp://localhost:4874/freeopcua/server/",
+            "http://lofar.eu",
+            5,
+            mock.Mock(),
+            self.loop,
+        )
         try:
             await test_client.start()
 
             m_opc_client.assert_called_once()  # makes sure the actual freeOPCua client object is created only once
 
             # this also implies test_client.connect() is called
-            m_opc_client_members.get_namespace_index.assert_called_once_with("http://lofar.eu")
+            m_opc_client_members.get_namespace_index.assert_called_once_with(
+                "http://lofar.eu"
+            )
             self.assertEqual(42, test_client.name_space_index)
         finally:
             await test_client.stop()
 
-
     @asynctest.patch.object(OPCUAConnection, "ping")
     @asynctest.patch.object(opcua_client, "Client")
-    @asynctest.patch.object(opcua_client, 'ProtocolAttribute')
+    @asynctest.patch.object(opcua_client, "ProtocolAttribute")
     async def test_opcua_attr_setup(self, m_protocol_attr, m_opc_client, m_ping):
         """
         This tests covers the correct creation of read/write functions.
@@ -81,17 +90,22 @@ class TestOPCua(base.AsyncTestCase):
         """
 
         m_opc_client_members = asynctest.asynctest.CoroutineMock()
-        m_opc_client_members.get_namespace_index = asynctest.asynctest.CoroutineMock(return_value=2)
+        m_opc_client_members.get_namespace_index = asynctest.asynctest.CoroutineMock(
+            return_value=2
+        )
         m_opc_client_members.connect = asynctest.asynctest.CoroutineMock()
         m_opc_client_members.disconnect = asynctest.asynctest.CoroutineMock()
         m_opc_client_members.send_hello = asynctest.asynctest.CoroutineMock()
         m_objects_node = asynctest.Mock()
         m_objects_node.get_child = asynctest.asynctest.CoroutineMock()
         m_objects_node.get_children_descriptions = asynctest.asynctest.CoroutineMock()
-        m_opc_client_members.get_objects_node = asynctest.Mock(return_value=m_objects_node)
+        m_opc_client_members.get_objects_node = asynctest.Mock(
+            return_value=m_objects_node
+        )
         m_opc_client.return_value = m_opc_client_members
 
         for i in ATTR_TEST_TYPES:
+
             class MockAttr:
                 def __init__(self, dtype, x, y):
                     self.datatype = dtype
@@ -110,9 +124,17 @@ class TestOPCua(base.AsyncTestCase):
                 m_attribute = MockAttr(i.numpy_type, dim_x, dim_y)
 
                 # pretend like there is a running OPCua server with a node that has this name
-                m_annotation = [f"2:testNode_{str(i.numpy_type)}_{str(dim_x)}_{str(dim_y)}"]
-
-                test_client = OPCUAConnection("opc.tcp://localhost:4874/freeopcua/server/", "http://lofar.eu", 5, mock.Mock(), self.loop)
+                m_annotation = [
+                    f"2:testNode_{str(i.numpy_type)}_{str(dim_x)}_{str(dim_y)}"
+                ]
+
+                test_client = OPCUAConnection(
+                    "opc.tcp://localhost:4874/freeopcua/server/",
+                    "http://lofar.eu",
+                    5,
+                    mock.Mock(),
+                    self.loop,
+                )
                 try:
                     await test_client.start()
                     await test_client.setup_attribute(m_annotation, m_attribute)
@@ -148,18 +170,31 @@ class TestOPCua(base.AsyncTestCase):
                 Part of the test already includes simply not throwing an exception, but for the sake coverage these asserts have also
                 been added.
                 """
-                self.assertTrue(test.dim_y == dims[1], f"Dimensionality error, ProtocolAttribute.dim_y got: {test.dim_y} expected: {dims[1]}")
-                self.assertTrue(test.dim_x == dims[0], f"Dimensionality error, ProtocolAttribute.dim_y got: {test.dim_x} expected: {dims[0]}")
-                self.assertTrue(test.ua_type == ua_type, f"type error. Got: {test.ua_type} expected: {ua_type}")
-                self.assertTrue(hasattr(test, "write_function"), f"No write function found")
-                self.assertTrue(hasattr(test, "read_function"), f"No read function found")
+                self.assertTrue(
+                    test.dim_y == dims[1],
+                    f"Dimensionality error, ProtocolAttribute.dim_y got: {test.dim_y} expected: {dims[1]}",
+                )
+                self.assertTrue(
+                    test.dim_x == dims[0],
+                    f"Dimensionality error, ProtocolAttribute.dim_y got: {test.dim_x} expected: {dims[0]}",
+                )
+                self.assertTrue(
+                    test.ua_type == ua_type,
+                    f"type error. Got: {test.ua_type} expected: {ua_type}",
+                )
+                self.assertTrue(
+                    hasattr(test, "write_function"), f"No write function found"
+                )
+                self.assertTrue(
+                    hasattr(test, "read_function"), f"No read function found"
+                )
 
     def _get_test_value(self, dims, n_type):
-        """ get numpy array of the test value """
+        """get numpy array of the test value"""
         return numpy.zeros(dims, n_type)
 
     def _wrap_dims(self, value, dims):
-        """ Wrap a value in the current number of dimensions """
+        """Wrap a value in the current number of dimensions"""
         if dims == 0:
             return value
         elif dims == 1:
@@ -168,8 +203,10 @@ class TestOPCua(base.AsyncTestCase):
             return [[value]]
 
     def _get_mock_value(self, value, n_type):
-        """ get opcua Varianttype array of the test value """
-        return asyncua.ua.uatypes.Variant(Value=value, VariantType=opcua_client.numpy_to_OPCua_dict[n_type])
+        """get opcua Varianttype array of the test value"""
+        return asyncua.ua.uatypes.Variant(
+            Value=value, VariantType=opcua_client.numpy_to_OPCua_dict[n_type]
+        )
 
     async def test_read(self):
         """
@@ -186,30 +223,45 @@ class TestOPCua(base.AsyncTestCase):
                 m_node = asynctest.asynctest.CoroutineMock()
 
                 if len(j) == 1:
-                    test = opcua_client.ProtocolAttribute(m_node, j[0], 0, opcua_client.numpy_to_OPCua_dict[i.numpy_type])
+                    test = opcua_client.ProtocolAttribute(
+                        m_node, j[0], 0, opcua_client.numpy_to_OPCua_dict[i.numpy_type]
+                    )
                 else:
-                    test = opcua_client.ProtocolAttribute(m_node, j[1], j[0], opcua_client.numpy_to_OPCua_dict[i.numpy_type])
+                    test = opcua_client.ProtocolAttribute(
+                        m_node,
+                        j[1],
+                        j[0],
+                        opcua_client.numpy_to_OPCua_dict[i.numpy_type],
+                    )
                 m_node.get_value.return_value = get_flat_value(j, i)
                 val = await test.read_function()
 
                 comp = val == self._get_test_value(j, i.numpy_type)
-                self.assertTrue(comp.all(), "Read value unequal to expected value: \n\t{} \n\t{}".format(val, self._get_test_value(j, i.numpy_type)))
+                self.assertTrue(
+                    comp.all(),
+                    "Read value unequal to expected value: \n\t{} \n\t{}".format(
+                        val, self._get_test_value(j, i.numpy_type)
+                    ),
+                )
 
     async def test_read_unicode(self):
         """
         Test whether unicode characters are replaced by '?'.
         """
+
         async def get_unicode_value(dims):
-            return self._wrap_dims(b'foo \xef\xbf\xbd bar'.decode('utf-8'), dims)
+            return self._wrap_dims(b"foo \xef\xbf\xbd bar".decode("utf-8"), dims)
 
         # test 0-2 dimensions of strings
-        for dims in range(0,2):
+        for dims in range(0, 2):
 
             m_node = asynctest.asynctest.CoroutineMock()
             m_node.get_value.return_value = get_unicode_value(dims)
 
             # create the ProtocolAttribute to test
-            test = opcua_client.ProtocolAttribute(m_node, 1, 0, opcua_client.numpy_to_OPCua_dict[str])
+            test = opcua_client.ProtocolAttribute(
+                m_node, 1, 0, opcua_client.numpy_to_OPCua_dict[str]
+            )
 
             # check if unicode is replaced by ?
             val = await test.read_function()
@@ -229,7 +281,9 @@ class TestOPCua(base.AsyncTestCase):
                 default_value = 42.25
 
             # apply our mapping
-            v = asyncua.ua.uatypes.Variant(Value=numpy_type(default_value), VariantType=opcua_type)
+            v = asyncua.ua.uatypes.Variant(
+                Value=numpy_type(default_value), VariantType=opcua_type
+            )
 
             try:
                 # try to convert it to binary to force opcua to parse the value as the type
@@ -239,14 +293,24 @@ class TestOPCua(base.AsyncTestCase):
                 binary_stream = io.BytesIO(binary)
                 reparsed_v = asyncua.ua.ua_binary.variant_from_binary(binary_stream)
             except Exception as e:
-                raise Exception(f"Conversion {numpy_type} -> {opcua_type} failed.") from e
+                raise Exception(
+                    f"Conversion {numpy_type} -> {opcua_type} failed."
+                ) from e
 
             # did the value get lost in translation?
-            self.assertEqual(v.Value, reparsed_v.Value, msg=f"Conversion {numpy_type} -> {opcua_type} failed.")
+            self.assertEqual(
+                v.Value,
+                reparsed_v.Value,
+                msg=f"Conversion {numpy_type} -> {opcua_type} failed.",
+            )
 
             # does the OPC-UA type have the same datasize (and thus, precision?)
             if numpy_type not in [str, bool]:
-                self.assertEqual(numpy_type().itemsize, getattr(asyncua.ua.ua_binary.Primitives, opcua_type.name).size, msg=f"Conversion {numpy_type} -> {opcua_type} failed: precision mismatch")
+                self.assertEqual(
+                    numpy_type().itemsize,
+                    getattr(asyncua.ua.ua_binary.Primitives, opcua_type.name).size,
+                    msg=f"Conversion {numpy_type} -> {opcua_type} failed: precision mismatch",
+                )
 
     async def test_write(self):
         """
@@ -256,16 +320,38 @@ class TestOPCua(base.AsyncTestCase):
         """
 
         async def compare_values(val, j, i):
-            """ comparison function that replaces `set_data_value` inside the attributes write function """
+            """comparison function that replaces `set_data_value` inside the attributes write function"""
             # test valuest
             val = val.tolist() if type(val) == numpy.ndarray else val
             if j != DIMENSION_TESTS[0]:
-                comp = val.Value == self._get_mock_value(self._get_test_value(j, i.numpy_type).flatten(), i.numpy_type).Value
-                self.assertTrue(comp.all(),
-                                "Array attempting to write unequal to expected array: \n\t got: {} \n\texpected: {}".format(val,self._get_mock_value(self._get_test_value(j, i.numpy_type), i.numpy_type)))
+                comp = (
+                    val.Value
+                    == self._get_mock_value(
+                        self._get_test_value(j, i.numpy_type).flatten(), i.numpy_type
+                    ).Value
+                )
+                self.assertTrue(
+                    comp.all(),
+                    "Array attempting to write unequal to expected array: \n\t got: {} \n\texpected: {}".format(
+                        val,
+                        self._get_mock_value(
+                            self._get_test_value(j, i.numpy_type), i.numpy_type
+                        ),
+                    ),
+                )
             else:
-                comp = val == self._get_mock_value(self._get_test_value(j, i.numpy_type), i.numpy_type)
-                self.assertTrue(comp, "value attempting to write unequal to expected value: \n\tgot: {} \n\texpected: {}".format(val, self._get_mock_value(self._get_test_value(j, i.numpy_type), i.numpy_type)))
+                comp = val == self._get_mock_value(
+                    self._get_test_value(j, i.numpy_type), i.numpy_type
+                )
+                self.assertTrue(
+                    comp,
+                    "value attempting to write unequal to expected value: \n\tgot: {} \n\texpected: {}".format(
+                        val,
+                        self._get_mock_value(
+                            self._get_test_value(j, i.numpy_type), i.numpy_type
+                        ),
+                    ),
+                )
 
         m_node = asynctest.asynctest.CoroutineMock()
         m_node.set_data_value.return_value = asyncio.Future()
@@ -274,14 +360,21 @@ class TestOPCua(base.AsyncTestCase):
         # for all dimensionalities
         for j in DIMENSION_TESTS:
 
-            #for all datatypes
+            # for all datatypes
             for i in ATTR_TEST_TYPES:
 
                 # create the protocolattribute
                 if len(j) == 1:
-                    test = opcua_client.ProtocolAttribute(m_node, j[0], 0, opcua_client.numpy_to_OPCua_dict[i.numpy_type])
+                    test = opcua_client.ProtocolAttribute(
+                        m_node, j[0], 0, opcua_client.numpy_to_OPCua_dict[i.numpy_type]
+                    )
                 else:
-                    test = opcua_client.ProtocolAttribute(m_node, j[1], j[0], opcua_client.numpy_to_OPCua_dict[i.numpy_type])
+                    test = opcua_client.ProtocolAttribute(
+                        m_node,
+                        j[1],
+                        j[0],
+                        opcua_client.numpy_to_OPCua_dict[i.numpy_type],
+                    )
 
                 # call the write function with the test values
                 await test.write_function(self._get_test_value(j, i.numpy_type))
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/test_snmp_client.py b/tangostationcontrol/tangostationcontrol/test/clients/test_snmp_client.py
index c08926ed109ec35bbf6c6ff896ce47e1ed97e093..859354978758f9895b99c3560ae74136be745b3e 100644
--- a/tangostationcontrol/tangostationcontrol/test/clients/test_snmp_client.py
+++ b/tangostationcontrol/tangostationcontrol/test/clients/test_snmp_client.py
@@ -1,16 +1,18 @@
-from pysnmp import hlapi
-from pysnmp.smi import view, error
-from pysnmp.smi.rfc1902 import ObjectIdentity
-from os import path
-import numpy
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
+from os import path
 from unittest import mock
-from tangostationcontrol.test import base
 
+import numpy
+from pysnmp import hlapi
+from pysnmp.smi import view, error
+from pysnmp.smi.rfc1902 import ObjectIdentity
+from tangostationcontrol.clients.snmp_client import MIBLoader
 from tangostationcontrol.clients.snmp_client import SNMPAttribute
-from tangostationcontrol.clients.snmp_client import SNMPComm
 from tangostationcontrol.clients.snmp_client import SNMPClient
-from tangostationcontrol.clients.snmp_client import MIBLoader
+from tangostationcontrol.clients.snmp_client import SNMPComm
+from tangostationcontrol.test import base
 
 
 class SNMPServerFixture:
@@ -30,7 +32,7 @@ class SNMPServerFixture:
         "spectrum": (4, 0),
     }
 
-    def get_return_val(self, snmp_type : type, dims : tuple):
+    def get_return_val(self, snmp_type: type, dims: tuple):
         """
         provides the return value for the set/get functions that an actual server would return.
         """
@@ -44,19 +46,34 @@ class SNMPServerFixture:
 
         return read_val
 
-    def _get_return_val_for_scalar(self, snmp_type : type):
+    def _get_return_val_for_scalar(self, snmp_type: type):
         if snmp_type is hlapi.ObjectIdentity:
             read_val = [(snmp_type("1.3.6.1.2.1.1.1.0"),)]
         elif snmp_type is hlapi.IpAddress:
-            read_val = [(None, snmp_type("1.1.1.1"),)]
+            read_val = [
+                (
+                    None,
+                    snmp_type("1.1.1.1"),
+                )
+            ]
         elif snmp_type is hlapi.OctetString:
-            read_val = [(None, snmp_type("1"),)]
+            read_val = [
+                (
+                    None,
+                    snmp_type("1"),
+                )
+            ]
         else:
-            read_val = [(None, snmp_type(1),)]
+            read_val = [
+                (
+                    None,
+                    snmp_type(1),
+                )
+            ]
 
         return read_val
 
-    def _get_return_val_for_spectrum(self, snmp_type : type, dims : tuple):
+    def _get_return_val_for_spectrum(self, snmp_type: type, dims: tuple):
         if snmp_type is hlapi.ObjectIdentity:
             read_val = []
             for _i in range(dims[0]):
@@ -76,25 +93,31 @@ class SNMPServerFixture:
 
         return read_val
 
-    def val_check(self,  snmp_type : type, dims : tuple):
+    def val_check(self, snmp_type: type, dims: tuple):
         """
         provides the values we expect and would provide to the attribute after converting the
         """
 
         if dims == self.DIM_LIST["scalar"]:
-            snmp_type_dict = {hlapi.ObjectIdentity:"1.3.6.1.2.1.1.1.0.1",
-                            hlapi.IpAddress: "1.1.1.1",
-                            hlapi.OctetString: "1"}
+            snmp_type_dict = {
+                hlapi.ObjectIdentity: "1.3.6.1.2.1.1.1.0.1",
+                hlapi.IpAddress: "1.1.1.1",
+                hlapi.OctetString: "1",
+            }
             check_val = 1
-            for k,v in snmp_type_dict.items():
-                if snmp_type is k:  check_val = v
+            for k, v in snmp_type_dict.items():
+                if snmp_type is k:
+                    check_val = v
         elif dims == self.DIM_LIST["spectrum"]:
-            snmp_type_dict = {hlapi.ObjectIdentity:["1.3.6.1.2.1.1.1.0.1"] * dims[0],
-                            hlapi.IpAddress: ["1.1.1.1"] * dims[0],
-                            hlapi.OctetString: ["1"] * dims[0]}
+            snmp_type_dict = {
+                hlapi.ObjectIdentity: ["1.3.6.1.2.1.1.1.0.1"] * dims[0],
+                hlapi.IpAddress: ["1.1.1.1"] * dims[0],
+                hlapi.OctetString: ["1"] * dims[0],
+            }
             check_val = [1] * dims[0]
-            for k,v in snmp_type_dict.items():
-                if snmp_type is k:  check_val = v
+            for k, v in snmp_type_dict.items():
+                if snmp_type is k:
+                    check_val = v
         else:
             raise Exception("Image not yet supported :(")
 
@@ -102,13 +125,19 @@ class SNMPServerFixture:
 
 
 class TestSNMP(base.TestCase):
-
     def test_annotation_fail(self):
         """
         unit test for the processing of annotation. Has 2 lists. 1 with things that should succeed and 1 with things that should fail.
         """
 
-        client = SNMPClient(community='public', host='localhost', version=1, timeout=10, fault_func=None, try_interval=2)
+        client = SNMPClient(
+            community="public",
+            host="localhost",
+            version=1,
+            timeout=10,
+            fault_func=None,
+            try_interval=2,
+        )
 
         fail_list = [
             # no 'name'
@@ -121,9 +150,9 @@ class TestSNMP(base.TestCase):
             with self.assertRaises(ValueError):
                 client._process_annotation(annotation=i)
 
-    @mock.patch('pysnmp.hlapi.ObjectIdentity')
-    @mock.patch('pysnmp.hlapi.ObjectType')
-    @mock.patch('tangostationcontrol.clients.snmp_client.SNMPComm.getter')
+    @mock.patch("pysnmp.hlapi.ObjectIdentity")
+    @mock.patch("pysnmp.hlapi.ObjectType")
+    @mock.patch("tangostationcontrol.clients.snmp_client.SNMPComm.getter")
     def test_snmp_obj_get(self, m_next, m_obj_T, m_obj_i):
         """
         Attempts to read a fake SNMP variable and checks whether it got what it expected
@@ -133,25 +162,42 @@ class TestSNMP(base.TestCase):
 
         for j in server.DIM_LIST:
             for i in server.SNMP_TO_NUMPY_DICT:
-                m_next.return_value = (None, None, None, server.get_return_val(i, server.DIM_LIST[j]))
+                m_next.return_value = (
+                    None,
+                    None,
+                    None,
+                    server.get_return_val(i, server.DIM_LIST[j]),
+                )
 
                 def __fakeInit__(self):
                     pass
 
-                with mock.patch.object(SNMPComm, '__init__', __fakeInit__):
+                with mock.patch.object(SNMPComm, "__init__", __fakeInit__):
                     m_comms = SNMPComm()
 
-                    snmp_attr = SNMPAttribute(comm=m_comms, mib="test", name="test", idx=0, dtype=server.SNMP_TO_NUMPY_DICT[i], dim_x=server.DIM_LIST[j][0], dim_y=server.DIM_LIST[j][1])
+                    snmp_attr = SNMPAttribute(
+                        comm=m_comms,
+                        mib="test",
+                        name="test",
+                        idx=0,
+                        dtype=server.SNMP_TO_NUMPY_DICT[i],
+                        dim_x=server.DIM_LIST[j][0],
+                        dim_y=server.DIM_LIST[j][1],
+                    )
 
                     val = snmp_attr.read_function()
 
                     checkval = server.val_check(i, server.DIM_LIST[j])
-                    self.assertEqual(checkval, val, f"During test {j} {i}; Expected: {checkval} of type {i}, got: {val} of type {type(val)}")
-
-    @mock.patch('pysnmp.hlapi.ObjectIdentity')
-    @mock.patch('pysnmp.hlapi.ObjectType')
-    @mock.patch('pysnmp.hlapi.setCmd')
-    @mock.patch('tangostationcontrol.clients.snmp_client.SNMPComm.setter')
+                    self.assertEqual(
+                        checkval,
+                        val,
+                        f"During test {j} {i}; Expected: {checkval} of type {i}, got: {val} of type {type(val)}",
+                    )
+
+    @mock.patch("pysnmp.hlapi.ObjectIdentity")
+    @mock.patch("pysnmp.hlapi.ObjectType")
+    @mock.patch("pysnmp.hlapi.setCmd")
+    @mock.patch("tangostationcontrol.clients.snmp_client.SNMPComm.setter")
     def test_snmp_obj_set(self, m_next, m_nextCmd, m_obj_T, m_obj_ID):
         """
         Attempts to write a value to an SNMP server, but instead intercepts it and compared whether the values is as expected.
@@ -163,23 +209,41 @@ class TestSNMP(base.TestCase):
         for j in server.DIM_LIST:
             for i in server.SNMP_TO_NUMPY_DICT:
                 # mocks the return value of the next function in snmp_client.SNMPComm.setter
-                m_next.return_value = (None, None, None, server.get_return_val(i, server.DIM_LIST[j]))
+                m_next.return_value = (
+                    None,
+                    None,
+                    None,
+                    server.get_return_val(i, server.DIM_LIST[j]),
+                )
 
                 def __fakeInit__(self):
                     pass
 
-                with mock.patch.object(SNMPComm, '__init__', __fakeInit__):
+                with mock.patch.object(SNMPComm, "__init__", __fakeInit__):
                     m_comms = SNMPComm()
 
                     set_val = server.val_check(i, server.DIM_LIST[j])
 
                     # create an SNMP attribute object and temporarily
                     hlapi.ObjectType = obj_type
-                    snmp_attr = SNMPAttribute(comm=m_comms, mib="test", name="test", idx=0, dtype=server.SNMP_TO_NUMPY_DICT[i], dim_x=server.DIM_LIST[j][0], dim_y=server.DIM_LIST[j][1])
+                    snmp_attr = SNMPAttribute(
+                        comm=m_comms,
+                        mib="test",
+                        name="test",
+                        idx=0,
+                        dtype=server.SNMP_TO_NUMPY_DICT[i],
+                        dim_x=server.DIM_LIST[j][0],
+                        dim_y=server.DIM_LIST[j][1],
+                    )
 
                     hlapi.ObjectType = mock.MagicMock()
 
-                    hlapi.ObjectType.return_value = (None, None, None, server.get_return_val(i, server.DIM_LIST[j]))
+                    hlapi.ObjectType.return_value = (
+                        None,
+                        None,
+                        None,
+                        server.get_return_val(i, server.DIM_LIST[j]),
+                    )
 
                     # call the write function. This function should now call m_ObjectType itself.
                     snmp_attr.write_function(set_val)
@@ -187,7 +251,9 @@ class TestSNMP(base.TestCase):
                     # get a value to compare the value we got against
                     checkval = server.val_check(i, server.DIM_LIST[j])
 
-                    res_lst = [args[1] for args, _ in hlapi.ObjectType.call_args_list if args]
+                    res_lst = [
+                        args[1] for args, _ in hlapi.ObjectType.call_args_list if args
+                    ]
                     if len(res_lst) == 1:
                         res_lst = res_lst[0]
 
@@ -195,25 +261,47 @@ class TestSNMP(base.TestCase):
 
         hlapi.ObjectType = obj_type
 
-    @mock.patch('tangostationcontrol.clients.snmp_client.SNMPComm.getter')
+    @mock.patch("tangostationcontrol.clients.snmp_client.SNMPComm.getter")
     def test_named_value(self, m_next):
 
         m_comms = mock.Mock()
-        snmp_attr = SNMPAttribute(comm=m_comms, mib="test", name="test", idx=0, dtype=str, dim_x=1, dim_y=0)
+        snmp_attr = SNMPAttribute(
+            comm=m_comms, mib="test", name="test", idx=0, dtype=str, dim_x=1, dim_y=0
+        )
 
         # create a named integer with the values: 'enable' for 1 and 'disable' for 0
-        test_val = [[(None, hlapi.Integer.withNamedValues(enable=1, disable=0)(1),)]]
+        test_val = [
+            [
+                (
+                    None,
+                    hlapi.Integer.withNamedValues(enable=1, disable=0)(1),
+                )
+            ]
+        ]
         ret_val = snmp_attr.convert(test_val)
 
         # should return 'enable' since we supplied the value 1
-        self.assertEqual(ret_val, "enable", f"Expected: to get 'enable', got: {ret_val} of type {type(ret_val)}")
+        self.assertEqual(
+            ret_val,
+            "enable",
+            f"Expected: to get 'enable', got: {ret_val} of type {type(ret_val)}",
+        )
 
         # create an unnamed integer with a value of 2
-        test_val = [[(None, hlapi.Integer(2),)]]
+        test_val = [
+            [
+                (
+                    None,
+                    hlapi.Integer(2),
+                )
+            ]
+        ]
         ret_val = snmp_attr.convert(test_val)
 
         # check to make sure the value is indeed 2
-        self.assertEqual(ret_val, 2, f"Expected: to get {2}, got: {ret_val} of type {type(ret_val)}")
+        self.assertEqual(
+            ret_val, 2, f"Expected: to get {2}, got: {ret_val} of type {type(ret_val)}"
+        )
 
 
 class TestMibLoading(base.TestCase):
@@ -224,7 +312,6 @@ class TestMibLoading(base.TestCase):
     REL_DIR = "snmp_mib_loading"
 
     def test_retrieve_mib_content(self):
-
         """
         This file contains a 1 variable named: testNamedValue with oid "1.3.99.1.99" with named values: ("test_name", 1), ("other_name", 2)
         In order to confirm that the mib is indeed loaded correctly this test has to get the oids, the values and the named values
@@ -246,7 +333,9 @@ class TestMibLoading(base.TestCase):
         testNamedValue_value = 1
 
         # get testValue and set a value of 1
-        obj_T = hlapi.ObjectType(ObjectIdentity(self.MIB, testNamedValue), hlapi.Integer32(1))
+        obj_T = hlapi.ObjectType(
+            ObjectIdentity(self.MIB, testNamedValue), hlapi.Integer32(1)
+        )
         obj_T.resolveWithMib(mibView)
 
         # get the oid
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/test_statistics_client_thread.py b/tangostationcontrol/tangostationcontrol/test/clients/test_statistics_client_thread.py
index 1513f605ec7ee937fe9cc51764488fe0fde4f44b..6d9f01ef76f122b126747fa6ebfe2e64a1f619d9 100644
--- a/tangostationcontrol/tangostationcontrol/test/clients/test_statistics_client_thread.py
+++ b/tangostationcontrol/tangostationcontrol/test/clients/test_statistics_client_thread.py
@@ -1,30 +1,20 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import logging
 from unittest import mock
 
-from tangostationcontrol.clients.statistics.client_thread import \
-    StatisticsClientThread
-
+from tangostationcontrol.clients.statistics.client_thread import StatisticsClientThread
 from tangostationcontrol.test import base
 
 logger = logging.getLogger()
 
 
 class TestStatisticsClientThread(base.TestCase):
-
     def setUp(self):
         super(TestStatisticsClientThread, self).setUp()
 
     class DummySCThread(StatisticsClientThread):
-
         def disconnect(self):
             pass
 
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/test_tcp_replicator.py b/tangostationcontrol/tangostationcontrol/test/clients/test_tcp_replicator.py
index 53c96a662c95439fc7c7f0e43925f23a05ca68fc..a6e3a24f67728d90969c1c7fab0353a954670303 100644
--- a/tangostationcontrol/tangostationcontrol/test/clients/test_tcp_replicator.py
+++ b/tangostationcontrol/tangostationcontrol/test/clients/test_tcp_replicator.py
@@ -1,24 +1,18 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import asyncio
 import logging
 from unittest import mock
 
 import timeout_decorator
-
 from tangostationcontrol.clients.tcp_replicator import TCPReplicator
 from tangostationcontrol.test import base
 
 logger = logging.getLogger()
 
 
-class MockTcpServer():
+class MockTcpServer:
     def __init__(self, *args, **kwargs):
         logger.critical("################### __INIT__ ################")
         self.serving = True
@@ -40,9 +34,8 @@ class MockTcpServer():
         pass
 
 
-@mock.patch('asyncio.start_server', side_effect=MockTcpServer)
+@mock.patch("asyncio.start_server", side_effect=MockTcpServer)
 class TestTCPReplicator(base.TestCase):
-
     def setUp(self):
         super(TestTCPReplicator, self).setUp()
 
@@ -53,24 +46,24 @@ class TestTCPReplicator(base.TestCase):
         """Validate option parsing"""
 
         # Perform string copy of current tcp_bind value
-        t_tcp_bind = str(TCPReplicator._DEFAULT_OPTIONS['tcp_bind'])
+        t_tcp_bind = str(TCPReplicator._DEFAULT_OPTIONS["tcp_bind"])
 
         test_options = {
-            "random"  : 12346,  # I should be ignored
-            "tcp_bind": '1.1.1.1',  # I should get set
+            "random": 12346,  # I should be ignored
+            "tcp_bind": "1.1.1.1",  # I should get set
         }
 
         replicator = self.m_tcp_replicator(options=test_options)
         self.assertTrue(replicator.is_alive())
 
         # Ensure replicator initialization does not modify static variable
-        self.assertEqual(t_tcp_bind, TCPReplicator._DEFAULT_OPTIONS['tcp_bind'])
+        self.assertEqual(t_tcp_bind, TCPReplicator._DEFAULT_OPTIONS["tcp_bind"])
 
         # Ensure options are correctly updated upon initialization
-        self.assertEqual(test_options['tcp_bind'], replicator.options['tcp_bind'])
+        self.assertEqual(test_options["tcp_bind"], replicator.options["tcp_bind"])
 
         # Ensure non-existing keys don't propagate into options
-        self.assertFalse('random' in replicator.options)
+        self.assertFalse("random" in replicator.options)
 
     def test_start_stop(self, _):
         """Verify threading behavior, being able to start and stop the thread"""
@@ -85,7 +78,7 @@ class TestTCPReplicator(base.TestCase):
         self.assertFalse(replicator.is_alive())
 
     @timeout_decorator.timeout(5)
-    @mock.patch('asyncio.new_event_loop', side_effect=RuntimeError)
+    @mock.patch("asyncio.new_event_loop", side_effect=RuntimeError)
     def test_start_except_eventloop(self, *_):
         """Verify exception handling inside run() for eventloop creation"""
 
@@ -113,7 +106,7 @@ class TestTCPReplicator(base.TestCase):
     def test_transmit(self, _):
         """Test that clients are getting data written to their transport"""
 
-        m_data = "Hello World!".encode('utf-8')
+        m_data = "Hello World!".encode("utf-8")
 
         m_client_1 = mock.Mock(TCPReplicator.TcpReplicatorClient)
         m_client_2 = mock.Mock(TCPReplicator.TcpReplicatorClient)
@@ -130,7 +123,7 @@ class TestTCPReplicator(base.TestCase):
         m_client_2.put.assert_called_once_with(m_data)
 
     def test_transmit_queue(self, _):
-        m_data = "Hello World!".encode('utf-8')
+        m_data = "Hello World!".encode("utf-8")
 
         m_client_1 = mock.Mock(TCPReplicator.TcpReplicatorClient)
         m_client_2 = mock.Mock(TCPReplicator.TcpReplicatorClient)
diff --git a/tangostationcontrol/tangostationcontrol/test/common/__init__.py b/tangostationcontrol/tangostationcontrol/test/common/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/test/common/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/test/common/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/test/common/file_access/SST_2022-11-15-14-21-39.h5 b/tangostationcontrol/tangostationcontrol/test/common/file_access/SST_2022-11-15-14-21-39.h5
deleted file mode 100644
index ed56c2590699853637e660a4b5b950a7b5203f9a..0000000000000000000000000000000000000000
Binary files a/tangostationcontrol/tangostationcontrol/test/common/file_access/SST_2022-11-15-14-21-39.h5 and /dev/null differ
diff --git a/tangostationcontrol/tangostationcontrol/test/common/test_baselines.py b/tangostationcontrol/tangostationcontrol/test/common/test_baselines.py
index 0701dee8262042bca4039d66939bd2246bb76ed9..ca1486f27d4da8f83c3fca227947ed22972450c8 100644
--- a/tangostationcontrol/tangostationcontrol/test/common/test_baselines.py
+++ b/tangostationcontrol/tangostationcontrol/test/common/test_baselines.py
@@ -1,11 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tangostationcontrol.common import baselines
 from tangostationcontrol.common.constants import MAX_INPUTS
@@ -23,9 +17,13 @@ class TestBaselines(base.TestCase):
         self.assertEqual(3, baselines.nr_baselines(2))
 
     def test_baseline_indices(self):
-        """ Test whether baseline_from_index and baseline_index line up. """
+        """Test whether baseline_from_index and baseline_index line up."""
 
         for major in range(MAX_INPUTS):
             for minor in range(major + 1):
                 idx = baselines.baseline_index(major, minor)
-                self.assertEqual((major, minor), baselines.baseline_from_index(idx), msg=f'baseline_index({major},{minor}) resulted in {idx}, and should match baseline_from_index({idx})')
+                self.assertEqual(
+                    (major, minor),
+                    baselines.baseline_from_index(idx),
+                    msg=f"baseline_index({major},{minor}) resulted in {idx}, and should match baseline_from_index({idx})",
+                )
diff --git a/tangostationcontrol/tangostationcontrol/test/common/test_cables.py b/tangostationcontrol/tangostationcontrol/test/common/test_cables.py
index 04fd3653a10229e8ab2cf89810516a535a23dab8..96a78dd89cb684b5e46e31a2edb635f830816afc 100644
--- a/tangostationcontrol/tangostationcontrol/test/common/test_cables.py
+++ b/tangostationcontrol/tangostationcontrol/test/common/test_cables.py
@@ -1,36 +1,35 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tangostationcontrol.common import cables
 
 from tangostationcontrol.test import base
 
+
 class TestCables(base.TestCase):
     def test_cable_names(self):
-        """ Test whether cable names match their key in the cable_types dictionary. """
+        """Test whether cable names match their key in the cable_types dictionary."""
 
         for name, cable in cables.cable_types.items():
             self.assertEqual(name, cable.name)
 
     def test_cable_speeds(self):
-        """ Test whether cables are transporting signals at 80% - 100% the speed of light,
-            which is a property of all our cables. """
+        """Test whether cables are transporting signals at 80% - 100% the speed of light,
+        which is a property of all our cables."""
 
         speed_of_light = 299_792_458
 
         for cable in cables.cable_types.values():
             if cable.length > 0:
-                self.assertLess(80, cable.speed() / speed_of_light * 100, msg=f"Cable {cable.name}")
-                self.assertGreater(100, cable.speed() / speed_of_light * 100, msg=f"Cable {cable.name}")
+                self.assertLess(
+                    80, cable.speed() / speed_of_light * 100, msg=f"Cable {cable.name}"
+                )
+                self.assertGreater(
+                    100, cable.speed() / speed_of_light * 100, msg=f"Cable {cable.name}"
+                )
 
     def test_cable_loss_increases_with_frequency(self):
-        """ Test whether cable losses increase with frequency for each cable. """
+        """Test whether cable losses increase with frequency for each cable."""
 
         for cable in cables.cable_types.values():
             if cable.length == 0:
@@ -39,7 +38,12 @@ class TestCables(base.TestCase):
                 self.assertEqual(0.0, cable.loss[200])
                 self.assertEqual(0.0, cable.loss[250])
             else:
-                self.assertLess(cable.loss[50], cable.loss[150], msg=f"Cable {cable.name}")
-                self.assertLess(cable.loss[150], cable.loss[200], msg=f"Cable {cable.name}")
-                self.assertLess(cable.loss[200], cable.loss[250], msg=f"Cable {cable.name}")
-
+                self.assertLess(
+                    cable.loss[50], cable.loss[150], msg=f"Cable {cable.name}"
+                )
+                self.assertLess(
+                    cable.loss[150], cable.loss[200], msg=f"Cable {cable.name}"
+                )
+                self.assertLess(
+                    cable.loss[200], cable.loss[250], msg=f"Cable {cable.name}"
+                )
diff --git a/tangostationcontrol/tangostationcontrol/test/common/test_calibration.py b/tangostationcontrol/tangostationcontrol/test/common/test_calibration.py
index 632a388f3c1daf1d6820e941e64f6d1788eb4c5a..6d13f8929dd25dbab41bf0b89ee204ff7bfa3e07 100644
--- a/tangostationcontrol/tangostationcontrol/test/common/test_calibration.py
+++ b/tangostationcontrol/tangostationcontrol/test/common/test_calibration.py
@@ -1,17 +1,14 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
-
-from tangostationcontrol.common.calibration import delay_compensation, loss_compensation, dB_to_factor
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
+import numpy
+from tangostationcontrol.common.calibration import (
+    delay_compensation,
+    loss_compensation,
+    dB_to_factor,
+)
 from tangostationcontrol.test import base
 
-import numpy
 
 class TestCalibration(base.TestCase):
     def test_dB_to_factor(self):
@@ -20,17 +17,23 @@ class TestCalibration(base.TestCase):
         self.assertAlmostEqual(2.0, dB_to_factor(3.0), places=2)
         self.assertAlmostEqual(10.0, dB_to_factor(10.0), places=7)
 
+
 class TestLossCompensation(base.TestCase):
     def test_integer_losses_no_remainder(self):
         losses = [1.0, 2.0, 3.0, 4.0]
 
-        attenuation_integer_dB, remainder_factor = loss_compensation(numpy.array(losses))
+        attenuation_integer_dB, remainder_factor = loss_compensation(
+            numpy.array(losses)
+        )
 
         # verify that there is no remainder
-        self.assertTrue(numpy.all(remainder_factor == 1.0), msg=f"attenuation_integer_dB = {attenuation_integer_dB}, remainder_factor = {remainder_factor}")
+        self.assertTrue(
+            numpy.all(remainder_factor == 1.0),
+            msg=f"attenuation_integer_dB = {attenuation_integer_dB}, remainder_factor = {remainder_factor}",
+        )
 
     def test_loss_compensation_lines_up(self):
-        """ Test whether signals line up after the computed delay compensation. """
+        """Test whether signals line up after the computed delay compensation."""
 
         losses = [1.0, 2.0, 3.0, 4.0]
 
@@ -40,15 +43,21 @@ class TestLossCompensation(base.TestCase):
         effective_attenuation = losses + attenuation_integer_dB
 
         # all values must be lined up equally
-        self.assertEqual(1, len(set(effective_attenuation)), msg=f"effective_attenuation = {effective_attenuation}, attenuation_integer_dB = {attenuation_integer_dB}, losses = {losses}")
+        self.assertEqual(
+            1,
+            len(set(effective_attenuation)),
+            msg=f"effective_attenuation = {effective_attenuation}, attenuation_integer_dB = {attenuation_integer_dB}, losses = {losses}",
+        )
 
     def test_loss_compensation_remainder(self):
-        """ Test correctness of the loss compensation remainders. """
+        """Test correctness of the loss compensation remainders."""
 
         # losses in dB we want to compensate for. they all round to the same integer value
         losses = [0.75, 1.0, 1.25]
 
-        attenuation_integer_dB, remainder_factor = loss_compensation(numpy.array(losses))
+        attenuation_integer_dB, remainder_factor = loss_compensation(
+            numpy.array(losses)
+        )
 
         # should not result in any sample shifts
         self.assertEqual(0, attenuation_integer_dB[0])
@@ -58,13 +67,13 @@ class TestLossCompensation(base.TestCase):
         # remainder should correspond with differences.
         # NB: these are the factors to apply to line up the signals.
         self.assertAlmostEqual(dB_to_factor(+0.25), remainder_factor[0])
-        self.assertAlmostEqual(dB_to_factor( 0.0), remainder_factor[1])
+        self.assertAlmostEqual(dB_to_factor(0.0), remainder_factor[1])
         self.assertAlmostEqual(dB_to_factor(-0.25), remainder_factor[2])
 
 
 class TestDelayCompensation(base.TestCase):
     def _compute_delay_compensation(self, delays_samples: list):
-        # convert to seconds (200 MHz clock => 5 ns samples) 
+        # convert to seconds (200 MHz clock => 5 ns samples)
         clock = 200_000_000
         delays_seconds = numpy.array(delays_samples) / clock
 
@@ -72,7 +81,7 @@ class TestDelayCompensation(base.TestCase):
         return delay_compensation(delays_seconds, clock)
 
     def test_whole_sample_shifts_no_remainder(self):
-        """ Test whether delay compensation indeed has no remainder if we shift whole samples. """
+        """Test whether delay compensation indeed has no remainder if we shift whole samples."""
 
         # delay to compensate for, in samples
         delay_samples = [1, 2, 3, 4]
@@ -83,7 +92,7 @@ class TestDelayCompensation(base.TestCase):
         self.assertTrue(numpy.all(remainder_seconds == 0.0), msg=f"{remainder_seconds}")
 
     def test_sample_shifts_line_up(self):
-        """ Test whether signals line up after the computed delay compensation. """
+        """Test whether signals line up after the computed delay compensation."""
 
         # delay to compensate for, in samples
         delay_samples = [1, 2, 3, 4]
@@ -94,15 +103,21 @@ class TestDelayCompensation(base.TestCase):
         effective_signal_delay = delay_samples + sample_shift
 
         # all values must be lined up equally
-        self.assertEqual(1, len(set(effective_signal_delay)), msg=f"effective_signal_delay = {effective_signal_delay}, sample_shift = {sample_shift}, delay_samples = {delay_samples}")
+        self.assertEqual(
+            1,
+            len(set(effective_signal_delay)),
+            msg=f"effective_signal_delay = {effective_signal_delay}, sample_shift = {sample_shift}, delay_samples = {delay_samples}",
+        )
 
     def test_delay_compensation_remainder(self):
-        """ Test correctness of the delay compensation remainders. """
+        """Test correctness of the delay compensation remainders."""
 
         # delays in samples we want to compensate for. they all round to the same sample
         delay_samples = [0.75, 1.0, 1.25]
 
-        sample_shift, remainder_seconds = self._compute_delay_compensation(delay_samples)
+        sample_shift, remainder_seconds = self._compute_delay_compensation(
+            delay_samples
+        )
 
         # should not result in any sample shifts
         self.assertEqual(0, sample_shift[0])
@@ -112,5 +127,5 @@ class TestDelayCompensation(base.TestCase):
         # remainder should correspond with differences.
         # NB: these are the remainders to apply to line up the signals.
         self.assertAlmostEqual(+0.25, remainder_seconds[0] / 5e-9)
-        self.assertAlmostEqual( 0.00, remainder_seconds[1] / 5e-9)
+        self.assertAlmostEqual(0.00, remainder_seconds[1] / 5e-9)
         self.assertAlmostEqual(-0.25, remainder_seconds[2] / 5e-9)
diff --git a/tangostationcontrol/tangostationcontrol/test/common/test_lofar_logging.py b/tangostationcontrol/tangostationcontrol/test/common/test_lofar_logging.py
index 60703eb8eec7a42965528ba334676028da5cf100..7b42863fc4abc17038236145e58e28d18ba436cf 100644
--- a/tangostationcontrol/tangostationcontrol/test/common/test_lofar_logging.py
+++ b/tangostationcontrol/tangostationcontrol/test/common/test_lofar_logging.py
@@ -1,32 +1,21 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from unittest import mock
 import logging
 import unittest
+from unittest import mock
 
-from tango.server import Device
 from tango import device_server
+from tango.server import Device
 from tango.test_context import DeviceTestContext
-
 from tangostationcontrol.common import lofar_logging
-
 from tangostationcontrol.test import base
 
 
 class TestLofarLogging(base.TestCase):
-
-
     def setUp(self):
         super(TestLofarLogging, self).setUp()
 
-
         # reset logging system
         rootLogger = logging.getLogger()
         rootLogger.filters = []
@@ -35,7 +24,7 @@ class TestLofarLogging(base.TestCase):
 
         # record everything we log in memory so we can inspect it
         class MemoryHandler(logging.Handler):
-            """ Handler that provides access to the records emitted. """
+            """Handler that provides access to the records emitted."""
 
             def __init__(self):
                 super().__init__()
@@ -47,9 +36,8 @@ class TestLofarLogging(base.TestCase):
         self.memory_handler = MemoryHandler()
         rootLogger.addHandler(self.memory_handler)
 
-
     def test_configure_logging_basic_usage(self):
-        """ Test whether configure_logger indeed runs smoothly. """
+        """Test whether configure_logger indeed runs smoothly."""
 
         logger = lofar_logging.configure_logger()
 
@@ -59,9 +47,8 @@ class TestLofarLogging(base.TestCase):
         logger.error("test error")
         logger.fatal("test fatal")
 
-
     def test_configure_logging_log_annotation(self):
-        """ Test whether log records get annotated after using configure_logger(). """
+        """Test whether log records get annotated after using configure_logger()."""
 
         logger = lofar_logging.configure_logger()
 
@@ -71,7 +58,7 @@ class TestLofarLogging(base.TestCase):
 
     @unittest.skip("Logs are not sent to Tango device currently, to reduce logspam")
     def test_configure_logging_uses_tango_device(self):
-        """ Test whether log records get annotated with the active Tango device after using configure_logger(), and whether logs get forwarded to it. """
+        """Test whether log records get annotated with the active Tango device after using configure_logger(), and whether logs get forwarded to it."""
 
         logger = lofar_logging.configure_logger()
 
@@ -86,18 +73,32 @@ class TestLofarLogging(base.TestCase):
         # create a Tango Device that logs something
         class MyDevice(Device):
             def init_device(self):
-                with mock.patch.object(device_server.DeviceImpl, '__info_stream') as m_info_stream:
+                with mock.patch.object(
+                    device_server.DeviceImpl, "__info_stream"
+                ) as m_info_stream:
                     self.log_deeper_in_stack()
 
                     # check if we actually routed the log to self.info_stream
-                    test_object.assertEqual(1, m_info_stream.call_count, msg="configure_logger did not send logs to active Tango device")
+                    test_object.assertEqual(
+                        1,
+                        m_info_stream.call_count,
+                        msg="configure_logger did not send logs to active Tango device",
+                    )
 
                     # Lookup our "test" logline among f.e. the debug messages output by Tango
-                    test_record = [record for record in test_object.memory_handler.records if record.msg == "test log_deeper_in_stack"]
+                    test_record = [
+                        record
+                        for record in test_object.memory_handler.records
+                        if record.msg == "test log_deeper_in_stack"
+                    ]
 
                     # Tango uses slightly different class representations of MyDevice, so
                     # we can't compare them direclty. Just verify we're talking about the same thing.
-                    test_object.assertEqual(str(self), str(test_record[0].tango_device), msg="configure_logging did not detect active Tango device")
+                    test_object.assertEqual(
+                        str(self),
+                        str(test_record[0].tango_device),
+                        msg="configure_logging did not detect active Tango device",
+                    )
 
             def log_deeper_in_stack(self):
                 logger.info("test log_deeper_in_stack")
@@ -107,16 +108,21 @@ class TestLofarLogging(base.TestCase):
             pass
 
     def test_log_exceptions(self):
-        """ Test whether log_exceptions actually logs and reraises exceptions. """
+        """Test whether log_exceptions actually logs and reraises exceptions."""
 
         class Foo:
             @lofar_logging.log_exceptions()
             def exceptionalFunction(self):
                 raise RuntimeError("test")
 
-        with self.assertRaises(RuntimeError, msg="log_exceptions did not reraise exception"):
+        with self.assertRaises(
+            RuntimeError, msg="log_exceptions did not reraise exception"
+        ):
             f = Foo()
             f.exceptionalFunction()
 
-        self.assertEqual(1, len(self.memory_handler.records), msg="log_exceptions did not log exception")
-
+        self.assertEqual(
+            1,
+            len(self.memory_handler.records),
+            msg="log_exceptions did not log exception",
+        )
diff --git a/tangostationcontrol/tangostationcontrol/test/common/test_measures.py b/tangostationcontrol/tangostationcontrol/test/common/test_measures.py
index 858290947d6eed755f0656ebfbc49ac84fba3a61..ec8e0369af8451a8cb10f36a7a55433102cfc4ff 100644
--- a/tangostationcontrol/tangostationcontrol/test/common/test_measures.py
+++ b/tangostationcontrol/tangostationcontrol/test/common/test_measures.py
@@ -1,20 +1,13 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-import urllib.request
 import os.path
-from unittest import mock
 import shutil
 import tempfile
+import urllib.request
+from unittest import mock
 
 from tangostationcontrol.common import measures
-
 from tangostationcontrol.test import base
 
 # where our WSRT_Measures.ztar surrogate is located
@@ -22,17 +15,21 @@ from tangostationcontrol.test import base
 FAKE_MEASURES = os.path.dirname(__file__) + "/fake_measures.ztar"
 FAKE_MEASURES_NEWER = os.path.dirname(__file__) + "/fake_measures_newer.ztar"
 
+
 class TestMeasures(base.TestCase):
-    @mock.patch.object(urllib.request, 'urlretrieve')
+    @mock.patch.object(urllib.request, "urlretrieve")
     def test_download_and_use(self, m_urlretrieve):
-        """ Test downloading and using new measures tables. """
-
-        with tempfile.TemporaryDirectory() as tmpdirname, \
-             mock.patch('tangostationcontrol.common.measures.IERS_ROOTDIR', tmpdirname) as rootdir, \
-             mock.patch('tangostationcontrol.common.measures.DOWNLOAD_DIR', tmpdirname) as downloaddir:
+        """Test downloading and using new measures tables."""
 
+        with tempfile.TemporaryDirectory() as tmpdirname, mock.patch(
+            "tangostationcontrol.common.measures.IERS_ROOTDIR", tmpdirname
+        ) as rootdir, mock.patch(
+            "tangostationcontrol.common.measures.DOWNLOAD_DIR", tmpdirname
+        ) as downloaddir:
             # emulate the download
-            m_urlretrieve.side_effect = lambda *args, **kw: shutil.copyfile(FAKE_MEASURES, tmpdirname + "/WSRT_Measures.ztar")
+            m_urlretrieve.side_effect = lambda *args, **kw: shutil.copyfile(
+                FAKE_MEASURES, tmpdirname + "/WSRT_Measures.ztar"
+            )
 
             # 'download' and process our fake measures
             newdir = measures.download_measures()
@@ -44,18 +41,23 @@ class TestMeasures(base.TestCase):
             self.assertIn(newdir, measures.get_available_measures_directories())
             self.assertEqual(newdir, measures.get_measures_directory())
 
-    @mock.patch.object(urllib.request, 'urlretrieve')
+    @mock.patch.object(urllib.request, "urlretrieve")
     def test_switch_tables(self, m_urlretrieve):
-        """ Test switching between available sets of measures tables. """
-
-        with tempfile.TemporaryDirectory() as tmpdirname, \
-             mock.patch('tangostationcontrol.common.measures.IERS_ROOTDIR', tmpdirname) as rootdir, \
-             mock.patch('tangostationcontrol.common.measures.DOWNLOAD_DIR', tmpdirname) as downloaddir:
+        """Test switching between available sets of measures tables."""
 
+        with tempfile.TemporaryDirectory() as tmpdirname, mock.patch(
+            "tangostationcontrol.common.measures.IERS_ROOTDIR", tmpdirname
+        ) as rootdir, mock.patch(
+            "tangostationcontrol.common.measures.DOWNLOAD_DIR", tmpdirname
+        ) as downloaddir:
             # 'download' two measures with different timestamps
-            m_urlretrieve.side_effect = lambda *args, **kw: shutil.copyfile(FAKE_MEASURES, tmpdirname + "/WSRT_Measures.ztar")
+            m_urlretrieve.side_effect = lambda *args, **kw: shutil.copyfile(
+                FAKE_MEASURES, tmpdirname + "/WSRT_Measures.ztar"
+            )
             newdir1 = measures.download_measures()
-            m_urlretrieve.side_effect = lambda *args, **kw: shutil.copyfile(FAKE_MEASURES_NEWER, tmpdirname + "/WSRT_Measures.ztar")
+            m_urlretrieve.side_effect = lambda *args, **kw: shutil.copyfile(
+                FAKE_MEASURES_NEWER, tmpdirname + "/WSRT_Measures.ztar"
+            )
             newdir2 = measures.download_measures()
 
             # check if both are available
diff --git a/tangostationcontrol/tangostationcontrol/test/common/test_observation_controller.py b/tangostationcontrol/tangostationcontrol/test/common/test_observation_controller.py
index d9ddbbdcc0a69abd5565b516fef9574569ee230a..75908dacea942049132dd8d6e5c9af16fdd1b84c 100644
--- a/tangostationcontrol/tangostationcontrol/test/common/test_observation_controller.py
+++ b/tangostationcontrol/tangostationcontrol/test/common/test_observation_controller.py
@@ -1,11 +1,6 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import importlib
 import sys
 from datetime import datetime
@@ -13,14 +8,13 @@ from unittest import mock
 from unittest.mock import Mock
 
 from tango import DevState
-
 from tangostationcontrol.common import ObservationController
 from tangostationcontrol.common.observation_controller import RunningObservation
 from tangostationcontrol.configuration import ObservationSettings, Pointing, Sap
 from tangostationcontrol.test import base
 
 
-@mock.patch('tango.Util.instance')
+@mock.patch("tango.Util.instance")
 class TestObservationController(base.TestCase):
     def test_is_any_observation_running(self, _):
         sut = ObservationController("DMR")
@@ -39,12 +33,15 @@ class TestObservationController(base.TestCase):
         sut.stop_all_observations()
 
 
-@mock.patch('tango.Util.instance')
+@mock.patch("tango.Util.instance")
 class TestRunningObservation(base.TestCase):
-    SETTINGS = ObservationSettings(5, datetime.fromisoformat("2022-10-26T11:35:54.704150"), [3, 2, 1],
-                                   "filter settings",
-                                   [Sap([3, 2], Pointing(1.2, 2.1, "LMN")),
-                                    Sap([1], Pointing(3.3, 4.4, "MOON"))])
+    SETTINGS = ObservationSettings(
+        5,
+        datetime.fromisoformat("2022-10-26T11:35:54.704150"),
+        [3, 2, 1],
+        "filter settings",
+        [Sap([3, 2], Pointing(1.2, 2.1, "LMN")), Sap([1], Pointing(3.3, 4.4, "MOON"))],
+    )
 
     def test_properties(self, _):
         sut = RunningObservation("DMR", TestRunningObservation.SETTINGS)
@@ -57,13 +54,16 @@ class TestRunningObservation(base.TestCase):
         sut = RunningObservation("DMR", TestRunningObservation.SETTINGS)
         sut.create_tango_device()
 
-    @mock.patch('tango.DeviceProxy')
+    @mock.patch("tango.DeviceProxy")
     def test_create_device_proxy(self, dp_mock, tu_mock):
         importlib.reload(sys.modules[RunningObservation.__module__])
         sut = RunningObservation("DMR", TestRunningObservation.SETTINGS)
         sut.create_device_proxy()
 
-        self.assertEqual(dp_mock.return_value.observation_settings_RW, TestRunningObservation.SETTINGS.to_json())
+        self.assertEqual(
+            dp_mock.return_value.observation_settings_RW,
+            TestRunningObservation.SETTINGS.to_json(),
+        )
         dp_mock.return_value.Initialise.assert_called()
         dp_mock.return_value.On.assert_called()
 
diff --git a/tangostationcontrol/tangostationcontrol/test/common/test_type_checking.py b/tangostationcontrol/tangostationcontrol/test/common/test_type_checking.py
index 342e1ba9b7834981397bbaf79a97b06075ba4d96..f5b1816d39e93899f0e4c82b2eb8afac5e3564df 100644
--- a/tangostationcontrol/tangostationcontrol/test/common/test_type_checking.py
+++ b/tangostationcontrol/tangostationcontrol/test/common/test_type_checking.py
@@ -1,29 +1,20 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from tango.utils import is_seq
 import numpy
-
+from tango.utils import is_seq
 from tangostationcontrol.common import type_checking
-
 from tangostationcontrol.test import base
 
 
 class TestTypeChecking(base.TestCase):
-
     @staticmethod
     def subscriptable(obj):
-        return hasattr(obj, '__getitem__')
+        return hasattr(obj, "__getitem__")
 
     @staticmethod
     def iterable(obj):
-        return hasattr(obj, '__iter__')
+        return hasattr(obj, "__iter__")
 
     @staticmethod
     def positional_ordering(obj):
@@ -40,14 +31,10 @@ class TestTypeChecking(base.TestCase):
         """
 
         result = (
-            self.subscriptable(obj) & self.iterable(obj)
-            & self.positional_ordering(obj)
+            self.subscriptable(obj) & self.iterable(obj) & self.positional_ordering(obj)
         )
 
-        self.assertEqual(
-            result, is_seq(obj),
-            F"Test failed for type {type(obj)}"
-        )
+        self.assertEqual(result, is_seq(obj), f"Test failed for type {type(obj)}")
 
     def test_is_sequence_for_types(self):
         """Types to be tested by is_sequence
@@ -70,11 +57,7 @@ class TestTypeChecking(base.TestCase):
         """Types test for sequence_not_str, must be false"""
 
         t_bytearray = bytearray([0, 5, 255])
-        test_types = [
-            str(""),
-            bytes(t_bytearray),
-            t_bytearray
-        ]
+        test_types = [str(""), bytes(t_bytearray), t_bytearray]
 
         for test in test_types:
             self.assertFalse(type_checking.sequence_not_str(test))
diff --git a/tangostationcontrol/tangostationcontrol/test/configuration/__init__.py b/tangostationcontrol/tangostationcontrol/test/configuration/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/test/configuration/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/test/configuration/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/test/configuration/_mock_requests.py b/tangostationcontrol/tangostationcontrol/test/configuration/_mock_requests.py
index ae9720e1b85f54f8d357dfe2e2c8912e485049fc..38eb7412c7f296db0811f088ec97fe13e2ea52e4 100644
--- a/tangostationcontrol/tangostationcontrol/test/configuration/_mock_requests.py
+++ b/tangostationcontrol/tangostationcontrol/test/configuration/_mock_requests.py
@@ -1,14 +1,9 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import json
 
-POINTING_SCHEMA = '''
+POINTING_SCHEMA = """
 {
   "type": "object",
   "required": [
@@ -52,9 +47,9 @@ POINTING_SCHEMA = '''
     }
   }
 }
-'''
+"""
 
-SAP_SCHEMA = '''
+SAP_SCHEMA = """
 {
   "$schema": "http://json-schema.org/draft-07/schema",
   "type": "object",
@@ -75,9 +70,9 @@ SAP_SCHEMA = '''
     }
   }
 }
-'''
+"""
 
-OBSERVATION_SETTINGS_SCHEMA = '''
+OBSERVATION_SETTINGS_SCHEMA = """
 {
   "$schema": "http://json-schema.org/draft-07/schema",
   "type": "object",
@@ -123,7 +118,7 @@ OBSERVATION_SETTINGS_SCHEMA = '''
     }
   }
 }
-'''
+"""
 
 
 def mocked_requests_get(*args, **kwargs):
@@ -138,11 +133,11 @@ def mocked_requests_get(*args, **kwargs):
         def json(self):
             return json.loads(self.text)
 
-    if args[0] == 'http://schemas/pointing.json':
+    if args[0] == "http://schemas/pointing.json":
         return MockResponse(POINTING_SCHEMA, 200)
-    elif args[0] == 'http://schemas/sap.json':
+    elif args[0] == "http://schemas/sap.json":
         return MockResponse(SAP_SCHEMA, 200)
-    elif args[0] == 'http://schemas/observation-settings.json':
+    elif args[0] == "http://schemas/observation-settings.json":
         return MockResponse(OBSERVATION_SETTINGS_SCHEMA, 200)
 
     return MockResponse(None, 404)
diff --git a/tangostationcontrol/tangostationcontrol/test/configuration/test_observation_settings.py b/tangostationcontrol/tangostationcontrol/test/configuration/test_observation_settings.py
index 718595392a635433082011d894dce5ff806a6cc3..2e63d0c0b8ae36e781371cc8e41ea98e739f8fd7 100644
--- a/tangostationcontrol/tangostationcontrol/test/configuration/test_observation_settings.py
+++ b/tangostationcontrol/tangostationcontrol/test/configuration/test_observation_settings.py
@@ -1,28 +1,24 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 from datetime import datetime
 from unittest import mock
 
 import requests
 from jsonschema.exceptions import ValidationError, RefResolutionError
-
 from tangostationcontrol.configuration import Pointing, ObservationSettings, Sap
 from tangostationcontrol.test import base
 from tangostationcontrol.test.configuration._mock_requests import mocked_requests_get
 
 
-@mock.patch('requests.get', side_effect=mocked_requests_get)
+@mock.patch("requests.get", side_effect=mocked_requests_get)
 class TestObservationSettings(base.TestCase):
     def test_from_json(self, _):
-        sut = ObservationSettings.from_json('{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", '
-                                            '"antenna_mask": [3, 2, 1], "filter": "filter_settings",'
-                                            '"SAPs": [{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}]}')
+        sut = ObservationSettings.from_json(
+            '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", '
+            '"antenna_mask": [3, 2, 1], "filter": "filter_settings",'
+            '"SAPs": [{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}]}'
+        )
 
         self.assertEqual(sut.observation_id, 3)
         self.assertEqual(sut.stop_time, datetime.fromisoformat("2012-04-23T18:25:43"))
@@ -30,19 +26,23 @@ class TestObservationSettings(base.TestCase):
         self.assertEqual(sut.filter, "filter_settings")
         self.assertEqual(len(sut.SAPs), 1)
 
-        sut = ObservationSettings.from_json('{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", '
-                                            '"antenna_mask": [3, 2, 1], "filter": "filter_settings",'
-                                            '"SAPs": [{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}],'
-                                            '"tile_beam": {"angle1":2.2, "angle2": 3.1, "direction_type":"MOON"} }')
+        sut = ObservationSettings.from_json(
+            '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", '
+            '"antenna_mask": [3, 2, 1], "filter": "filter_settings",'
+            '"SAPs": [{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}],'
+            '"tile_beam": {"angle1":2.2, "angle2": 3.1, "direction_type":"MOON"} }'
+        )
 
         self.assertEqual(sut.tile_beam.angle1, 2.2)
         self.assertEqual(sut.tile_beam.angle2, 3.1)
         self.assertEqual(sut.tile_beam.direction_type, "MOON")
 
-        sut = ObservationSettings.from_json('{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", '
-                                            '"antenna_mask": [3, 2, 1], "filter": "filter_settings",'
-                                            '"SAPs": [{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}],'
-                                            '"tile_beam": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}, "first_beamlet": 2}')
+        sut = ObservationSettings.from_json(
+            '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", '
+            '"antenna_mask": [3, 2, 1], "filter": "filter_settings",'
+            '"SAPs": [{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}],'
+            '"tile_beam": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}, "first_beamlet": 2}'
+        )
 
         self.assertEqual(sut.first_beamlet, 2)
 
@@ -56,7 +56,7 @@ class TestObservationSettings(base.TestCase):
             '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", "antenna_mask": [3, 2, 1], "filter": "filter_settings","SAPs": {"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}},"tile_beam": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}, "first_beamlet": 2}',
             # '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", "antenna_mask": [3, 2, 1], "filter": "filter_settings","SAPs": [1],"tile_beam": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}, "first_beamlet": 2}',
             # '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", "antenna_mask": [3, 2, 1], "filter": "filter_settings","SAPs": [{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}],"tile_beam": 1, "first_beamlet": 2}',
-            '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", "antenna_mask": [3, 2, 1], "filter": "filter_settings","SAPs": [{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}],"tile_beam": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}, "first_beamlet": "2"}'
+            '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", "antenna_mask": [3, 2, 1], "filter": "filter_settings","SAPs": [{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}],"tile_beam": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}, "first_beamlet": "2"}',
         ]:
             with self.assertRaises((ValidationError, ValueError)):
                 ObservationSettings.from_json(json)
@@ -69,32 +69,46 @@ class TestObservationSettings(base.TestCase):
             '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", "antenna_mask": [], "filter": "filter_settings","SAPs": [{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}],"tile_beam": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}, "first_beamlet": 2}',
             '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", "antenna_mask": [3, 2, 1], "SAPs": [{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}],"tile_beam": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}, "first_beamlet": 2}',
             '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", "antenna_mask": [3, 2, 1], "filter": "filter_settings","tile_beam": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}, "first_beamlet": 2}',
-            '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", "antenna_mask": [3, 2, 1], "filter": "filter_settings","SAPs": [],"tile_beam": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}, "first_beamlet": 2}'
+            '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", "antenna_mask": [3, 2, 1], "filter": "filter_settings","SAPs": [],"tile_beam": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}, "first_beamlet": 2}',
         ]:
             with self.assertRaises((ValidationError, ValueError)):
                 ObservationSettings.from_json(json)
 
     def test_to_json(self, _):
-        sut = ObservationSettings(5, datetime.fromisoformat("2022-10-26T11:35:54.704150"), [3, 2, 1], "filter settings",
-                                  [Sap([3, 2], Pointing(1.2, 2.1, "LMN")),
-                                   Sap([1], Pointing(3.3, 4.4, "MOON"))])
-        self.assertEqual(sut.to_json(), '{"observation_id": 5, "stop_time": "2022-10-26T11:35:54.704150", ' \
-                                        '"antenna_mask": [3, 2, 1], "filter": "filter settings", "SAPs": ' \
-                                        '[{"subbands": [3, 2], "pointing": {"angle1": 1.2, "angle2": 2.1, ' \
-                                        '"direction_type": "LMN"}}, {"subbands": [1], "pointing": {"angle1": 3.3, ' \
-                                        '"angle2": 4.4, "direction_type": "MOON"}}], "first_beamlet": 0}')
+        sut = ObservationSettings(
+            5,
+            datetime.fromisoformat("2022-10-26T11:35:54.704150"),
+            [3, 2, 1],
+            "filter settings",
+            [
+                Sap([3, 2], Pointing(1.2, 2.1, "LMN")),
+                Sap([1], Pointing(3.3, 4.4, "MOON")),
+            ],
+        )
+        self.assertEqual(
+            sut.to_json(),
+            '{"observation_id": 5, "stop_time": "2022-10-26T11:35:54.704150", '
+            '"antenna_mask": [3, 2, 1], "filter": "filter settings", "SAPs": '
+            '[{"subbands": [3, 2], "pointing": {"angle1": 1.2, "angle2": 2.1, '
+            '"direction_type": "LMN"}}, {"subbands": [1], "pointing": {"angle1": 3.3, '
+            '"angle2": 4.4, "direction_type": "MOON"}}], "first_beamlet": 0}',
+        )
 
     def test_throw_exception_if_schema_not_available(self, mock_get):
         ObservationSettings.VALIDATOR = None
         mock_get.side_effect = requests.exceptions.Timeout
         with self.assertRaises(RefResolutionError):
-            ObservationSettings.from_json('{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", '
-                                          '"antenna_mask": [3, 2, 1], "filter": "filter_settings",'
-                                          '"SAPs": [{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}]}')
+            ObservationSettings.from_json(
+                '{"observation_id": 3, "stop_time": "2012-04-23T18:25:43", '
+                '"antenna_mask": [3, 2, 1], "filter": "filter_settings",'
+                '"SAPs": [{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}]}'
+            )
         self.assertEqual(5, mock_get.call_count)
 
     def test_throw_wrong_instance(self, _):
-        for json in ['{"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}',
-                     '{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}']:
+        for json in [
+            '{"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}',
+            '{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}',
+        ]:
             with self.assertRaises(ValidationError):
                 ObservationSettings.from_json(json)
diff --git a/tangostationcontrol/tangostationcontrol/test/configuration/test_pointing.py b/tangostationcontrol/tangostationcontrol/test/configuration/test_pointing.py
index 7f1e3edfcbfb4622260042f6e6c8cbaf1adc4dda..0161f0a35bfba492605cb179db7237858286d98d 100644
--- a/tangostationcontrol/tangostationcontrol/test/configuration/test_pointing.py
+++ b/tangostationcontrol/tangostationcontrol/test/configuration/test_pointing.py
@@ -1,49 +1,47 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 from unittest import mock
 
 import requests
 from jsonschema.exceptions import ValidationError, RefResolutionError
-
 from tangostationcontrol.configuration import Pointing
 from tangostationcontrol.test import base
 from tangostationcontrol.test.configuration._mock_requests import mocked_requests_get
 
 
-@mock.patch('requests.get', side_effect=mocked_requests_get)
+@mock.patch("requests.get", side_effect=mocked_requests_get)
 class TestPointing(base.TestCase):
     def test_from_json(self, _):
         ps = Pointing.from_json('{"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}')
 
         self.assertEqual(1.2, ps.angle1)
         self.assertEqual(2.1, ps.angle2)
-        self.assertEqual('LMN', ps.direction_type)
+        self.assertEqual("LMN", ps.direction_type)
 
     def test_from_json_type_missmatch(self, _):
-        for json in ['{"angle1":"1.2", "angle2": 2.1, "direction_type":"LMN"}',
-                     '{"angle1":1.2, "angle2": "2.1", "direction_type":"LMN"}',
-                     '{"angle1":1.2, "angle2": 2.1, "direction_type":"ABC"}'
-                     ]:
+        for json in [
+            '{"angle1":"1.2", "angle2": 2.1, "direction_type":"LMN"}',
+            '{"angle1":1.2, "angle2": "2.1", "direction_type":"LMN"}',
+            '{"angle1":1.2, "angle2": 2.1, "direction_type":"ABC"}',
+        ]:
             with self.assertRaises(ValidationError):
                 Pointing.from_json(json)
 
     def test_from_json_missing_fields(self, _):
-        for json in ['{"angle2": 2.1, "direction_type":"LMN"}',
-                     '{"angle1":1.2, "direction_type":"LMN"}',
-                     '{"angle1":1.2, "angle2": 2.1}'
-                     ]:
+        for json in [
+            '{"angle2": 2.1, "direction_type":"LMN"}',
+            '{"angle1":1.2, "direction_type":"LMN"}',
+            '{"angle1":1.2, "angle2": 2.1}',
+        ]:
             with self.assertRaises(ValidationError):
                 Pointing.from_json(json)
 
     def test_to_json(self, _):
         ps = Pointing(1.3, 2.3, "URANUS")
-        self.assertEqual(ps.to_json(), '{"angle1": 1.3, "angle2": 2.3, "direction_type": "URANUS"}')
+        self.assertEqual(
+            ps.to_json(), '{"angle1": 1.3, "angle2": 2.3, "direction_type": "URANUS"}'
+        )
 
     def test_throw_exception_if_schema_not_available(self, mock_get):
         Pointing.VALIDATOR = None
diff --git a/tangostationcontrol/tangostationcontrol/test/configuration/test_sap_settings.py b/tangostationcontrol/tangostationcontrol/test/configuration/test_sap_settings.py
index f457e963bf67afb9d9dd2b2d3f5d2ca47e5b9c76..c625f4ebfcaaa2faa80b2736b3584e66fd874e3b 100644
--- a/tangostationcontrol/tangostationcontrol/test/configuration/test_sap_settings.py
+++ b/tangostationcontrol/tangostationcontrol/test/configuration/test_sap_settings.py
@@ -1,63 +1,63 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 from unittest import mock
 
 import requests
 from jsonschema.exceptions import ValidationError, RefResolutionError
-
 from tangostationcontrol.configuration import Pointing, Sap
 from tangostationcontrol.test import base
 from tangostationcontrol.test.configuration._mock_requests import mocked_requests_get
 
 
-@mock.patch('requests.get', side_effect=mocked_requests_get)
+@mock.patch("requests.get", side_effect=mocked_requests_get)
 class TestSapSettings(base.TestCase):
     def test_from_json(self, _):
         sap = Sap.from_json(
-                '{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}')
+            '{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}'
+        )
 
         self.assertEqual(sap.subbands, [3, 2, 1])
         self.assertEqual(sap.pointing.angle1, 1.2)
         self.assertEqual(sap.pointing.angle2, 2.1)
-        self.assertEqual(sap.pointing.direction_type, 'LMN')
+        self.assertEqual(sap.pointing.direction_type, "LMN")
 
     def test_from_json_type_missmatch(self, _):
-        for json in ['{"subbands": ["3", 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}',
-                     '{"subbands": "3", "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}',
-                     '{"subbands": 3, "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}',
-                     '{"subbands": 3, "pointing": {"angle1":"1.2", "angle2": 2.1, "direction_type":"LMN"}}',
-                     '{"subbands": "3", "pointing": "test"}',
-                     '{"angle1":"1.2", "angle2": 2.1, "direction_type":"LMN"}'
-                     ]:
+        for json in [
+            '{"subbands": ["3", 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}',
+            '{"subbands": "3", "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}',
+            '{"subbands": 3, "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}',
+            '{"subbands": 3, "pointing": {"angle1":"1.2", "angle2": 2.1, "direction_type":"LMN"}}',
+            '{"subbands": "3", "pointing": "test"}',
+            '{"angle1":"1.2", "angle2": 2.1, "direction_type":"LMN"}',
+        ]:
             with self.assertRaises(ValidationError):
                 Sap.from_json(json)
 
     def test_from_json_missing_fields(self, _):
-        for json in ['{"subbands": [], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}',
-                     '{"pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}',
-                     '{"subbands": [1], "pointing": {"angle2": 2.1, "direction_type":"LMN"}}',
-                     '{"subbands": [1]}'
-                     ]:
+        for json in [
+            '{"subbands": [], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}',
+            '{"pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}',
+            '{"subbands": [1], "pointing": {"angle2": 2.1, "direction_type":"LMN"}}',
+            '{"subbands": [1]}',
+        ]:
             with self.assertRaises(ValidationError):
                 Sap.from_json(json)
 
     def test_to_json(self, _):
         sut = Sap([3, 2, 1], Pointing(1.3, 2.3, "URANUS"))
-        self.assertEqual(sut.to_json(),
-                         '{"subbands": [3, 2, 1], "pointing": {"angle1": 1.3, "angle2": 2.3, "direction_type": "URANUS"}}')
+        self.assertEqual(
+            sut.to_json(),
+            '{"subbands": [3, 2, 1], "pointing": {"angle1": 1.3, "angle2": 2.3, "direction_type": "URANUS"}}',
+        )
 
     def test_throw_exception_if_schema_not_available(self, mock_get):
         Sap.VALIDATOR = None
         mock_get.side_effect = requests.exceptions.Timeout
         with self.assertRaises(RefResolutionError):
             Sap.from_json(
-                    '{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}')
+                '{"subbands": [3, 2, 1], "pointing": {"angle1":1.2, "angle2": 2.1, "direction_type":"LMN"}}'
+            )
         self.assertEqual(5, mock_get.call_count)
 
     def test_throw_wrong_instance(self, _):
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/__init__.py b/tangostationcontrol/tangostationcontrol/test/devices/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/automatic_polling_performance_test/monitoring_performance_test.py b/tangostationcontrol/tangostationcontrol/test/devices/automatic_polling_performance_test/monitoring_performance_test.py
index 820dc64cc42c8f47448be8a84153a16d95fbd731..7263bc3474393bb2cdd20ad716b410c4c66d877e 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/automatic_polling_performance_test/monitoring_performance_test.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/automatic_polling_performance_test/monitoring_performance_test.py
@@ -1,14 +1,10 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR2.0 project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 # TODO(Corne): Remove sys.path.append hack once packaging is in place!
-import os, sys
+import os
+import sys
+
 currentdir = os.path.dirname(os.path.realpath(__file__))
 parentdir = os.path.dirname(currentdir)
 parentdir = os.path.dirname(parentdir)
@@ -27,65 +23,74 @@ ARRAY_SIZE = 2000000
 
 class MonitoringPerformanceDevice(Device):
     global ARRAY_SIZE
+
     def read_array(self):
         print("{} {}".format(time.time(), self.get_name()))
         return self._array
 
     array1_r = attribute(
-        dtype = (numpy.double,),
-        max_dim_x = ARRAY_SIZE,
-        period = 1000,
-        rel_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        fget = read_array,
+        dtype=(numpy.double,),
+        max_dim_x=ARRAY_SIZE,
+        period=1000,
+        rel_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        fget=read_array,
     )
 
     array2_r = attribute(
-        dtype = (numpy.double,),
-        max_dim_x = ARRAY_SIZE,
-        period = 1000,
-        rel_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        fget = read_array,
+        dtype=(numpy.double,),
+        max_dim_x=ARRAY_SIZE,
+        period=1000,
+        rel_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        fget=read_array,
     )
 
     array3_r = attribute(
-        dtype = (numpy.double,),
-        max_dim_x = ARRAY_SIZE,
-        period = 1000,
-        rel_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        fget = read_array,
+        dtype=(numpy.double,),
+        max_dim_x=ARRAY_SIZE,
+        period=1000,
+        rel_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        fget=read_array,
     )
 
     array4_r = attribute(
-        dtype = (numpy.double,),
-        max_dim_x = ARRAY_SIZE,
-        period = 1000,
-        rel_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        fget = read_array,
+        dtype=(numpy.double,),
+        max_dim_x=ARRAY_SIZE,
+        period=1000,
+        rel_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        fget=read_array,
     )
 
     def init_device(self):
         Device.init_device(self)
 
         util = Util.instance()
-        print("Current polling thread pool size = {}".format(util.get_polling_threads_pool_size()))
+        print(
+            "Current polling thread pool size = {}".format(
+                util.get_polling_threads_pool_size()
+            )
+        )
         util.set_polling_threads_pool_size(POLLING_THREADS)
-        print("New polling thread pool size = {}".format(util.get_polling_threads_pool_size()))
+        print(
+            "New polling thread pool size = {}".format(
+                util.get_polling_threads_pool_size()
+            )
+        )
         print("Array size = {}".format(ARRAY_SIZE))
 
         self.set_state(DevState.OFF)
@@ -113,8 +118,10 @@ class MonitoringPerformanceDevice(Device):
     def delete_device(self):
         self.set_state(DevState.OFF)
 
-def main(args = None, **kwargs):
-    return run((MonitoringPerformanceDevice, ), args = args, **kwargs)
 
-if __name__ == '__main__':
+def main(args=None, **kwargs):
+    return run((MonitoringPerformanceDevice,), args=args, **kwargs)
+
+
+if __name__ == "__main__":
     main()
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/device_base.py b/tangostationcontrol/tangostationcontrol/test/devices/device_base.py
index 85c8c908ba03a93a137b30a19b07208cec99094e..22f8155e20bf3820f3a9ebcadf2e55c13a53fcef 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/device_base.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/device_base.py
@@ -1,18 +1,10 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
+import mock
 from tangostationcontrol.devices import lofar_device
-
 from tangostationcontrol.test import base
 
-import mock
-
 
 class DeviceTestCase(base.TestCase):
     """BaseClass for device test cases to perform common DeviceProxy patching
@@ -30,7 +22,6 @@ class DeviceTestCase(base.TestCase):
             self.device_proxy_patch(device)
 
     def device_proxy_patch(self, device):
-        proxy_patcher = mock.patch.object(
-            device, 'DeviceProxy')
+        proxy_patcher = mock.patch.object(device, "DeviceProxy")
         proxy_patcher.start()
         self.addCleanup(proxy_patcher.stop)
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/random_data.py b/tangostationcontrol/tangostationcontrol/test/devices/random_data.py
index 73499090165a2a8a3564a619eaa9ca1f328090ed..96c56bb60f0c8d3b4cc828152502b71634581287 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/random_data.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/random_data.py
@@ -1,25 +1,21 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR2.0 project
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
+from numpy import random, double
 
 # PyTango imports
 from tango import DevState
 from tango.server import run, Device, attribute
-from numpy import random, double
 
 __all__ = ["RandomData", "main"]
 
+
 class RandomData(Device):
     """
     Random data monitor point device
     """
 
-    DIM_ARRAY = 1024 # x-axis dimension of a random values array
+    DIM_ARRAY = 1024  # x-axis dimension of a random values array
 
     def read(self):
         return random.random()
@@ -29,383 +25,383 @@ class RandomData(Device):
 
     # Attributes
     rnd1 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd2 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd3 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd4 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd5 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd6 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd7 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd8 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd9 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd10 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd11 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd12 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd13 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd14 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd15 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd16 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd17 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd18 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd19 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd20 = attribute(
-        dtype = 'DevDouble',
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read,
+        dtype="DevDouble",
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read,
     )
 
     rnd21 = attribute(
-        dtype = ('DevDouble',),
-        max_dim_x =  DIM_ARRAY,
-        max_dim_y = 1,
-        polling_period = 1000,
-        period = 1000,
-        rel_change = 0.1,
-        abs_change = 0.1,
-        archive_period = 1000,
-        archive_rel_change = 0.1,
-        archive_abs_change = 0.1,
-        max_value = 1.0,
-        min_value = 0.0,
-        max_alarm = 1.0,
-        min_alarm = 0.99,
-        max_warning = 0.99,
-        min_warning = 0.98,
-        fget = read_array,
+        dtype=("DevDouble",),
+        max_dim_x=DIM_ARRAY,
+        max_dim_y=1,
+        polling_period=1000,
+        period=1000,
+        rel_change=0.1,
+        abs_change=0.1,
+        archive_period=1000,
+        archive_rel_change=0.1,
+        archive_abs_change=0.1,
+        max_value=1.0,
+        min_value=0.0,
+        max_alarm=1.0,
+        min_alarm=0.99,
+        max_warning=0.99,
+        min_warning=0.98,
+        fget=read_array,
     )
 
     # General methods
@@ -485,8 +481,8 @@ class RandomData(Device):
         self.set_state(DevState.OFF)
 
 
-def main(args = None, **kwargs):
+def main(args=None, **kwargs):
     """
     Main function of the RandomData module.
     """
-    return run((RandomData,), args = args, **kwargs)
+    return run((RandomData,), args=args, **kwargs)
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_antennafield_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_antennafield_device.py
index 1b08a0d86453fecbd6d512ae5924680932291588..5fe8445b016a335ee90059379c7599652ef7561a 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_antennafield_device.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_antennafield_device.py
@@ -12,7 +12,8 @@ from tangostationcontrol.common.constants import MAX_ANTENNA, N_rcu, DEFAULT_N_H
 from tangostationcontrol.devices import antennafield
 from tangostationcontrol.devices.antennafield import (
     AntennaToRecvMapper,
-    AntennaQuality, AntennaUse
+    AntennaQuality,
+    AntennaUse,
 )
 from tangostationcontrol.test import base
 from tangostationcontrol.test.devices import device_base
@@ -30,29 +31,38 @@ class TestAntennaToRecvMapper(base.TestCase):
     # The first Antenna control line on RCU 1 and the second Antenna control line
     # on RCU 0.
     POWER_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1 = [[1, 1], [1, 0]] + [[-1, -1]] * (
-            DEFAULT_N_HBA_TILES - 2)
+        DEFAULT_N_HBA_TILES - 2
+    )
     CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1 = [[1, 1], [1, 0]] + [[-1, -1]] * (
-            DEFAULT_N_HBA_TILES - 2)
+        DEFAULT_N_HBA_TILES - 2
+    )
 
     def test_ant_read_mask_r_no_mapping(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
         )
 
-        receiver_values = [[False] * MAX_ANTENNA, [False] * MAX_ANTENNA,
-                           [False] * MAX_ANTENNA]
+        receiver_values = [
+            [False] * MAX_ANTENNA,
+            [False] * MAX_ANTENNA,
+            [False] * MAX_ANTENNA,
+        ]
         expected = [False] * DEFAULT_N_HBA_TILES
         actual = mapper.map_read("ANT_mask_RW", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_ant_read_mask_r_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            3,
         )
 
-        receiver_values = [[False, True, False] + [False, False, False] * (N_rcu - 1),
-                           [False] * MAX_ANTENNA, [False] * MAX_ANTENNA]
+        receiver_values = [
+            [False, True, False] + [False, False, False] * (N_rcu - 1),
+            [False] * MAX_ANTENNA,
+            [False] * MAX_ANTENNA,
+        ]
         expected = [True, False] + [False] * (DEFAULT_N_HBA_TILES - 2)
         actual = mapper.map_read("ANT_mask_RW", receiver_values)
 
@@ -60,7 +70,7 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_rcu_band_select_no_mapping(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
         )
         receiver_values = [[0] * MAX_ANTENNA, [0] * MAX_ANTENNA, [0] * MAX_ANTENNA]
         expected = [0] * DEFAULT_N_HBA_TILES
@@ -69,221 +79,271 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_bf_read_delay_steps_r_no_mapping(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
         )
 
-        receiver_values = [[[0] * N_rcu] * MAX_ANTENNA, [[0] * N_rcu] * MAX_ANTENNA,
-                           [[0] * N_rcu] * MAX_ANTENNA]
+        receiver_values = [
+            [[0] * N_rcu] * MAX_ANTENNA,
+            [[0] * N_rcu] * MAX_ANTENNA,
+            [[0] * N_rcu] * MAX_ANTENNA,
+        ]
         expected = [[0] * N_rcu] * DEFAULT_N_HBA_TILES
         actual = mapper.map_read("HBAT_BF_delay_steps_R", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_bf_read_delay_steps_r_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            3,
         )
 
         receiver_values = [
             [[2] * N_rcu, [1] * N_rcu] + [[0] * N_rcu] * (MAX_ANTENNA - 2),
-            [[0] * N_rcu] * MAX_ANTENNA, [[0] * N_rcu] * MAX_ANTENNA]
+            [[0] * N_rcu] * MAX_ANTENNA,
+            [[0] * N_rcu] * MAX_ANTENNA,
+        ]
         expected = [[1] * N_rcu, [2] * N_rcu] + [[0] * N_rcu] * (
-                DEFAULT_N_HBA_TILES - 2)
+            DEFAULT_N_HBA_TILES - 2
+        )
         actual = mapper.map_read("HBAT_BF_delay_steps_R", receiver_values)
 
         numpy.testing.assert_equal(expected, actual)
 
     def test_bf_read_delay_steps_rw_no_mapping(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
         )
 
-        receiver_values = [[[0] * N_rcu] * MAX_ANTENNA, [[0] * N_rcu] * MAX_ANTENNA,
-                           [[0] * N_rcu] * MAX_ANTENNA]
+        receiver_values = [
+            [[0] * N_rcu] * MAX_ANTENNA,
+            [[0] * N_rcu] * MAX_ANTENNA,
+            [[0] * N_rcu] * MAX_ANTENNA,
+        ]
         expected = [[0] * N_rcu] * DEFAULT_N_HBA_TILES
         actual = mapper.map_read("HBAT_BF_delay_steps_RW", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_bf_read_delay_steps_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            3,
         )
 
         receiver_values = [
             [[2] * N_rcu, [1] * N_rcu] + [[0] * N_rcu] * (MAX_ANTENNA - 2),
-            [[0] * N_rcu] * MAX_ANTENNA, [[0] * N_rcu] * MAX_ANTENNA]
+            [[0] * N_rcu] * MAX_ANTENNA,
+            [[0] * N_rcu] * MAX_ANTENNA,
+        ]
         expected = [[1] * N_rcu, [2] * N_rcu] + [[0] * N_rcu] * (
-                DEFAULT_N_HBA_TILES - 2)
+            DEFAULT_N_HBA_TILES - 2
+        )
         actual = mapper.map_read("HBAT_BF_delay_steps_RW", receiver_values)
 
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_read_led_on_r_unmapped(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
         )
 
-        receiver_values = [[[False] * N_rcu] * MAX_ANTENNA,
-                           [[False] * N_rcu] * MAX_ANTENNA,
-                           [[False] * N_rcu] * MAX_ANTENNA]
+        receiver_values = [
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+        ]
         expected = [[False] * N_rcu] * DEFAULT_N_HBA_TILES
         actual = mapper.map_read("HBAT_LED_on_R", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_read_led_on_r_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            3,
         )
 
         receiver_values = [
-            [[False, True] * 16, [True, False] * 16] + [[False] * N_rcu] * (
-                    MAX_ANTENNA - 2), [[False] * N_rcu] * MAX_ANTENNA,
-            [[False] * N_rcu] * MAX_ANTENNA]
+            [[False, True] * 16, [True, False] * 16]
+            + [[False] * N_rcu] * (MAX_ANTENNA - 2),
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+        ]
 
         expected = [[True, False] * 16, [False, True] * 16] + [[False] * N_rcu] * (
-                DEFAULT_N_HBA_TILES - 2)
+            DEFAULT_N_HBA_TILES - 2
+        )
         actual = mapper.map_read("HBAT_LED_on_R", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_read_led_on_rw_unmapped(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
         )
 
-        receiver_values = [[[False] * N_rcu] * MAX_ANTENNA,
-                           [[False] * N_rcu] * MAX_ANTENNA,
-                           [[False] * N_rcu] * MAX_ANTENNA]
+        receiver_values = [
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+        ]
         expected = [[False] * N_rcu] * DEFAULT_N_HBA_TILES
         actual = mapper.map_read("HBAT_LED_on_RW", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_read_led_on_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            3,
         )
 
         receiver_values = [
-            [[False, True] * 16, [True, False] * 16] + [[False] * N_rcu] * (
-                    MAX_ANTENNA - 2), [[False] * N_rcu] * MAX_ANTENNA,
-            [[False] * N_rcu] * MAX_ANTENNA]
+            [[False, True] * 16, [True, False] * 16]
+            + [[False] * N_rcu] * (MAX_ANTENNA - 2),
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+        ]
 
         expected = [[True, False] * 16, [False, True] * 16] + [[False] * N_rcu] * (
-                DEFAULT_N_HBA_TILES - 2)
+            DEFAULT_N_HBA_TILES - 2
+        )
         actual = mapper.map_read("HBAT_LED_on_RW", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_read_pwr_lna_on_r_unmapped(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
         )
 
-        receiver_values = [[[False] * N_rcu] * MAX_ANTENNA,
-                           [[False] * N_rcu] * MAX_ANTENNA,
-                           [[False] * N_rcu] * MAX_ANTENNA]
+        receiver_values = [
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+        ]
         expected = [[False] * N_rcu] * DEFAULT_N_HBA_TILES
         actual = mapper.map_read("HBAT_PWR_LNA_on_R", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_read_pwr_lna_on_r_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            3,
         )
 
         receiver_values = [
-            [[False, True] * 16, [True, False] * 16] + [[False] * N_rcu] * (
-                    MAX_ANTENNA - 2), [[False] * N_rcu] * MAX_ANTENNA,
-            [[False] * N_rcu] * MAX_ANTENNA]
+            [[False, True] * 16, [True, False] * 16]
+            + [[False] * N_rcu] * (MAX_ANTENNA - 2),
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+        ]
 
         expected = [[True, False] * 16, [False, True] * 16] + [[False] * N_rcu] * (
-                DEFAULT_N_HBA_TILES - 2)
+            DEFAULT_N_HBA_TILES - 2
+        )
         actual = mapper.map_read("HBAT_PWR_LNA_on_R", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_read_pwr_lna_on_rw_unmapped(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
         )
 
-        receiver_values = [[[False] * N_rcu] * MAX_ANTENNA,
-                           [[False] * N_rcu] * MAX_ANTENNA,
-                           [[False] * N_rcu] * MAX_ANTENNA]
+        receiver_values = [
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+        ]
         expected = [[False] * N_rcu] * DEFAULT_N_HBA_TILES
         actual = mapper.map_read("HBAT_PWR_LNA_on_RW", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_read_pwr_lna_on_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            3,
         )
 
         receiver_values = [
-            [[False, True] * 16, [True, False] * 16] + [[False] * N_rcu] * (
-                    MAX_ANTENNA - 2), [[False] * N_rcu] * MAX_ANTENNA,
-            [[False] * N_rcu] * MAX_ANTENNA]
+            [[False, True] * 16, [True, False] * 16]
+            + [[False] * N_rcu] * (MAX_ANTENNA - 2),
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+        ]
 
         expected = [[True, False] * 16, [False, True] * 16] + [[False] * N_rcu] * (
-                DEFAULT_N_HBA_TILES - 2)
+            DEFAULT_N_HBA_TILES - 2
+        )
         actual = mapper.map_read("HBAT_PWR_LNA_on_RW", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_read_pwr_on_r_unmapped(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
         )
 
-        receiver_values = [[[False] * N_rcu] * MAX_ANTENNA,
-                           [[False] * N_rcu] * MAX_ANTENNA,
-                           [[False] * N_rcu] * MAX_ANTENNA]
+        receiver_values = [
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+        ]
         expected = [[False] * N_rcu] * DEFAULT_N_HBA_TILES
         actual = mapper.map_read("HBAT_PWR_on_R", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_read_pwr_on_r_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            3,
         )
 
         receiver_values = [
-            [[False, True] * 16, [True, False] * 16] + [[False] * N_rcu] * (
-                    MAX_ANTENNA - 2), [[False] * N_rcu] * MAX_ANTENNA,
-            [[False] * N_rcu] * MAX_ANTENNA]
+            [[False, True] * 16, [True, False] * 16]
+            + [[False] * N_rcu] * (MAX_ANTENNA - 2),
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+        ]
 
         expected = [[True, False] * 16, [False, True] * 16] + [[False] * N_rcu] * (
-                DEFAULT_N_HBA_TILES - 2)
+            DEFAULT_N_HBA_TILES - 2
+        )
         actual = mapper.map_read("HBAT_PWR_on_R", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_read_pwr_on_rw_unmapped(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 3
         )
 
-        receiver_values = [[[False] * N_rcu] * MAX_ANTENNA,
-                           [[False] * N_rcu] * MAX_ANTENNA,
-                           [[False] * N_rcu] * MAX_ANTENNA]
+        receiver_values = [
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+        ]
         expected = [[False] * N_rcu] * DEFAULT_N_HBA_TILES
         actual = mapper.map_read("HBAT_PWR_on_RW", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_read_pwr_on_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 3
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            3,
         )
 
         receiver_values = [
-            [[False, True] * 16, [True, False] * 16] + [[False] * N_rcu] * (
-                    MAX_ANTENNA - 2), [[False] * N_rcu] * MAX_ANTENNA,
-            [[False] * N_rcu] * MAX_ANTENNA]
+            [[False, True] * 16, [True, False] * 16]
+            + [[False] * N_rcu] * (MAX_ANTENNA - 2),
+            [[False] * N_rcu] * MAX_ANTENNA,
+            [[False] * N_rcu] * MAX_ANTENNA,
+        ]
 
         expected = [[True, False] * 16, [False, True] * 16] + [[False] * N_rcu] * (
-                DEFAULT_N_HBA_TILES - 2)
+            DEFAULT_N_HBA_TILES - 2
+        )
         actual = mapper.map_read("HBAT_PWR_on_RW", receiver_values)
         numpy.testing.assert_equal(expected, actual)
 
@@ -293,7 +353,7 @@ class TestAntennaToRecvMapper(base.TestCase):
         """Verify results None without control and array sizes"""
 
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
         )
 
         set_values = [None] * DEFAULT_N_HBA_TILES
@@ -305,7 +365,7 @@ class TestAntennaToRecvMapper(base.TestCase):
         """Verify results None without control and array sizes"""
 
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
         )
 
         set_values = [None] * DEFAULT_N_HBA_TILES
@@ -315,8 +375,9 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_ant_mask_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 1
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            1,
         )
 
         set_values = [True, False] + [None] * (DEFAULT_N_HBA_TILES - 2)
@@ -326,7 +387,7 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_rcu_pwr_ant_on_no_mapping_and_one_receiver(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
         )
 
         set_values = [None] * DEFAULT_N_HBA_TILES
@@ -336,7 +397,7 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_rcu_pwr_ant_on_no_mapping_and_two_receivers(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
         )
 
         set_values = [None] * DEFAULT_N_HBA_TILES
@@ -346,8 +407,9 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_rcu_pwr_ant_on_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED,
-                self.POWER_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1, 1
+            self.CONTROL_NOT_CONNECTED,
+            self.POWER_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            1,
         )
 
         set_values = [1, 0] + [None] * (DEFAULT_N_HBA_TILES - 2)
@@ -357,7 +419,7 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_rcu_band_select_no_mapping_and_one_receiver(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
         )
 
         set_values = [None] * DEFAULT_N_HBA_TILES
@@ -367,7 +429,7 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_rcu_band_select_no_mapping_and_two_receivers(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
         )
 
         set_values = [None] * DEFAULT_N_HBA_TILES
@@ -377,8 +439,9 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_rcu_band_select_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 1
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            1,
         )
 
         set_values = [1, 0] + [None] * (DEFAULT_N_HBA_TILES - 2)
@@ -388,7 +451,7 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_bf_delay_steps_rw_no_mapping_and_one_receiver(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
         )
 
         set_values = [[1] * N_rcu] * DEFAULT_N_HBA_TILES
@@ -398,7 +461,7 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_bf_delay_steps_rw_no_mapping_and_two_receivers(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
         )
 
         set_values = [[1] * N_rcu] * DEFAULT_N_HBA_TILES
@@ -408,19 +471,21 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_bf_delay_steps_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 1
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            1,
         )
 
         set_values = [[1] * N_rcu, [2] * N_rcu] + [[None] * N_rcu] * (
-                DEFAULT_N_HBA_TILES - 2)
+            DEFAULT_N_HBA_TILES - 2
+        )
         expected = [[[2] * N_rcu, [1] * N_rcu] + [[None] * N_rcu] * (MAX_ANTENNA - 2)]
         actual = mapper.map_write("HBAT_BF_delay_steps_RW", set_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_write_led_on_rw_no_mapping_and_one_receiver(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
         )
 
         set_values = [[None] * N_rcu] * DEFAULT_N_HBA_TILES
@@ -430,7 +495,7 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_led_on_rw_no_mapping_and_two_receivers(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
         )
 
         set_values = [[None] * N_rcu] * DEFAULT_N_HBA_TILES
@@ -440,20 +505,24 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_led_on_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 1
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            1,
         )
 
         set_values = [[False, True] * 16, [True, False] * 16] + [[None] * N_rcu] * (
-                DEFAULT_N_HBA_TILES - 2)
-        expected = [[[True, False] * 16, [False, True] * 16] + [[None] * N_rcu] * (
-                MAX_ANTENNA - 2)]
+            DEFAULT_N_HBA_TILES - 2
+        )
+        expected = [
+            [[True, False] * 16, [False, True] * 16]
+            + [[None] * N_rcu] * (MAX_ANTENNA - 2)
+        ]
         actual = mapper.map_write("HBAT_LED_on_RW", set_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_write_pwr_lna_on_rw_no_mapping_and_one_receiver(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
         )
 
         set_values = [[None] * N_rcu] * DEFAULT_N_HBA_TILES
@@ -463,7 +532,7 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_pwr_lna_on_rw_no_mapping_and_two_receivers(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
         )
 
         set_values = [[None] * N_rcu] * DEFAULT_N_HBA_TILES
@@ -473,20 +542,24 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_pwr_lna_on_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 1
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            1,
         )
 
         set_values = [[False, True] * 16, [True, False] * 16] + [[None] * N_rcu] * (
-                DEFAULT_N_HBA_TILES - 2)
-        expected = [[[True, False] * 16, [False, True] * 16] + [[None] * N_rcu] * (
-                MAX_ANTENNA - 2)]
+            DEFAULT_N_HBA_TILES - 2
+        )
+        expected = [
+            [[True, False] * 16, [False, True] * 16]
+            + [[None] * N_rcu] * (MAX_ANTENNA - 2)
+        ]
         actual = mapper.map_write("HBAT_PWR_LNA_on_RW", set_values)
         numpy.testing.assert_equal(expected, actual)
 
     def test_map_write_pwr_on_rw_no_mapping_and_one_receiver(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 1
         )
         set_values = [[None] * N_rcu] * DEFAULT_N_HBA_TILES
         expected = [[[None] * N_rcu] * MAX_ANTENNA]
@@ -495,7 +568,7 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_lna_on_rw_no_mapping_and_two_receivers(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
+            self.CONTROL_NOT_CONNECTED, self.POWER_NOT_CONNECTED, 2
         )
 
         set_values = [[None] * N_rcu] * DEFAULT_N_HBA_TILES
@@ -505,14 +578,18 @@ class TestAntennaToRecvMapper(base.TestCase):
 
     def test_map_write_pwr_on_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
         mapper = AntennaToRecvMapper(
-                self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
-                self.POWER_NOT_CONNECTED, 1
+            self.CONTROL_HBA_0_AND_1_ON_RCU_1_AND_0_OF_RECV_1,
+            self.POWER_NOT_CONNECTED,
+            1,
         )
 
         set_values = [[False, True] * 16, [True, False] * 16] + [[None] * N_rcu] * (
-                DEFAULT_N_HBA_TILES - 2)
-        expected = [[[True, False] * 16, [False, True] * 16] + [[None] * N_rcu] * (
-                MAX_ANTENNA - 2)]
+            DEFAULT_N_HBA_TILES - 2
+        )
+        expected = [
+            [[True, False] * 16, [False, True] * 16]
+            + [[None] * N_rcu] * (MAX_ANTENNA - 2)
+        ]
         actual = mapper.map_write("HBAT_PWR_on_RW", set_values)
         numpy.testing.assert_equal(expected, actual)
 
@@ -521,11 +598,11 @@ class TestAntennafieldDevice(device_base.DeviceTestCase):
 
     # some dummy values for mandatory properties
     AT_PROPERTIES = {
-        'OPC_Server_Name'             : 'example.com',
-        'OPC_Server_Port'             : 4840,
-        'OPC_Time_Out'                : 5.0,
-        'Antenna_Field_Reference_ITRF': [3.0, 3.0, 3.0],
-        'Antenna_Field_Reference_ETRS': [7.0, 7.0, 7.0],
+        "OPC_Server_Name": "example.com",
+        "OPC_Server_Port": 4840,
+        "OPC_Time_Out": 5.0,
+        "Antenna_Field_Reference_ITRF": [3.0, 3.0, 3.0],
+        "Antenna_Field_Reference_ETRS": [7.0, 7.0, 7.0],
     }
 
     def setUp(self):
@@ -536,81 +613,88 @@ class TestAntennafieldDevice(device_base.DeviceTestCase):
         """Verify if Antenna coordinates are correctly provided"""
         # Device uses ITRF coordinates by default
         with DeviceTestContext(
-                antennafield.AntennaField, properties=self.AT_PROPERTIES, process=True
+            antennafield.AntennaField, properties=self.AT_PROPERTIES, process=True
         ) as proxy:
             self.assertEqual(3.0, proxy.Antenna_Field_Reference_ITRF_R[0])
         # Device derives coordinates from ETRS if ITRF ones are not found
         at_properties_v2 = {
-            'OPC_Server_Name': 'example.com', 'OPC_Server_Port': 4840,
-            'OPC_Time_Out'   : 5.0, 'Antenna_Field_Reference_ETRS': [7.0, 7.0, 7.0]
+            "OPC_Server_Name": "example.com",
+            "OPC_Server_Port": 4840,
+            "OPC_Time_Out": 5.0,
+            "Antenna_Field_Reference_ETRS": [7.0, 7.0, 7.0],
         }
         with DeviceTestContext(
-                antennafield.AntennaField, properties=at_properties_v2, process=True
+            antennafield.AntennaField, properties=at_properties_v2, process=True
         ) as proxy:
             self.assertNotEqual(
-                    3.0, proxy.Antenna_Field_Reference_ITRF_R[0]
+                3.0, proxy.Antenna_Field_Reference_ITRF_R[0]
             )  # value = 6.948998835785814
 
     def test_read_Antenna_Quality(self):
-        """ Verify if Antenna_Quality_R is correctly retrieved """
+        """Verify if Antenna_Quality_R is correctly retrieved"""
         antenna_qualities = numpy.array([AntennaQuality.OK] * MAX_ANTENNA)
         with DeviceTestContext(
-                antennafield.AntennaField, properties=self.AT_PROPERTIES, process=True
+            antennafield.AntennaField, properties=self.AT_PROPERTIES, process=True
         ) as proxy:
             numpy.testing.assert_equal(antenna_qualities, proxy.Antenna_Quality_R)
 
     def test_read_Antenna_Use(self):
-        """ Verify if Antenna_Use_R is correctly retrieved """
+        """Verify if Antenna_Use_R is correctly retrieved"""
         antenna_use = numpy.array([AntennaUse.AUTO] * MAX_ANTENNA)
         with DeviceTestContext(
-                antennafield.AntennaField, properties=self.AT_PROPERTIES, process=True
+            antennafield.AntennaField, properties=self.AT_PROPERTIES, process=True
         ) as proxy:
             numpy.testing.assert_equal(antenna_use, proxy.Antenna_Use_R)
 
     def test_read_Antenna_Usage_Mask(self):
-        """ Verify if Antenna_Usage_Mask_R is correctly retrieved """
+        """Verify if Antenna_Usage_Mask_R is correctly retrieved"""
         antenna_qualities = numpy.array([AntennaQuality.OK] * MAX_ANTENNA)
         antenna_use = numpy.array(
-                [AntennaUse.ON] + [AntennaUse.AUTO] * (MAX_ANTENNA - 1)
+            [AntennaUse.ON] + [AntennaUse.AUTO] * (MAX_ANTENNA - 1)
         )
         antenna_properties = {
-            'Antenna_Quality': antenna_qualities, 'Antenna_Use': antenna_use
+            "Antenna_Quality": antenna_qualities,
+            "Antenna_Use": antenna_use,
         }
         with DeviceTestContext(
-                antennafield.AntennaField,
-                properties={**self.AT_PROPERTIES, **antenna_properties}, process=True
+            antennafield.AntennaField,
+            properties={**self.AT_PROPERTIES, **antenna_properties},
+            process=True,
         ) as proxy:
             numpy.testing.assert_equal(
-                    numpy.array([True] * MAX_ANTENNA), proxy.Antenna_Usage_Mask_R
+                numpy.array([True] * MAX_ANTENNA), proxy.Antenna_Usage_Mask_R
             )
 
     def test_read_Antenna_Usage_Mask_only_one_functioning_antenna(self):
-        """ Verify if Antenna_Usage_Mask_R (only first antenna is OK) is correctly retrieved """
+        """Verify if Antenna_Usage_Mask_R (only first antenna is OK) is correctly retrieved"""
         antenna_qualities = numpy.array(
-                [AntennaQuality.OK] + [AntennaQuality.BROKEN] * (MAX_ANTENNA - 1)
+            [AntennaQuality.OK] + [AntennaQuality.BROKEN] * (MAX_ANTENNA - 1)
         )
         antenna_use = numpy.array(
-                [AntennaUse.ON] + [AntennaUse.AUTO] * (MAX_ANTENNA - 1)
+            [AntennaUse.ON] + [AntennaUse.AUTO] * (MAX_ANTENNA - 1)
         )
         antenna_properties = {
-            'Antenna_Quality': antenna_qualities, 'Antenna_Use': antenna_use
+            "Antenna_Quality": antenna_qualities,
+            "Antenna_Use": antenna_use,
         }
         with DeviceTestContext(
-                antennafield.AntennaField,
-                properties={**self.AT_PROPERTIES, **antenna_properties}, process=True
+            antennafield.AntennaField,
+            properties={**self.AT_PROPERTIES, **antenna_properties},
+            process=True,
         ) as proxy:
             numpy.testing.assert_equal(
-                    numpy.array([True] + [False] * (MAX_ANTENNA - 1)),
-                    proxy.Antenna_Usage_Mask_R
+                numpy.array([True] + [False] * (MAX_ANTENNA - 1)),
+                proxy.Antenna_Usage_Mask_R,
             )
 
     def test_read_Antenna_Names(self):
-        """ Verify if Antenna_Names_R is correctly retrieved """
+        """Verify if Antenna_Names_R is correctly retrieved"""
         antenna_names = ["C0", "C1", "C2", "C3", "C4"]
-        antenna_properties = {'Antenna_Names': antenna_names}
+        antenna_properties = {"Antenna_Names": antenna_names}
         with DeviceTestContext(
-                antennafield.AntennaField,
-                properties={**self.AT_PROPERTIES, **antenna_properties}, process=True
+            antennafield.AntennaField,
+            properties={**self.AT_PROPERTIES, **antenna_properties},
+            process=True,
         ) as proxy:
             for i in range(len(antenna_names)):
                 self.assertTrue(proxy.Antenna_Names_R[i] == f"C{i}")
@@ -621,29 +705,26 @@ class TestAntennafieldDevice(device_base.DeviceTestCase):
         """Verify set_mapped_attribute only modifies controlled inputs"""
 
         antenna_properties = {
-            'RECV_devices': ['stat/RECV/1'],
+            "RECV_devices": ["stat/RECV/1"],
         }
 
         data = numpy.array([[False] * N_rcu] * MAX_ANTENNA)
 
         m_proxy.return_value = mock.Mock(
-                read_attribute=mock.Mock(
-                        return_value=mock.Mock(value=data)
-                )
+            read_attribute=mock.Mock(return_value=mock.Mock(value=data))
         )
 
         with DeviceTestContext(
-                antennafield.AntennaField, process=False,
-                properties={**self.AT_PROPERTIES, **antenna_properties}
+            antennafield.AntennaField,
+            process=False,
+            properties={**self.AT_PROPERTIES, **antenna_properties},
         ) as proxy:
             proxy.boot()
 
             proxy.write_attribute(
-                    "HBAT_PWR_on_RW",
-                    numpy.array([[False] * N_rcu] * DEFAULT_N_HBA_TILES)
+                "HBAT_PWR_on_RW", numpy.array([[False] * N_rcu] * DEFAULT_N_HBA_TILES)
             )
 
             numpy.testing.assert_equal(
-                    m_proxy.return_value.write_attribute.call_args[0][1],
-                    data
+                m_proxy.return_value.write_attribute.call_args[0][1], data
             )
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_beam_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_beam_device.py
index 7b84e6992d9dbe7f2c36db920ba18f53eb052e17..6e9c5b25656b739c09ce95ad8af3cab747a7fdd8 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_beam_device.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_beam_device.py
@@ -1,17 +1,10 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tangostationcontrol.test.devices import device_base
 
 
 class TestBeamDevice(device_base.DeviceTestCase):
-
     def setUp(self):
         # DeviceTestCase setUp patches lofar_device DeviceProxy
         super(TestBeamDevice, self).setUp()
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_beamlet_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_beamlet_device.py
index 1fa9c8b096a409d642e303e4cff0dca6ca6a54dd..f733dac95841aa5844e329c8fe334729bfbdf5ee 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_beamlet_device.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_beamlet_device.py
@@ -1,50 +1,60 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
+import numpy
+import numpy.testing
 from tangostationcontrol.common.constants import CLK_200_MHZ
 from tangostationcontrol.devices.sdp.beamlet import Beamlet
 from tangostationcontrol.devices.sdp.common import weight_to_complex
-
-import numpy
-import numpy.testing
-
 from tangostationcontrol.test import base
 
+
 # unpack into 16-bit complex
 def to_complex(uint32):
     return weight_to_complex(uint32, Beamlet.BF_UNIT_WEIGHT)
 
+
 class TestBeamletDevice(base.TestCase):
     def test_calculate_bf_weights_small_numbers(self):
         # 2 beamlets, 3 antennas. The antennas are 1 second apart.
-        delays = numpy.array([
-            [1.0, 2.0, 3.0],
-            [1.0, 2.0, 3.0],
-        ])
+        delays = numpy.array(
+            [
+                [1.0, 2.0, 3.0],
+                [1.0, 2.0, 3.0],
+            ]
+        )
 
         # the frequency of the signal is 1.0 Hz and 0.5 Hz respectively,
         # so the antennas will be either in phase or in opposite phase
-        beamlet_frequencies = numpy.array([
-            [1.0, 1.0, 1.0],
-            [0.5, 0.5, 0.5],
-        ])
+        beamlet_frequencies = numpy.array(
+            [
+                [1.0, 1.0, 1.0],
+                [0.5, 0.5, 0.5],
+            ]
+        )
 
         bf_weights = Beamlet._calculate_bf_weights(delays, beamlet_frequencies)
 
         self.assertEqual(delays.shape, bf_weights.shape)
 
-        self.assertEqual(to_complex(bf_weights[0][0]),  1 + 0j, msg=f"bf_weights = {bf_weights}")
-        self.assertEqual(to_complex(bf_weights[0][1]),  1 + 0j, msg=f"bf_weights = {bf_weights}")
-        self.assertEqual(to_complex(bf_weights[0][2]),  1 + 0j, msg=f"bf_weights = {bf_weights}")
-        self.assertEqual(to_complex(bf_weights[1][0]), -1 + 0j, msg=f"bf_weights = {bf_weights}")
-        self.assertEqual(to_complex(bf_weights[1][1]),  1 + 0j, msg=f"bf_weights = {bf_weights}")
-        self.assertEqual(to_complex(bf_weights[1][2]), -1 + 0j, msg=f"bf_weights = {bf_weights}")
+        self.assertEqual(
+            to_complex(bf_weights[0][0]), 1 + 0j, msg=f"bf_weights = {bf_weights}"
+        )
+        self.assertEqual(
+            to_complex(bf_weights[0][1]), 1 + 0j, msg=f"bf_weights = {bf_weights}"
+        )
+        self.assertEqual(
+            to_complex(bf_weights[0][2]), 1 + 0j, msg=f"bf_weights = {bf_weights}"
+        )
+        self.assertEqual(
+            to_complex(bf_weights[1][0]), -1 + 0j, msg=f"bf_weights = {bf_weights}"
+        )
+        self.assertEqual(
+            to_complex(bf_weights[1][1]), 1 + 0j, msg=f"bf_weights = {bf_weights}"
+        )
+        self.assertEqual(
+            to_complex(bf_weights[1][2]), -1 + 0j, msg=f"bf_weights = {bf_weights}"
+        )
 
     def test_calculate_bf_weights_actual_numbers(self):
         # we test phase offsets with 90 degree increments to weed out:
@@ -56,20 +66,28 @@ class TestBeamletDevice(base.TestCase):
         #     offsets to detect 180 degree errors as well.
 
         # 2 beamlets, 3 antennas. The antennas are 1 second apart.
-        delays = numpy.array([
-            [0.0, 1.25e-9, 2.5e-9, 3.75e-9, 5.0e-9]
-        ])
+        delays = numpy.array([[0.0, 1.25e-9, 2.5e-9, 3.75e-9, 5.0e-9]])
 
-        beamlet_frequencies = numpy.array([
-            [CLK_200_MHZ, CLK_200_MHZ, CLK_200_MHZ, CLK_200_MHZ, CLK_200_MHZ]
-        ])
+        beamlet_frequencies = numpy.array(
+            [[CLK_200_MHZ, CLK_200_MHZ, CLK_200_MHZ, CLK_200_MHZ, CLK_200_MHZ]]
+        )
 
         bf_weights = Beamlet._calculate_bf_weights(delays, beamlet_frequencies)
 
         self.assertEqual(delays.shape, bf_weights.shape)
 
-        self.assertEqual(to_complex(bf_weights[0][0]),  1 + 0j, msg=f"bf_weights = {bf_weights}")
-        self.assertEqual(to_complex(bf_weights[0][1]),  0 - 1j, msg=f"bf_weights = {bf_weights}")
-        self.assertEqual(to_complex(bf_weights[0][2]), -1 + 0j, msg=f"bf_weights = {bf_weights}")
-        self.assertEqual(to_complex(bf_weights[0][3]),  0 + 1j, msg=f"bf_weights = {bf_weights}")
-        self.assertEqual(to_complex(bf_weights[0][4]),  1 + 0j, msg=f"bf_weights = {bf_weights}")
+        self.assertEqual(
+            to_complex(bf_weights[0][0]), 1 + 0j, msg=f"bf_weights = {bf_weights}"
+        )
+        self.assertEqual(
+            to_complex(bf_weights[0][1]), 0 - 1j, msg=f"bf_weights = {bf_weights}"
+        )
+        self.assertEqual(
+            to_complex(bf_weights[0][2]), -1 + 0j, msg=f"bf_weights = {bf_weights}"
+        )
+        self.assertEqual(
+            to_complex(bf_weights[0][3]), 0 + 1j, msg=f"bf_weights = {bf_weights}"
+        )
+        self.assertEqual(
+            to_complex(bf_weights[0][4]), 1 + 0j, msg=f"bf_weights = {bf_weights}"
+        )
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_device_temperature_manager.py b/tangostationcontrol/tangostationcontrol/test/devices/test_device_temperature_manager.py
index ca54101ed14e8affee1dce064ea92bf5ba9065a9..dc0f004821cfbb9dfefe7ba8b1e59053c3ff52d4 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_device_temperature_manager.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_device_temperature_manager.py
@@ -1,23 +1,14 @@
-
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import time
 
 from tango.test_context import DeviceTestContext
 from tangostationcontrol.devices import temperature_manager
-
 from tangostationcontrol.test.devices import device_base
 
 
 class TestTemperatureManagerDevice(device_base.DeviceTestCase):
-
     def setUp(self):
         # DeviceTestCase setUp patches lofar_device DeviceProxy
         super(TestTemperatureManagerDevice, self).setUp()
@@ -27,7 +18,9 @@ class TestTemperatureManagerDevice(device_base.DeviceTestCase):
             self.device_proxy_patch(device)
 
     def test_alarm(self):
-        with DeviceTestContext(temperature_manager.TemperatureManager, process=True, timeout=10) as proxy:
+        with DeviceTestContext(
+            temperature_manager.TemperatureManager, process=True, timeout=10
+        ) as proxy:
             proxy.initialise()
             proxy.on()
 
@@ -36,9 +29,3 @@ class TestTemperatureManagerDevice(device_base.DeviceTestCase):
             time.sleep(1)
 
             self.assertFalse(proxy.is_alarming_R)
-
-
-
-
-
-
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_digitalbeam_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_digitalbeam_device.py
index d57f912218550b7e73181a332caa61b94e6c3914..d4df4051399073b003609baee1c903a3e4a0e903 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_digitalbeam_device.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_digitalbeam_device.py
@@ -1,35 +1,33 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 # Builtin regular libraries
 import copy
-
-# External regular libraries
-import numpy
-
-# Internal regular imports
-from tangostationcontrol.devices.sdp import digitalbeam
-from tangostationcontrol.common.constants import MAX_ANTENNA, N_beamlets_ctrl, N_xyz, N_pn
+import unittest
 
 # Builtin test libraries
 from unittest import mock
-import unittest
+
+# External regular libraries
+import numpy
 
 # External test libraries
 from tango.test_context import DeviceTestContext
+from tangostationcontrol.common.constants import (
+    MAX_ANTENNA,
+    N_beamlets_ctrl,
+    N_xyz,
+    N_pn,
+)
+
+# Internal regular imports
+from tangostationcontrol.devices.sdp import digitalbeam
 
 # Internal test imports
 from tangostationcontrol.test.devices import device_base
 
 
 class TestDigitalBeamDevice(device_base.DeviceTestCase):
-
     def setUp(self):
         # DeviceTestCase setUp patches lofar_device DeviceProxy
         super(TestDigitalBeamDevice, self).setUp()
@@ -41,7 +39,9 @@ class TestDigitalBeamDevice(device_base.DeviceTestCase):
     def test_apply_weights_disabled(self, m_proxy, m_compute, m_wait):
         """Verify won't overwrite digitalbeam data if no input_selected"""
 
-        input_data = numpy.array([["AZELGEO", "0deg", "90deg"]] * N_beamlets_ctrl).flatten()
+        input_data = numpy.array(
+            [["AZELGEO", "0deg", "90deg"]] * N_beamlets_ctrl
+        ).flatten()
         current_data = numpy.array([[16384] * 5856] * N_pn)
 
         m_proxy.return_value = mock.Mock(
@@ -50,26 +50,26 @@ class TestDigitalBeamDevice(device_base.DeviceTestCase):
             ),
             Antenna_Usage_Mask_R=numpy.array([0] * MAX_ANTENNA),
             Antenna_Field_Reference_ITRF_R=mock.MagicMock(),
-            HBAT_reference_ITRF_R=numpy.array([[0] * N_xyz] * MAX_ANTENNA)
+            HBAT_reference_ITRF_R=numpy.array([[0] * N_xyz] * MAX_ANTENNA),
         )
 
-        new_data = numpy.array(
-            [[16384] * 2928 + [0] * 2928] * N_pn
-        )
+        new_data = numpy.array([[16384] * 2928 + [0] * 2928] * N_pn)
         m_compute.return_value = copy.copy(new_data)
 
         with DeviceTestContext(
-            digitalbeam.DigitalBeam, process=False,
+            digitalbeam.DigitalBeam,
+            process=False,
         ) as proxy:
             proxy.initialise()
             proxy.Tracking_enabled_RW = False
-            proxy.input_select_RW = numpy.array([[False] * N_beamlets_ctrl] * MAX_ANTENNA)
+            proxy.input_select_RW = numpy.array(
+                [[False] * N_beamlets_ctrl] * MAX_ANTENNA
+            )
 
             proxy.set_pointing(input_data)
 
             numpy.testing.assert_equal(
-                m_proxy.return_value.write_attribute.call_args[0][0],
-                current_data
+                m_proxy.return_value.write_attribute.call_args[0][0], current_data
             )
 
     @unittest.skip("Test for manual use, enable at most one (process=false)")
@@ -79,33 +79,33 @@ class TestDigitalBeamDevice(device_base.DeviceTestCase):
     def test_apply_weights_enabled(self, m_proxy, m_compute, m_wait):
         """Verify can overwrite digitalbeam data if input_selected"""
 
-        input_data = numpy.array([["AZELGEO", "0deg", "90deg"]] * N_beamlets_ctrl).flatten()
+        input_data = numpy.array(
+            [["AZELGEO", "0deg", "90deg"]] * N_beamlets_ctrl
+        ).flatten()
         current_data = numpy.array([[16384] * 5856] * N_pn)
 
         m_proxy.return_value = mock.Mock(
-            read_attribute=mock.Mock(
-                return_value=mock.Mock(value=current_data)
-            ),
+            read_attribute=mock.Mock(return_value=mock.Mock(value=current_data)),
             Antenna_Usage_Mask_R=numpy.array([0] * MAX_ANTENNA),
             Antenna_Field_Reference_ITRF_R=mock.MagicMock(),
-            HBAT_reference_ITRF_R=numpy.array([[0] * N_xyz] * MAX_ANTENNA)
+            HBAT_reference_ITRF_R=numpy.array([[0] * N_xyz] * MAX_ANTENNA),
         )
 
-        new_data = numpy.array(
-            [[16384] * 2928 + [0] * 2928] * N_pn
-        )
+        new_data = numpy.array([[16384] * 2928 + [0] * 2928] * N_pn)
         m_compute.return_value = copy.copy(new_data)
 
         with DeviceTestContext(
-            digitalbeam.DigitalBeam, process=False,
+            digitalbeam.DigitalBeam,
+            process=False,
         ) as proxy:
             proxy.initialise()
             proxy.Tracking_enabled_RW = False
-            proxy.input_select_RW = numpy.array([[True] * N_beamlets_ctrl] * MAX_ANTENNA)
+            proxy.input_select_RW = numpy.array(
+                [[True] * N_beamlets_ctrl] * MAX_ANTENNA
+            )
 
             proxy.set_pointing(input_data)
 
             numpy.testing.assert_equal(
-                m_proxy.return_value.write_attribute.call_args[0][0],
-                new_data
+                m_proxy.return_value.write_attribute.call_args[0][0], new_data
             )
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_lofar_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_lofar_device.py
index 7a0cf7b18455456eb0f46cf4e8742ca06e17af11..9621a930ff980f89d568176263ebfddc54a8c73e 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_lofar_device.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_lofar_device.py
@@ -1,37 +1,27 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-import numpy
+from unittest import mock
 
-from tango.test_context import DeviceTestContext
-from tango.server import attribute
-from tango.server import command
+import numpy
 from tango import AttrWriteType
 from tango import DevFailed
 from tango import DevState
 from tango import DevVarBooleanArray
-
+from tango.server import attribute
+from tango.server import command
+from tango.test_context import DeviceTestContext
 from tangostationcontrol.devices import lofar_device
-
-from unittest import mock
-
 from tangostationcontrol.test.devices import device_base
 
 
 class TestLofarDevice(device_base.DeviceTestCase):
-
     def setUp(self):
         # DeviceTestCase setUp patches lofar_device DeviceProxy
         super(TestLofarDevice, self).setUp()
 
     def test_read_attribute(self):
-        """ Test whether read_attribute really returns the attribute. """
+        """Test whether read_attribute really returns the attribute."""
 
         class MyLofarDevice(lofar_device.LOFARDevice):
             @attribute(dtype=float)
@@ -56,7 +46,9 @@ class TestLofarDevice(device_base.DeviceTestCase):
             self.assertListEqual([42.0, 43.0], proxy.read_attribute_B_array.tolist())
 
     def test_disable_state(self):
-        with DeviceTestContext(lofar_device.LOFARDevice, process=True, timeout=10) as proxy:
+        with DeviceTestContext(
+            lofar_device.LOFARDevice, process=True, timeout=10
+        ) as proxy:
             proxy.initialise()
             self.assertEqual(DevState.STANDBY, proxy.state())
             proxy.on()
@@ -65,7 +57,9 @@ class TestLofarDevice(device_base.DeviceTestCase):
             self.assertEqual(DevState.DISABLE, proxy.state())
 
     def test_disable_state_transitions(self):
-        with DeviceTestContext(lofar_device.LOFARDevice, process=True, timeout=10) as proxy:
+        with DeviceTestContext(
+            lofar_device.LOFARDevice, process=True, timeout=10
+        ) as proxy:
             proxy.off()
             with self.assertRaises(DevFailed):
                 proxy.disable_hardware()
@@ -78,15 +72,16 @@ class TestLofarDevice(device_base.DeviceTestCase):
         """Test atomic read modify write for attribute"""
 
         class AttributeLofarDevice(lofar_device.LOFARDevice):
-
             BOOL_ARRAY_DIM = 32
 
             # Just for demo, do not use class variables to store attribute state
             _bool_array = [False] * BOOL_ARRAY_DIM
             bool_array = attribute(
-                dtype=(bool,), max_dim_x=BOOL_ARRAY_DIM,
-                access=AttrWriteType.READ_WRITE, fget="get_bool_array",
-                fset="set_bool_array"
+                dtype=(bool,),
+                max_dim_x=BOOL_ARRAY_DIM,
+                access=AttrWriteType.READ_WRITE,
+                fget="get_bool_array",
+                fset="set_bool_array",
             )
 
             def get_bool_array(self):
@@ -104,33 +99,26 @@ class TestLofarDevice(device_base.DeviceTestCase):
                 t_write = mock.Mock()
                 t_proxy = mock.Mock(
                     read_attribute=mock.Mock(
-                        return_value=mock.Mock(
-                            value=numpy.array(self._bool_array)
-                        )
+                        return_value=mock.Mock(value=numpy.array(self._bool_array))
                     ),
-                    write_attribute=t_write
+                    write_attribute=t_write,
                 )
 
                 self.atomic_read_modify_write_attribute(
-                    values, t_proxy, "bool_array",
-                    numpy.array([True, False] * bool_array_half)
+                    values,
+                    t_proxy,
+                    "bool_array",
+                    numpy.array([True, False] * bool_array_half),
                 )
 
                 # Fake the write, extract the call argument from t_write mock
                 self._bool_array = t_write.call_args[0][1]
 
-        with DeviceTestContext(
-            AttributeLofarDevice, process=True
-        ) as proxy:
-
+        with DeviceTestContext(AttributeLofarDevice, process=True) as proxy:
             bool_array_half = int(AttributeLofarDevice.BOOL_ARRAY_DIM / 2)
             excepted_result = [True, False] * bool_array_half
 
             proxy.initialise()
-            proxy.do_read_modify_write(
-                [True] * AttributeLofarDevice.BOOL_ARRAY_DIM
-            )
+            proxy.do_read_modify_write([True] * AttributeLofarDevice.BOOL_ARRAY_DIM)
 
-            numpy.testing.assert_array_equal(
-                excepted_result, proxy.bool_array
-            )
+            numpy.testing.assert_array_equal(excepted_result, proxy.bool_array)
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_observation_base.py b/tangostationcontrol/tangostationcontrol/test/devices/test_observation_base.py
index f98a7c4754262e76cf3bb4d72630fcc3baf5f94a..cbe53eecffd46be8c773e8e0e4772d59447a7906 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_observation_base.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_observation_base.py
@@ -1,23 +1,17 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 
 class TestObservationBase:
 
     # TODO(Corne): Use this once working on L2SS-774
-    VALID_JSON = '''
+    VALID_JSON = """
             {
               "observation_id": 12345,
               "stop_time": "2106-02-07T00:00:00",
               "antenna_mask": [0,1,2,9],
               "filter": "HBA_110_190",
-              "SAPs": [{ 
+              "SAPs": [{
                     "subbands": [10, 20, 30],
                     "pointing": {
                         "angle1": 1.5, "angle2": 0, "direction_type": "J2000"
@@ -27,4 +21,4 @@ class TestObservationBase:
                 { "angle1": 1.5, "angle2": 0, "direction_type": "J2000" },
               "first_beamlet": 0
             }
-        '''
+        """
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_observation_control_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_observation_control_device.py
index 1d5e2af773252e2e28ad789e6a9ccccb75468a91..1b502783b03ed975e66af64508d4e6b10e48a6b6 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_observation_control_device.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_observation_control_device.py
@@ -1,18 +1,13 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tangostationcontrol.test import base
 from tangostationcontrol.test.devices import test_observation_base
 
 
-class TestObservationControlDevice(base.TestCase, test_observation_base.TestObservationBase):
-
+class TestObservationControlDevice(
+    base.TestCase, test_observation_base.TestObservationBase
+):
     def setUp(self):
         super(TestObservationControlDevice, self).setUp()
 
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_observation_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_observation_device.py
index 70ecc0cfbaab093aef6a4685e111ae35f3fbd327..5b98c9286d3e7f410bf01ff4b502c71677a3e868 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_observation_device.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_observation_device.py
@@ -1,19 +1,13 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
-
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tangostationcontrol.test.devices import device_base
 from tangostationcontrol.test.devices import test_observation_base
 
 
-class TestObservationDevice(device_base.DeviceTestCase, test_observation_base.TestObservationBase):
-
+class TestObservationDevice(
+    device_base.DeviceTestCase, test_observation_base.TestObservationBase
+):
     def setUp(self):
         # DeviceTestCase setUp patches lofar_device DeviceProxy
         super(TestObservationDevice, self).setUp()
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_psoc_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_psoc_device.py
index f6951904d8f09b9279bcb48d2fd3a744cf4d65fa..272d0fac083e6a31145abc7e626b229b0160783c 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_psoc_device.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_psoc_device.py
@@ -1,11 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tangostationcontrol.test.devices import device_base
 
@@ -15,15 +9,21 @@ class TestPSOCDevice(device_base.DeviceTestCase):
     # some dummy values for mandatory properties
 
     psoc_properties = {
-        'SNMP_community': 'public',
-        'SNMP_host': "10.87.2.145",
+        "SNMP_community": "public",
+        "SNMP_host": "10.87.2.145",
         "SNMP_version": 1,
-        'SNMP_mib_dir': "devices/psoc_mib/PowerNet-MIB.mib",
-        'SNMP_timeout': 5.0,
+        "SNMP_mib_dir": "devices/psoc_mib/PowerNet-MIB.mib",
+        "SNMP_timeout": 5.0,
         "PSOC_sockets": [
-            "socket_1", "socket_2", "socket_3", "socket_4", "socket_5",
-            "socket_6", "socket_7", "socket_8"
-        ]
+            "socket_1",
+            "socket_2",
+            "socket_3",
+            "socket_4",
+            "socket_5",
+            "socket_6",
+            "socket_7",
+            "socket_8",
+        ],
     }
 
     def setUp(self):
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_recv_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_recv_device.py
index 1ce23d972729bbac403313482db8f65e527cada9..0dddaa3d48708ff97ad8ba6da091551ae9ef526f 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_recv_device.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_recv_device.py
@@ -1,26 +1,21 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
+import numpy
 from tango.test_context import DeviceTestContext
-
-from tangostationcontrol.devices import recv
 from tangostationcontrol.common.constants import N_rcu, N_rcu_inp, N_elements
-
-import numpy
-
+from tangostationcontrol.devices import recv
 from tangostationcontrol.test.devices import device_base
 
 
 class TestRecvDevice(device_base.DeviceTestCase):
 
     # some dummy values for mandatory properties
-    RECV_PROPERTIES = {'OPC_Server_Name': 'example.com', 'OPC_Server_Port': 4840, 'OPC_Time_Out': 5.0}
+    RECV_PROPERTIES = {
+        "OPC_Server_Name": "example.com",
+        "OPC_Server_Port": 4840,
+        "OPC_Time_Out": 5.0,
+    }
 
     def setUp(self):
         # DeviceTestCase setUp patches lofar_device DeviceProxy
@@ -28,14 +23,18 @@ class TestRecvDevice(device_base.DeviceTestCase):
 
     def test_calculate_HBAT_bf_delay_steps(self):
         """Verify HBAT beamforming calculations are correctly executed"""
-        with DeviceTestContext(recv.RECV, properties=self.RECV_PROPERTIES, process=True) as proxy:
-            delays = numpy.random.rand(N_rcu * N_rcu_inp,N_elements).flatten()
+        with DeviceTestContext(
+            recv.RECV, properties=self.RECV_PROPERTIES, process=True
+        ) as proxy:
+            delays = numpy.random.rand(N_rcu * N_rcu_inp, N_elements).flatten()
             HBAT_bf_delay_steps = proxy.calculate_HBAT_bf_delay_steps(delays)
-            self.assertEqual(3072, len(HBAT_bf_delay_steps))                             # 96x32=3072
+            self.assertEqual(3072, len(HBAT_bf_delay_steps))  # 96x32=3072
 
     def test_get_rcu_band_from_filter(self):
         """Verify filter lookup table values are correctly retrieved"""
-        with DeviceTestContext(recv.RECV, properties=self.RECV_PROPERTIES, process=True) as proxy:
+        with DeviceTestContext(
+            recv.RECV, properties=self.RECV_PROPERTIES, process=True
+        ) as proxy:
             filter_name = "HBA_170_230"
             self.assertEqual(1, proxy.get_rcu_band_from_filter(filter_name))
-            self.assertEqual(-1, proxy.get_rcu_band_from_filter('MOCK_FILTER'))
+            self.assertEqual(-1, proxy.get_rcu_band_from_filter("MOCK_FILTER"))
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_sdp_common.py b/tangostationcontrol/tangostationcontrol/test/devices/test_sdp_common.py
index 84655351499ef7b117fedf28e5f119b4c0905523..0e0d752f9302d208d6565c5d455bcb74def176f6 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_sdp_common.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_sdp_common.py
@@ -1,24 +1,24 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
-
-from tangostationcontrol.common.constants import CLK_200_MHZ, CLK_160_MHZ
-from tangostationcontrol.devices.sdp.common import phases_to_weights, subband_frequencies, subband_frequency, weight_to_complex
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import numpy
-
+from tangostationcontrol.common.constants import CLK_200_MHZ, CLK_160_MHZ
+from tangostationcontrol.devices.sdp.common import (
+    phases_to_weights,
+    subband_frequencies,
+    subband_frequency,
+    weight_to_complex,
+)
 from tangostationcontrol.test import base
 
+
 class TestSDPCommon(base.TestCase):
     def test_subband_frequencies(self):
-        subbands = numpy.array([
-          [0, 1, 102],
-        ])
+        subbands = numpy.array(
+            [
+                [0, 1, 102],
+            ]
+        )
 
         nyquist_zones_0 = numpy.zeros(subbands.shape)
         nyquist_zones_1 = numpy.ones(subbands.shape)
@@ -27,28 +27,34 @@ class TestSDPCommon(base.TestCase):
         # for reference values, see https://proxy.lofar.eu/rtsm/tests/
 
         lba_frequencies = subband_frequencies(subbands, CLK_160_MHZ, nyquist_zones_0)
-        self.assertAlmostEqual(lba_frequencies[0][0],  0.0000000e6)
-        self.assertAlmostEqual(lba_frequencies[0][1],  0.1562500e6)
+        self.assertAlmostEqual(lba_frequencies[0][0], 0.0000000e6)
+        self.assertAlmostEqual(lba_frequencies[0][1], 0.1562500e6)
         self.assertAlmostEqual(lba_frequencies[0][2], 15.9375000e6)
 
         lba_frequencies = subband_frequencies(subbands, CLK_200_MHZ, nyquist_zones_0)
-        self.assertAlmostEqual(lba_frequencies[0][0],  0.0000000e6)
-        self.assertAlmostEqual(lba_frequencies[0][1],  0.1953125e6)
+        self.assertAlmostEqual(lba_frequencies[0][0], 0.0000000e6)
+        self.assertAlmostEqual(lba_frequencies[0][1], 0.1953125e6)
         self.assertAlmostEqual(lba_frequencies[0][2], 19.9218750e6)
 
         # Nyquist zone 1 is not used in 160 MHz
 
-        hba_low_frequencies = subband_frequencies(subbands, CLK_200_MHZ, nyquist_zones_1)
+        hba_low_frequencies = subband_frequencies(
+            subbands, CLK_200_MHZ, nyquist_zones_1
+        )
         self.assertAlmostEqual(hba_low_frequencies[0][0], 100.0000000e6)
         self.assertAlmostEqual(hba_low_frequencies[0][1], 100.1953125e6)
         self.assertAlmostEqual(hba_low_frequencies[0][2], 119.9218750e6)
 
-        hba_high_frequencies = subband_frequencies(subbands, CLK_160_MHZ, nyquist_zones_2)
+        hba_high_frequencies = subband_frequencies(
+            subbands, CLK_160_MHZ, nyquist_zones_2
+        )
         self.assertAlmostEqual(hba_high_frequencies[0][0], 160.0000000e6)
         self.assertAlmostEqual(hba_high_frequencies[0][1], 160.1562500e6)
         self.assertAlmostEqual(hba_high_frequencies[0][2], 175.9375000e6)
 
-        hba_high_frequencies = subband_frequencies(subbands, CLK_200_MHZ, nyquist_zones_2)
+        hba_high_frequencies = subband_frequencies(
+            subbands, CLK_200_MHZ, nyquist_zones_2
+        )
         self.assertAlmostEqual(hba_high_frequencies[0][0], 200.0000000e6)
         self.assertAlmostEqual(hba_high_frequencies[0][1], 200.1953125e6)
         self.assertAlmostEqual(hba_high_frequencies[0][2], 219.9218750e6)
@@ -61,22 +67,46 @@ class TestSDPCommon(base.TestCase):
 
     def test_phases_to_weights(self):
         # offer nice 0, 90, 180, 270, 360 degrees
-        phases = numpy.array([0.0, numpy.pi / 2, numpy.pi, numpy.pi * 1.5, numpy.pi * 2])
+        phases = numpy.array(
+            [0.0, numpy.pi / 2, numpy.pi, numpy.pi * 1.5, numpy.pi * 2]
+        )
         unit = 2**14
 
         sdp_weights = phases_to_weights(phases, unit)
 
         # check whether the complex representation is also along the right axes and
         # has the right amplitude
-        self.assertEqual(weight_to_complex(sdp_weights[0], unit),  1 + 0j, msg=f"sdp_weights = {sdp_weights}")
-        self.assertEqual(weight_to_complex(sdp_weights[1], unit),  0 + 1j, msg=f"sdp_weights = {sdp_weights}")
-        self.assertEqual(weight_to_complex(sdp_weights[2], unit), -1 + 0j, msg=f"sdp_weights = {sdp_weights}")
-        self.assertEqual(weight_to_complex(sdp_weights[3], unit),  0 - 1j, msg=f"sdp_weights = {sdp_weights}")
-        self.assertEqual(weight_to_complex(sdp_weights[4], unit),  1 + 0j, msg=f"sdp_weights = {sdp_weights}")
+        self.assertEqual(
+            weight_to_complex(sdp_weights[0], unit),
+            1 + 0j,
+            msg=f"sdp_weights = {sdp_weights}",
+        )
+        self.assertEqual(
+            weight_to_complex(sdp_weights[1], unit),
+            0 + 1j,
+            msg=f"sdp_weights = {sdp_weights}",
+        )
+        self.assertEqual(
+            weight_to_complex(sdp_weights[2], unit),
+            -1 + 0j,
+            msg=f"sdp_weights = {sdp_weights}",
+        )
+        self.assertEqual(
+            weight_to_complex(sdp_weights[3], unit),
+            0 - 1j,
+            msg=f"sdp_weights = {sdp_weights}",
+        )
+        self.assertEqual(
+            weight_to_complex(sdp_weights[4], unit),
+            1 + 0j,
+            msg=f"sdp_weights = {sdp_weights}",
+        )
 
     def test_phases_to_weights_with_amplitude(self):
         # offer nice 0, 90, 180, 270, 360 degrees
-        phases = numpy.array([0.0, numpy.pi / 2, numpy.pi, numpy.pi * 1.5, numpy.pi * 2])
+        phases = numpy.array(
+            [0.0, numpy.pi / 2, numpy.pi, numpy.pi * 1.5, numpy.pi * 2]
+        )
         amplitudes = numpy.array([0.1, 0.2, 0.3, 0.4, 0.5])
         unit = 2**14
 
@@ -84,11 +114,36 @@ class TestSDPCommon(base.TestCase):
 
         # check whether the complex representation is also along the right axes and
         # has the right amplitude
-        self.assertAlmostEqual(weight_to_complex(sdp_weights[0], unit),  0.1 + 0j, places=3, msg=f"sdp_weights = {sdp_weights}")
-        self.assertAlmostEqual(weight_to_complex(sdp_weights[1], unit),  0 + 0.2j, places=3, msg=f"sdp_weights = {sdp_weights}")
-        self.assertAlmostEqual(weight_to_complex(sdp_weights[2], unit), -0.3 + 0j, places=3, msg=f"sdp_weights = {sdp_weights}")
-        self.assertAlmostEqual(weight_to_complex(sdp_weights[3], unit),  0 - 0.4j, places=3, msg=f"sdp_weights = {sdp_weights}")
-        self.assertAlmostEqual(weight_to_complex(sdp_weights[4], unit),  0.5 + 0j, places=3, msg=f"sdp_weights = {sdp_weights}")
+        self.assertAlmostEqual(
+            weight_to_complex(sdp_weights[0], unit),
+            0.1 + 0j,
+            places=3,
+            msg=f"sdp_weights = {sdp_weights}",
+        )
+        self.assertAlmostEqual(
+            weight_to_complex(sdp_weights[1], unit),
+            0 + 0.2j,
+            places=3,
+            msg=f"sdp_weights = {sdp_weights}",
+        )
+        self.assertAlmostEqual(
+            weight_to_complex(sdp_weights[2], unit),
+            -0.3 + 0j,
+            places=3,
+            msg=f"sdp_weights = {sdp_weights}",
+        )
+        self.assertAlmostEqual(
+            weight_to_complex(sdp_weights[3], unit),
+            0 - 0.4j,
+            places=3,
+            msg=f"sdp_weights = {sdp_weights}",
+        )
+        self.assertAlmostEqual(
+            weight_to_complex(sdp_weights[4], unit),
+            0.5 + 0j,
+            places=3,
+            msg=f"sdp_weights = {sdp_weights}",
+        )
 
     def test_weight_to_complex(self):
         unit = 8192
@@ -99,9 +154,15 @@ class TestSDPCommon(base.TestCase):
 
         # some constructed values
         def complex_to_weight(c: complex) -> numpy.uint32:
-            return numpy.array([c.real * unit, c.imag * unit],dtype=numpy.int16).view(numpy.uint32).item()
+            return (
+                numpy.array([c.real * unit, c.imag * unit], dtype=numpy.int16)
+                .view(numpy.uint32)
+                .item()
+            )
 
         self.assertEqual(-1, weight_to_complex(complex_to_weight(-1), unit))
         self.assertEqual(-1j, weight_to_complex(complex_to_weight(-1j), unit))
         self.assertEqual(2 - 3j, weight_to_complex(complex_to_weight(2 - 3j), unit))
-        self.assertEqual(0.5 - 0.25j, weight_to_complex(complex_to_weight(0.5 - 0.25j), unit))
+        self.assertEqual(
+            0.5 - 0.25j, weight_to_complex(complex_to_weight(0.5 - 0.25j), unit)
+        )
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_statistics_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_statistics_device.py
index 29c2462fe95047bc6364acd0d4c6a03c945801f2..25815b132504d0541229ac667749c975e4f31dbe 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_statistics_device.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_statistics_device.py
@@ -1,11 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import mock
 
@@ -16,7 +10,6 @@ from tangostationcontrol.test import base
 
 
 class TestStatisticsDevice(base.TestCase):
-
     def setUp(self):
         super(TestStatisticsDevice, self).setUp()
 
@@ -32,13 +25,15 @@ class TestStatisticsDevice(base.TestCase):
         import ctypes as ct
 
         try:
+
             class TestSubclass(ct.c_uint32):
                 def __repr__(self):
                     return super().__repr__()
+
         except Exception as e:
             self.assertIsInstance(e, TypeError)
 
-    @mock.patch.object(server, 'get_worker')
+    @mock.patch.object(server, "get_worker")
     def test_instance_statistics(self, m_worker):
         """Test that we can import and create a statistics device
 
diff --git a/tangostationcontrol/tangostationcontrol/test/prometheus/__init__.py b/tangostationcontrol/tangostationcontrol/test/prometheus/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/test/prometheus/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/test/prometheus/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/test/prometheus/test_archiver_policy.py b/tangostationcontrol/tangostationcontrol/test/prometheus/test_archiver_policy.py
index 9b8f53478227e5390eea98c4a9436b8363eaa7c3..8bd60a335af59e44faaa50f513e349e8c6e70658 100644
--- a/tangostationcontrol/tangostationcontrol/test/prometheus/test_archiver_policy.py
+++ b/tangostationcontrol/tangostationcontrol/test/prometheus/test_archiver_policy.py
@@ -1,34 +1,35 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-import sys, os
 import importlib.util
+import os
+import sys
 
 from tangostationcontrol.test import base
 
-module_name = 'ArchiverPolicy'
-file_path = os.path.join(os.path.realpath('..'), 'docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py')
+module_name = "ArchiverPolicy"
+file_path = os.path.join(
+    os.path.realpath(".."),
+    "docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py",
+)
 spec = importlib.util.spec_from_file_location(module_name, file_path)
 tpc = importlib.util.module_from_spec(spec)
 sys.modules[module_name] = tpc
 spec.loader.exec_module(tpc)
 
+
 class TestArchiverPolicy(base.TestCase):
 
-    config_path = os.path.join(os.path.realpath('..'), 'docker-compose/tango-prometheus-exporter/lofar2-policy.json')
+    config_path = os.path.join(
+        os.path.realpath(".."),
+        "docker-compose/tango-prometheus-exporter/lofar2-policy.json",
+    )
     CONFIG = tpc.ArchiverPolicy.load_config(config_path)
 
     def test_config_file(self):
-        """ Test if policy config file is correctly retrieved """
-        empty_policy = tpc.ArchiverPolicy()                         # empty config file
-        expected_config = {'default': {}, 'devices': {}}
+        """Test if policy config file is correctly retrieved"""
+        empty_policy = tpc.ArchiverPolicy()  # empty config file
+        expected_config = {"default": {}, "devices": {}}
         self.assertEqual(empty_policy.config, expected_config)
         policy = tpc.ArchiverPolicy(self.CONFIG)
-        self.assertEqual([*policy.config], ['default', 'devices'])  # dict keys
-
+        self.assertEqual([*policy.config], ["default", "devices"])  # dict keys
diff --git a/tangostationcontrol/tangostationcontrol/test/statistics/__init__.py b/tangostationcontrol/tangostationcontrol/test/statistics/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/test/statistics/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/test/statistics/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/test/statistics/test_writer.py b/tangostationcontrol/tangostationcontrol/test/statistics/test_writer.py
index 9b73607fafa546aa0cc0db63c1569aebdff99114..b58bc4a03062d272eb959d628a3cdfd195d13a19 100644
--- a/tangostationcontrol/tangostationcontrol/test/statistics/test_writer.py
+++ b/tangostationcontrol/tangostationcontrol/test/statistics/test_writer.py
@@ -1,19 +1,13 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import sys
 from os.path import dirname, isfile
 from tempfile import TemporaryDirectory
 from unittest import mock
 
-from tangostationcontrol.test import base
 from tangostationcontrol.statistics.writer import entry
+from tangostationcontrol.test import base
 
 
 class TestStatisticsWriter(base.TestCase):
@@ -24,11 +18,14 @@ class TestStatisticsWriter(base.TestCase):
         with TemporaryDirectory() as tmpdir:
             new_sys_argv = [
                 sys.argv[0],
-                "--mode", "XST",
-                "--file", dirname(__file__) + "/SDP_XST_statistics_packets.bin",
-                "--output_dir", tmpdir
+                "--mode",
+                "XST",
+                "--file",
+                dirname(__file__) + "/SDP_XST_statistics_packets.bin",
+                "--output_dir",
+                tmpdir,
             ]
-            with mock.patch.object(entry.sys, 'argv', new_sys_argv):
+            with mock.patch.object(entry.sys, "argv", new_sys_argv):
                 with self.assertRaises(SystemExit):
                     entry.main()
 
@@ -39,11 +36,14 @@ class TestStatisticsWriter(base.TestCase):
         with TemporaryDirectory() as tmpdir:
             new_sys_argv = [
                 sys.argv[0],
-                "--mode", "XST",
-                "--file", dirname(__file__) + "/SDP_XST_statistics_packets_multiple_subbands.bin",
-                "--output_dir", tmpdir
+                "--mode",
+                "XST",
+                "--file",
+                dirname(__file__) + "/SDP_XST_statistics_packets_multiple_subbands.bin",
+                "--output_dir",
+                tmpdir,
             ]
-            with mock.patch.object(entry.sys, 'argv', new_sys_argv):
+            with mock.patch.object(entry.sys, "argv", new_sys_argv):
                 with self.assertRaises(SystemExit):
                     entry.main()
 
@@ -55,11 +55,14 @@ class TestStatisticsWriter(base.TestCase):
         with TemporaryDirectory() as tmpdir:
             new_sys_argv = [
                 sys.argv[0],
-                "--mode", "BST",
-                "--file", dirname(__file__) + "/SDP_BST_statistics_packets.bin",
-                "--output_dir", tmpdir
+                "--mode",
+                "BST",
+                "--file",
+                dirname(__file__) + "/SDP_BST_statistics_packets.bin",
+                "--output_dir",
+                tmpdir,
             ]
-            with mock.patch.object(entry.sys, 'argv', new_sys_argv):
+            with mock.patch.object(entry.sys, "argv", new_sys_argv):
                 with self.assertRaises(SystemExit):
                     entry.main()
 
diff --git a/tangostationcontrol/tangostationcontrol/test/toolkit/__init__.py b/tangostationcontrol/tangostationcontrol/test/toolkit/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/test/toolkit/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/test/toolkit/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_config_file.py b/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_config_file.py
index 9dde30edebcf991fb335bf6643c071188f564acd..b174e6efc02779be6dfb8ba36ce6a29012191a38 100644
--- a/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_config_file.py
+++ b/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_config_file.py
@@ -1,22 +1,28 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from tangostationcontrol.test import base
 import json
+
 import pkg_resources
+from tangostationcontrol.test import base
+from tangostationcontrol.toolkit.archiver_configurator import (
+    get_global_env_parameters,
+    get_parameters_from_attribute,
+)
 
-from tangostationcontrol.toolkit.archiver_configurator import get_global_env_parameters, get_parameters_from_attribute
 
 class TestArchiverConfigFile(base.TestCase):
 
-    dev_config_dict = json.load(pkg_resources.resource_stream('tangostationcontrol.toolkit', f'archiver_config/lofar2_dev.json'))
-    prod_config_dict = json.load(pkg_resources.resource_stream('tangostationcontrol.toolkit', f'archiver_config/lofar2_prod.json'))
+    dev_config_dict = json.load(
+        pkg_resources.resource_stream(
+            "tangostationcontrol.toolkit", f"archiver_config/lofar2_dev.json"
+        )
+    )
+    prod_config_dict = json.load(
+        pkg_resources.resource_stream(
+            "tangostationcontrol.toolkit", f"archiver_config/lofar2_prod.json"
+        )
+    )
 
     def test_separate_config_files(self):
         self.assertIsNotNone(self.dev_config_dict)
@@ -25,28 +31,49 @@ class TestArchiverConfigFile(base.TestCase):
     def test_get_global_variables(self):
         config_dicts = [self.dev_config_dict, self.prod_config_dict]
         for d in config_dicts:
-            polling_time, archive_abs_change, archive_rel_change, archive_period, event_period, strategy = get_global_env_parameters(d)
-            self.assertEqual(type(polling_time),int)
+            (
+                polling_time,
+                archive_abs_change,
+                archive_rel_change,
+                archive_period,
+                event_period,
+                strategy,
+            ) = get_global_env_parameters(d)
+            self.assertEqual(type(polling_time), int)
             self.assertEqual(type(archive_abs_change), int)
             self.assertEqual(f"{type(archive_rel_change)}", f"<class 'NoneType'>")
-            self.assertEqual(type(archive_period),int)
-            self.assertEqual(type(event_period),int)
-            self.assertEqual(type(strategy),str)
+            self.assertEqual(type(archive_period), int)
+            self.assertEqual(type(event_period), int)
+            self.assertEqual(type(strategy), str)
 
     def test_get_parameters_from_infixes_list(self):
-        device_name = 'STAT/RECV/1'
-        attribute_name = 'RCU_TEMP_R'
-        archive_period, event_period, abs_change, rel_change = get_parameters_from_attribute(device_name, attribute_name, self.prod_config_dict)
-        self.assertEqual(archive_period,60000)
-        self.assertEqual(event_period,1000)
-        self.assertEqual(abs_change,0.5)
-        self.assertEqual(rel_change,5.0)
+        device_name = "STAT/RECV/1"
+        attribute_name = "RCU_TEMP_R"
+        (
+            archive_period,
+            event_period,
+            abs_change,
+            rel_change,
+        ) = get_parameters_from_attribute(
+            device_name, attribute_name, self.prod_config_dict
+        )
+        self.assertEqual(archive_period, 60000)
+        self.assertEqual(event_period, 1000)
+        self.assertEqual(abs_change, 0.5)
+        self.assertEqual(rel_change, 5.0)
 
     def test_get_parameters_from_suffixes_list(self):
-        device_name = 'STAT/RECV/1'
-        attribute_name = 'RECVTR_I2C_error_R'
-        archive_period, event_period, abs_change, rel_change = get_parameters_from_attribute(device_name, attribute_name, self.prod_config_dict)
-        self.assertEqual(archive_period,60000)
-        self.assertEqual(event_period,1000)
-        self.assertEqual(abs_change,1)
-        self.assertEqual(rel_change,None)
+        device_name = "STAT/RECV/1"
+        attribute_name = "RECVTR_I2C_error_R"
+        (
+            archive_period,
+            event_period,
+            abs_change,
+            rel_change,
+        ) = get_parameters_from_attribute(
+            device_name, attribute_name, self.prod_config_dict
+        )
+        self.assertEqual(archive_period, 60000)
+        self.assertEqual(event_period, 1000)
+        self.assertEqual(abs_change, 1)
+        self.assertEqual(rel_change, None)
diff --git a/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_configurator.py b/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_configurator.py
index 3b2227cc156ff2ee3f9eaad26af681c2bde2b5db..6630bcb9a3d73662a031e8350ed53384705d39f5 100644
--- a/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_configurator.py
+++ b/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_configurator.py
@@ -1,53 +1,87 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from tangostationcontrol.test import base
-from tangostationcontrol.toolkit.archiver_configurator import get_parameters_from_attribute, get_global_env_parameters
 import json
+
 import pkg_resources
+from tangostationcontrol.test import base
+from tangostationcontrol.toolkit.archiver_configurator import (
+    get_parameters_from_attribute,
+    get_global_env_parameters,
+)
+
 
 class TestArchiverConfigurator(base.TestCase):
 
-    DEVICE_NAME = 'STAT/RECV/1'
-    ATTRIBUTE_NAME = 'ant_mask_rw'
-    PROD_CONFIG_DICT = json.load(pkg_resources.resource_stream('tangostationcontrol.toolkit', f'archiver_config/lofar2_prod.json'))
-    DEV_CONFIG_DICT = json.load(pkg_resources.resource_stream('tangostationcontrol.toolkit', f'archiver_config/lofar2_dev.json'))
-    DEV_SUFFIXES = DEV_CONFIG_DICT['global']['suffixes']
-    PROD_SUFFIXES = PROD_CONFIG_DICT['global']['suffixes']
-    PROD_INFIXES =  PROD_CONFIG_DICT['global']['infixes']
+    DEVICE_NAME = "STAT/RECV/1"
+    ATTRIBUTE_NAME = "ant_mask_rw"
+    PROD_CONFIG_DICT = json.load(
+        pkg_resources.resource_stream(
+            "tangostationcontrol.toolkit", f"archiver_config/lofar2_prod.json"
+        )
+    )
+    DEV_CONFIG_DICT = json.load(
+        pkg_resources.resource_stream(
+            "tangostationcontrol.toolkit", f"archiver_config/lofar2_dev.json"
+        )
+    )
+    DEV_SUFFIXES = DEV_CONFIG_DICT["global"]["suffixes"]
+    PROD_SUFFIXES = PROD_CONFIG_DICT["global"]["suffixes"]
+    PROD_INFIXES = PROD_CONFIG_DICT["global"]["infixes"]
 
     def test_get_parameters_from_attribute(self):
         """Test if the attribute archiving parameters are correctly retrieved from the JSON config file"""
         self.assertIsNotNone(self.DEV_CONFIG_DICT)
-        archive_period, event_period, abs_change, rel_change = get_parameters_from_attribute(self.DEVICE_NAME, self.ATTRIBUTE_NAME, 
-                                                                    self.DEV_CONFIG_DICT)
-        self.assertEqual(archive_period,int(self.DEV_SUFFIXES[2]['archive_period']))
-        self.assertEqual(event_period,int(self.DEV_SUFFIXES[2]['event_period']))
-        self.assertEqual(abs_change,float(self.DEV_SUFFIXES[2]['abs_change']))
-        self.assertEqual(rel_change, self.DEV_SUFFIXES[2]['rel_change'] and int(self.DEV_SUFFIXES[2]['rel_change']))
+        (
+            archive_period,
+            event_period,
+            abs_change,
+            rel_change,
+        ) = get_parameters_from_attribute(
+            self.DEVICE_NAME, self.ATTRIBUTE_NAME, self.DEV_CONFIG_DICT
+        )
+        self.assertEqual(archive_period, int(self.DEV_SUFFIXES[2]["archive_period"]))
+        self.assertEqual(event_period, int(self.DEV_SUFFIXES[2]["event_period"]))
+        self.assertEqual(abs_change, float(self.DEV_SUFFIXES[2]["abs_change"]))
+        self.assertEqual(
+            rel_change,
+            self.DEV_SUFFIXES[2]["rel_change"]
+            and int(self.DEV_SUFFIXES[2]["rel_change"]),
+        )
 
         """Test if the attribute archiving parameters are correctly retrieved from the infixes list (production environment)"""
-        attribute_name = 'rcu_temp_r'   # 'TEMP' is in the infixes list
-        archive_period, event_period, abs_change, rel_change = get_parameters_from_attribute(self.DEVICE_NAME, attribute_name, 
-                                                                    self.PROD_CONFIG_DICT)
-        self.assertEqual(archive_period,int(self.PROD_INFIXES[2]['archive_period']))
-        self.assertEqual(event_period,int(self.PROD_INFIXES[2]['event_period']))
-        self.assertEqual(abs_change,float(self.PROD_INFIXES[2]['abs_change']))
-        self.assertEqual(rel_change,self.PROD_INFIXES[2]['rel_change'] and int(self.PROD_INFIXES[2]['rel_change']))
+        attribute_name = "rcu_temp_r"  # 'TEMP' is in the infixes list
+        (
+            archive_period,
+            event_period,
+            abs_change,
+            rel_change,
+        ) = get_parameters_from_attribute(
+            self.DEVICE_NAME, attribute_name, self.PROD_CONFIG_DICT
+        )
+        self.assertEqual(archive_period, int(self.PROD_INFIXES[2]["archive_period"]))
+        self.assertEqual(event_period, int(self.PROD_INFIXES[2]["event_period"]))
+        self.assertEqual(abs_change, float(self.PROD_INFIXES[2]["abs_change"]))
+        self.assertEqual(
+            rel_change,
+            self.PROD_INFIXES[2]["rel_change"]
+            and int(self.PROD_INFIXES[2]["rel_change"]),
+        )
 
     def test_get_global_env_parameters(self):
         """Test if the include attribute list is correctly retrieved from the JSON config file"""
         self.assertIsNotNone(self.PROD_CONFIG_DICT)
-        polling_time, archive_abs_change, archive_rel_change, archive_period, event_period, strategy = get_global_env_parameters(self.PROD_CONFIG_DICT)
-        self.assertEqual(type(polling_time),int)
+        (
+            polling_time,
+            archive_abs_change,
+            archive_rel_change,
+            archive_period,
+            event_period,
+            strategy,
+        ) = get_global_env_parameters(self.PROD_CONFIG_DICT)
+        self.assertEqual(type(polling_time), int)
         self.assertEqual(type(archive_abs_change), int)
         self.assertEqual(f"{type(archive_rel_change)}", f"<class 'NoneType'>")
-        self.assertEqual(type(archive_period),int)
-        self.assertEqual(type(event_period),int)
-        self.assertEqual(type(strategy),str)
+        self.assertEqual(type(archive_period), int)
+        self.assertEqual(type(event_period), int)
+        self.assertEqual(type(strategy), str)
diff --git a/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_util.py b/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_util.py
index 2fbe8c69fc09decdbc4849efcd9b581f0483eb36..8f51aba17ff28f68840e2738f5a9aec582e5767d 100644
--- a/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_util.py
+++ b/tangostationcontrol/tangostationcontrol/test/toolkit/test_archiver_util.py
@@ -1,44 +1,56 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 from tangostationcontrol.test import base
-from tangostationcontrol.toolkit.archiver_util import get_attribute_from_fqdn, split_tango_name, device_fqdn, attribute_fqdn, get_size_from_datatype
+from tangostationcontrol.toolkit.archiver_util import (
+    get_attribute_from_fqdn,
+    split_tango_name,
+    device_fqdn,
+    attribute_fqdn,
+    get_size_from_datatype,
+)
+
 
 class TestArchiverUtil(base.TestCase):
 
-    DEVICE_NAME = 'STAT/RECV/1'
-    ATTRIBUTE_NAME = 'ant_mask_rw'
+    DEVICE_NAME = "STAT/RECV/1"
+    ATTRIBUTE_NAME = "ant_mask_rw"
 
     def test_get_attribute_from_fqdn(self):
         """Test if a Tango attribute name is correctly retrieved from a Tango FQDN"""
         fqdn = f"tango://databaseds:10000/{self.DEVICE_NAME}/{self.ATTRIBUTE_NAME}"
-        self.assertEqual('STAT/RECV/1/ant_mask_rw', get_attribute_from_fqdn(fqdn))
+        self.assertEqual("STAT/RECV/1/ant_mask_rw", get_attribute_from_fqdn(fqdn))
 
     def test_device_fqdn(self):
         """Test if a device name is correctly converted in a Tango FQDN"""
-        self.assertEqual(f"tango://databaseds:10000/{self.DEVICE_NAME}".lower(), device_fqdn(self.DEVICE_NAME, "databaseds:10000"))
+        self.assertEqual(
+            f"tango://databaseds:10000/{self.DEVICE_NAME}".lower(),
+            device_fqdn(self.DEVICE_NAME, "databaseds:10000"),
+        )
 
     def test_attribute_fqdn(self):
         """Test if an attribute name is correctly converted in a Tango FQDN"""
-        self.assertEqual(f"tango://databaseds:10000/{self.DEVICE_NAME}/{self.ATTRIBUTE_NAME}".lower(), 
-                            attribute_fqdn(f"{self.DEVICE_NAME}/{self.ATTRIBUTE_NAME}", "databaseds:10000"))
+        self.assertEqual(
+            f"tango://databaseds:10000/{self.DEVICE_NAME}/{self.ATTRIBUTE_NAME}".lower(),
+            attribute_fqdn(
+                f"{self.DEVICE_NAME}/{self.ATTRIBUTE_NAME}", "databaseds:10000"
+            ),
+        )
         self.assertRaises(ValueError, lambda: attribute_fqdn(self.ATTRIBUTE_NAME))
 
     def test_split_tango_name(self):
         """Test if the Tango full qualified domain names are correctly splitted"""
-        self.assertEqual(('STAT','RECV','1'), split_tango_name(self.DEVICE_NAME, 'device'))
-        self.assertEqual(('STAT','RECV','1', 'ant_mask_rw'), split_tango_name(f"{self.DEVICE_NAME}/{self.ATTRIBUTE_NAME}", 'attribute'))
+        self.assertEqual(
+            ("STAT", "RECV", "1"), split_tango_name(self.DEVICE_NAME, "device")
+        )
+        self.assertEqual(
+            ("STAT", "RECV", "1", "ant_mask_rw"),
+            split_tango_name(f"{self.DEVICE_NAME}/{self.ATTRIBUTE_NAME}", "attribute"),
+        )
 
     def test_get_size_from_datatype(self):
         """Test if the bytesize of a certain datatype is correctly retrieved"""
-        datatype_boolean = 1    # 1 byte
+        datatype_boolean = 1  # 1 byte
         self.assertEqual(1, get_size_from_datatype(datatype_boolean))
-        datatype_double = 5     # 8 bytes
+        datatype_double = 5  # 8 bytes
         self.assertEqual(8, get_size_from_datatype(datatype_double))
-
diff --git a/tangostationcontrol/tangostationcontrol/test/toolkit/test_mib_compiler.py b/tangostationcontrol/tangostationcontrol/test/toolkit/test_mib_compiler.py
index 8641f6483f04ef9e21c27b5bcaaaa4aff4f6587d..36c723c0751d74c83003ef65a30febb231f38e77 100644
--- a/tangostationcontrol/tangostationcontrol/test/toolkit/test_mib_compiler.py
+++ b/tangostationcontrol/tangostationcontrol/test/toolkit/test_mib_compiler.py
@@ -1,29 +1,30 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
-
-from tangostationcontrol.test import base
-from tangostationcontrol.toolkit.mib_compiler import mib_compiler
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import sys
-from os.path import isfile
 from os import getcwd
+from os.path import isfile
 from tempfile import TemporaryDirectory
 from unittest import mock
 
+from tangostationcontrol.test import base
+from tangostationcontrol.toolkit.mib_compiler import mib_compiler
+
+
 class TestCompiler(base.TestCase):
     def test_compile(self):
-
         with TemporaryDirectory() as tmpdir:
-            new_sys_argv = [sys.argv[0], "--mibs", "TEST-MIB",
-                            "--source", f"{getcwd()}/tangostationcontrol/toolkit/mib_compiler/mibs",
-                            "--destination", f"{tmpdir}", "-v"]
-            with mock.patch.object(mib_compiler.sys, 'argv', new_sys_argv):
+            new_sys_argv = [
+                sys.argv[0],
+                "--mibs",
+                "TEST-MIB",
+                "--source",
+                f"{getcwd()}/tangostationcontrol/toolkit/mib_compiler/mibs",
+                "--destination",
+                f"{tmpdir}",
+                "-v",
+            ]
+            with mock.patch.object(mib_compiler.sys, "argv", new_sys_argv):
                 with self.assertRaises(SystemExit):
                     mib_compiler.main()
 
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/__init__.py b/tangostationcontrol/tangostationcontrol/toolkit/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver.py
index aaf013ce2df0f3f9f2d1e39bc949da4489acbd8c..7b66d194c37785a274bb0546e3f908d245440a8c 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver.py
@@ -1,79 +1,117 @@
 #! /usr/bin/env python3
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
+import json
 import logging
-
-from tango import DeviceProxy, AttributeProxy, DevState, DevFailed
-from tangostationcontrol.toolkit.archiver_util import get_db_config, device_fqdn, attribute_fqdn, get_size_from_datatype, filter_attribute_list
-from tangostationcontrol.toolkit.archiver_configurator import get_parameters_from_attribute, get_include_attribute_list, get_exclude_attribute_list, get_global_env_parameters, get_multimember_devices
-
-import time
 import re
-import json
-import pkg_resources
+import time
 from functools import wraps
 
+import pkg_resources
+from tango import DeviceProxy, AttributeProxy, DevState, DevFailed
+from tangostationcontrol.toolkit.archiver_configurator import (
+    get_parameters_from_attribute,
+    get_include_attribute_list,
+    get_exclude_attribute_list,
+    get_global_env_parameters,
+    get_multimember_devices,
+)
+from tangostationcontrol.toolkit.archiver_util import (
+    get_db_config,
+    device_fqdn,
+    attribute_fqdn,
+    get_size_from_datatype,
+    filter_attribute_list,
+)
+
 logger = logging.getLogger()
 
+
 def warn_if_attribute_not_found():
     """
-      Log a warning if an exception is thrown indicating access to an non-existing attribute
-      was requested, and swallow the exception.
+    Log a warning if an exception is thrown indicating access to an non-existing attribute
+    was requested, and swallow the exception.
     """
+
     def inner(func):
         @wraps(func)
         def warn_wrapper(self, attribute_name, *args, **kwargs):
             try:
                 return func(self, attribute_name, *args, **kwargs)
             except DevFailed as e:
-                if e.args[0].reason in ['Attribute not found', 'BadSignalName', 'API_AttrNotFound']:
-                    logger.warning(f"Attribute {attribute_name} not found: {e.args[0].desc}")
+                if e.args[0].reason in [
+                    "Attribute not found",
+                    "BadSignalName",
+                    "API_AttrNotFound",
+                ]:
+                    logger.warning(
+                        f"Attribute {attribute_name} not found: {e.args[0].desc}"
+                    )
                 else:
                     raise
+
         return warn_wrapper
+
     return inner
 
+
 def warn_if_device_not_connected():
     """
-      Log a warning if an exception is thrown indicating access to an non-connected device
+    Log a warning if an exception is thrown indicating access to an non-connected device
     """
+
     def inner(func):
         @wraps(func)
         def warn_wrapper(self, attribute_name, *args, **kwargs):
             try:
                 return func(self, attribute_name, *args, **kwargs)
             except DevFailed as e:
-                if 'API_CantConnectToDevice' in str(e):
-                    logger.warning(f"Attribute {attribute_name} not reachable: {e.args[0].desc}")
+                if "API_CantConnectToDevice" in str(e):
+                    logger.warning(
+                        f"Attribute {attribute_name} not reachable: {e.args[0].desc}"
+                    )
                 else:
                     raise
+
         return warn_wrapper
+
     return inner
 
-class Archiver():
+
+class Archiver:
     """
     The Archiver class implements the basic operations to perform attributes archiving
     """
 
-    # Global environment variables set by configuration file 
+    # Global environment variables set by configuration file
     GLOBAL_POLLING_TIME = 1000
     GLOBAL_ARCHIVE_ABS_CHANGE = 1
     GLOBAL_ARCHIVE_REL_CHANGE = None
-    GLOBAL_ARCHIVE_PERIOD = 10000   # 3600000 (prod)
-    GLOBAL_EVENT_PERIOD = 1000      # 60000 (prod)
-    GLOBAL_STRATEGY = 'RUN'
+    GLOBAL_ARCHIVE_PERIOD = 10000  # 3600000 (prod)
+    GLOBAL_EVENT_PERIOD = 1000  # 60000 (prod)
+    GLOBAL_STRATEGY = "RUN"
 
-    def __init__(self, cm_name: str = 'archiving/hdbppts/confmanager01', context: str = 'RUN'):
+    def __init__(
+        self, cm_name: str = "archiving/hdbppts/confmanager01", context: str = "RUN"
+    ):
         self.cm_name = cm_name
         self.cm = DeviceProxy(cm_name)
-        try: 
+        try:
             if self.cm.state() == DevState.FAULT:
-                raise Exception(f"Configuration Manager {cm_name} is in FAULT state: {self.cm.status()}")
+                raise Exception(
+                    f"Configuration Manager {cm_name} is in FAULT state: {self.cm.status()}"
+                )
         except Exception as e:
-            raise Exception(f"Connection failed with Configuration Manager {cm_name}") from e
+            raise Exception(
+                f"Connection failed with Configuration Manager {cm_name}"
+            ) from e
         self.es_list = [es_name for es_name in self.get_subscribers(from_db=False)]
-        self.cm.write_attribute('Context',context)    # Set default Context Archiving for all the subscribers
+        self.cm.write_attribute(
+            "Context", context
+        )  # Set default Context Archiving for all the subscribers
 
-    def get_hdbpp_libname(self, device_name:str):
+    def get_hdbpp_libname(self, device_name: str):
         """
         Get the hdbpp library name used by the Configuration Manager or by the EventSubscribers
         Useful in the case of different DBMS architectures (e.g. MySQL, TimescaleDB)
@@ -81,14 +119,14 @@ class Archiver():
         config = get_db_config(device_name)
         return config["libname"]
 
-    def get_subscribers(self, from_db:bool=False):
+    def get_subscribers(self, from_db: bool = False):
         """
         Get the list of Event Subscribers managed by the Configuration Manager.
         It can be retrieved as a device property (stored in TangoDB) or as a device attribute.
         Choose from_db=True only if new subscribers are not added dynamically while ConfManager is running.
         """
         if from_db:
-            es_list = self.cm.get_property('ArchiverList')['ArchiverList'] or []
+            es_list = self.cm.get_property("ArchiverList")["ArchiverList"] or []
         else:
             es_list = self.cm.ArchiverList or []
         return es_list
@@ -102,19 +140,21 @@ class Archiver():
         # Only one subscriber in ConfManager list
         if len(es_list) == 1:
             return es_list[0]
-        else :
+        else:
             # Choose the best subscriber analysing their load
             load_dict = {}
             for es_name in es_list:
                 es = DeviceProxy(es_name)
-                load_dict[es_name]=float(es.AttributeRecordFreq or 0) 
+                load_dict[es_name] = float(es.AttributeRecordFreq or 0)
             # Return the subscriber's name with min load
-            min_es = min(load_dict,key=load_dict.get)
+            min_es = min(load_dict, key=load_dict.get)
             return min_es
 
-    def get_configuration(self, resource: str = 'lofar2_dev') -> dict:
-        """ Read an archiver configuration from one of the preinstalled resources in archiver_config. """
-        resource = pkg_resources.resource_stream(__name__, f'archiver_config/{resource}.json')
+    def get_configuration(self, resource: str = "lofar2_dev") -> dict:
+        """Read an archiver configuration from one of the preinstalled resources in archiver_config."""
+        resource = pkg_resources.resource_stream(
+            __name__, f"archiver_config/{resource}.json"
+        )
         return json.load(resource)
 
     def apply_configuration(self, config_dict: dict):
@@ -122,66 +162,88 @@ class Archiver():
         Apply the customized strategy defined by the given archiver configuration.
         """
         # Set global development env variables
-        self.GLOBAL_POLLING_TIME, self.GLOBAL_ARCHIVE_ABS_CHANGE, self.GLOBAL_ARCHIVE_REL_CHANGE, self.GLOBAL_ARCHIVE_PERIOD, self.GLOBAL_EVENT_PERIOD, self.GLOBAL_STRATEGY = get_global_env_parameters(config_dict)
+        (
+            self.GLOBAL_POLLING_TIME,
+            self.GLOBAL_ARCHIVE_ABS_CHANGE,
+            self.GLOBAL_ARCHIVE_REL_CHANGE,
+            self.GLOBAL_ARCHIVE_PERIOD,
+            self.GLOBAL_EVENT_PERIOD,
+            self.GLOBAL_STRATEGY,
+        ) = get_global_env_parameters(config_dict)
         # Set devices archiving
-        env_dict = config_dict['devices']
+        env_dict = config_dict["devices"]
         # Check if device has more than one member (domain/family/*)
         multimember_devices_dict = get_multimember_devices(env_dict)
         # Merge the two configuration dictionaries
         extended_env_dict = {**env_dict, **multimember_devices_dict}
-        extended_config_dict = config_dict.copy()   # Copy to preserve original dict
-        extended_config_dict['devices'] = extended_env_dict
+        extended_config_dict = config_dict.copy()  # Copy to preserve original dict
+        extended_config_dict["devices"] = extended_env_dict
         for device in extended_env_dict:
             try:
                 # DEV environment -> all attributes are excluded by default
                 # PROD environment -> all attributes are included by default
-                if not device.endswith('*'):
+                if not device.endswith("*"):
                     self.configure_device(extended_config_dict, device)
             except Exception as e:
-                if 'API_DeviceNotExported' in str(e):   # ignore if device is offline
+                if "API_DeviceNotExported" in str(e):  # ignore if device is offline
                     logger.warning(f"Device {device} offline")
-                elif 'API_CantConnectToDevice' in str(e):
+                elif "API_CantConnectToDevice" in str(e):
                     logger.warning(f"Device {device} not found")
-                elif 'DB_DeviceNotDefined' in str(e):
+                elif "DB_DeviceNotDefined" in str(e):
                     logger.warning(f"Device {device} not defined in TangoDB")
                 else:
                     raise Exception from e
 
-    def configure_device(self, config_dict:dict, device:str):
+    def configure_device(self, config_dict: dict, device: str):
         """
         Procedure that enables the Archiving configuration for a certain device as defined in the config file
         """
         # Cleanup the subscriber
-        #self.remove_attributes_by_device(device)
+        # self.remove_attributes_by_device(device)
         # Attributes to be included in archiving stategy with specific parameters
         include_att_list = get_include_attribute_list(device, config_dict)
         # Attributes to be excluded from archiving
         exclude_att_list = get_exclude_attribute_list(device, config_dict)
         exclude_att_list = [a for a in exclude_att_list if a not in include_att_list]
-        try:        
+        try:
             for att in include_att_list:
                 # Retrieve specific attribute parameters from config file
-                archive_period, event_period, abs_change, rel_change = get_parameters_from_attribute(device,att,config_dict)
+                (
+                    archive_period,
+                    event_period,
+                    abs_change,
+                    rel_change,
+                ) = get_parameters_from_attribute(device, att, config_dict)
                 att_fqname = attribute_fqdn(att)
                 # Add the attribute to the archiver setting either specific or global parameters
-                self.add_attribute_to_archiver(att_fqname, self.GLOBAL_POLLING_TIME, archive_period or self.GLOBAL_ARCHIVE_PERIOD, 
-                                                self.GLOBAL_STRATEGY, abs_change or self.GLOBAL_ARCHIVE_ABS_CHANGE, 
-                                                rel_change or self.GLOBAL_ARCHIVE_REL_CHANGE)
-            self.add_attributes_by_device(device, self.GLOBAL_ARCHIVE_PERIOD, self.GLOBAL_ARCHIVE_ABS_CHANGE, 
-                                                self.GLOBAL_ARCHIVE_REL_CHANGE, exclude=exclude_att_list)
+                self.add_attribute_to_archiver(
+                    att_fqname,
+                    self.GLOBAL_POLLING_TIME,
+                    archive_period or self.GLOBAL_ARCHIVE_PERIOD,
+                    self.GLOBAL_STRATEGY,
+                    abs_change or self.GLOBAL_ARCHIVE_ABS_CHANGE,
+                    rel_change or self.GLOBAL_ARCHIVE_REL_CHANGE,
+                )
+            self.add_attributes_by_device(
+                device,
+                self.GLOBAL_ARCHIVE_PERIOD,
+                self.GLOBAL_ARCHIVE_ABS_CHANGE,
+                self.GLOBAL_ARCHIVE_REL_CHANGE,
+                exclude=exclude_att_list,
+            )
             # Remove attributes by custom configuration if already present
             # The following cycle is a security check in the special case that an attribute is in the
-            # included list in DEV mode, and in the excluded list in PROD mode             
+            # included list in DEV mode, and in the excluded list in PROD mode
             for att in exclude_att_list:
                 att_fqname = attribute_fqdn(att)
                 self.remove_attribute_from_archiver(att_fqname)
         except DevFailed as e:
-            if 'already subscribed' in str(e):
+            if "already subscribed" in str(e):
                 logger.warning(f"Multiple entries of Attribute {att} in config file")
             else:
                 raise
 
-    def add_event_subscriber(self, es_name:str=None):
+    def add_event_subscriber(self, es_name: str = None):
         """
         Add an additional Event Subscriber to the Configuration Manager
         """
@@ -189,52 +251,73 @@ class Archiver():
         if es_name is None:
             last_es_name = self.get_subscribers()[-1]
             last_es_idx = int(last_es_name[-2:])
-            es_name = last_es_name[:-2]+'0'+str(last_es_idx+1)   
-        try: 
+            es_name = last_es_name[:-2] + "0" + str(last_es_idx + 1)
+        try:
             es = DeviceProxy(es_name)
             if es.state() == DevState.FAULT:
                 raise Exception(f"Event Subscriber {es_name} is in FAULT state")
             self.cm.ArchiverAdd(device_fqdn(es_name))
         except DevFailed as e:
             if e.args[0].reason == "Archiver already present":
-                logger.warning(f"Event Subscriber {es_name} already present in Configuration Manager")
+                logger.warning(
+                    f"Event Subscriber {es_name} already present in Configuration Manager"
+                )
             else:
                 raise
 
     @warn_if_attribute_not_found()
     @warn_if_device_not_connected()
-    def add_attribute_to_archiver(self, attribute_name: str, polling_period: int, archive_event_period: int, strategy: str = 'RUN', 
-                                        abs_change: int = 1, rel_change: int = None, es_name:str=None):
-        """
-        Takes as input the attribute name, polling period (ms), event period (ms) and archiving strategy, 
+    def add_attribute_to_archiver(
+        self,
+        attribute_name: str,
+        polling_period: int,
+        archive_event_period: int,
+        strategy: str = "RUN",
+        abs_change: int = 1,
+        rel_change: int = None,
+        es_name: str = None,
+    ):
+        """
+        Takes as input the attribute name, polling period (ms), event period (ms) and archiving strategy,
         and adds the selected attribute to the subscriber's list of archiving attributes.
         The ConfigurationManager and EventSubscriber devices must be already up and running.
         The archiving-DBMS must be already properly configured.
         """
         attribute_name = attribute_fqdn(attribute_name)
         try:
-            self.cm.write_attribute('SetAttributeName', attribute_name)
-            self.cm.write_attribute('SetArchiver', es_name or self.get_next_subscriber())
-            self.cm.write_attribute('SetStrategy', strategy)
-            self.cm.write_attribute('SetPollingPeriod', polling_period)
-            self.cm.write_attribute('SetPeriodEvent', archive_event_period)
-            if abs_change is not None: self.cm.write_attribute('SetAbsoluteEvent', abs_change)
-            if rel_change is not None: self.cm.write_attribute('SetRelativeEvent', rel_change)
+            self.cm.write_attribute("SetAttributeName", attribute_name)
+            self.cm.write_attribute(
+                "SetArchiver", es_name or self.get_next_subscriber()
+            )
+            self.cm.write_attribute("SetStrategy", strategy)
+            self.cm.write_attribute("SetPollingPeriod", polling_period)
+            self.cm.write_attribute("SetPeriodEvent", archive_event_period)
+            if abs_change is not None:
+                self.cm.write_attribute("SetAbsoluteEvent", abs_change)
+            if rel_change is not None:
+                self.cm.write_attribute("SetRelativeEvent", rel_change)
             self.cm.AttributeAdd()
             logger.info(f"Attribute {attribute_name} added to archiving list!")
         except DevFailed as e:
-            if e.args[0].reason == 'Already archived' or 'already subscribed' in str(e) :
+            if e.args[0].reason == "Already archived" or "already subscribed" in str(e):
                 logger.warning(f"Attribute {attribute_name} already in archiving list!")
             else:
                 raise
 
-    def add_attributes_by_device(self, device_name, global_archive_period:int = None, global_abs_change:int = 1,
-                                        global_rel_change:int = None, es_name:str=None, exclude:list = None):
+    def add_attributes_by_device(
+        self,
+        device_name,
+        global_archive_period: int = None,
+        global_abs_change: int = 1,
+        global_rel_change: int = None,
+        es_name: str = None,
+        exclude: list = None,
+    ):
         """
         Add sequentially all the attributes of the selected device in the event subscriber list, if not already present
         """
         if not exclude:
-            """ B006 Do not use mutable data structures for argument defaults.
+            """B006 Do not use mutable data structures for argument defaults.
             They are created during function definition time. All calls to the
             function reuse this one instance of that data structure,
             persisting changes between them"""
@@ -244,26 +327,44 @@ class Archiver():
         for a in attrs_list:
             attr_fullname = attribute_fqdn(f"{device_name}/{a}")
             attr_proxy = AttributeProxy(attr_fullname)
-            if attr_proxy.is_polled() and not self.is_attribute_archived(attr_fullname):   # if not polled, attribute is also not archived
+            if attr_proxy.is_polled() and not self.is_attribute_archived(
+                attr_fullname
+            ):  # if not polled, attribute is also not archived
                 try:
-                    es = DeviceProxy(es_name or self.get_next_subscriber()) # choose an e.s. or get the first one available
-                    polling_period = attr_proxy.get_poll_period() or self.prod_polling_time  
-                    archive_period = global_archive_period or int(attr_proxy.get_property('archive_period')['archive_period'][0])                
+                    es = DeviceProxy(
+                        es_name or self.get_next_subscriber()
+                    )  # choose an e.s. or get the first one available
+                    polling_period = (
+                        attr_proxy.get_poll_period() or self.prod_polling_time
+                    )
+                    archive_period = global_archive_period or int(
+                        attr_proxy.get_property("archive_period")["archive_period"][0]
+                    )
                     abs_change = global_abs_change
                     rel_change = global_rel_change
-                    self.add_attribute_to_archiver(attr_fullname,polling_period=polling_period,
-                        archive_event_period = archive_period, abs_change=abs_change, rel_change=rel_change, es_name = es.name())
+                    self.add_attribute_to_archiver(
+                        attr_fullname,
+                        polling_period=polling_period,
+                        archive_event_period=archive_period,
+                        abs_change=abs_change,
+                        rel_change=rel_change,
+                        es_name=es.name(),
+                    )
                 except IndexError as e:
-                    logger.warning(f"Attribute {attr_fullname} will not be archived because archive event period is not defined!")
+                    logger.warning(
+                        f"Attribute {attr_fullname} will not be archived because archive event period is not defined!"
+                    )
                 except Exception as e:
-                    raise Exception from e                        
+                    raise Exception from e
             else:
-                logger.warning(f"Attribute {attr_fullname} will not be archived because polling is set to FALSE!")
+                logger.warning(
+                    f"Attribute {attr_fullname} will not be archived because polling is set to FALSE!"
+                )
 
     @warn_if_attribute_not_found()
-    def remove_attribute_from_archiver(self, attribute_name:str):
+    def remove_attribute_from_archiver(self, attribute_name: str):
         """
-        Stops the data archiving of the attribute passed as input, and remove it from the subscriber's list. 
+        Stops the data archiving of the attribute passed as input, and remove it from the subscriber's list.
         """
         attribute_name = attribute_fqdn(attribute_name)
         self.cm.AttributeStop(attribute_name)
@@ -271,13 +372,13 @@ class Archiver():
         logger.warning(f"Attribute {attribute_name} removed!")
 
     @warn_if_attribute_not_found()
-    def remove_attributes_by_device(self, device_name:str, exclude:list = None):
+    def remove_attributes_by_device(self, device_name: str, exclude: list = None):
         """
         Stops the data archiving of all the attributes of the selected device, and remove them from the
         subscriber's list
         """
         if not exclude:
-            """ B006 Do not use mutable data structures for argument defaults.
+            """B006 Do not use mutable data structures for argument defaults.
             They are created during function definition time. All calls to the
             function reuse this one instance of that data structure,
             persisting changes between them"""
@@ -288,18 +389,22 @@ class Archiver():
             archived_attrs = es.AttributeList or []
             exclude_list = [attribute_fqdn(a.lower()) for a in exclude]
             # Search the attributes in the EventSubscriber list from their device name
-            match = re.compile(f'.*{device_name.lower()}.*').match
-            attrs_list = [a.lower() for a in list(filter(match, archived_attrs)) if a.lower() not in exclude_list]
+            match = re.compile(f".*{device_name.lower()}.*").match
+            attrs_list = [
+                a.lower()
+                for a in list(filter(match, archived_attrs))
+                if a.lower() not in exclude_list
+            ]
             for a in attrs_list:
                 self.remove_attribute_from_archiver(a)
 
-    def remove_attributes_in_error(self, exclude:list = None, es_name:str=None):
+    def remove_attributes_in_error(self, exclude: list = None, es_name: str = None):
         """
         Remove from the subscribers list all the attributes currently in error (not being archived)
         """
 
         if not exclude:
-            """ B006 Do not use mutable data structures for argument defaults.
+            """B006 Do not use mutable data structures for argument defaults.
             They are created during function definition time. All calls to the
             function reuse this one instance of that data structure,
             persisting changes between them"""
@@ -308,17 +413,19 @@ class Archiver():
         if es_name is not None:
             es_list = [es_name]
         else:
-            es_list = self.get_subscribers()      
+            es_list = self.get_subscribers()
         for es_name in es_list:
             es = DeviceProxy(es_name)
             attributes_nok = es.AttributeNokList or []
-            exclude_list = [attribute_fqdn(a.lower()) for  a in exclude]
-            attrs_list = [a.lower() for a in list(attributes_nok) if a.lower() not in exclude_list]
+            exclude_list = [attribute_fqdn(a.lower()) for a in exclude]
+            attrs_list = [
+                a.lower() for a in list(attributes_nok) if a.lower() not in exclude_list
+            ]
             for a in attrs_list:
                 self.remove_attribute_from_archiver(a)
 
     @warn_if_attribute_not_found()
-    def start_archiving_attribute(self, attribute_name:str):
+    def start_archiving_attribute(self, attribute_name: str):
         """
         Starts the archiving of the attribute passed as input.
         The attribute must be already present in the subscriber's list
@@ -327,7 +434,7 @@ class Archiver():
         self.cm.AttributeStart(attribute_name)
 
     @warn_if_attribute_not_found()
-    def stop_archiving_attribute(self, attribute_name:str):
+    def stop_archiving_attribute(self, attribute_name: str):
         """
         Stops the archiving of the attribute passed as input.
         The attribute must be already present in the subscriber's list
@@ -335,7 +442,7 @@ class Archiver():
         attribute_name = attribute_fqdn(attribute_name)
         self.cm.AttributeStop(attribute_name)
 
-    def is_attribute_archived(self, attribute_name:str):
+    def is_attribute_archived(self, attribute_name: str):
         """
         Check if an attribute is in the archiving list
         """
@@ -346,19 +453,27 @@ class Archiver():
         # so check whether an exact match is included.
         return any(attribute_name == a for a in attributes)
 
-    def update_archiving_attribute(self, attribute_name: str, polling_period: int, archive_period: int, strategy: str = 'RUN'):
+    def update_archiving_attribute(
+        self,
+        attribute_name: str,
+        polling_period: int,
+        archive_period: int,
+        strategy: str = "RUN",
+    ):
         """
         Update the archiving properties of an attribute already in a subscriber list
         """
         attribute_name = attribute_fqdn(attribute_name)
         self.remove_attribute_from_archiver(attribute_name)
-        time.sleep(3.)
-        self.add_attribute_to_archiver(attribute_name,polling_period,archive_period,strategy)
-        time.sleep(3.)
+        time.sleep(3.0)
+        self.add_attribute_to_archiver(
+            attribute_name, polling_period, archive_period, strategy
+        )
+        time.sleep(3.0)
         self.start_archiving_attribute(attribute_name)
         logger.info(f"Attribute {attribute_name} successfully updated!")
 
-    def get_subscriber_attributes(self, es_name:str = None):
+    def get_subscriber_attributes(self, es_name: str = None):
         """
         Return the list of attributes managed by the event subscribers
         """
@@ -366,13 +481,13 @@ class Archiver():
         if es_name is not None:
             es_list = [es_name]
         else:
-            es_list = self.get_subscribers()      
+            es_list = self.get_subscribers()
         for es_name in es_list:
             es = DeviceProxy(es_name)
             attrs.extend(list(es.AttributeList or []))
         return attrs
 
-    def get_subscriber_errors(self, es_name:str = None):
+    def get_subscriber_errors(self, es_name: str = None):
         """
         Return a dictionary of the attributes currently in error, defined as AttributeName -> AttributeError
         """
@@ -384,9 +499,9 @@ class Archiver():
             es = DeviceProxy(es_name)
             attrs.extend(list(es.AttributeList or []))
             errs.extend(list(es.AttributeErrorList or []))
-        return {a: e for a,e in zip(attrs,errs) if e}
+        return {a: e for a, e in zip(attrs, errs) if e}
 
-    def get_attribute_errors(self,attribute_name:str):
+    def get_attribute_errors(self, attribute_name: str):
         """
         Return the error related to the attribute
         """
@@ -397,18 +512,18 @@ class Archiver():
                 return errs_dict[e]
         return None
 
-    def get_subscriber_load(self,use_freq:bool=True, es_name:str = None):
+    def get_subscriber_load(self, use_freq: bool = True, es_name: str = None):
         """
         Return the estimated load of an archiver, in frequency of records or number
         of attributes
         """
         es = DeviceProxy(es_name or self.get_next_subscriber())
         if use_freq:
-            return str(es.AttributeRecordFreq)+(' events/period' )
+            return str(es.AttributeRecordFreq) + (" events/period")
         else:
             return len(es.AttributeList or [])
 
-    def get_started_attributes(self, regex:str = '/*', es_name:str = None):
+    def get_started_attributes(self, regex: str = "/*", es_name: str = None):
         """
         Return a list of the attributes that are being currently archived
         """
@@ -417,7 +532,7 @@ class Archiver():
         pattern = re.compile(regex)
         return [a for a in attribute_list if pattern.search(a)]
 
-    def get_attribute_subscriber(self, attribute_name:str):
+    def get_attribute_subscriber(self, attribute_name: str):
         """
         Given an attribute name, return the event subscriber associated with it
         """
@@ -425,75 +540,91 @@ class Archiver():
         # Check if attribute is archived
         if self.is_attribute_archived(attribute_name):
             # If the ConfManager manages more than one subscriber
-            if len(self.get_subscribers())>1:
+            if len(self.get_subscribers()) > 1:
                 for es_name in self.get_subscribers():
                     # Search the attribute in the subscriber list
                     for a in list(DeviceProxy(es_name).AttributeList or []):
                         if attribute_name.lower() == a:
-                            return es_name                    
+                            return es_name
             else:
                 return self.get_next_subscriber()
         else:
             logger.warning(f"Attribute {attribute_name} not found!")
 
-    def get_attribute_freq(self, attribute_name:str):
+    def get_attribute_freq(self, attribute_name: str):
         """
-        Return the attribute archiving frequency in events/minute 
+        Return the attribute archiving frequency in events/minute
         """
         attribute_name = attribute_fqdn(attribute_name)
         if self.is_attribute_archived(attribute_name):
             es = DeviceProxy(self.get_attribute_subscriber(attribute_name))
-            freq_dict = {a: r for a,r in zip(es.AttributeList,es.AttributeRecordFreqList)}
+            freq_dict = {
+                a: r for a, r in zip(es.AttributeList, es.AttributeRecordFreqList)
+            }
             for f in freq_dict:
                 if attribute_name.lower() == f:
                     return freq_dict[f]
         else:
             logger.warning(f"Attribute {attribute_name} not found!")
 
-    def get_attribute_failures(self, attribute_name:str):
+    def get_attribute_failures(self, attribute_name: str):
         """
-        Return the attribute failure archiving frequency in events/minute 
+        Return the attribute failure archiving frequency in events/minute
         """
         attribute_name = attribute_fqdn(attribute_name)
         if self.is_attribute_archived(attribute_name):
             es = DeviceProxy(self.get_attribute_subscriber(attribute_name))
-            fail_dict = {a: r for a,r in zip(es.AttributeList,es.AttributeFailureFreqList)}
+            fail_dict = {
+                a: r for a, r in zip(es.AttributeList, es.AttributeFailureFreqList)
+            }
             for f in fail_dict:
                 if attribute_name.lower() == f:
                     return fail_dict[f]
         else:
             logger.warning(f"Attribute {attribute_name} not found!")
 
-    def get_maximum_device_load(self, device_name:str):
-        """ Compute maximum archiving load (bytes/second) based on device configuration """
+    def get_maximum_device_load(self, device_name: str):
+        """Compute maximum archiving load (bytes/second) based on device configuration"""
         load_list = []
         # Get the list of started attributes (truncated in order to match AttributeInfo names)
-        attributes_started = [str(a).split('/')[-1] for a in self.get_started_attributes(regex=device_name.lower())]
+        attributes_started = [
+            str(a).split("/")[-1]
+            for a in self.get_started_attributes(regex=device_name.lower())
+        ]
         # Get the list of attributes info
         attributes_info = DeviceProxy(device_name).attribute_list_query()
         # Filter the archived attributes
         for attribute_info in attributes_info:
             if attribute_info.name.lower() in attributes_started:
-                attr_dict = {'attribute': attribute_info.name.lower(), 
-                            'polling_period': AttributeProxy(device_name+'/'+attribute_info.name).get_poll_period(),
-                            'data_type': attribute_info.data_type,
-                            'dim_x': attribute_info.max_dim_x, 
-                            'dim_y': attribute_info.max_dim_y}
+                attr_dict = {
+                    "attribute": attribute_info.name.lower(),
+                    "polling_period": AttributeProxy(
+                        device_name + "/" + attribute_info.name
+                    ).get_poll_period(),
+                    "data_type": attribute_info.data_type,
+                    "dim_x": attribute_info.max_dim_x,
+                    "dim_y": attribute_info.max_dim_y,
+                }
                 load_list.append(attr_dict)
         # Compute the total load
         polling_load = 0
         for a in load_list:
-            polling_period = a['polling_period']/1000        # in seconds
-            n_bytes = get_size_from_datatype(a['data_type'])
-            x = int(a['dim_x']) or 1
-            y = int(a['dim_y']) or 1
-            polling_load = polling_load + ( (n_bytes * (x * y) ) / polling_period )
+            polling_period = a["polling_period"] / 1000  # in seconds
+            n_bytes = get_size_from_datatype(a["data_type"])
+            x = int(a["dim_x"]) or 1
+            y = int(a["dim_y"]) or 1
+            polling_load = polling_load + ((n_bytes * (x * y)) / polling_period)
         return polling_load
 
+
 class AttributeFormatException(Exception):
     """
     Exception that handles wrong attribute naming
     """
-    def __init__(self, message="Wrong Tango attribute format! Try: domain/family/member/attribute (e.g. STAT/RECV/1/temperature)"):
+
+    def __init__(
+        self,
+        message="Wrong Tango attribute format! Try: domain/family/member/attribute (e.g. STAT/RECV/1/temperature)",
+    ):
         self.message = message
         super().__init__(self.message)
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py
index 4ff60af76115cc3128d9efe955d89356d97ae8ae..3586268aad634375381206a12e3b5b737e12d50c 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py
@@ -1,25 +1,31 @@
 #! /usr/bin/env python3
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from sqlalchemy.dialects.postgresql import ARRAY,TIMESTAMP,FLOAT, JSON
+from typing import List
+
+import numpy
+from sqlalchemy import Column, Integer, String
+from sqlalchemy.dialects.postgresql import ARRAY, TIMESTAMP, FLOAT, JSON
 from sqlalchemy.dialects.postgresql.base import BYTEA
 from sqlalchemy.dialects.postgresql.ranges import INT4RANGE, INT8RANGE
-from sqlalchemy.sql.sqltypes import INTEGER, TEXT, Boolean
 from sqlalchemy.orm import declarative_base
-from sqlalchemy import Column, Integer, String
-from typing import List
-import numpy
+from sqlalchemy.sql.sqltypes import INTEGER, TEXT, Boolean
 
 # Declarative system used to define classes mapped to relational DB tables
 Base = declarative_base()
 
+
 # ----------------- LOFAR VIEWS ----------------- #
 
+
 class Lofar_Scalar_Attribute(Base):
     """
-    Abstract Class that represents a Lofar customized Tango Attribute view 
+    Abstract Class that represents a Lofar customized Tango Attribute view
     """
-    __abstract__ = True 
-    __table_args__ = {'extend_existing': True}
+
+    __abstract__ = True
+    __table_args__ = {"extend_existing": True}
 
     data_time = Column(TIMESTAMP, primary_key=True)
     device = Column(String, primary_key=True)
@@ -28,68 +34,84 @@ class Lofar_Scalar_Attribute(Base):
     def __repr__(self):
         return f"<Attribute(device='{self.device}', name='{self.name}', data_time='{self.data_time}',value='{self.value}'>"
 
+
 class Lofar_Scalar_Boolean(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_boolean'
+    __tablename__ = "lofar_scalar_boolean"
     value = Column(Boolean)
 
+
 class Lofar_Scalar_Double(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_double'
+    __tablename__ = "lofar_scalar_double"
     value = Column(FLOAT)
 
+
 class Lofar_Scalar_Encoded(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_encoded'
+    __tablename__ = "lofar_scalar_encoded"
     value = Column(BYTEA)
 
+
 class Lofar_Scalar_Enum(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_enum'
+    __tablename__ = "lofar_scalar_enum"
     value = Column(INTEGER)
 
+
 class Lofar_Scalar_Float(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_float'
+    __tablename__ = "lofar_scalar_float"
     value = Column(FLOAT)
 
+
 class Lofar_Scalar_Long(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_long'
+    __tablename__ = "lofar_scalar_long"
     value = Column(INT4RANGE)
 
+
 class Lofar_Scalar_Long64(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_long64'
+    __tablename__ = "lofar_scalar_long64"
     value = Column(INT8RANGE)
 
+
 class Lofar_Scalar_Short(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_short'
+    __tablename__ = "lofar_scalar_short"
     value = Column(INTEGER)
 
+
 class Lofar_Scalar_State(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_state'
+    __tablename__ = "lofar_scalar_state"
     value = Column(INTEGER)
 
+
 class Lofar_Scalar_String(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_string'
+    __tablename__ = "lofar_scalar_string"
     value = Column(TEXT)
 
+
 class Lofar_Scalar_Uchar(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_uchar'
+    __tablename__ = "lofar_scalar_uchar"
     value = Column(INTEGER)
 
+
 class Lofar_Scalar_Ulong(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_ulong'
+    __tablename__ = "lofar_scalar_ulong"
     value = Column(INTEGER)
 
+
 class Lofar_Scalar_Ulong64(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_ulong64'
+    __tablename__ = "lofar_scalar_ulong64"
     value = Column(INTEGER)
 
+
 class Lofar_Scalar_Ushort(Lofar_Scalar_Attribute):
-    __tablename__ = 'lofar_scalar_ushort'
+    __tablename__ = "lofar_scalar_ushort"
     value = Column(INTEGER)
 
+
 class Lofar_Array_Attribute(Base):
     """
-    Abstract Class that represents a Lofar customized Tango Attribute view 
+    Abstract Class that represents a Lofar customized Tango Attribute view
     """
-    __abstract__ = True 
-    __table_args__ = {'extend_existing': True}
+
+    __abstract__ = True
+    __table_args__ = {"extend_existing": True}
 
     data_time = Column(TIMESTAMP, primary_key=True)
     device = Column(String, primary_key=True)
@@ -99,68 +121,84 @@ class Lofar_Array_Attribute(Base):
     def __repr__(self):
         return f"<Attribute(device='{self.device}', name='{self.name}', data_time='{self.data_time}',index='{self.x}',value='{self.value}'>"
 
+
 class Lofar_Array_Boolean(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_boolean'
+    __tablename__ = "lofar_array_boolean"
     value = Column(Boolean)
 
+
 class Lofar_Array_Double(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_double'
+    __tablename__ = "lofar_array_double"
     value = Column(FLOAT)
 
+
 class Lofar_Array_Encoded(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_encoded'
+    __tablename__ = "lofar_array_encoded"
     value = Column(BYTEA)
 
+
 class Lofar_Array_Enum(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_enum'
+    __tablename__ = "lofar_array_enum"
     value = Column(INTEGER)
 
+
 class Lofar_Array_Float(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_float'
+    __tablename__ = "lofar_array_float"
     value = Column(FLOAT)
 
+
 class Lofar_Array_Long(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_long'
+    __tablename__ = "lofar_array_long"
     value = Column(INT4RANGE)
 
+
 class Lofar_Array_Long64(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_long64'
+    __tablename__ = "lofar_array_long64"
     value = Column(INT8RANGE)
 
+
 class Lofar_Array_Short(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_short'
+    __tablename__ = "lofar_array_short"
     value = Column(INTEGER)
 
+
 class Lofar_Array_State(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_state'
+    __tablename__ = "lofar_array_state"
     value = Column(INTEGER)
 
+
 class Lofar_Array_String(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_string'
+    __tablename__ = "lofar_array_string"
     value = Column(TEXT)
 
+
 class Lofar_Array_Uchar(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_uchar'
+    __tablename__ = "lofar_array_uchar"
     value = Column(INTEGER)
 
+
 class Lofar_Array_Ulong(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_ulong'
+    __tablename__ = "lofar_array_ulong"
     value = Column(INTEGER)
 
+
 class Lofar_Array_Ulong64(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_ulong64'
+    __tablename__ = "lofar_array_ulong64"
     value = Column(INTEGER)
 
+
 class Lofar_Array_Ushort(Lofar_Array_Attribute):
-    __tablename__ = 'lofar_array_ushort'
+    __tablename__ = "lofar_array_ushort"
     value = Column(INTEGER)
 
+
 class Lofar_Image_Attribute(Base):
     """
-    Abstract Class that represents a Lofar customized Tango Attribute view 
+    Abstract Class that represents a Lofar customized Tango Attribute view
     """
-    __abstract__ = True 
-    __table_args__ = {'extend_existing': True}
+
+    __abstract__ = True
+    __table_args__ = {"extend_existing": True}
 
     data_time = Column(TIMESTAMP, primary_key=True)
     device = Column(String, primary_key=True)
@@ -171,70 +209,87 @@ class Lofar_Image_Attribute(Base):
     def __repr__(self):
         return f"<Attribute(device='{self.device}', name='{self.name}', data_time='{self.data_time}',index_x='{self.x}',index_y='{self.y}',value='{self.value}'>"
 
+
 class Lofar_Image_Boolean(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_boolean'
+    __tablename__ = "lofar_image_boolean"
     value = Column(Boolean)
 
+
 class Lofar_Image_Double(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_double'
+    __tablename__ = "lofar_image_double"
     value = Column(FLOAT)
 
+
 class Lofar_Image_Encoded(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_encoded'
+    __tablename__ = "lofar_image_encoded"
     value = Column(BYTEA)
 
+
 class Lofar_Image_Enum(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_enum'
+    __tablename__ = "lofar_image_enum"
     value = Column(INTEGER)
 
+
 class Lofar_Image_Float(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_float'
+    __tablename__ = "lofar_image_float"
     value = Column(FLOAT)
 
+
 class Lofar_Image_Long(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_long'
+    __tablename__ = "lofar_image_long"
     value = Column(INT4RANGE)
 
+
 class Lofar_Image_Long64(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_long64'
+    __tablename__ = "lofar_image_long64"
     value = Column(INT8RANGE)
 
+
 class Lofar_Image_Short(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_short'
+    __tablename__ = "lofar_image_short"
     value = Column(INTEGER)
 
+
 class Lofar_Image_State(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_state'
+    __tablename__ = "lofar_image_state"
     value = Column(INTEGER)
 
+
 class Lofar_Image_String(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_string'
+    __tablename__ = "lofar_image_string"
     value = Column(TEXT)
 
+
 class Lofar_Image_Uchar(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_uchar'
+    __tablename__ = "lofar_image_uchar"
     value = Column(INTEGER)
 
+
 class Lofar_Image_Ulong(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_ulong'
+    __tablename__ = "lofar_image_ulong"
     value = Column(INTEGER)
 
+
 class Lofar_Image_Ulong64(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_ulong64'
+    __tablename__ = "lofar_image_ulong64"
     value = Column(INTEGER)
 
+
 class Lofar_Image_Ushort(Lofar_Image_Attribute):
-    __tablename__ = 'lofar_image_ushort'
+    __tablename__ = "lofar_image_ushort"
     value = Column(INTEGER)
 
+
 # ----------------- ----------------- ----------------- #
 
+
 class Attribute(Base):
     """
     Class that represents a Tango Attribute mapped to table 'att_conf'
     """
-    __tablename__ = 'att_conf'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_conf"
+    __table_args__ = {"extend_existing": True}
 
     att_conf_id = Column(Integer, primary_key=True)
     att_name = Column(String)
@@ -251,12 +306,14 @@ class Attribute(Base):
     def __repr__(self):
         return f"<Attribute(fullname='{self.att_name}',data_type ='{self.att_conf_type_id}',format='{self.att_conf_format_id}',table_name='{self.table_name}',cs_name ='{self.cs_name}',domain ='{self.domain}',family ='{self.family}',member ='{self.member}',name ='{self.name}'),ttl='{self.ttl}'>"
 
+
 class DataType(Base):
     """
     Class that represents a Tango Data Type mapped to table 'att_conf_data_type'
     """
-    __tablename__ = 'att_conf_type'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_conf_type"
+    __table_args__ = {"extend_existing": True}
 
     att_conf_type_id = Column(Integer, primary_key=True)
     type = Column(String)
@@ -264,12 +321,14 @@ class DataType(Base):
     def __repr__(self):
         return f"<DataType(type='{self.type}')>"
 
+
 class Format(Base):
     """
     Class that represents a Tango Format mapped to table 'att_conf_format'
     """
-    __tablename__ = 'att_conf_format'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_conf_format"
+    __table_args__ = {"extend_existing": True}
 
     att_conf_format_id = Column(Integer, primary_key=True)
     format = Column(String)
@@ -278,15 +337,17 @@ class Format(Base):
     def __repr__(self):
         return f"<Format(format='{self.format}', format_num='{self.format_num}')>"
 
+
 class Scalar(Base):
     """
     Abstract class that represents Super-class of Scalar mapper classes
     """
-    # In the concrete inheritance use case, it is common that the base class is not represented 
+
+    # In the concrete inheritance use case, it is common that the base class is not represented
     # within the database, only the subclasses. In other words, the base class is abstract.
-    __abstract__ = True 
+    __abstract__ = True
 
-    # Primary key is not defined for tables which store values, but SQLAlchemy requires a mandatory 
+    # Primary key is not defined for tables which store values, but SQLAlchemy requires a mandatory
     # primary key definition. Anyway, this definition is on Python-side and does not compromise
     # DBMS architecture
     att_conf_id = Column(Integer, primary_key=True)
@@ -295,48 +356,56 @@ class Scalar(Base):
     att_error_desc_id = Column(Integer)
     details = Column(JSON)
 
+
 class Scalar_Boolean(Scalar):
     """
     Class that represents a Tango Boolean mapped to table 'att_scalar_devboolean'
     """
-    __tablename__ = 'att_scalar_devboolean'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devboolean"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(Boolean)
     value_w = Column(Boolean)
 
     def __repr__(self):
         return f"<Scalar_Boolean(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Scalar_Double(Scalar):
     """
     Class that represents a Tango Double mapped to table 'att_scalar_devdouble'
     """
-    __tablename__ = 'att_scalar_devdouble'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devdouble"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(FLOAT)
     value_w = Column(FLOAT)
 
     def __repr__(self):
         return f"<Scalar_Double(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Scalar_Encoded(Scalar):
     """
     Class that represents a Tango Encoded mapped to table 'att_scalar_devencoded'
     """
-    __tablename__ = 'att_scalar_devencoded'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devencoded"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(BYTEA)
     value_w = Column(BYTEA)
 
     def __repr__(self):
         return f"<Scalar_Encoded(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Scalar_Enum(Scalar):
     """
     Class that represents a Tango Enum mapped to table 'att_scalar_devenum'
     """
-    __tablename__ = 'att_scalar_devenum'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devenum"
+    __table_args__ = {"extend_existing": True}
     value_r_label = Column(TEXT)
     value_r = Column(INTEGER)
     value_w_label = Column(TEXT)
@@ -345,132 +414,154 @@ class Scalar_Enum(Scalar):
     def __repr__(self):
         return f"<Scalar_Enum(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r_label='{self.value_r_label}',value_r='{self.value_r}',value_w_label='{self.value_w_label}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Scalar_Float(Scalar):
     """
     Class that represents a Tango Float mapped to table 'att_scalar_devfloat'
     """
-    __tablename__ = 'att_scalar_devfloat'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devfloat"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(FLOAT)
     value_w = Column(FLOAT)
 
     def __repr__(self):
         return f"<Scalar_Float(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Scalar_Long(Scalar):
     """
     Class that represents a Tango Long mapped to table 'att_scalar_devlong'
     """
-    __tablename__ = 'att_scalar_devlong'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devlong"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(INT4RANGE)
     value_w = Column(INT4RANGE)
 
     def __repr__(self):
         return f"<Scalar_Long(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Scalar_Long64(Scalar):
     """
     Class that represents a Tango Long64 mapped to table 'att_scalar_devlong64'
     """
-    __tablename__ = 'att_scalar_devlong64'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devlong64"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(INT8RANGE)
     value_w = Column(INT8RANGE)
 
     def __repr__(self):
         return f"<Scalar_Long64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Scalar_Short(Scalar):
     """
     Class that represents a Tango Short mapped to table 'att_scalar_devshort'
     """
-    __tablename__ = 'att_scalar_devshort'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devshort"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(INTEGER)
     value_w = Column(INTEGER)
 
     def __repr__(self):
         return f"<Scalar_Short(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Scalar_State(Scalar):
     """
     Class that represents a Tango State mapped to table 'att_scalar_devstate'
     """
-    __tablename__ = 'att_scalar_devstate'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devstate"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(INTEGER)
     value_w = Column(INTEGER)
 
     def __repr__(self):
         return f"<Scalar_State(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Scalar_String(Scalar):
     """
     Class that represents a Tango String mapped to table 'att_scalar_devstring'
     """
-    __tablename__ = 'att_scalar_devstring'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devstring"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(TEXT)
     value_w = Column(TEXT)
 
     def __repr__(self):
         return f"<Scalar_String(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Scalar_UChar(Scalar):
     """
     Class that represents a Tango UChar mapped to table 'att_scalar_devuchar'
     """
-    __tablename__ = 'att_scalar_devuchar'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devuchar"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(INTEGER)
     value_w = Column(INTEGER)
 
     def __repr__(self):
         return f"<Scalar_UChar(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Scalar_ULong(Scalar):
     """
     Class that represents a Tango ULong mapped to table 'att_scalar_devulong'
     """
-    __tablename__ = 'att_scalar_devulong'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devulong"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(INTEGER)
     value_w = Column(INTEGER)
 
     def __repr__(self):
         return f"<Scalar_ULong(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Scalar_ULong64(Scalar):
     """
     Class that represents a Tango ULong64 mapped to table 'att_scalar_devulong64'
     """
-    __tablename__ = 'att_scalar_devulong64'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devulong64"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(INTEGER)
     value_w = Column(INTEGER)
 
     def __repr__(self):
         return f"<Scalar_ULong64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Scalar_UShort(Scalar):
     """
     Class that represents a Tango UShort mapped to table 'att_scalar_devushort'
     """
-    __tablename__ = 'att_scalar_devushort'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_scalar_devushort"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(INTEGER)
     value_w = Column(INTEGER)
 
     def __repr__(self):
         return f"<Scalar_UShort(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Array(Base):
     """
     Abstract class that represents Super-class of Array mapper classes
     """
+
     __abstract__ = True
-    # Primary key is not defined for tables which store values, but SQLAlchemy requires a mandatory 
+    # Primary key is not defined for tables which store values, but SQLAlchemy requires a mandatory
     # primary key definition. Anyway, this definition is on Python-side and does not compromise
     # DBMS architecture
     att_conf_id = Column(Integer, primary_key=True)
@@ -479,24 +570,28 @@ class Array(Base):
     att_error_desc_id = Column(Integer)
     details = Column(JSON)
 
+
 class Array_Boolean(Array):
     """
     Class that represents a Tango Boolean Array mapped to table 'att_array_devboolean'
     """
-    __tablename__ = 'att_array_devboolean'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devboolean"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(Boolean))
     value_w = Column(ARRAY(Boolean))
 
     def __repr__(self):
         return f"<Array_Boolean(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_Boolean(Array):
     """
     Class that represents a Tango Boolean Image mapped to table 'att_image_devboolean'
     """
-    __tablename__ = 'att_image_devboolean'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devboolean"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(Boolean))
     value_w = Column(ARRAY(Boolean))
 
@@ -508,56 +603,65 @@ class Array_Double(Array):
     """
     Class that represents a Tango Double Array mapped to table 'att_array_devdouble'
     """
-    __tablename__ = 'att_array_devdouble'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devdouble"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(FLOAT))
     value_w = Column(ARRAY(FLOAT))
 
     def __repr__(self):
         return f"<Array_Double(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_Double(Array):
     """
     Class that represents a Tango Double Image mapped to table 'att_image_devdouble'
     """
-    __tablename__ = 'att_image_devdouble'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devdouble"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(FLOAT))
     value_w = Column(ARRAY(FLOAT))
 
     def __repr__(self):
         return f"<Image_Double(att_conf_id='{self.att_conf_id}', data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Array_Encoded(Array):
     """
     Class that represents a Tango Encoded Array mapped to table 'att_array_devencoded'
     """
-    __tablename__ = 'att_array_devencoded'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devencoded"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(BYTEA))
     value_w = Column(ARRAY(BYTEA))
 
     def __repr__(self):
         return f"<Array_Encoded(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_Encoded(Array):
     """
     Class that represents a Tango Encoded Array mapped to table 'att_image_devencoded'
     """
-    __tablename__ = 'att_image_devencoded'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devencoded"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(BYTEA))
     value_w = Column(ARRAY(BYTEA))
 
     def __repr__(self):
         return f"<Image_Encoded(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Array_Enum(Array):
     """
     Class that represents a Tango Enum Array mapped to table 'att_array_devenum'
     """
-    __tablename__ = 'att_array_devenum'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devenum"
+    __table_args__ = {"extend_existing": True}
     value_r_label = Column(ARRAY(TEXT))
     value_r = Column(ARRAY(INTEGER))
     value_w_label = Column(ARRAY(TEXT))
@@ -566,12 +670,14 @@ class Array_Enum(Array):
     def __repr__(self):
         return f"<Array_Enum(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r_label='{self.value_r_label}',value_r='{self.value_r}',value_w_label='{self.value_w_label}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_Enum(Array):
     """
     Class that represents a Tango Enum Array mapped to table 'att_image_devenum'
     """
-    __tablename__ = 'att_image_devenum'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devenum"
+    __table_args__ = {"extend_existing": True}
     value_r_label = Column(ARRAY(TEXT))
     value_r = Column(ARRAY(INTEGER))
     value_w_label = Column(ARRAY(TEXT))
@@ -580,302 +686,351 @@ class Image_Enum(Array):
     def __repr__(self):
         return f"<Image_Enum(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r_label='{self.value_r_label}',value_r='{self.value_r}',value_w_label='{self.value_w_label}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Array_Float(Array):
     """
     Class that represents a Tango Float Array mapped to table 'att_array_devfloat'
     """
-    __tablename__ = 'att_array_devfloat'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devfloat"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(FLOAT))
     value_w = Column(ARRAY(FLOAT))
 
     def __repr__(self):
         return f"<Array_Float(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_Float(Array):
     """
     Class that represents a Tango Float Array mapped to table 'att_image_devfloat'
     """
-    __tablename__ = 'att_image_devfloat'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devfloat"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(FLOAT))
     value_w = Column(ARRAY(FLOAT))
 
     def __repr__(self):
         return f"<Image_Float(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Array_Long(Array):
     """
     Class that represents a Tango Long Array mapped to table 'att_array_devlong'
     """
-    __tablename__ = 'att_array_devlong'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devlong"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INT4RANGE))
     value_w = Column(ARRAY(INT4RANGE))
 
     def __repr__(self):
         return f"<Array_Long(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_Long(Array):
     """
     Class that represents a Tango Long Array mapped to table 'att_image_devlong'
     """
-    __tablename__ = 'att_image_devlong'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devlong"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INT4RANGE))
     value_w = Column(ARRAY(INT4RANGE))
 
     def __repr__(self):
         return f"<Image_Long(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Array_Long64(Array):
     """
     Class that represents a Tango Long64 Array mapped to table 'att_array_devlong64'
     """
-    __tablename__ = 'att_array_devlong64'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devlong64"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INT8RANGE))
     value_w = Column(ARRAY(INT8RANGE))
 
     def __repr__(self):
         return f"<Array_Long64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_Long64(Array):
     """
     Class that represents a Tango Long64 Array mapped to table 'att_image_devlong64'
     """
-    __tablename__ = 'att_image_devlong64'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devlong64"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INT8RANGE))
     value_w = Column(ARRAY(INT8RANGE))
 
     def __repr__(self):
         return f"<Image_Long64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Array_Short(Array):
     """
     Class that represents a Tango Short Array mapped to table 'att_array_devshort'
     """
-    __tablename__ = 'att_array_devshort'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devshort"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INTEGER))
     value_w = Column(ARRAY(INTEGER))
 
     def __repr__(self):
         return f"<Array_Short(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_Short(Array):
     """
     Class that represents a Tango Short Array mapped to table 'att_image_devshort'
     """
-    __tablename__ = 'att_image_devshort'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devshort"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INTEGER))
     value_w = Column(ARRAY(INTEGER))
 
     def __repr__(self):
         return f"<Image_Short(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Array_State(Array):
     """
     Class that represents a Tango State Array mapped to table 'att_array_devstate'
     """
-    __tablename__ = 'att_array_devstate'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devstate"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INT4RANGE))
     value_w = Column(ARRAY(INT4RANGE))
 
     def __repr__(self):
         return f"<Array_State(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_State(Array):
     """
     Class that represents a Tango State Array mapped to table 'att_image_devstate'
     """
-    __tablename__ = 'att_image_devstate'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devstate"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INT4RANGE))
     value_w = Column(ARRAY(INT4RANGE))
 
     def __repr__(self):
         return f"<Image_State(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Array_String(Array):
     """
     Class that represents a Tango String Array mapped to table 'att_array_devstring'
     """
-    __tablename__ = 'att_array_devstring'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devstring"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(TEXT))
     value_w = Column(ARRAY(TEXT))
 
     def __repr__(self):
         return f"<Array_String(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_String(Array):
     """
     Class that represents a Tango String Array mapped to table 'att_image_devstring'
     """
-    __tablename__ = 'att_image_devstring'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devstring"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(TEXT))
     value_w = Column(ARRAY(TEXT))
 
     def __repr__(self):
         return f"<Image_String(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Array_UChar(Array):
     """
     Class that represents a Tango UChar Array mapped to table 'att_array_devuchar'
     """
-    __tablename__ = 'att_array_devuchar'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devuchar"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INTEGER))
     value_w = Column(ARRAY(INTEGER))
 
     def __repr__(self):
         return f"<Array_UChar(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_UChar(Array):
     """
     Class that represents a Tango UChar Array mapped to table 'att_image_devuchar'
     """
-    __tablename__ = 'att_image_devuchar'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devuchar"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INTEGER))
     value_w = Column(ARRAY(INTEGER))
 
     def __repr__(self):
         return f"<Image_UChar(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Array_ULong(Array):
     """
     Class that represents a Tango ULong Array mapped to table 'att_array_devulong'
     """
-    __tablename__ = 'att_array_devulong'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devulong"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INTEGER))
     value_w = Column(ARRAY(INTEGER))
 
     def __repr__(self):
         return f"<Array_ULong(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_ULong(Array):
     """
     Class that represents a Tango ULong Array mapped to table 'att_image_devulong'
     """
-    __tablename__ = 'att_image_devulong'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devulong"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INTEGER))
     value_w = Column(ARRAY(INTEGER))
 
     def __repr__(self):
         return f"<Image_ULong(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Array_ULong64(Array):
     """
     Class that represents a Tango ULong64 Array mapped to table 'att_array_devulong64'
     """
-    __tablename__ = 'att_array_devulong64'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devulong64"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INTEGER))
     value_w = Column(ARRAY(INTEGER))
 
     def __repr__(self):
         return f"<Array_ULong64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_ULong64(Array):
     """
     Class that represents a Tango ULong64 Array mapped to table 'att_image_devulong64'
     """
-    __tablename__ = 'att_image_devulong64'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devulong64"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INTEGER))
     value_w = Column(ARRAY(INTEGER))
 
     def __repr__(self):
         return f"<Image_ULong64(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Array_UShort(Array):
     """
     Class that represents a Tango UShort Array mapped to table 'att_array_devushort'
     """
-    __tablename__ = 'att_array_devushort'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_array_devushort"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INTEGER))
     value_w = Column(ARRAY(INTEGER))
 
     def __repr__(self):
         return f"<Array_UShort(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 class Image_UShort(Array):
     """
     Class that represents a Tango UShort Array mapped to table 'att_image_devushort'
     """
-    __tablename__ = 'att_image_devushort'
-    __table_args__ = {'extend_existing': True}
+
+    __tablename__ = "att_image_devushort"
+    __table_args__ = {"extend_existing": True}
     value_r = Column(ARRAY(INTEGER))
     value_w = Column(ARRAY(INTEGER))
 
     def __repr__(self):
         return f"<Image_UShort(att_conf_id='{self.att_conf_id}',data_time='{self.data_time}',value_r='{self.value_r}',value_w='{self.value_w}',quality='{self.quality}',att_error_desc_id='{self.att_error_desc_id}',details='{self.details}')>"
 
+
 def get_class_by_tablename(tablename: str):
     """
-    Returns class reference mapped to a table.    
+    Returns class reference mapped to a table.
     """
     for mapper in Base.registry.mappers:
         c = mapper.class_
         classname = c.__name__
-        if not classname.startswith('_'):
-            if hasattr(c, '__tablename__') and c.__tablename__ == tablename:
+        if not classname.startswith("_"):
+            if hasattr(c, "__tablename__") and c.__tablename__ == tablename:
                 return c
     return None
 
+
 def get_viewclass_by_tablename(tablename: str):
     """
     Returns class reference mapped to a view table name.
     Example -> if parameter tablename is 'att_array_devdouble',
                the method retrieves from the string the format ('array') and datatype ('double')
-               and finds the class that matches the relative view table name ('lofar_array_double')     
+               and finds the class that matches the relative view table name ('lofar_array_double')
     """
-    format = tablename.split('_')[1].lower()
-    datatype = tablename.split('_')[2][3:].lower() # Remove 'dev' prefix
+    format = tablename.split("_")[1].lower()
+    datatype = tablename.split("_")[2][3:].lower()  # Remove 'dev' prefix
     for mapper in Base.registry.mappers:
         c = mapper.class_
         classname = c.__name__
-        if not classname.startswith('_'):
-            if hasattr(c, '__tablename__'): 
-                if format=='scalar' and c.__tablename__ == f"lofar_scalar_{datatype}":     
+        if not classname.startswith("_"):
+            if hasattr(c, "__tablename__"):
+                if format == "scalar" and c.__tablename__ == f"lofar_scalar_{datatype}":
                     return c
-                elif format=='array' and c.__tablename__ == f"lofar_array_{datatype}":              
+                elif format == "array" and c.__tablename__ == f"lofar_array_{datatype}":
                     return c
-                elif format=='image' and c.__tablename__ == f"lofar_image_{datatype}":
+                elif format == "image" and c.__tablename__ == f"lofar_image_{datatype}":
                     return c
     return None
 
+
 def build_array_from_record(rows: List[Array], dim_x: int):
     """
     Converts Array database items in Python lists
     """
     matrix = numpy.array([])
-    for i in range(0,dim_x):
-        x = numpy.array([item for item in rows if item.idx==i]) #group records by array index
-        if i==0:
-            matrix = numpy.append(matrix,x)    #append first row
+    for i in range(0, dim_x):
+        x = numpy.array(
+            [item for item in rows if item.idx == i]
+        )  # group records by array index
+        if i == 0:
+            matrix = numpy.append(matrix, x)  # append first row
         else:
-            matrix = numpy.vstack([matrix,x])  #stack vertically
-    result = numpy.transpose(matrix)   #transpose -> each row is a distinct array of value
+            matrix = numpy.vstack([matrix, x])  # stack vertically
+    result = numpy.transpose(
+        matrix
+    )  # transpose -> each row is a distinct array of value
     list_result = result.tolist()
     return list_result
 
+
 def get_values_from_record(data_matrix: List[Array]):
     """
     Returns a matrix of values from a matrix of Array records
     """
     array_matrix = numpy.matrix(data_matrix)
     value_matrix = numpy.empty(array_matrix.shape)
-    for index in range(array_matrix.size):    # for each object element
-        value_matrix.itemset(index,array_matrix.item(index).value_r) # extract the value from object and put in the matrix
+    for index in range(array_matrix.size):  # for each object element
+        value_matrix.itemset(
+            index, array_matrix.item(index).value_r
+        )  # extract the value from object and put in the matrix
     return value_matrix
-
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/__init__.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..68ddd5cdc3efaa38e853aef337c08beb99c50c4c 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/__init__.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver_config/__init__.py
@@ -0,0 +1,2 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_configurator.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_configurator.py
index 32528134d6f8435f960af9bb5233ad1615eb536a..87bcaf7a5c2f8c42302dd6407519562c443a74c2 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_configurator.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver_configurator.py
@@ -1,4 +1,6 @@
 #! /usr/bin/env python3
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """
 
@@ -7,101 +9,125 @@ Functions related to the managing of the archiver configuration JSON file
 """
 import logging
 import re
-from tango import Database
 
-from tangostationcontrol.toolkit.archiver_util import get_attributes_from_suffix, get_attributes_from_infix, retrieve_attributes_from_wildcards
+from tango import Database
+from tangostationcontrol.toolkit.archiver_util import (
+    get_attributes_from_suffix,
+    get_attributes_from_infix,
+    retrieve_attributes_from_wildcards,
+)
 
 logger = logging.getLogger()
 
-def _get_archiving_parameters(attribute:str):
+
+def _get_archiving_parameters(attribute: str):
     """Helper function that returns the following archiving parameters defined in a JSON file:
     archive period [ms], event period [ms], absolute change, relative change
     """
-    archive_period =    int(attribute['archive_period'])
-    event_period =      int(attribute['event_period']) 
-    abs_change =        attribute['abs_change'] and float(attribute['abs_change'])
-    rel_change =        attribute['rel_change'] and int(attribute['rel_change'])
+    archive_period = int(attribute["archive_period"])
+    event_period = int(attribute["event_period"])
+    abs_change = attribute["abs_change"] and float(attribute["abs_change"])
+    rel_change = attribute["rel_change"] and int(attribute["rel_change"])
     return archive_period, event_period, abs_change, rel_change
 
-def get_parameters_from_attribute(device_name:str, attribute_name:str, config_dict:dict):
+
+def get_parameters_from_attribute(
+    device_name: str, attribute_name: str, config_dict: dict
+):
     """
     Return the archiving parameters  ( see ref.: '_get_archiving_parameters' )
     defined in the configuration file for a certain attribute
     """
     # Search if the attribute parameters are listed inside the device configuration
-    include_node = config_dict['devices'][device_name].get('include',[])
-    if include_node != ['/*']:
+    include_node = config_dict["devices"][device_name].get("include", [])
+    if include_node != ["/*"]:
         for a in include_node:
-            if attribute_name.lower() == a['attribute'].lower():
+            if attribute_name.lower() == a["attribute"].lower():
                 return _get_archiving_parameters(a)
     # Search if the archiving parameters are listed inside the global infixes attributes
-    infixes = config_dict['global']['infixes']
+    infixes = config_dict["global"]["infixes"]
     for a in infixes:
         # Match regular expression with attribute name
-        if re.compile(a['attribute'].lower()).search(attribute_name.lower()):
+        if re.compile(a["attribute"].lower()).search(attribute_name.lower()):
             return _get_archiving_parameters(a)
     # Search if the archiving parameters are listed inside the global suffixes attributes
-    suffixes = config_dict['global']['suffixes']
+    suffixes = config_dict["global"]["suffixes"]
     for a in suffixes:
-        if attribute_name.lower().endswith(a['attribute'].lower()):
+        if attribute_name.lower().endswith(a["attribute"].lower()):
             return _get_archiving_parameters(a)
     return None, None, None, None
 
-def get_include_attribute_list(device:str, config_dict:dict):
+
+def get_include_attribute_list(device: str, config_dict: dict):
     """
     Return the list of attributes that must be archived from the JSON configuration file
     """
-    suffixes = config_dict['global']['suffixes']
-    infixes = config_dict['global']['infixes']
+    suffixes = config_dict["global"]["suffixes"]
+    infixes = config_dict["global"]["infixes"]
     # Attributes to be included in archiving stategy
     include_att_list = []
     # Add attributes with defined suffixes
-    include_att_list.extend(get_attributes_from_suffix(device,suffixes))
+    include_att_list.extend(get_attributes_from_suffix(device, suffixes))
     include_att_list.extend(get_attributes_from_infix(device, infixes))
     # Add attributes explicitly defined in JSON included list
-    include_node = config_dict['devices'][device].get('include',[])
-    if include_node != ['/*']:
+    include_node = config_dict["devices"][device].get("include", [])
+    if include_node != ["/*"]:
         for a in include_node:
-            include_att_list.append(a['attribute'])
+            include_att_list.append(a["attribute"])
     return [f"{device}/{a}".lower() for a in include_att_list]
 
-def get_exclude_attribute_list(device:str, config_dict:dict):
+
+def get_exclude_attribute_list(device: str, config_dict: dict):
     """
     Return the list of attributes that must not be archived from the JSON configuration file
     """
-    exclude_list = config_dict['devices'][device].get('exclude', [])  # may contain wildcards
-    exclude_att_list = retrieve_attributes_from_wildcards(device,exclude_list)
+    exclude_list = config_dict["devices"][device].get(
+        "exclude", []
+    )  # may contain wildcards
+    exclude_att_list = retrieve_attributes_from_wildcards(device, exclude_list)
     return exclude_att_list
 
-def get_global_env_parameters(config_dict:dict):
+
+def get_global_env_parameters(config_dict: dict):
     """Return the following archiving parameters defined in the 'global_variable' section of the JSON configuration file:
     polling time [ms], absolute change, relative change, archive period [ms], event period [ms] and strategy
     """
-    var_dict = config_dict['global']
+    var_dict = config_dict["global"]
     # Archiving parameters retrieved from JSON file
-    polling_time = int(var_dict['polling_time'])
-    archive_abs_change = var_dict['archive_abs_change'] and int(var_dict['archive_abs_change'])
-    archive_rel_change = var_dict['archive_rel_change'] and int(var_dict['archive_rel_change'])
-    archive_period = int(var_dict['archive_period'])
-    event_period = int(var_dict['event_period'])
-    strategy = var_dict['strategy']
-    return polling_time, archive_abs_change, archive_rel_change, archive_period, event_period, strategy
-
-def get_multimember_devices(env_dict:dict):
+    polling_time = int(var_dict["polling_time"])
+    archive_abs_change = var_dict["archive_abs_change"] and int(
+        var_dict["archive_abs_change"]
+    )
+    archive_rel_change = var_dict["archive_rel_change"] and int(
+        var_dict["archive_rel_change"]
+    )
+    archive_period = int(var_dict["archive_period"])
+    event_period = int(var_dict["event_period"])
+    strategy = var_dict["strategy"]
+    return (
+        polling_time,
+        archive_abs_change,
+        archive_rel_change,
+        archive_period,
+        event_period,
+        strategy,
+    )
+
+
+def get_multimember_devices(env_dict: dict):
     """Given a regular expression, return multi-member device configuration if they are stored in TangoDB"""
     # Get a Tango DB reference
     tangodb = Database()
     # Scan configuration dictionary for possible multi-member devices
     matched_devices_dict = {}
     for device in env_dict:
-        # Search for asterisk in device names 
-        if re.match('.*/.*/[*]', device):
+        # Search for asterisk in device names
+        if re.match(".*/.*/[*]", device):
             # Return a list of string with the members of the matched device name (f.e. ['1','2'])
-            members = tangodb.get_device_member(device)         
+            members = tangodb.get_device_member(device)
             # Add to matched devices list the device name in the form 'domain/family/member'
             retrieved_devices = [f"{device[:-1]}{m}" for m in members]
             # Append device-names and relative configuration to dictionary
             for d in retrieved_devices:
                 matched_devices_dict[d] = env_dict[device]
     return matched_devices_dict
-
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py
index c859fab9dbbcef55b7e9b2366c9de4ec7d692ab8..3209ed88c99ca5d81d5f8d31ba057647c63924b4 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver_util.py
@@ -1,12 +1,15 @@
 #! /usr/bin/env python3
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 """
    Utility functions for the Archiver functionality.
 """
 
-from tango import DeviceProxy, CmdArgType
-import re
 import os
+import re
+
+from tango import DeviceProxy, CmdArgType
 
 """
 A dictionary whose keys are the Tango datatypes mapping, and the values are the relative byte size
@@ -14,113 +17,166 @@ See reference https://tango-controls.readthedocs.io/en/latest/development/advanc
 and https://www.tutorialspoint.com/cplusplus/cpp_data_types.htm
 TODO: manage String attributes
 """
-DATATYPES_SIZE_DICT = {CmdArgType.DevBoolean:1, CmdArgType.DevShort:2, CmdArgType.DevLong:8, CmdArgType.DevFloat:4, CmdArgType.DevDouble:8, 
-    CmdArgType.DevUShort:2, CmdArgType.DevULong:8, CmdArgType.DevString:20, CmdArgType.DevVarCharArray:None, CmdArgType.DevVarShortArray:None, 
-    CmdArgType.DevVarLongArray: None,CmdArgType.DevVarFloatArray:None, CmdArgType.DevVarDoubleArray:None, CmdArgType.DevVarUShortArray:None,
-    CmdArgType.DevVarULongArray: None, CmdArgType.DevVarStringArray: None, CmdArgType.DevVarLongStringArray: None, CmdArgType.DevVarDoubleStringArray:None,
-    CmdArgType.DevState:3, CmdArgType.ConstDevString:None, CmdArgType.DevVarBooleanArray:None, CmdArgType.DevUChar:1, CmdArgType.DevLong64:8,
-    CmdArgType.DevULong64:8,CmdArgType.DevVarLong64Array:None,CmdArgType.DevVarULong64Array:None, CmdArgType.DevInt:4,CmdArgType.DevEncoded:None, 
-    CmdArgType.DevEnum:None, CmdArgType.DevPipeBlob:None}
+DATATYPES_SIZE_DICT = {
+    CmdArgType.DevBoolean: 1,
+    CmdArgType.DevShort: 2,
+    CmdArgType.DevLong: 8,
+    CmdArgType.DevFloat: 4,
+    CmdArgType.DevDouble: 8,
+    CmdArgType.DevUShort: 2,
+    CmdArgType.DevULong: 8,
+    CmdArgType.DevString: 20,
+    CmdArgType.DevVarCharArray: None,
+    CmdArgType.DevVarShortArray: None,
+    CmdArgType.DevVarLongArray: None,
+    CmdArgType.DevVarFloatArray: None,
+    CmdArgType.DevVarDoubleArray: None,
+    CmdArgType.DevVarUShortArray: None,
+    CmdArgType.DevVarULongArray: None,
+    CmdArgType.DevVarStringArray: None,
+    CmdArgType.DevVarLongStringArray: None,
+    CmdArgType.DevVarDoubleStringArray: None,
+    CmdArgType.DevState: 3,
+    CmdArgType.ConstDevString: None,
+    CmdArgType.DevVarBooleanArray: None,
+    CmdArgType.DevUChar: 1,
+    CmdArgType.DevLong64: 8,
+    CmdArgType.DevULong64: 8,
+    CmdArgType.DevVarLong64Array: None,
+    CmdArgType.DevVarULong64Array: None,
+    CmdArgType.DevInt: 4,
+    CmdArgType.DevEncoded: None,
+    CmdArgType.DevEnum: None,
+    CmdArgType.DevPipeBlob: None,
+}
 
 TANGO_HOST = os.environ.get("TANGO_HOST", None)
 
-def get_db_config(device_name:str) -> dict:
+
+def get_db_config(device_name: str) -> dict:
     """
     Retrieve the DB credentials from the Tango properties of Configuration Manager or EventSubscribers
     """
     device = DeviceProxy(device_name)
     # example LibConfiguration property value:
     # ['connect_string= user=postgres password=password host=archiver-timescale port=5432 dbname=hdb', 'host=archiver-timescale', 'libname=libhdb++timescale.so', 'dbname=hdb', 'port=5432', 'user=postgres', 'password=password']
-    config_strs = device.get_property('LibConfiguration')['LibConfiguration']
+    config_strs = device.get_property("LibConfiguration")["LibConfiguration"]
 
-    config = dict(config_str.split("=",1) for config_str in config_strs)
+    config = dict(config_str.split("=", 1) for config_str in config_strs)
     return config
 
-def get_attribute_from_fqdn(attribute_name:str):
+
+def get_attribute_from_fqdn(attribute_name: str):
     """
     For some operations Tango attribute must be transformed from the form 'tango://db:port/domain/family/name/attribute'
     to canonical 'domain/family/name/attribute'
     """
-    if attribute_name.startswith('tango://'):
-        return '/'.join(attribute_name.split('/')[3:])
+    if attribute_name.startswith("tango://"):
+        return "/".join(attribute_name.split("/")[3:])
 
-    if len(attribute_name.split('/')) != 4:
-        raise ValueError(f"Expected attribute of format 'domain/family/name/attribute', got {attribute_name}")
+    if len(attribute_name.split("/")) != 4:
+        raise ValueError(
+            f"Expected attribute of format 'domain/family/name/attribute', got {attribute_name}"
+        )
 
     return attribute_name
 
-def device_fqdn(device_name:str, tango_host:str = TANGO_HOST):
+
+def device_fqdn(device_name: str, tango_host: str = TANGO_HOST):
     """
     For some operations Tango devices must be transformed from the form 'domain/family/name'
     to 'tango://db:port/domain/family/name'
     """
-    if device_name.startswith('tango://'):
+    if device_name.startswith("tango://"):
         return device_name.lower()
 
-    if len(device_name.split('/')) != 3:
-        raise ValueError(f"Expected device name of format 'domain/family/name', got {device_name}")
+    if len(device_name.split("/")) != 3:
+        raise ValueError(
+            f"Expected device name of format 'domain/family/name', got {device_name}"
+        )
 
     return f"tango://{tango_host}/{device_name}".lower()
 
-def attribute_fqdn(attribute_name:str, tango_host:str = TANGO_HOST):
+
+def attribute_fqdn(attribute_name: str, tango_host: str = TANGO_HOST):
     """
     For some operations Tango devices must be transformed from the form 'domain/family/name/attribute'
     to 'tango://db:port/domain/family/name/attribute'
     """
-    if attribute_name.startswith('tango://'):
+    if attribute_name.startswith("tango://"):
         return attribute_name.lower()
 
-    if len(attribute_name.split('/')) != 4:
-        raise ValueError(f"Expected attribute name of format 'domain/family/name/attribute', got {attribute_name}")
+    if len(attribute_name.split("/")) != 4:
+        raise ValueError(
+            f"Expected attribute name of format 'domain/family/name/attribute', got {attribute_name}"
+        )
 
     return f"tango://{tango_host}/{attribute_name}".lower()
 
-def split_tango_name(tango_fqname:str, tango_type:str):
+
+def split_tango_name(tango_fqname: str, tango_type: str):
     """
     Helper function to split device or attribute Tango full qualified domain names
     into its components
     """
-    if tango_type.lower() == 'device':
+    if tango_type.lower() == "device":
         try:
-            domain, family, member = tango_fqname.split('/')
+            domain, family, member = tango_fqname.split("/")
             return domain, family, member
         except ValueError as e:
-            raise ValueError(f"Could not parse device name {tango_fqname}. Please provide FQDN, e.g. STAT/Device/1") from e
-    elif tango_type.lower() == 'attribute':
+            raise ValueError(
+                f"Could not parse device name {tango_fqname}. Please provide FQDN, e.g. STAT/Device/1"
+            ) from e
+    elif tango_type.lower() == "attribute":
         try:
-            domain, family, member, name = tango_fqname.split('/')
+            domain, family, member, name = tango_fqname.split("/")
             return domain, family, member, name
         except ValueError as e:
-            raise ValueError(f"Could not parse attribute name {tango_fqname}. Please provide FQDN, e.g. STAT/Device/1/Attribute") from e
+            raise ValueError(
+                f"Could not parse attribute name {tango_fqname}. Please provide FQDN, e.g. STAT/Device/1/Attribute"
+            ) from e
     else:
-        raise ValueError(f"Invalid value: {tango_type}. Please provide 'device' or 'attribute'.")
+        raise ValueError(
+            f"Invalid value: {tango_type}. Please provide 'device' or 'attribute'."
+        )
 
-def get_attributes_from_suffix(device_name:str, suffixes:list):
+
+def get_attributes_from_suffix(device_name: str, suffixes: list):
     """
-    Return a list of device attributes whose suffix is present in the input suffixes list 
+    Return a list of device attributes whose suffix is present in the input suffixes list
     """
     device = DeviceProxy(device_name)
     attribute_list = device.get_attribute_list()
     result = []
     for s in suffixes:
-        att_name = s['attribute']
+        att_name = s["attribute"]
         # Search suffix substring in the device attribute list
-        result.extend([a for a in attribute_list if a.lower().endswith(att_name.lower())])
+        result.extend(
+            [a for a in attribute_list if a.lower().endswith(att_name.lower())]
+        )
     return result
 
-def get_attributes_from_infix(device_name:str, infixes:list):
+
+def get_attributes_from_infix(device_name: str, infixes: list):
     """
-    Return a list of device attributes whose infix is present in the input infixes list 
+    Return a list of device attributes whose infix is present in the input infixes list
     """
     device = DeviceProxy(device_name)
     attribute_list = device.get_attribute_list()
     result = []
     for inf in infixes:
-        att_name = inf['attribute']
+        att_name = inf["attribute"]
         # Search infix substring in the device attribute list
-        result.extend([a for a in attribute_list if re.compile(att_name.lower()).search(a.lower())])
+        result.extend(
+            [
+                a
+                for a in attribute_list
+                if re.compile(att_name.lower()).search(a.lower())
+            ]
+        )
     return result
 
+
 def retrieve_attributes_from_wildcards(device_name: str, matching_list: list):
     """
     Return a list of device attibutes based on given wildcards and/or attribute names
@@ -135,16 +191,18 @@ def retrieve_attributes_from_wildcards(device_name: str, matching_list: list):
                 matched_list.append(f"{device_name}/{a}".lower())
     return matched_list
 
-def get_size_from_datatype(datatype:int) -> int:
+
+def get_size_from_datatype(datatype: int) -> int:
     """
     Return the number of bytes for a given Tango datatype
     """
-    try :
+    try:
         return DATATYPES_SIZE_DICT[datatype]
     except IndexError:
         return 1
 
-def filter_attribute_list(device_name: str, exclude:list) -> list:
+
+def filter_attribute_list(device_name: str, exclude: list) -> list:
     """
     Filter out the attributes in exclude-list
     """
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/attribute_polling_stats.py b/tangostationcontrol/tangostationcontrol/toolkit/attribute_polling_stats.py
index c9ce452b647da579607bdf362fee00e735ee81de..d103a6329be314602b0d9e09f1ed016605af8863 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/attribute_polling_stats.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/attribute_polling_stats.py
@@ -1,13 +1,26 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
+from time import sleep
+
 import numpy
 import tango
 
-from time import sleep
 
-def attribute_polling_stats(dp: tango._tango.DeviceProxy = None, iterations: int = 10, polling_time: float = 1.0, quiet = False):
+def attribute_polling_stats(
+    dp: tango._tango.DeviceProxy = None,
+    iterations: int = 10,
+    polling_time: float = 1.0,
+    quiet=False,
+):
     if dp is not None:
-        print('Will sample the device server\'s polling time {} times with a pause of {}s between each sampling.'.format(iterations, polling_time))
+        print(
+            "Will sample the device server's polling time {} times with a pause of {}s between each sampling.".format(
+                iterations, polling_time
+            )
+        )
     else:
-        print('A DeviceProxy object is needed!')
+        print("A DeviceProxy object is needed!")
         return
     polling_durations = []
     polling_delays = []
@@ -15,23 +28,38 @@ def attribute_polling_stats(dp: tango._tango.DeviceProxy = None, iterations: int
     iterations_left = iterations
     while iterations_left > 0:
         iterations_left -= 1
-        string = dp.polling_status()[0].split('\n')
-        polling_duration = numpy.double(string[3].split('=')[-1].strip()) / 1e3
-        polling_delay = numpy.double(string[5].split('=')[-1].split(',')[0].strip()) / 1e3
+        string = dp.polling_status()[0].split("\n")
+        polling_duration = numpy.double(string[3].split("=")[-1].strip()) / 1e3
+        polling_delay = (
+            numpy.double(string[5].split("=")[-1].split(",")[0].strip()) / 1e3
+        )
         polling_durations.append(polling_duration)
         polling_delays.append(polling_delay)
         if not quiet:
-            print('Iteration #{}, {} iterations left, polling duration = {}s, polling delay = {}s.'.format(iterations - iterations_left, iterations_left, polling_duration, polling_delay))
+            print(
+                "Iteration #{}, {} iterations left, polling duration = {}s, polling delay = {}s.".format(
+                    iterations - iterations_left,
+                    iterations_left,
+                    polling_duration,
+                    polling_delay,
+                )
+            )
         sleep(polling_time)
     durations = numpy.array(polling_durations)
     delays = numpy.array(polling_delays)
+
     def compute_and_print(result):
         min = numpy.min(result)
         max = numpy.max(result)
         median = numpy.median(result)
         mean = numpy.mean(result)
         std = numpy.std(result)
-        print("\tmin = {}[s]\n\tmax = {}[s]\n\tmedian = {}[s]\n\tmean = {}[s]\n\tstddev = {}[s]".format(min, max, median, mean, std))
+        print(
+            "\tmin = {}[s]\n\tmax = {}[s]\n\tmedian = {}[s]\n\tmean = {}[s]\n\tstddev = {}[s]".format(
+                min, max, median, mean, std
+            )
+        )
+
     print("\n\titerations = {}\n\n\tPolling duration".format(iterations))
     compute_and_print(durations)
     print("\n\tPolling delay")
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/get_internal_attribute_history.py b/tangostationcontrol/tangostationcontrol/toolkit/get_internal_attribute_history.py
index 980f350703882f41f1ceec57de8bb7be681b7b26..234e4049f2ab5b8087bed4ca5679d2999f89f6bc 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/get_internal_attribute_history.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/get_internal_attribute_history.py
@@ -1,13 +1,22 @@
 #! /usr/bin/env python3
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from tango import DeviceProxy
 from numpy import array
+from tango import DeviceProxy
+
 
-def get_internal_attribute_history(device: DeviceProxy, attribute_name: str, depth: int = 10):
+def get_internal_attribute_history(
+    device: DeviceProxy, attribute_name: str, depth: int = 10
+):
     try:
-        history = array(device.attribute_history(attr_name = attribute_name, depth = depth))
+        history = array(device.attribute_history(attr_name=attribute_name, depth=depth))
     except Exception as e:
-        raise ValueError("Cannot access the internal history of the attribute '{}/{}'.".format(device.name(), attribute_name)) from e
+        raise ValueError(
+            "Cannot access the internal history of the attribute '{}/{}'.".format(
+                device.name(), attribute_name
+            )
+        ) from e
 
     history_values = array([entry.value for entry in history])
     values = history_values.transpose()
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/lofar2_config.py b/tangostationcontrol/tangostationcontrol/toolkit/lofar2_config.py
index 811f4f27abbbac832c6de6811a223a27229e9032..bb1b2322dfa7c3d8506143eb91510e7d22a95af7 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/lofar2_config.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/lofar2_config.py
@@ -1,12 +1,23 @@
 #! /usr/bin/env python3
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
 import logging
 
+
 def configure_logging():
     # Always also log the hostname because it makes the origin of the log clear.
     import socket
+
     hostname = socket.gethostname()
     # Set up logging in a way that it can be understood by a human reader, be
     # easily grep'ed, be parsed with a couple of shell commands and
     # easily fed into an Kibana/Elastic search system.
-    logging.basicConfig(format = '%(asctime)s.%(msecs)d %(levelname)s - HOST="{}" PID="%(process)d" TNAME="%(threadName)s" TID="%(thread)d" FILE="%(pathname)s" LINE="%(lineno)d" FUNC="%(funcName)s" MSG="%(message)s"'.format(hostname), datefmt = '%Y-%m-%dT%H:%M:%S', level = logging.INFO, Force = True)
+    logging.basicConfig(
+        format='%(asctime)s.%(msecs)d %(levelname)s - HOST="{}" PID="%(process)d" TNAME="%(threadName)s" TID="%(thread)d" FILE="%(pathname)s" LINE="%(lineno)d" FUNC="%(funcName)s" MSG="%(message)s"'.format(
+            hostname
+        ),
+        datefmt="%Y-%m-%dT%H:%M:%S",
+        level=logging.INFO,
+        Force=True,
+    )
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mib_compiler.py b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mib_compiler.py
index a96f2c34670d900665dce697c048e1460866c89f..ab145a79934b64a75dc9a41030c87408e3248e83 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mib_compiler.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mib_compiler.py
@@ -1,17 +1,19 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import argparse
+import logging
 import sys
-
-from pysnmp.smi import builder, compiler
-
 from pathlib import Path
 
 from pysmi import debug
-import logging
+from pysnmp.smi import builder, compiler
+
 logging.basicConfig(level=logging.INFO)
 logger = logging.getLogger("mib_compiler")
 
 
-def mib_compile(mib_list : list, src, dst):
+def mib_compile(mib_list: list, src, dst):
 
     mibBuilder = builder.MibBuilder()
 
@@ -24,7 +26,10 @@ def mib_compile(mib_list : list, src, dst):
             mibBuilder.loadModules(i)
             logger.debug(f"loaded {i}")
         except Exception as e:
-            raise Exception(f"Something went wrong, try checking whether all the mib fills imported by the provided mib files are present in the source locations ({src}) \r\n (To do this enable debug options and scroll up) ") from e
+            raise Exception(
+                f"Something went wrong, try checking whether all the mib fills imported by the provided mib files are present in the source locations ({src}) \r\n (To do this enable debug options and scroll up) "
+            ) from e
+
 
 def main():
     abs_path = str(Path().absolute()).replace("\\", "/")
@@ -32,20 +37,43 @@ def main():
     in_path = f"{abs_path}/mibs"
 
     parser = argparse.ArgumentParser(
-        description='Compiles .mib files in to the easy to load pysnmp format')
+        description="Compiles .mib files in to the easy to load pysnmp format"
+    )
     parser.add_argument(
-        '-m', '--mibs', type=str, required=True, nargs='+', help='list of mib names to compile')
+        "-m",
+        "--mibs",
+        type=str,
+        required=True,
+        nargs="+",
+        help="list of mib names to compile",
+    )
     parser.add_argument(
-        '-d', '--destination', type=str,  required=False, default=out_path,
-        help='sets the output directory for the compiled mibs. (default: '
-             '%(default)s)')
+        "-d",
+        "--destination",
+        type=str,
+        required=False,
+        default=out_path,
+        help="sets the output directory for the compiled mibs. (default: "
+        "%(default)s)",
+    )
     parser.add_argument(
-        '-s', '--source', type=str, required=False, nargs='+',  default=in_path,
-        help='sets the input paths or addresses to read the .mib files from  (default: '
-             '%(default)s)')
+        "-s",
+        "--source",
+        type=str,
+        required=False,
+        nargs="+",
+        default=in_path,
+        help="sets the input paths or addresses to read the .mib files from  (default: "
+        "%(default)s)",
+    )
     parser.add_argument(
-        '-v', '--debug', dest='debug', action='store_true', default=False,
-        help='increase log output')
+        "-v",
+        "--debug",
+        dest="debug",
+        action="store_true",
+        default=False,
+        help="increase log output",
+    )
 
     args = parser.parse_args()
 
@@ -56,11 +84,12 @@ def main():
     debug_option = args.debug
 
     if debug_option:
-        debug.setLogger(debug.Debug('compiler'))
+        debug.setLogger(debug.Debug("compiler"))
 
     mib_compile(mib_list=mibs, src=source, dst=destination)
 
     sys.exit(1)
 
+
 if __name__ == "__main__":
     main()
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/retriever.py b/tangostationcontrol/tangostationcontrol/toolkit/retriever.py
index bb1e2d4ef8a85820b0f327ac597e267defb1d718..16bf752fbc096a566c73323635257b233ae035f5 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/retriever.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/retriever.py
@@ -1,13 +1,16 @@
 #! /usr/bin/env python3
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
 
-from tangostationcontrol.toolkit.archiver_util import get_db_config, split_tango_name
-
+import importlib
 from abc import ABC, abstractmethod
 from datetime import datetime, timedelta
+
 from sqlalchemy import create_engine, and_
 from sqlalchemy.orm import sessionmaker
 from sqlalchemy.orm.exc import NoResultFound
-import importlib
+from tangostationcontrol.toolkit.archiver_util import get_db_config, split_tango_name
+
 
 class Retriever(ABC):
     """
@@ -46,63 +49,107 @@ class Retriever(ABC):
         """
         Returns a list of the archived attributes in the DB.
         """
-        attrs = self.session.query(self.ab.Attribute).order_by(self.ab.Attribute.att_conf_id).all()
+        attrs = (
+            self.session.query(self.ab.Attribute)
+            .order_by(self.ab.Attribute.att_conf_id)
+            .all()
+        )
         # Returns the representation as set in __repr__ method of the mapper class
         return attrs
 
-    def get_archived_attributes_by_device(self,device_fqname: str):
+    def get_archived_attributes_by_device(self, device_fqname: str):
         """
         Takes as input the fully-qualified name of a device and returns a list of its archived attributes
         """
-        domain, family, member = split_tango_name(device_fqname,"device")
-        attrs = self.session.query(self.ab.Attribute).filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, \
-                                self.ab.Attribute.member == member)).all()
+        domain, family, member = split_tango_name(device_fqname, "device")
+        attrs = (
+            self.session.query(self.ab.Attribute)
+            .filter(
+                and_(
+                    self.ab.Attribute.domain == domain,
+                    self.ab.Attribute.family == family,
+                    self.ab.Attribute.member == member,
+                )
+            )
+            .all()
+        )
         # Returns the representation as set in __repr__ method of the mapper class
         return attrs
 
-    def get_attribute_id(self,attribute_fqname: str):
+    def get_attribute_id(self, attribute_fqname: str):
         """
         Takes as input the fully-qualified name of an attribute and returns its id.
         """
-        domain, family, member, name = split_tango_name(attribute_fqname,"attribute")
+        domain, family, member, name = split_tango_name(attribute_fqname, "attribute")
         try:
-            result = self.session.query(self.ab.Attribute.att_conf_id).filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, \
-                                    self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
+            result = (
+                self.session.query(self.ab.Attribute.att_conf_id)
+                .filter(
+                    and_(
+                        self.ab.Attribute.domain == domain,
+                        self.ab.Attribute.family == family,
+                        self.ab.Attribute.member == member,
+                        self.ab.Attribute.name == name,
+                    )
+                )
+                .one()
+            )
             return result[0]
         except (TypeError, NoResultFound) as e:
             raise ValueError(f"Attribute {attribute_fqname} not found!") from e
 
     @abstractmethod
-    def get_attribute_datatype(self,attribute_fqname: str):
+    def get_attribute_datatype(self, attribute_fqname: str):
         return
 
-    def get_attribute_value_by_hours(self, attribute_fqname: str, hours: float, tablename:str):
+    def get_attribute_value_by_hours(
+        self, attribute_fqname: str, hours: float, tablename: str
+    ):
         """
-        Takes as input the attribute fully-qualified name and the number of past hours since the actual time 
+        Takes as input the attribute fully-qualified name and the number of past hours since the actual time
         (e.g. hours=1 retrieves values in the last hour, hours=8.5 retrieves values in the last eight hours and half).
         Returns a list of timestamps and a list of values
         """
         attr_id = self.get_attribute_id(attribute_fqname)
         # Retrieves the class that maps the DB table given the tablename
-        base_class = self.ab.get_class_by_tablename(tablename)    
-        # Retrieves the timestamp 
+        base_class = self.ab.get_class_by_tablename(tablename)
+        # Retrieves the timestamp
         time_now = datetime.now()
         time_delta = time_now - timedelta(hours=hours)
         # Converts the timestamps in the right format for the query
         time_now_db = str(time_now.strftime("%Y-%m-%d %X"))
         time_delta_db = str(time_delta.strftime("%Y-%m-%d %X"))
         try:
-            result = self.session.query(base_class).\
-                    join(self.ab.Attribute,self.ab.Attribute.att_conf_id==base_class.att_conf_id).\
-                    filter(and_(self.ab.Attribute.att_conf_id == attr_id,base_class.quality == 0,base_class.data_time >= time_delta_db, \
-                            base_class.data_time <= time_now_db)).order_by(base_class.data_time).all()
+            result = (
+                self.session.query(base_class)
+                .join(
+                    self.ab.Attribute,
+                    self.ab.Attribute.att_conf_id == base_class.att_conf_id,
+                )
+                .filter(
+                    and_(
+                        self.ab.Attribute.att_conf_id == attr_id,
+                        base_class.quality == 0,
+                        base_class.data_time >= time_delta_db,
+                        base_class.data_time <= time_now_db,
+                    )
+                )
+                .order_by(base_class.data_time)
+                .all()
+            )
         except (AttributeError, TypeError, NoResultFound) as e:
             raise ValueError(f"Attribute {attribute_fqname} not found!") from e
         return result
 
-    def get_attribute_value_by_interval(self,attribute_fqname: str, start_time: datetime, stop_time: datetime, tablename:str):
+    def get_attribute_value_by_interval(
+        self,
+        attribute_fqname: str,
+        start_time: datetime,
+        stop_time: datetime,
+        tablename: str,
+    ):
         """
-        Takes as input the attribute name and a certain starting and ending point-time. 
+        Takes as input the attribute name and a certain starting and ending point-time.
         The datetime format is pretty flexible (e.g. "YYYY-MM-dd hh:mm:ss").
         Returns a list of timestamps and a list of values
         """
@@ -110,17 +157,30 @@ class Retriever(ABC):
         # Retrieves the class that maps the DB table given the tablename
         base_class = self.ab.get_class_by_tablename(tablename)
         try:
-            result = self.session.query(base_class).\
-                    join(self.ab.Attribute,self.ab.Attribute.att_conf_id==base_class.att_conf_id).\
-                        filter(and_(self.ab.Attribute.att_conf_id == attr_id, base_class.quality == 0,base_class.data_time >= str(start_time), \
-                                base_class.data_time <= str(stop_time))).order_by(base_class.data_time).all()
+            result = (
+                self.session.query(base_class)
+                .join(
+                    self.ab.Attribute,
+                    self.ab.Attribute.att_conf_id == base_class.att_conf_id,
+                )
+                .filter(
+                    and_(
+                        self.ab.Attribute.att_conf_id == attr_id,
+                        base_class.quality == 0,
+                        base_class.data_time >= str(start_time),
+                        base_class.data_time <= str(stop_time),
+                    )
+                )
+                .order_by(base_class.data_time)
+                .all()
+            )
         except (AttributeError, TypeError, NoResultFound) as e:
             raise ValueError(f"Attribute {attribute_fqname} not found!") from e
         return result
 
-class RetrieverTimescale(Retriever):
 
-    def __init__(self, cm_name: str = 'archiving/hdbppts/confmanager01'):
+class RetrieverTimescale(Retriever):
+    def __init__(self, cm_name: str = "archiving/hdbppts/confmanager01"):
         self.cm_name = cm_name
 
         super().__init__()
@@ -131,11 +191,13 @@ class RetrieverTimescale(Retriever):
         """
         creds = get_db_config(self.cm_name)
 
-        # Set sqlalchemy library connection        
-        if creds["host"] == 'archiver-timescale':
-            creds["libname"] = 'postgresql+psycopg2'
+        # Set sqlalchemy library connection
+        if creds["host"] == "archiver-timescale":
+            creds["libname"] = "postgresql+psycopg2"
         else:
-            raise ValueError(f"Invalid hostname: {creds['host']}, we only support 'archiver-timescale'")
+            raise ValueError(
+                f"Invalid hostname: {creds['host']}, we only support 'archiver-timescale'"
+            )
 
         Session = self.create_session(creds)
         return Session()
@@ -144,80 +206,125 @@ class RetrieverTimescale(Retriever):
         """
         Sets the right mapper class following the DBMS connection
         """
-        return importlib.import_module('.archiver_base_ts', package=__package__)
+        return importlib.import_module(".archiver_base_ts", package=__package__)
 
-    def get_attribute_datatype(self,attribute_fqname: str):
+    def get_attribute_datatype(self, attribute_fqname: str):
         """
         Takes as input the fully-qualified name of an attribute and returns its Data-Type.
         Data Type name indicates the type (e.g. string, int, ...) and the read/write property. The name is used
         as DB table name suffix in which values are stored.
         """
-        domain, family, member, name = split_tango_name(attribute_fqname,"attribute")
+        domain, family, member, name = split_tango_name(attribute_fqname, "attribute")
         try:
-            result = self.session.query(self.ab.DataType.type).join(self.ab.Attribute,self.ab.Attribute.att_conf_type_id==self.ab.DataType.att_conf_type_id).\
-                            filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
+            result = (
+                self.session.query(self.ab.DataType.type)
+                .join(
+                    self.ab.Attribute,
+                    self.ab.Attribute.att_conf_type_id
+                    == self.ab.DataType.att_conf_type_id,
+                )
+                .filter(
+                    and_(
+                        self.ab.Attribute.domain == domain,
+                        self.ab.Attribute.family == family,
+                        self.ab.Attribute.member == member,
+                        self.ab.Attribute.name == name,
+                    )
+                )
+                .one()
+            )
             return result[0]
         except (AttributeError, TypeError, NoResultFound) as e:
             raise ValueError(f"Attribute {attribute_fqname} not found!") from e
 
-    def get_attribute_format(self,attribute_fqname: str):
+    def get_attribute_format(self, attribute_fqname: str):
         """
         Takes as input the fully-qualified name of an attribute and returns its format.
         Formats are basically three: Scalar, Spectrum and Image.
-        * Works only for POSTGRESQL * 
+        * Works only for POSTGRESQL *
         """
-        domain, family, member, name = split_tango_name(attribute_fqname,"attribute")
+        domain, family, member, name = split_tango_name(attribute_fqname, "attribute")
         try:
-            result = self.session.query(self.ab.Format.format).join(self.ab.Attribute,self.ab.Attribute.att_conf_format_id==self.ab.Format.att_conf_format_id).\
-                filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
+            result = (
+                self.session.query(self.ab.Format.format)
+                .join(
+                    self.ab.Attribute,
+                    self.ab.Attribute.att_conf_format_id
+                    == self.ab.Format.att_conf_format_id,
+                )
+                .filter(
+                    and_(
+                        self.ab.Attribute.domain == domain,
+                        self.ab.Attribute.family == family,
+                        self.ab.Attribute.member == member,
+                        self.ab.Attribute.name == name,
+                    )
+                )
+                .one()
+            )
             return result[0]
         except (AttributeError, TypeError, NoResultFound) as e:
             raise ValueError(f"Attribute {attribute_fqname} not found!") from e
 
-    def get_attribute_tablename(self,attribute_fqname: str):
+    def get_attribute_tablename(self, attribute_fqname: str):
         """
         Takes as input the fully-qualified name of an attribute and returns the tablename where it is stored.
-        * Works only for POSTGRESQL * 
+        * Works only for POSTGRESQL *
         """
-        domain, family, member, name = split_tango_name(attribute_fqname,"attribute")
+        domain, family, member, name = split_tango_name(attribute_fqname, "attribute")
         try:
-            result = self.session.query(self.ab.Attribute.table_name).filter(and_(self.ab.Attribute.domain == domain, self.ab.Attribute.family == family, \
-                                    self.ab.Attribute.member == member, self.ab.Attribute.name == name)).one()
+            result = (
+                self.session.query(self.ab.Attribute.table_name)
+                .filter(
+                    and_(
+                        self.ab.Attribute.domain == domain,
+                        self.ab.Attribute.family == family,
+                        self.ab.Attribute.member == member,
+                        self.ab.Attribute.name == name,
+                    )
+                )
+                .one()
+            )
             return result[0]
         except (AttributeError, TypeError, NoResultFound) as e:
             raise ValueError(f"Attribute {attribute_fqname} not found!") from e
 
     def get_attribute_value_by_hours(self, attribute_fqname: str, hours: float = 1.0):
         """
-        Takes as input the attribute fully-qualified name and the number of past hours since the actual time 
+        Takes as input the attribute fully-qualified name and the number of past hours since the actual time
         (e.g. hours=1 retrieves values in the last hour, hours=8.5 retrieves values in the last eight hours and half).
         Returns a list of timestamps and a list of values
         """
         tablename = self.get_attribute_tablename(attribute_fqname)
-        return super().get_attribute_value_by_hours(attribute_fqname,hours,tablename)
+        return super().get_attribute_value_by_hours(attribute_fqname, hours, tablename)
 
-    def get_attribute_value_by_interval(self,attribute_fqname: str, start_time: datetime, stop_time: datetime):
+    def get_attribute_value_by_interval(
+        self, attribute_fqname: str, start_time: datetime, stop_time: datetime
+    ):
         """
-        Takes as input the attribute name and a certain starting and ending point-time. 
+        Takes as input the attribute name and a certain starting and ending point-time.
         The datetime format is pretty flexible (e.g. "YYYY-MM-dd hh:mm:ss").
         Returns a list of timestamps and a list of values
         """
         tablename = self.get_attribute_tablename(attribute_fqname)
-        return super().get_attribute_value_by_interval(attribute_fqname,start_time,stop_time,tablename)
+        return super().get_attribute_value_by_interval(
+            attribute_fqname, start_time, stop_time, tablename
+        )
 
-    def get_lofar_attribute(self,attribute_fqname: str):
+    def get_lofar_attribute(self, attribute_fqname: str):
         """
         Takes as input the attribute fully-qualified name and queries the customized lofar attribute views
-        Returns a list of rows containing device name, attribute name, timestamp and value 
+        Returns a list of rows containing device name, attribute name, timestamp and value
         """
         # Retrieves the attribute tablename
         tablename = self.get_attribute_tablename(attribute_fqname)
         # Retrieves the class that maps the DB table given the tablename
         base_class = self.ab.get_viewclass_by_tablename(tablename)
-        domain,family,member,name = split_tango_name(attribute_fqname,'attribute')
+        domain, family, member, name = split_tango_name(attribute_fqname, "attribute")
         try:
-            result = self.session.query(base_class).\
-                    filter(base_class.name == name).all()
+            result = (
+                self.session.query(base_class).filter(base_class.name == name).all()
+            )
         except (AttributeError, TypeError, NoResultFound) as e:
             raise ValueError(f"Attribute {attribute_fqname} not found!") from e
         return result
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/udp_simulator.py b/tangostationcontrol/tangostationcontrol/toolkit/udp_simulator.py
index 32dc156e27e7816a805d430f6fe5730f90bd0c3e..a7d41001cce7f21cccfae00e91d7b285a5de0f1b 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/udp_simulator.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/udp_simulator.py
@@ -1,3 +1,6 @@
+# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+# SPDX-License-Identifier: Apache-2.0
+
 import socket
 import time
 
@@ -10,13 +13,12 @@ MESSAGE = "{}".format(i)
 print(f"UDP target IP: {UDP_IP}")
 print(f"UDP target port: {UDP_PORT}")
 
-sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # create UDP socket
+sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)  # create UDP socket
 
 while True:
     sock.sendto(bytes(MESSAGE, "utf-8"), (UDP_IP, UDP_PORT))
     i += 1
     MESSAGE = "{}".format(i)
 
-    #sleep for an arbitrary amount of time. Currently 0.2 settings for visual testing.
+    # sleep for an arbitrary amount of time. Currently 0.2 settings for visual testing.
     time.sleep(0.2)
-
diff --git a/tangostationcontrol/test-requirements.txt b/tangostationcontrol/test-requirements.txt
index 03aa93626787882f5abea3c037d1e324669bc9d8..8d6926cbab34c65c5cad61e6bdf8e0bbd30ec530 100644
--- a/tangostationcontrol/test-requirements.txt
+++ b/tangostationcontrol/test-requirements.txt
@@ -9,10 +9,12 @@ virtualenv>=20.16.0 # MIT
 build>=0.8.0 # MIT
 coverage>=5.2.0 # Apache-2.0
 doc8>=0.8.0 # Apache-2.0
+black>=22.0.0 # MIT
 flake8>=3.8.0 # MIT
 flake8-bugbear>=22.1.11 # MIT
 flake8-breakpoint>=1.1.0 # MIT
 flake8-debugger>=4.0.0 #MIT
+pylint>=2.15.0 # GPLv2
 autopep8>=1.7.0 # MIT
 python-subunit>=1.4.0 # Apache-2.0/BSD
 Pygments>=2.6.0 # BSD
diff --git a/tangostationcontrol/tox.ini b/tangostationcontrol/tox.ini
index d562136911bdfaf7a8970b3d0d578f3739475555..d88aa7e9a6b72832a76505dcac2d396ecd65f472 100644
--- a/tangostationcontrol/tox.ini
+++ b/tangostationcontrol/tox.ini
@@ -1,6 +1,6 @@
 [tox]
 minversion = 3.20
-envlist = py3{7,8,9,10},pep8
+envlist = black,pep8,pylint,py3{7,8,9,10},docs
 skipsdist = True
 
 [testenv]
@@ -10,8 +10,8 @@ usedevelop = True
 install_command = {envbindir}/pip3 install {opts} {packages}
 passenv = HOME
 setenv =
-   VIRTUAL_ENV={envdir}
-   PYTHONWARNINGS=default::DeprecationWarning
+    VIRTUAL_ENV={envdir}
+    PYTHONWARNINGS=default::DeprecationWarning
 ; Share the same envdir with as many jobs as possible due to extensive time it
 ; takes to compile the pytango wheel, in addition to its large install size.
 ; should the environment change (such as the Python version) the environment
@@ -59,7 +59,7 @@ commands =
     {envpython} -m coverage xml -o coverage.xml
     {envpython} -m coverage report --omit='*test*'
 
-[testenv:cover]
+[testenv:{cover,coverage}]
 ; stestr does not natively support generating coverage reports use
 ; `PYTHON=python -m coverage run....` to overcome this.
 setenv =
@@ -75,17 +75,23 @@ commands =
     {envpython} -m coverage xml -o coverage.xml
     {envpython} -m coverage report --omit='*test*'
 
-[testenv:pep8]
-commands =
-    {envpython} -m doc8 --version
-    {envpython} -m flake8 --version
-    {envpython} -m doc8 docs/source/ --ignore D001
-    {envpython} -m flake8
-
-[testenv:format]
+# Use generative name and command prefixes to reuse the same virtualenv
+# for all linting jobs.
+[testenv:{pep8,black,pylint,format}]
+usedevelop = False
+envdir = {toxworkdir}/linting
 commands =
-    {envpython} -m autopep8 --version
-    {envpython} -m autopep8 -v -aa --in-place --recursive tangostationcontrol/
+    pep8: {envpython} -m doc8 --version
+    pep8: {envpython} -m flake8 --version
+    pep8: {envpython} -m doc8 docs/source/ --ignore D001
+    pep8: {envpython} -m flake8
+    black: {envpython} -m black --version
+    black: {envpython} -m black --check --diff . --extend-exclude=libhdbpp-python|SNMP_mib_loading
+    pylint: {envpython} -m pylint --version
+    pylint: {envpython} -m pylint --ignore=test,integration_test --max-line-length=88 tangostationcontrol
+    format: {envpython} -m autopep8 --version
+;    format: {envpython} -m autopep8 -v -aa --in-place --recursive tangostationcontrol/
+    format: {envpython} -m black -v . --extend-exclude=libhdbpp-python|SNMP_mib_loading
 
 [testenv:bandit];
 ; B104: hardcoded_bind_all_interfaces
@@ -117,4 +123,4 @@ commands =
 [flake8]
 filename = *.py,.stestr.conf,.txt
 ignore = B014, B019, W291, W293, W391, E111, E114, E121, E122, E123, E124, E126, E127, E128, E131, E201, E201, E202, E203, E221, E222, E225, E226, E231, E241, E251, E252, E261, E262, E265, E271, E301, E302, E303, E305, E306, E401, E402, E501, E502, E701, E712, E721, E731, F403, F523, F541, F841, H301, H306, H401, H403, H404, H405, W503
-exclude=.tox,build,.egg-info,libhdbpp-python,SNMP_mib_loading
+exclude = .tox,build,.egg-info,libhdbpp-python,SNMP_mib_loading