diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 85d25455781d7f006412a478ac32d46a42eb2dc3..de8a85e3ba87d0e659d0ecb96e6457e6c48fab46 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -42,17 +42,6 @@ stages:
     - . bootstrap/etc/lofar20rc.sh || true
 ##    Allow docker image script to execute
 #    - chmod u+x $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh
-# This suffers from only refs changes not working as expected:
-# https://gitlab.com/gitlab-org/gitlab-foss/-/issues/55012
-# Therefore we have to add `only: refs: - merge_requests` to all jobs that are
-# only supposed to run on merge requests with file changes. However,
-# two pipelines will spawn instead of one of which one tagged with 'detached`.
-.base_docker_images_except:
-  extends: .base_docker_images
-  except:
-    refs:
-      - tags
-      - master
 .base_docker_store_images:
   extends: .base_docker_images
   script:
@@ -66,15 +55,17 @@ docker_store_images_master_tag:
       - master
 docker_store_images_changes:
   extends: .base_docker_store_images
-  only:
-    refs:
-      - merge_requests
-    changes:
+  rules:
+#   https://stackoverflow.com/questions/68955071/how-to-disable-detached-pipelines-in-gitlab
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/.env
-  except:
-    refs:
-      - tags
-      - master
+      when: always
 docker_build_image_all:
   extends: .base_docker_images
   only:
@@ -100,6 +91,7 @@ docker_build_image_all:
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-tilebeam latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-beamlet latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-digitalbeam latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-antennafield latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-boot latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-docker latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-observation_control latest
@@ -110,43 +102,61 @@ docker_build_image_all:
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-unb2 latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-xst latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-temperature-manager latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh archiver-timescale latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-cm latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-es latest
+
 docker_build_image_elk:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/elk.yml
       - docker-compose/elk/*
       - docker-compose/elk-configure-host/*
+      when: always
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh elk $tag
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh elk-configure-host $tag
 docker_build_image_lofar_device_base:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/lofar-device-base.yml
       - docker-compose/lofar-device-base/*
+      when: always
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh lofar-device-base $tag
 docker_build_image_prometheus:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/prometheus.yml
       - docker-compose/prometheus/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh prometheus $tag
 docker_build_image_itango:
-  extends: .base_docker_images_except
+  extends: .base_docker_images
   only:
     refs:
       - merge_requests
@@ -157,247 +167,395 @@ docker_build_image_itango:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh itango $tag
 docker_build_image_grafana:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/grafana.yml
       - docker-compose/grafana/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh grafana $tag
 docker_build_image_jupyter:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/jupyter.yml
       - docker-compose/jupyter/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh jupyter $tag
 docker_build_image_apsct_sim:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/aspct-sim.yml
       - docker-compose/pypcc-sim-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh apsct-sim $tag
 docker_build_image_apspu_sim:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/apspu-sim.yml
       - docker-compose/pypcc-sim-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh apspu-sim $tag
 docker_build_image_recv_sim:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/recv-sim.yml
       - docker-compose/pypcc-sim-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh recv-sim $tag
 docker_build_image_sdptr_sim:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/sdptr-sim.yml
       - docker-compose/sdptr-sim/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh sdptr-sim $tag
 docker_build_image_unb2_sim:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/unb2-sim.yml
       - docker-compose/pypcc-sim-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh unb2-sim $tag
 docker_build_image_device_apsct:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-aspct.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-aspct $tag
 docker_build_image_device_apspu:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-apspu.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-apspu $tag
 docker_build_image_device_pdu:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-pdu.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-pdu $tag
 docker_build_image_device_tilebeam:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-tilebeam.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-tilebeam $tag
 docker_build_image_device_beamlet:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-beamlet.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-beamlet $tag
 docker_build_image_device_digitalbeam:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-digitalbeam.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-digitalbeam $tag
 docker_build_image_device_boot:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-boot.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-boot $tag
 docker_build_image_device_docker:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-docker.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-docker $tag
 docker_build_image_device_ovservation_control:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-observation_control.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-observation_control $tag
+docker_build_image_device_antennafield:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
+      - docker-compose/device-antennafield.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-antennafield $tag
 docker_build_image_device_recv:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-recv.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-recv $tag
 docker_build_image_device_sdp:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-sdp.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sdp $tag
 docker_build_image_device_sst:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-sst.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sst $tag
 docker_build_image_device_unb2:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-unb2.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-unb2 $tag
 docker_build_image_device_xst:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-xst.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-xst $tag
 docker_build_image_device_temperature_manager:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-temperature-manager.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-temperature-manager $tag
+docker_build_image_archiver_timescale:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
+      - docker-compose/archiver-timescale.yml
+      - docker-compose/timescaledb/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh archiver-timescale $tag
+docker_build_image_hdbppts_cm:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
+      - docker-compose/archiver-timescale.yml
+      - docker-compose/tango-archiver-ts/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-cm $tag
+docker_build_image_hdbppts_es:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
+      - docker-compose/archiver-timescale.yml
+      - docker-compose/tango-archiver-ts/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-es $tag
 newline_at_eof:
   stage: linting
   before_script:
diff --git a/.gitmodules b/.gitmodules
index 3f1cb3e31f5ff75ccc1292eada306c6bcc551630..f1248450adb0a12584a247b8119bc9653e6498f0 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,7 +1,7 @@
-[submodule "docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter"]
-	path = docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter
-	url = https://git.astron.nl/lofar2.0/ska-tango-grafana-exporter.git
-	branch = station-control
 [submodule "tangostationcontrol/tangostationcontrol/toolkit/libhdbpp-python"]
 	path = tangostationcontrol/tangostationcontrol/toolkit/libhdbpp-python
 	url = https://gitlab.com/tango-controls/hdbpp/libhdbpp-python.git
+[submodule "docker-compose/alerta-web"]
+	path = docker-compose/alerta-web
+	url = https://github.com/jjdmol/alerta-webui
+	branch = add-isa-18-2-states
diff --git a/CDB/LOFAR_ConfigDb.json b/CDB/LOFAR_ConfigDb.json
index ab989d5d8a3c629771771a38d8b70ed2ebef3822..9c243aab268d9ca13f37bfe3d7af9cfbba8222de 100644
--- a/CDB/LOFAR_ConfigDb.json
+++ b/CDB/LOFAR_ConfigDb.json
@@ -1,4 +1,14 @@
 {
+    "objects": {
+        "station": {
+            "name": [
+                "DevStation"
+            ],
+            "number": [
+                "999"
+            ]
+        }
+    },
     "servers": {
         "Docker": {
             "STAT": {
@@ -14,6 +24,17 @@
                 }
             }
         },
+        "AntennaField": {
+            "STAT": {
+                "AntennaField": {
+                    "STAT/AntennaField/1": {
+                        "properties": {
+                            "RECV_devices": ["STAT/RECV/1"]
+                        }
+                    }
+                }
+            }
+        },
         "PDU": {
             "STAT": {
                 "PDU": {
@@ -21,18 +42,18 @@
                 }
             }
         },
-        },
-        "Temperature_manager": {
+        "TemperatureManager": {
             "STAT": {
-                "Temperature_manager": {
-                    "STAT/Temperature_manager/1": {}
+                "TemperatureManager": {
+                    "STAT/TemperatureManager/1": {}
                 }
             }
         },
         "TileBeam": {
             "STAT": {
                 "TileBeam": {
-                    "STAT/TileBeam/1": {}
+                    "STAT/TileBeam/1": {
+                    }
                 }
             }
         },
diff --git a/CDB/stations/DTS_ConfigDb.json b/CDB/stations/DTS_ConfigDb.json
index 398ef7d63577ce62f61c2374b9335a905ebce566..b0cd5d91ed6795c579680df10fe53a82b1e93ccb 100644
--- a/CDB/stations/DTS_ConfigDb.json
+++ b/CDB/stations/DTS_ConfigDb.json
@@ -1,4 +1,14 @@
 {
+    "objects": {
+        "station": {
+            "name": [
+                "DTS"
+            ],
+            "number": [
+                "902"
+            ]
+        }
+    },
     "servers": {
         "boot": {
             "STAT": {
diff --git a/CDB/stations/DTS_Outside_ConfigDb.json b/CDB/stations/DTS_Outside_ConfigDb.json
new file mode 100644
index 0000000000000000000000000000000000000000..e1b6e19079df728ebba70204fea9768249501224
--- /dev/null
+++ b/CDB/stations/DTS_Outside_ConfigDb.json
@@ -0,0 +1,524 @@
+{
+    "objects": {
+        "station": {
+            "name": [
+                "DTSOutside"
+            ],
+            "number": [
+                "903"
+            ]
+        }
+    },
+    "servers": {
+        "boot": {
+            "STAT": {
+                "Boot": {
+                    "STAT/Boot/1": {
+                        "properties": {
+                            "Initialise_Hardware": [
+                                "True"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "APSCT": {
+            "STAT": {
+                "APSCT": {
+                    "STAT/APSCT/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.87.6.80"
+                            ],
+                            "OPC_Server_Port": [
+                                "4843"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "APSPU": {
+            "STAT": {
+                "APSPU": {
+                    "STAT/APSPU/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.87.6.80"
+                            ],
+                            "OPC_Server_Port": [
+                                "4842"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "Beamlet": {
+            "STAT": {
+                "Beamlet": {
+                    "STAT/Beamlet/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.99.0.250"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ],
+                            "FPGA_beamlet_output_hdr_eth_destination_mac_RW_default": [
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7"
+                            ],
+                            "FPGA_beamlet_output_hdr_ip_destination_address_RW_default": [
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "RECV": {
+            "STAT": {
+                "RECV": {
+                    "STAT/RECV/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.87.6.80"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ],
+                            "HBAT_reference_ETRS": [
+                                "3839371.416", "430339.901", "5057958.886",
+                                "3839368.919", "430335.979", "5057961.1",
+                                "3839365.645", "430339.299", "5057963.288",
+                                "3839368.142", "430343.221", "5057961.074",
+                                "3839374.094", "430299.513", "5057960.017",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0"
+                            ],
+                            "HBAT_PQR_rotation_angle_deg": [
+                                "45.73",
+                                "45.73",
+                                "45.73",
+                                "45.73",
+                                "54.40",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0"
+                            ],
+                            "HBAT_PQR_to_ETRS_rotation_matrix": [
+                               "-0.11660087", "-0.79095632", "0.60065992",
+                               " 0.99317077", "-0.09529842", "0.06730545",
+                               " 0.00400627", " 0.60440575", "0.79666658"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "SDP": {
+            "STAT": {
+                "SDP": {
+                    "STAT/SDP/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.99.0.250"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ],
+                            "FPGA_sdp_info_station_id_RW_default": [
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903"
+                            ],
+                            "TR_fpga_mask_RW_default": [
+                                "True",
+                                "True",
+                                "True",
+                                "True",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "SST": {
+            "STAT": {
+                "SST": {
+                    "STAT/SST/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.99.0.250"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ],
+                            "FPGA_sst_offload_hdr_eth_destination_mac_RW_default": [
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7"
+                            ],
+                            "FPGA_sst_offload_hdr_ip_destination_address_RW_default": [
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "XST": {
+            "STAT": {
+                "XST": {
+                    "STAT/XST/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.99.0.250"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ],
+                            "FPGA_xst_offload_hdr_eth_destination_mac_RW_default": [
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7"
+                            ],
+                            "FPGA_xst_offload_hdr_ip_destination_address_RW_default": [
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "UNB2": {
+            "STAT": {
+                "UNB2": {
+                    "STAT/UNB2/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.87.6.80"
+                            ],
+                            "OPC_Server_Port": [
+                                "4841"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        }
+    }
+}
diff --git a/CDB/stations/LTS_ConfigDb.json b/CDB/stations/LTS_ConfigDb.json
index 63b16a78b809ec51644094a41f6700f7f16ced22..f6cacc0d93adddef12a3868a3e1c93c70a083f6b 100644
--- a/CDB/stations/LTS_ConfigDb.json
+++ b/CDB/stations/LTS_ConfigDb.json
@@ -1,4 +1,14 @@
 {
+    "objects": {
+        "station": {
+            "name": [
+                "LTS"
+            ],
+            "number": [
+                "901"
+            ]
+        }
+    },
     "servers": {
         "boot": {
             "STAT": {
diff --git a/CDB/stations/simulators_ConfigDb.json b/CDB/stations/simulators_ConfigDb.json
index 9806a2ca407bcd41df3e744ef248d95831a28e39..6afb6b21adb00239edf53241dfe4b515cad0f35a 100644
--- a/CDB/stations/simulators_ConfigDb.json
+++ b/CDB/stations/simulators_ConfigDb.json
@@ -116,10 +116,10 @@
                 }
             }
         },
-        "Temperature_manager": {
+        "TemperatureManager": {
             "STAT": {
-                "Temperature_manager": {
-                    "STAT/Temperature_manager/1": {
+                "TemperatureManager": {
+                    "STAT/TemperatureManager/1": {
                         "properties": {
                         }
                     }
diff --git a/README.md b/README.md
index 09eedaf9a238c8a3407cc0ab2d0d267bc106af54..b8f08504fbcd221ac8a0ed3e4e6c98869b242f11 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,5 @@
 [![Pipeline Status](https://git.astron.nl/lofar2.0/tango/badges/master/pipeline.svg)](https://git.astron.nl/lofar2.0/tango/-/pipelines)
+[![Coverage Status](https://git.astron.nl/lofar2.0/tango/badges/master/coverage.svg)](https://git.astron.nl/lofar2.0/tango/-/jobs/artifacts/master/download?job=unit_test)
 [![Documentation Status](https://readthedocs.org/projects/lofar20-station-control/badge/?version=latest)](https://lofar20-station-control.readthedocs.io/en/latest/?badge=latest)
 
 # Tango Station Control
diff --git a/bin/dump_ConfigDb.sh b/bin/dump_ConfigDb.sh
index c1f6dc214e32458af1f1d555332ecb40c2b71601..2532b8e275a3c4a609dc9b618fb143f8815f94a6 100755
--- a/bin/dump_ConfigDb.sh
+++ b/bin/dump_ConfigDb.sh
@@ -1,4 +1,7 @@
 #!/bin/bash
 
 # writes the JSON dump to stdout, Do not change -i into -it incompatible with gitlab ci!
-docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig python -m dsconfig.dump
+docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig bash -c '
+  python -m dsconfig.dump > /tmp/dsconfig-configdb-dump.json
+  /manage_object_properties.py -r > /tmp/dsconfig-objectdb-dump.json
+  /merge_json.py /tmp/dsconfig-objectdb-dump.json /tmp/dsconfig-configdb-dump.json'
diff --git a/docker-compose/Makefile b/docker-compose/Makefile
index d329b9bbb5bd6cbaff6cd0d06f9bf1f93ed47ae5..64f51f70a39e443c998c0fb3ffbd3f08de0f9e48 100644
--- a/docker-compose/Makefile
+++ b/docker-compose/Makefile
@@ -150,12 +150,10 @@ pull: ## pull the images from the Docker hub
 
 build: ## rebuild images
 	# docker-compose does not support build dependencies, so manage those here
-	$(DOCKER_COMPOSE_ARGS) docker-compose -f lofar-device-base.yml -f networks.yml build --progress=plain
 	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) build --parallel --progress=plain $(SERVICE)
 
 build-nocache: ## rebuild images from scratch
 	# docker-compose does not support build dependencies, so manage those here
-	$(DOCKER_COMPOSE_ARGS) docker-compose -f lofar-device-base.yml -f networks.yml build --progress=plain
 	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) build --no-cache --progress=plain $(SERVICE)
 
 up: minimal  ## start the base TANGO system and prepare requested services
diff --git a/docker-compose/alerta-web/Dockerfile b/docker-compose/alerta-server/Dockerfile
similarity index 82%
rename from docker-compose/alerta-web/Dockerfile
rename to docker-compose/alerta-server/Dockerfile
index 80431da39da9ddb7ff0c28997660163234eb6d57..04f9bce1233a38a09cea6814b2ce8ac54f30fd84 100644
--- a/docker-compose/alerta-web/Dockerfile
+++ b/docker-compose/alerta-server/Dockerfile
@@ -9,6 +9,9 @@ RUN bash -c 'source /venv/bin/activate; pip install /tmp/grafana-plugin'
 COPY lofar-plugin /tmp/lofar-plugin
 RUN bash -c 'source /venv/bin/activate; pip install /tmp/lofar-plugin'
 
+COPY lofar-routing-plugin /tmp/lofar-routing-plugin
+RUN bash -c 'source /venv/bin/activate; pip install /tmp/lofar-routing-plugin'
+
 COPY alertad.conf /app/alertad.conf
 COPY alerta.conf /app/alerta.conf
 COPY config.json /web/config.json
diff --git a/docker-compose/alerta-web/README.md b/docker-compose/alerta-server/README.md
similarity index 100%
rename from docker-compose/alerta-web/README.md
rename to docker-compose/alerta-server/README.md
diff --git a/docker-compose/alerta-web/alerta-secrets.json b/docker-compose/alerta-server/alerta-secrets.json
similarity index 100%
rename from docker-compose/alerta-web/alerta-secrets.json
rename to docker-compose/alerta-server/alerta-secrets.json
diff --git a/docker-compose/alerta-web/alerta.conf b/docker-compose/alerta-server/alerta.conf
similarity index 100%
rename from docker-compose/alerta-web/alerta.conf
rename to docker-compose/alerta-server/alerta.conf
diff --git a/docker-compose/alerta-web/alertad.conf b/docker-compose/alerta-server/alertad.conf
similarity index 75%
rename from docker-compose/alerta-web/alertad.conf
rename to docker-compose/alerta-server/alertad.conf
index dc7b6c2e295ae4230a9373ed26f148d6aad59cd0..b0088c6c2bf8f26fd9cec59a3e12680dcbb1029e 100644
--- a/docker-compose/alerta-web/alertad.conf
+++ b/docker-compose/alerta-server/alertad.conf
@@ -1,15 +1,22 @@
+import os
+
 DEBUG = True
 SECRET = "T=&7xvF2S&x7w_JAcq$h1x5ocfA)8H2i"
 
 # Allow non-admin views
 CUSTOMER_VIEWS = True
 
+# Use more advanced ANSI/ISA 18.2 alarm model,
+# which does not auto-close alarms and thus
+# allows for tracking alarms that came and went.
+ALARM_MODEL = "ISA_18_2"
+
 # Never timeout alerts
 ALERT_TIMEOUT = 0
 # Auto unack after a day
 ACK_TIMEOUT = 24 * 3600
 # Auto unshelve after 2 hours
-SHELVE_TIMEOUT = 2 * 3600
+SHELVE_TIMEOUT = 7 * 24 * 3600
 
 # Use custom date formats
 DATE_FORMAT_MEDIUM_DATE = "dd DD/MM HH:mm"
@@ -17,10 +24,31 @@ DATE_FORMAT_LONG_DATE   = "yyyy-MM-DD HH:mm:ss.sss"
 
 # Default overview settings
 COLUMNS = ['severity', 'status', 'createTime', 'lastReceiveTime', 'resource', 'grafanaDashboardHtml', 'grafanaPanelHtml', 'event', 'text']
-DEFAULT_FILTER = {'status': ['open']}
+DEFAULT_FILTER = {'status': ['UNACK', 'RTNUN']}
 SORT_LIST_BY = "createTime"
 AUTO_REFRESH_INTERVAL = 5000 # ms
 
+COLOR_MAP = {
+    'severity': {
+        'Critical': 'red',
+        'High': 'orange',
+        'Medium': '#FFF380', # corn yellow
+        'Low': 'dodgerblue',
+        'Advisory': 'lightblue',
+        'OK': '#00CC00',  # lime green
+        'Unknown': 'silver'
+    },
+    'text': 'black'
+}
+
+# Allow alerta-web to refer to alerta-server for the client
+CORS_ORIGINS = [
+    'http://localhost:8081',
+    'http://localhost:8082',
+    os.environ.get("BASE_URL", ""),
+    os.environ.get("DASHBOARD_URL", ""),
+]
+
 # ------------------------------------
 #    Plugin configuration
 # ------------------------------------
@@ -28,7 +56,7 @@ AUTO_REFRESH_INTERVAL = 5000 # ms
 PLUGINS = ['reject', 'blackout', 'acked_by', 'enhance', 'grafana', 'lofar', 'slack']
 
 # Slack plugin settings, see https://github.com/alerta/alerta-contrib/tree/master/plugins/slack
-import os, json
+import json
 
 with open("/run/secrets/alerta-secrets") as secrets_file:
     secrets = json.load(secrets_file)
diff --git a/docker-compose/alerta-web/config.json b/docker-compose/alerta-server/config.json
similarity index 100%
rename from docker-compose/alerta-web/config.json
rename to docker-compose/alerta-server/config.json
diff --git a/docker-compose/alerta-web/grafana-plugin/alerta_grafana.py b/docker-compose/alerta-server/grafana-plugin/alerta_grafana.py
similarity index 100%
rename from docker-compose/alerta-web/grafana-plugin/alerta_grafana.py
rename to docker-compose/alerta-server/grafana-plugin/alerta_grafana.py
diff --git a/docker-compose/alerta-web/grafana-plugin/setup.py b/docker-compose/alerta-server/grafana-plugin/setup.py
similarity index 100%
rename from docker-compose/alerta-web/grafana-plugin/setup.py
rename to docker-compose/alerta-server/grafana-plugin/setup.py
diff --git a/docker-compose/alerta-server/lofar-plugin/alerta_lofar.py b/docker-compose/alerta-server/lofar-plugin/alerta_lofar.py
new file mode 100644
index 0000000000000000000000000000000000000000..b227069c8805b0f71aa8438c474d5a9afe5129ac
--- /dev/null
+++ b/docker-compose/alerta-server/lofar-plugin/alerta_lofar.py
@@ -0,0 +1,69 @@
+import os
+import json
+import logging
+
+from alerta.plugins import PluginBase
+import alerta.models.alarms.isa_18_2 as isa_18_2
+
+LOG = logging.getLogger()
+
+
+class EnhanceLOFAR(PluginBase):
+    """
+    Plugin for enhancing alerts with LOFAR-specific information
+    """
+
+    @staticmethod
+    def _fix_severity(alert):
+        """
+          Force conversion of severity to ISA 18.2 model, to allow Alerta to parse the alert.
+
+          For example, the 'prometheus' webhook by default uses the 'warning' severity,
+          but also users might specify a non-existing severity level.
+        """
+            
+        if alert.severity not in isa_18_2.SEVERITY_MAP:
+            # Save original severity
+            alert.attributes['unparsableSeverity'] = alert.severity
+
+            translation = {
+                "normal":   isa_18_2.OK,
+                "ok":       isa_18_2.OK,
+                "cleared":  isa_18_2.OK,
+                "warning":  isa_18_2.LOW,
+                "minor":    isa_18_2.MEDIUM,
+                "major":    isa_18_2.HIGH,
+                "critical": isa_18_2.CRITICAL,
+            }
+
+            alert.severity = translation.get(alert.severity.lower(), isa_18_2.MEDIUM)
+
+    def pre_receive(self, alert, **kwargs):
+        self._fix_severity(alert)
+
+        # Parse LOFAR-specific fields
+        for tag in alert.tags:
+            try:
+                key, value = tag.split("=", 1)
+            except ValueError:
+                continue
+
+            if key == "device":
+                alert.attributes['lofarDevice'] = value
+
+            if key == "name":
+                alert.attributes['lofarAttribute'] = value
+
+            if key == "station":
+                alert.resource = value
+
+        return alert
+
+    def post_receive(self, alert, **kwargs):
+        return
+
+    def status_change(self, alert, status, text, **kwargs):
+        return
+
+    def take_action(self, alert, action, text, **kwargs):
+        raise NotImplementedError
diff --git a/docker-compose/alerta-web/lofar-plugin/setup.py b/docker-compose/alerta-server/lofar-plugin/setup.py
similarity index 100%
rename from docker-compose/alerta-web/lofar-plugin/setup.py
rename to docker-compose/alerta-server/lofar-plugin/setup.py
diff --git a/docker-compose/alerta-server/lofar-routing-plugin/routing.py b/docker-compose/alerta-server/lofar-routing-plugin/routing.py
new file mode 100644
index 0000000000000000000000000000000000000000..bcd9f9e159c5f44bf12cacf17fb926b5db7bdb5a
--- /dev/null
+++ b/docker-compose/alerta-server/lofar-routing-plugin/routing.py
@@ -0,0 +1,72 @@
+import logging
+
+from alerta.app import alarm_model
+from alerta.models.enums import ChangeType
+
+LOG = logging.getLogger('alerta.plugins.routing')
+
+# For a description of this interface,
+# see https://docs.alerta.io/gettingstarted/tutorial-3-plugins.html?highlight=rules#step-3-route-alerts-to-plugins
+def rules(alert, plugins, config):
+    if alert.previous_severity is None:
+        # The alert still has to be parsed, and enriched, before it is
+        # merged into existing alerts.
+        return rules_prereceive(alert, plugins, config)
+    else:
+        # The alert has been processed. Check to which plugins we
+        # want to send it.
+        return rules_postreceive(alert, plugins, config)
+
+def rules_prereceive(alert, plugins, config):
+    """ Rules to determine which processing filters to use. """
+
+    # no filtering
+    return (plugins.values(), {})
+
+def _is_new_problem(alert) -> bool:
+    """ Return whether the state change denotes a newly identified issue
+        on a system that (as far as the operator knew) was fine before.
+        
+        Returns True when detecting NORM -> UNACK transitions, and False
+        on any duplicates of this transition.
+        
+        Note that RTNUN -> UNACK is thus not triggered on. """
+
+    if alert.status != 'UNACK':
+        # Only report problems (not ACKing, SHELVing, etc)
+        return False
+    elif alert.last_receive_time != alert.update_time:
+        # Ignore anything that didn't update the alert,
+        # to avoid triggering on alerts that repeat
+        # the current situation
+        return False
+    else:
+        # Only report if the previous status was NORM, to avoid
+        # triggering on (f.e.) RTNUN -> UNACK transitions.
+        for h in alert.history: # is sorted new -> old
+            if h.status == alert.status:
+                # ignore any update that didn't change the status
+                continue
+
+            return h.status == "NORM"
+
+        # ... or if there was no previous status (a brand new alert)
+        return True
+
+def rules_postreceive(alert, plugins, config):
+    """ Rules to determine which emission methods to use. """
+
+    # decide whether to notify the user on slack
+    send_to_slack = _is_new_problem(alert)
+
+    LOG.debug(f"Sending alert {alert.event} with status {alert.status} and severity {alert.previous_severity} => {alert.severity} to slack? {send_to_slack}")
+
+    # filter the plugin list based on these decisions
+    use_plugins = []
+    for name, plugin in plugins.items():
+        if name == 'slack' and not send_to_slack:
+            pass
+        else:
+            use_plugins.append(plugin)
+
+    return (use_plugins, {})
diff --git a/docker-compose/alerta-server/lofar-routing-plugin/setup.py b/docker-compose/alerta-server/lofar-routing-plugin/setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..038881e14b12d0f0c0ca941fd629a53ac730df75
--- /dev/null
+++ b/docker-compose/alerta-server/lofar-routing-plugin/setup.py
@@ -0,0 +1,24 @@
+
+from setuptools import setup, find_packages
+
+version = '1.0.0'
+
+setup(
+    name="alerta-routing",
+    version=version,
+    description='Alerta plugin to configure LOFAR custom alert routing',
+    url='https://git.astron.nl/lofar2.0/tango',
+    license='Apache License 2.0',
+    author='Jan David Mol',
+    author_email='mol@astron.nl',
+    packages=find_packages(),
+    py_modules=['routing'],
+    include_package_data=True,
+    zip_safe=True,
+    entry_points={
+        'alerta.routing': [
+            'rules = routing:rules'
+        ]
+    },
+    python_requires='>=3.5'
+)
diff --git a/docker-compose/alerta-web b/docker-compose/alerta-web
new file mode 160000
index 0000000000000000000000000000000000000000..9ee69dfbd0e33604169604b5a5cc506d560cb60b
--- /dev/null
+++ b/docker-compose/alerta-web
@@ -0,0 +1 @@
+Subproject commit 9ee69dfbd0e33604169604b5a5cc506d560cb60b
diff --git a/docker-compose/alerta-web/lofar-plugin/alerta_lofar.py b/docker-compose/alerta-web/lofar-plugin/alerta_lofar.py
deleted file mode 100644
index c4f618d2d6675feab78fce49cedc9f8030766c97..0000000000000000000000000000000000000000
--- a/docker-compose/alerta-web/lofar-plugin/alerta_lofar.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import os
-import json
-import logging
-
-from alerta.plugins import PluginBase
-
-LOG = logging.getLogger()
-
-
-class EnhanceLOFAR(PluginBase):
-    """
-    Plugin for enhancing alerts with LOFAR-specific information
-    """
-
-    def pre_receive(self, alert, **kwargs):
-        # Parse LOFAR-specific fields
-        for tag in alert.tags:
-            try:
-                key, value = tag.split("=", 1)
-            except ValueError:
-                continue
-
-            if key == "device":
-                alert.attributes['lofarDevice'] = value
-
-            if key == "name":
-                alert.attributes['lofarAttribute'] = value
-
-            if key == "station":
-                alert.resource = value
-
-        return alert
-
-    def post_receive(self, alert, **kwargs):
-        return
-
-    def status_change(self, alert, status, text, **kwargs):
-        return
-
-    def take_action(self, alert, action, text, **kwargs):
-        raise NotImplementedError
diff --git a/docker-compose/alerta-web/rules.json b/docker-compose/alerta-web/rules.json
deleted file mode 100644
index ca8df8cf7b01a4bd014387e045a2492d35292300..0000000000000000000000000000000000000000
--- a/docker-compose/alerta-web/rules.json
+++ /dev/null
@@ -1 +0,0 @@
-{"test":[{"name":"test2","interval":"10s","rules":[{"expr":"","for":"20s","labels":{"severity":"major"},"annotations":{"__dashboardUid__":"nC8N_kO7k","__panelId__":"9","summary":"My test alert"},"grafana_alert":{"id":3,"orgId":1,"title":"FPGA processing error 2","condition":"B","data":[{"refId":"A","queryType":"","relativeTimeRange":{"from":600,"to":0},"datasourceUid":"ZqArtG97z","model":{"exemplar":false,"expr":"device_attribute{device=\"stat/sdp/1\",name=\"FPGA_error_R\"}","format":"time_series","group":[],"hide":false,"interval":"","intervalMs":1000,"legendFormat":"","maxDataPoints":43200,"metricColumn":"name","rawQuery":true,"rawSql":"SELECT\n  data_time AS \"time\",\n  x::text,\n  device,\n  name,\n  case when value then 1 else 0 end AS value\nFROM lofar_array_boolean\nWHERE\n  $__timeFilter(data_time) AND\n  name = 'fpga_error_r'\nORDER BY 1,2","refId":"A","select":[[{"params":["x"],"type":"column"}],[{"params":["value"],"type":"column"}]],"table":"lofar_array_boolean","timeColumn":"data_time","timeColumnType":"timestamptz","where":[{"name":"$__timeFilter","params":[],"type":"macro"},{"datatype":"text","name":"","params":["name","=","'fpga_error_r'"],"type":"expression"}]}},{"refId":"B","queryType":"","relativeTimeRange":{"from":0,"to":0},"datasourceUid":"-100","model":{"conditions":[{"evaluator":{"params":[0,0],"type":"gt"},"operator":{"type":"and"},"query":{"params":[]},"reducer":{"params":[],"type":"avg"},"type":"query"}],"datasource":{"type":"__expr__","uid":"__expr__"},"expression":"A","hide":false,"intervalMs":1000,"maxDataPoints":43200,"reducer":"last","refId":"B","settings":{"mode":"dropNN"},"type":"reduce"}}],"updated":"2022-04-04T14:18:48Z","intervalSeconds":10,"version":1,"uid":"waXdSCynk","namespace_uid":"9DkbdYy7z","namespace_id":6,"rule_group":"test2","no_data_state":"OK","exec_err_state":"Error"}}]},{"name":"test","interval":"10s","rules":[{"expr":"","for":"20s","labels":{"severity":"major"},"annotations":{"__dashboardUid__":"nC8N_kO7k","__panelId__":"9","summary":"My test alert"},"grafana_alert":{"id":2,"orgId":1,"title":"FPGA processing error","condition":"B","data":[{"refId":"A","queryType":"","relativeTimeRange":{"from":600,"to":0},"datasourceUid":"ZqArtG97z","model":{"exemplar":false,"expr":"device_attribute{device=\"stat/sdp/1\",name=\"FPGA_error_R\"}","format":"time_series","group":[],"hide":false,"interval":"","intervalMs":1000,"legendFormat":"","maxDataPoints":43200,"metricColumn":"name","rawQuery":true,"rawSql":"SELECT\n  data_time AS \"time\",\n  x::text,\n  device,\n  name,\n  case when value then 1 else 0 end AS value\nFROM lofar_array_boolean\nWHERE\n  $__timeFilter(data_time) AND\n  name = 'fpga_error_r'\nORDER BY 1,2","refId":"A","select":[[{"params":["x"],"type":"column"}],[{"params":["value"],"type":"column"}]],"table":"lofar_array_boolean","timeColumn":"data_time","timeColumnType":"timestamptz","where":[{"name":"$__timeFilter","params":[],"type":"macro"},{"datatype":"text","name":"","params":["name","=","'fpga_error_r'"],"type":"expression"}]}},{"refId":"B","queryType":"","relativeTimeRange":{"from":0,"to":0},"datasourceUid":"-100","model":{"conditions":[{"evaluator":{"params":[0,0],"type":"gt"},"operator":{"type":"and"},"query":{"params":[]},"reducer":{"params":[],"type":"avg"},"type":"query"}],"datasource":{"type":"__expr__","uid":"__expr__"},"expression":"A","hide":false,"intervalMs":1000,"maxDataPoints":43200,"reducer":"last","refId":"B","settings":{"mode":"dropNN"},"type":"reduce"}}],"updated":"2022-04-04T14:16:22Z","intervalSeconds":10,"version":1,"uid":"MIt4Ijs7k","namespace_uid":"9DkbdYy7z","namespace_id":6,"rule_group":"test","no_data_state":"OK","exec_err_state":"Error"}}]}]}
\ No newline at end of file
diff --git a/docker-compose/alerta.yml b/docker-compose/alerta.yml
index 2ae3be42c17e450007914facd2a686c7cce1d63e..f828f1413d034e93b8c855876d647439696c69f3 100644
--- a/docker-compose/alerta.yml
+++ b/docker-compose/alerta.yml
@@ -5,7 +5,7 @@ volumes:
 
 secrets:
   alerta-secrets:
-    file: alerta-web/alerta-secrets.json
+    file: alerta-server/alerta-secrets.json
 
 services:
   alerta-web:
@@ -14,7 +14,21 @@ services:
     networks:
       - control
     ports:
-      - "8081:8080"
+      - 8081:80
+    depends_on:
+      - alerta-server
+    command: >
+      sh -c 'echo {\"endpoint\": \"http://\${HOSTNAME}:8082/api\"} > /usr/share/nginx/html/config.json &&
+             nginx -g "daemon off;"'
+    restart: always
+
+  alerta-server:
+    build: alerta-server
+    container_name: alerta-server
+    networks:
+      - control
+    ports:
+      - 8082:8080 # NOTE: This exposes an API and a web UI. Ignore the web UI as we replaced it with alerta-web
     depends_on:
       - alerta-db
     secrets:
diff --git a/docker-compose/device-antennafield.yml b/docker-compose/device-antennafield.yml
new file mode 100644
index 0000000000000000000000000000000000000000..aadc41e0853909fe2bad7a3417209ab88537bef6
--- /dev/null
+++ b/docker-compose/device-antennafield.yml
@@ -0,0 +1,47 @@
+#
+# Docker compose file that launches an interactive iTango session.
+#
+# Connect to the interactive session with 'docker attach itango'.
+# Disconnect with the Docker deattach sequence: <CTRL>+<P> <CTRL>+<Q>
+#
+# Defines:
+#   - itango: iTango interactive session
+#
+# Requires:
+#   - lofar-device-base.yml
+#
+version: '2'
+
+services:
+  device-antennafield:
+    image: device-antennafield
+    # build explicitly, as docker-compose does not understand a local image
+    # being shared among services.
+    build:
+        context: ..
+        dockerfile: docker-compose/lofar-device-base/Dockerfile
+        args:
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-itango:${TANGO_ITANGO_VERSION}
+    container_name: ${CONTAINER_NAME_PREFIX}device-antennafield
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
+    networks:
+      - control
+    ports:
+      - "5715:5715" # unique port for this DS
+    extra_hosts:
+      - "host.docker.internal:host-gateway"
+    volumes:
+        - ..:/opt/lofar/tango:rw
+    environment:
+      - TANGO_HOST=${TANGO_HOST}
+    working_dir: /opt/lofar/tango
+    entrypoint:
+      - bin/start-ds.sh
+      # configure CORBA to _listen_ on 0:port, but tell others we're _reachable_ through ${HOSTNAME}:port, since CORBA
+      # can't know about our Docker port forwarding
+      - l2ss-antennafield AntennaField STAT -v -ORBendPoint giop:tcp:0:5715 -ORBendPointPublish giop:tcp:${HOSTNAME}:5715
+    restart: unless-stopped
diff --git a/docker-compose/device-temperature-manager.yml b/docker-compose/device-temperature-manager.yml
index b50b5141260627e9a3fb462a3807f1852acbb1b5..cf1ac02fd36bbf0a7ef63ba05979e8702fad8985 100644
--- a/docker-compose/device-temperature-manager.yml
+++ b/docker-compose/device-temperature-manager.yml
@@ -26,7 +26,7 @@ services:
     networks:
       - control
     ports:
-      - "5717:5717" # unique port for this DS
+      - "5716:5716" # unique port for this DS
     extra_hosts:
       - "host.docker.internal:host-gateway"
     volumes:
@@ -38,5 +38,5 @@ services:
       - bin/start-ds.sh
       # configure CORBA to _listen_ on 0:port, but tell others we're _reachable_ through ${HOSTNAME}:port, since CORBA
       # can't know about our Docker port forwarding
-      - l2ss-temperature-manager Temperature_manager STAT -v -ORBendPoint giop:tcp:0:5717 -ORBendPointPublish giop:tcp:${HOSTNAME}:5717
+      - l2ss-temperature-manager TemperatureManager STAT -v -ORBendPoint giop:tcp:0:5716 -ORBendPointPublish giop:tcp:${HOSTNAME}:5716
     restart: unless-stopped
diff --git a/docker-compose/dsconfig/Dockerfile b/docker-compose/dsconfig/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..9783411ff933a880dca5003b8d5bceab703ff54a
--- /dev/null
+++ b/docker-compose/dsconfig/Dockerfile
@@ -0,0 +1,5 @@
+ARG SOURCE_IMAGE
+FROM ${SOURCE_IMAGE}
+
+COPY manage_object_properties.py /
+COPY merge_json.py /
diff --git a/docker-compose/dsconfig/manage_object_properties.py b/docker-compose/dsconfig/manage_object_properties.py
new file mode 100755
index 0000000000000000000000000000000000000000..7c4a75bb7d97293fa9df3b94af81486393350ee8
--- /dev/null
+++ b/docker-compose/dsconfig/manage_object_properties.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python3
+"""
+
+   Import/export the object propertis of the Tango Controls Database.
+
+"""
+
+from tango import Database
+
+def read_objects(db = None) -> dict:
+    """ Read and return all object properties. """
+
+    db = db or Database()
+
+    result = {}
+
+    objects = db.get_object_list("*").value_string
+
+    for obj in objects:
+        result[obj] = {}
+        properties = db.get_object_property_list(obj, "*").value_string
+
+        for prop in properties:
+            value = db.get_property(obj, prop)[prop]
+
+            result[obj][prop] = list(value)
+
+    return result
+
+def write_objects(objects: dict, db = None) -> None:
+    """ Write the given object properties. """
+
+    db = db or Database()
+
+    for obj, properties in objects.items():
+        db.put_property(obj, properties)
+
+if __name__ == "__main__":
+    import sys
+    import argparse
+    import json
+
+    parser = argparse.ArgumentParser("Import/export object properties of the Tango Database using the JSON file format")
+    parser.add_argument('-w', '--write', default=False, required=False, action='store_true', help='import objects from stdin')
+    parser.add_argument('-r', '--read', default=False, required=False, action='store_true', help='export all objects to stdout in JSON')
+    args = parser.parse_args()
+
+    if not args.read and not args.write:
+        parser.print_help()
+        sys.exit(1)
+
+    # import
+    if args.write:
+        objects = json.load(sys.stdin)
+        write_objects(objects["objects"])
+
+    # export
+    if args.read:
+        objects = read_objects()
+        print(json.dumps({"objects": objects}, indent=4))
diff --git a/docker-compose/dsconfig/merge_json.py b/docker-compose/dsconfig/merge_json.py
new file mode 100755
index 0000000000000000000000000000000000000000..c0b04d8466273862950f1a7060541d961d937d7d
--- /dev/null
+++ b/docker-compose/dsconfig/merge_json.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python3
+
+""" Merge all JSON files given on the command line at top level. """
+
+import json
+
+if __name__ == "__main__":
+    import argparse
+
+    parser = argparse.ArgumentParser("Merge input JSON files at top level. Keys from later files override those from earlier files.")
+    parser.add_argument('files', metavar='FILE', type=str, nargs='+', help='JSON input files')
+    args = parser.parse_args()
+
+    result = {}
+
+    # read all provided files
+    for filename in args.files:
+        with open(filename) as f:
+            file_dict = json.load(f)
+
+            # add them to the result
+            result.update(file_dict)
+
+
+    # print result in JSON
+    print(json.dumps(result, indent=4))
diff --git a/docker-compose/grafana/alerting.json b/docker-compose/grafana/alerting.json
index d5193964ae1127c0f76cc60a05dfc8f0dd4e1bf4..bc5c76e7f8870efa52e60e21bf621ae0f1cd8418 100644
--- a/docker-compose/grafana/alerting.json
+++ b/docker-compose/grafana/alerting.json
@@ -15,7 +15,7 @@
             "type": "webhook",
             "disableResolveMessage": false,
             "settings": {
-              "url": "http://alerta-web:8080/api/webhooks/prometheus?api-key=demo-key"
+              "url": "http://alerta-server:8080/api/webhooks/prometheus?api-key=demo-key"
             },
             "secureFields": {}
           }
diff --git a/docker-compose/grafana/dashboards/svg.json b/docker-compose/grafana/dashboards/svg.json
new file mode 100755
index 0000000000000000000000000000000000000000..8c762495cf5a5124fed3ed5ab3da141c5cdfd390
--- /dev/null
+++ b/docker-compose/grafana/dashboards/svg.json
@@ -0,0 +1,165 @@
+{
+  "annotations": {
+    "list": [
+      {
+        "builtIn": 1,
+        "datasource": {
+          "type": "grafana",
+          "uid": "-- Grafana --"
+        },
+        "enable": true,
+        "hide": true,
+        "iconColor": "rgba(0, 211, 255, 1)",
+        "name": "Annotations & Alerts",
+        "target": {
+          "limit": 100,
+          "matchAny": false,
+          "tags": [],
+          "type": "dashboard"
+        },
+        "type": "dashboard"
+      }
+    ]
+  },
+  "editable": true,
+  "fiscalYearStartMonth": 0,
+  "graphTooltip": 0,
+  "id": 6,
+  "links": [],
+  "liveNow": false,
+  "panels": [
+    {
+      "datasource": {
+        "type": "datasource",
+        "uid": "-- Mixed --"
+      },
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "light-green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 1
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 21,
+        "w": 12,
+        "x": 0,
+        "y": 0
+      },
+      "id": 4,
+      "options": {
+        "addAllIDs": false,
+        "captureMappings": false,
+        "eventAutoComplete": false,
+        "eventSource": "// find the right data series\nlet series = data.series.find(\n  x => x.refId == \"B\"\n    && x.fields[1].labels.device == \"total\"\n)\n\n// use the last value\nlet buffer = series.fields[1].values.buffer\nlet lastValue = buffer[buffer.length-1]\n\n// colour Drenthe accordingly\nsvgmap.Drenthe.css('fill', lastValue > 1 ? '#f00' : '#0f0')\nsvgmap.Drenthe.linkTo(function(link) {\n  link.to('http://www.drenthe.nl').target('_blank')\n})\n\n// lookup an alert\nalert = get_alert(data, \"test\")\n\n// colour Groningen accordingly\nsvgmap.Groningen.css('fill', alert.colour)\nif (alert.href) {\n  svgmap.Groningen.linkTo(function(link) {\n    link.to(alert.href).target('_blank')\n  })\n}\n\nconsole.log(\"refreshed\")",
+        "initAutoComplete": false,
+        "initSource": "// Lookup an alert in Grafana\r\nget_grafana_alert = (data, name) => {\r\n  series = data.series.find(\r\n    x => x.refId == \"GrafanaAlerts\"\r\n  )\r\n\r\n  return series.meta.custom.data.find(\r\n    x => x.labels.alertname == name\r\n  )\r\n}\r\n\r\n// Lookup an alert in Alerta\r\nget_alerta_alert = (data, name) => {\r\n  series = data.series.find(\r\n    x => x.refId == \"AlertaAlerts\"\r\n  )\r\n\r\n  return series.meta.custom.data.alerts.find(\r\n    x => x.event == name\r\n  )\r\n}\r\n\r\n// Return everything about an alert\r\nget_alert = (data, name) => {\r\n  let grafana_alert = get_grafana_alert(data, name)\r\n  let alerta_alert = get_alerta_alert(data, name)\r\n\r\n  if (alerta_alert) {\r\n    href = alerta_alert.href\r\n\r\n    if (grafana_alert)\r\n      colour = 'red'\r\n    else\r\n      colour = 'orange'\r\n  } else if (grafana_alert) {\r\n    // firing\r\n    colour = 'red'\r\n\r\n    href = '/alerting/grafana/'+ grafana_alert.labels.__alert_rule_uid__ +'/view'\r\n  } else {\r\n    colour = 'green'\r\n    href = undefined\r\n  }\r\n\r\n  return {\r\n    name: name,\r\n    alerta_alert: alerta_alert,\r\n    grafana_alert: grafana_alert,\r\n    colour: colour,\r\n    href: href\r\n  }\r\n}",
+        "svgMappings": [
+          {
+            "mappedName": "Drenthe",
+            "svgId": "NL-DR"
+          },
+          {
+            "mappedName": "Groningen",
+            "svgId": "NL-GR"
+          },
+          {
+            "mappedName": "Friesland",
+            "svgId": "NL-FR"
+          }
+        ],
+        "svgSource": "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<!-- (c) ammap.com | SVG map of Netherlands - Low -->\n<svg xmlns=\"http://www.w3.org/2000/svg\" xmlns:amcharts=\"http://amcharts.com/ammap\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" version=\"1.1\">\n\t<defs>\n\t\t<style type=\"text/css\">\n\t\t\t.land\n\t\t\t{\n\t\t\t\tfill: #CCCCCC;\n\t\t\t\tfill-opacity: 1;\n\t\t\t\tstroke:white;\n\t\t\t\tstroke-opacity: 1;\n\t\t\t\tstroke-width:0.5;\n\t\t\t}\n\t\t</style>\n\n\t\t<amcharts:ammap projection=\"mercator\" leftLongitude=\"3.359304\" topLatitude=\"53.560347\" rightLongitude=\"7.227792\" bottomLatitude=\"50.750938\"></amcharts:ammap>\n\n\t\t<!-- All areas are listed in the line below. You can use this list in your script. -->\n\t\t<!--{id:\"NL-DR\"},{id:\"NL-FL\"},{id:\"NL-FR\"},{id:\"NL-GE\"},{id:\"NL-GR\"},{id:\"NL-LI\"},{id:\"NL-NB\"},{id:\"NL-NH\"},{id:\"NL-OV\"},{id:\"NL-UT\"},{id:\"NL-ZE\"},{id:\"NL-ZH\"}-->\n\n\t</defs>\n\t<g>\n\t\t<path id=\"NL-DR\" title=\"Drenthe\" class=\"land\" d=\"M467.85,123.65L472.18,125.25L474.19,119.96L475.36,119.6L475.73,116.23L478.42,108.02L481.8,104.33L482.29,101.78L487.32,99.04L488.72,97.7L488.81,96.77L494.16,95.38L496.91,95.06L502.31,96.99L503.31,99.57L503.91,99.59L506.25,103.68L507.39,106.86L509.78,106.05L510.96,110.15L512.28,110.15L515.82,113.26L516.88,116.18L518.62,118.31L518.66,120.12L527.91,116.41L534.48,116.76L535.73,117.6L565.51,149.53L578.22,168.14L579.75,169.47L583.09,170.36L578.6,181.27L582.36,181.67L582.85,183.52L583.44,183.47L585.78,186.9L586.81,187.94L588.06,187.81L587.52,190.53L590.7,190.37L590.7,190.37L587.39,197.87L584.74,241.37L582.54,243.97L577.84,242.8L576.62,241.63L574.7,241.76L572,240.38L568.61,242.46L565.94,242.72L562.99,241.84L562.54,240.25L559.61,239.24L557.11,239.5L555.98,238.56L554.29,239.6L552.49,239.78L550.14,238.9L547.87,240.04L541.98,238.74L539.89,239.47L539.37,240.3L537.93,239.78L535.33,240.56L530.46,245.61L530.46,245.61L528.64,245.5L530.16,239.76L515.13,233.3L505.03,235.57L499,240.2L499.94,249.98L497.72,247.9L493.23,247.58L491.72,245.89L490.6,248.36L487.93,248.73L486.45,247.45L485.9,245.43L483.94,247.9L482.51,247.17L482.01,248.08L480.5,248.03L478.68,249.46L477.34,247.9L476.94,245.19L475.16,241.37L469.32,237.15L469.7,235.46L469.16,234.53L465.04,235.59L463.94,236.5L462.51,234.92L460.8,235.8L459.46,234.55L458.33,234.47L456.56,232.36L453.87,231.64L452.28,230.15L450.17,230.65L449.32,231.82L446.91,231.14L445.69,231.64L442.69,228.07L436.89,213.11L437.95,211.62L444.66,209.2L444.85,207.94L450.01,202.23L439.24,189.9L436.89,186.27L436.89,186.27L450.2,176.85L454.41,170.99L458.3,167.25L465.8,167.8L470.77,172.9L480.14,166.2L485.66,155.75L475.31,139.67L476,130.74L466.56,127.17z\"/>\n\t\t<path id=\"NL-FL\" title=\"Flevoland\" class=\"land\" d=\"M327.09,271.35l-0.68,-0.75l2.77,-7.32l3.85,-2.49l0.3,-1.12l-1.82,-5.61l-8.78,-14.14l0,0l0.36,-0.26l0,0l8.85,14.14l1.87,6.05l-0.4,1.27l-3.91,2.44l-2.1,5.11L327.09,271.35zM372.13,190.84l1.58,0.31l1.25,1.23l1.08,-0.44l-0.98,-2.95l3.17,0.97l5.73,5.8l0.98,3.79l0.59,-0.34l0,0l3.58,4.88h2.9l3.59,1.25l10.22,8.35l3.29,9.31l1.11,0.57l2.06,3.75l-0.38,1.59l-2.88,2.16l-1.79,2.03l0.06,0.89l5.81,1.59l2.52,2.21l2.58,0.7l2.28,2.52l-0.79,1.35l-3.56,2.37l-5.24,1.14l-3.88,2.63l-2.34,0.29l-2.06,-1.35l-9.32,2.05l-1.25,1.04l-9.38,-0.03l-0.06,6.86l5.19,0.21l6.74,7.19l1.74,8.62l0,0l-3.47,12l-1.85,0.16l-2.21,4.22l0.85,1.61l-2.07,3.55l-3.08,3.03l-7.85,6.37l-1.35,-0.49l0.17,0.85l-2.33,2.12l-3.07,1.99l-2.96,0.41l-4.32,3.7l-1.33,-0.98l-5,2.74l-1.27,-0.98l-2.69,0.88l-1.85,2.87l-6.88,19.91l-19.92,5.93l0,0l-11.11,-10.58l0,0l-3.81,-3.77l-4.65,-3.07l-9.73,-2.45l-11,1.88l-2.23,-2.07l0,0l1.03,-1.68l-2.12,-1.4l0.66,-1.24l-0.27,-5.01l-1.63,-3.1l0.03,-1.53l5.41,-3.26l0.81,-1.86l7.18,-4.24l2.59,-0.44l1.46,-2.04l23.64,-16.37l2.58,-0.8l6.12,-5.5l0.3,-0.99l-0.6,-2.1l-0.98,0.21l0.14,-1.38l0.74,0.23l1.66,-1.3l2.44,-4.62l5.48,-0.57l5.36,-5.12l-0.48,-0.96l0.55,-0.55l0.52,0.86l4.48,-4.6l10.59,-3.07l3.51,-2.89l-5,-8.06l-2.83,-2.08l0.52,-0.44l-0.74,-0.96l-1,0.18l0.27,-26.71l10.57,-17.2L372.13,190.84z\"/>\n\t\t<path id=\"NL-FR\" title=\"Friesland\" class=\"land\" d=\"M317.98,128.19l2.55,1.68l1.38,3.55l-3.05,-0.21l-1.38,-2.44l-0.35,-2.34L317.98,128.19zM312.42,128.35l0.06,1.1l-5.74,1.26l-12.99,11.45l-0.08,0.68l-1.66,0.79l1.41,-0.03l-1.58,0.21l-5.62,4.88l0,0l-0.65,-0.87l0,0l5.51,-4.83l-0.09,-0.58l0.73,-0.39l-0.49,0.84l14.79,-13.05l4.78,-1.66L312.42,128.35zM270.93,66.85l4.19,0.79l0.41,1.32l-1.22,1.14l-0.71,-0.32l0.3,0.55l-1.91,-0.32l-2.36,0.66l-3.17,2.59l-11.3,4.67l0.74,3.11l-1.47,0.87l0.05,2.56l-3.86,1.58l-2.15,2.32l-4.45,0.53l-2.9,1.58l-1.31,-0.37l-0.76,-2.24l8.96,-5.67l7.12,-7.1l9.3,-6.34L270.93,66.85zM282.06,71.45l-2.01,1.93l-1.96,-0.05l0.92,-0.98l-1.09,-0.95l4.04,-4.12l1.49,-0.4l0.36,1.74L282.06,71.45zM448.11,39.72l-3.69,5.38l0.93,5.74l1.68,1.8l5.9,1.8l1.72,3.41l2.96,-1.43l5.52,1.9l-0.84,7.32l1.96,1.22l-2.31,1.58l-1.23,3.38l-1.47,-0.19l-1.17,0.69l0.17,5.52l-0.82,0.4l-0.71,-0.5l-1.28,2.74l-0.96,0.37l0.46,0.66l-0.81,0.98l0.16,2.03l-2.74,0.45l2.67,4.88L454,91.03l-1.49,2.79l-1.6,1.74l-1.42,0.42l-0.22,1.4l-3.32,5.58l-0.7,9.42l4.83,5.29l5.21,0.79l4.37,-0.18l2.31,1.24l2.09,2.6l3.8,1.52l0,0l-1.28,3.52l9.43,3.57l-0.68,8.93l10.35,16.08l-5.52,10.45l-9.37,6.7l-4.97,-5.1l-7.5,-0.55l-3.89,3.74l-4.21,5.86l-13.31,9.41l0,0l-6.11,4.02l-2.77,-0.1l-1.79,0.73l0.21,3.19l-3.73,2.01l-0.71,-1.44l-4.13,1.12l-2.09,-0.99l-2.15,-5.38l-2.83,0.94l-0.02,1.38l-1.47,-0.6l-2.41,0.52l-0.71,0.86l0.02,2.04l-4.21,3.97l-3.31,1.57l-5.87,-1.57l-1,0.57l-0.35,-1.07l-2.17,-1.49l-3.78,2.64l0,0l-0.59,0.34l-0.98,-3.79l-5.73,-5.8l-3.17,-0.97l0.98,2.95l-1.08,0.44l-1.25,-1.23l-1.58,-0.31l0,0l0.11,-1.12l-1.46,0.57l-0.7,-1.49l-3.99,-0.21l0.11,-0.99l-0.92,1.7l-1.68,-0.29l-0.73,-0.84l0.3,-2.09l-1.77,-1.31l-4.23,2.46l-2.21,-0.31l-2.83,0.65l-1.06,2.77l-4.15,1.57l-14.86,-5.7l-5.43,1.65l-4.35,-2.2l-4.46,-4.1l-2.5,-3.64l2.79,-3.4l4.53,-2.12l0.87,-1.23l0.16,-2.67l-0.84,-4.14l-0.87,-1.05l1.79,-0.86l-0.6,-0.42l1.08,0.03l0.43,-1.96l-0.55,-1.39l-0.76,0.03l1.17,-0.29l0.16,-1.21l-1.72,-5.4l-0.57,-4.43l1.38,-2.33l0.05,-2.7l-1.39,-0.39l-0.25,-1.44l0.73,-0.29l0.11,-1.1l0.43,0.31l-0.21,-3.94l-0.55,3.81l-0.6,0.42L321,137.2l-1.55,-1.71l-0.25,-1.55l1.22,-0.21l0.82,0.71v-0.68l0.67,-0.05l0.49,0.79l0.73,-0.1l-1.77,-3.44l-0.3,-3.13l-1.71,-3.26l-5.93,4.89l-0.92,-1.52l2.07,-0.74l4.42,-4.28l0.51,-3.44l2.56,-1.55l1.35,-1.76l1.83,-11.34l-0.05,-1.11l-1.06,-1.05l1.9,-1.76l2.6,-7.96l1.61,-2.58l5.49,-5.64l7.14,-4.46l4.42,-4.54l0.96,-2.69l4.4,-4.44l4.46,-2.3l11.87,-3.73l11.6,-7.03l16.19,-7.46l3.31,-1.91l-1.6,-2.7l0.44,0.61l0.35,-0.37l0.98,2.28l4.56,-0.03l8.61,-2.97l0.43,0.45l2.36,-0.42l4.21,-1.35l8.36,-1.09l2.55,0.69l0.28,-0.69l0.32,0.69l0.67,-1.03l6.95,1.46l6.28,-2.97l1.06,-0.08L448.11,39.72zM342.85,30.28l-2.01,0.48l5.13,-0.19l-0.11,2.54l-3.18,1.62l-2.07,-0.13l-4.62,2.86l-2.31,-0.98l0.29,2.25l-3.67,-1.46l2.03,1.93l2.45,0.93l-1.2,0.16l-0.49,0.95l-10.11,0.26l-2.47,0.64l-6.12,4.87l-3.29,1.51l-1.87,-0.69l-0.38,0.74L307,49.06l-0.6,1.27l-5.85,-0.58l-2.47,2.17l-0.74,-0.56l-2.61,0.32l-0.66,0.9l0.09,1.9l-1.8,-0.4l-3.45,3.31l-1.87,-0.45l-2.66,-1.93l-0.44,-3.28l3.4,-3.33l2.18,-3.89l0.28,0.77l0.66,-2.22l19.88,-3.89l1.25,0.27l15.4,-5.01l13.39,-5.46l3.12,-0.58l2.71,0.53l-0.33,0.95L342.85,30.28zM427.56,26.92l0.27,2.12l-0.4,-0.69l-2.17,1.86l-0.87,-0.42l-0.33,-1.48l0.92,-1.35L427.56,26.92zM374.01,26.86l19.15,-1.56l19.37,0.05l1.74,0.85l-0.9,0.95l-1.6,0.34l-8.45,-0.21l-2.96,1.64l-13.34,3.66l-4.64,-0.03l-0.41,1.27l-0.02,-1.7l-7.67,-0.82l-3.31,3.31l-5.71,1.54l-0.49,1.25l-5.44,-2.83l-2.59,-3.6l0.63,-4.93l2.39,-2.09l10.44,2.63L374.01,26.86zM441.64,13.81l1.42,0.27l1.55,1.46l4.51,0.48l18.02,-2.2l5.78,-0.24l0.29,0.45l-2.77,1.96l-8.67,3l-1.06,1.33l-3.96,1.86l-7.82,1.14l0.74,1.25l-1.12,-1.11h-4.23l-0.32,1.06l-0.16,-1.01l-1.08,0.21l-0.71,0.88l0.41,0.56l-1.77,0.16l0.57,2.02l-3.21,-0.27l-1.65,1.59l-0.68,-0.48l0.66,-3.34l1.66,-4.03l1.42,-2.07l3.88,-3.08l-6.19,-1.75l-1.17,2.97l-1.03,-0.16l0.11,-0.96l1.09,-2.52L441.64,13.81z\"/>\n\t\t<path id=\"NL-GE\" title=\"Gelderland\" class=\"land\" d=\"M396.24,273.37L398.39,272.57L400.85,274.75L404.06,279.26L406.21,284.37L407.41,283.82L408.03,284.39L410.97,282.66L412.34,284.63L419.23,277.5L420.94,276.9L422.05,275.58L426.69,278.59L428.51,282.27L432.16,284.37L433.51,286L433.97,292.53L438.18,297.73L439.17,301.28L437.2,309.24L433.89,310.82L430.75,311.42L430.12,312.29L430.38,313.82L432.72,315.37L429.85,317.26L429.84,322.4L428.93,325.52L430.5,325.73L431.13,329.73L433.84,330.09L434.17,330.69L435.44,333.29L435.57,335.79L438.41,340.39L437.5,342.71L440.19,343.76L442.15,343.58L441.96,345L440.52,346.57L440.95,348.22L443.37,349.33L444.09,351.21L444.41,350.44L443.54,348.92L444.16,349.18L444.32,348.02L445.04,348.79L445.63,346.26L449.76,346.29L452.07,348.02L457.12,349.07L460.74,349.33L460.82,348.58L461.86,348.94L464.06,347.94L465.27,349.33L468.54,349.36L471.2,348.94L471.29,346.75L473.29,345.26L478.04,344.33L483.56,344.95L483.51,346.24L485.27,349.67L488.46,353.01L489.76,355.64L492.06,356.36L493,359.74L495.58,361.69L499.14,360.79L505.43,361.82L511.23,360.4L513.75,362.62L513.35,364.09L514.68,365.32L517.33,362.95L518.94,364.29L519.72,362.93L520.38,362.98L523.64,364.7L524.37,365.96L523.83,367.33L524.24,371.34L522.56,374.04L531.05,377.02L538.18,376.97L538.18,376.97L537.5,382.29L536.95,383.16L536.41,382.65L535.49,383.52L536.41,385.63L534.45,388.07L527.72,389.2L527.98,390.64L526.38,393.95L526.6,397.23L530.92,397.23L537.07,400.18L539.13,402.6L545.59,407.44L545.89,408.72L548.75,409.16L549.32,415.75L548.86,416.56L546.76,416.64L544.32,417.97L543.91,423.43L540.1,428.6L537.2,429.8L536.85,431.03L534.51,431.85L533.75,433.25L532.26,433.97L529.64,431.9L527.75,429.03L525.43,428.78L519.13,431.77L518.12,432.82L516.77,432.59L509.45,435.04L506.38,437.47L505.22,436.55L504.02,437.04L497.99,441.1L496.83,443.23L492.26,444.89L489.11,441.84L486.31,443.51L485.52,441.67L479.98,439.52L479.25,442.43L482.93,445.09L481.72,446.78L482.42,449.23L481.74,449.25L482.26,451.02L481.61,451.6L478.95,449.84L475.49,449.31L475.11,447.11L473.97,446.09L466.33,445.75L465.3,441.08L462.26,439.62L461.06,439.44L458.4,441.03L451.48,440.92L447.31,437.5L447.2,436.48L448.41,436.25L448.07,435.25L444.68,432.67L444.47,433.15L442.59,431.97L438.98,432.59L437.91,433.41L436.74,432.49L434.62,434.92L437.07,434.97L439.85,436.68L440.82,440.52L443.9,443L444.19,448.05L438.43,446.32L433.67,445.96L428.08,441.97L427.21,443.58L426.18,444.02L426.4,445.42L423.19,447.7L422.67,447.16L420.8,447.85L419.99,449.46L418.13,450.4L415.83,450.66L411.83,449.18L409.27,452.35L411.37,453.98L410.86,454.64L409.6,454.34L409.41,455.18L412.1,456.63L414.31,458.93L413.57,461.2L414.08,462.25L416.43,462.81L415.39,463.86L415.22,465.21L416.31,466.46L416.23,467.2L410.86,470.95L410.52,472.65L410.52,472.65L407.59,473.44L406.24,471.71L404.44,471.15L403.92,468.29L400.93,464.47L396.65,464.72L397.45,468.45L396.56,469.39L396.56,469.39L394.19,468.96L383.22,470.74L379.82,470.49L377.59,469.06L375.31,465.62L371.56,464.7L368.47,460.87L365.51,460.36L363.82,459.29L360.75,453.75L357.96,453.57L352.4,450.99L347.68,451.58L345.8,454.13L344.78,454.44L341.49,453.62L339.06,451.2L337.49,450.69L335.95,451.35L334.47,454.87L331.62,455.97L328.39,455.92L326.81,455.26L324.85,452.86L323.55,452.88L317.65,461.79L315.91,469.47L306.93,474.54L302.31,473.67L298.96,475.41L294.12,472.91L290.4,473.72L285.21,473.04L279.62,474.46L279.56,472.6L282.09,470.03L281.06,468.73L281.49,465.62L277.88,463.86L275.49,461.79L273.61,462.04L272.07,463.68L270.38,463.6L267.15,458.96L262.91,457.02L259.91,453.21L259.91,453.21L262.28,453.26L263.31,448.28L259.91,447.82L258.96,446.24L259.3,444.83L258.72,442.97L259.13,442.54L260.93,444.12L264.94,444.02L266.47,444.63L269.37,442.77L268.28,442.1L268.13,440.36L272.45,439.08L274.12,435.91L277.79,436.35L278.44,433.48L280.63,429.42L281.38,429.77L281.28,428.16L288.21,415.77L288.21,415.77L290.38,417.18L293.26,417.41L296.46,412.88L297.66,412.57L302.52,416.18L307.09,417.05L309.54,418.69L310.89,418.77L312.82,417.79L315.64,414.9L320.79,415.08L325.76,413.18L328.88,410.85L333.2,411.7L336.9,411.62L340.1,413.88L345.21,415.77L347.49,418.49L352.82,420.07L355.5,421.87L357.09,420.46L358,420.97L357.93,419.66L358.87,419.43L358.16,415.54L358.74,413.82L357.14,411.93L356.11,408.77L353.85,407.39L352.79,404.62L350.37,402.8L351.24,402.78L351.45,398.29L348.44,397.16L348.65,395.39L347.44,394.87L348.68,387.33L348.42,385.32L347.46,384.68L347.33,382.83L346.73,382.42L346.76,380.23L345.43,380.95L345.02,380.47L343.61,380.83L344.94,384.27L344.83,384.78L343.55,384.4L343.04,385.3L344.09,385.73L342.57,387.35L339.85,387.17L339.44,388.56L336.24,389.05L332.49,386.45L334.2,384.27L336.48,382.83L336.27,380.16L337.73,378.33L336.62,377.61L339.89,377.95L339.26,376.35L337.76,375.2L338.34,373.83L340.07,373.58L340.84,372.09L338.71,371.03L338.9,369.15L335.99,368.54L336.21,366.81L334.75,365.29L326.19,363.57L325.54,360.58L328.06,359.86L328.42,356.36L321.95,351.08L323.77,343.25L323.77,343.25L343.69,337.32L350.58,317.41L352.43,314.54L355.12,313.66L356.38,314.65L361.38,311.91L362.71,312.89L367.03,309.19L369.99,308.78L373.06,306.79L375.39,304.67L375.21,303.81L376.56,304.3L384.41,297.94L387.49,294.91L389.57,291.36L388.71,289.75L390.93,285.53L392.78,285.38z\"/>\n\t\t<path id=\"NL-GR\" title=\"Groningen\" class=\"land\" d=\"M542.88,27.47l1.39,0.34l2.55,-2.07l2.75,1.11l0.33,0.93l-0.4,1.3l-2.75,-0.56l-0.11,0.5l2.47,0.61l-0.17,0.72l-1.44,0.19l4.35,0.69l0.11,-0.53l-1.54,-0.53l0.67,-2.54l3.1,0.98l3.5,3.34l-1.33,4.77l-0.17,3.71l2.29,3.31l-0.43,0.69l0.78,0.71l-0.21,2.78l1.74,7.91l5.67,4.1l-0.93,1.85l10.13,3.81l2.22,1.66l6.06,1.69l4.68,-0.11l0.66,-0.95l2.82,-1l0.03,0.69l-2.33,2.14l-0.05,4.23l-1.27,3.51l1.19,2.56l2.45,1.66l5.47,0.4l1.61,1.19l1.87,0.5l1.09,-0.42l1.47,1.11l5.41,1.66l0.55,-2.03l1.34,11.63l-1.49,2.61l2.96,2.74l-3.78,1.16l-2.15,4.69l-0.06,2.53l-1.55,2.21l0.63,4.39l2.85,2.1l-0.33,8.75l2.06,17.36l0.81,2.07l-1.28,1.6l-4.16,15.26l-12.14,20.13l-2.63,3.37l-0.35,0.76l0.79,2.95l0,0l-3.18,0.16l0.54,-2.72l-1.25,0.13l-1.03,-1.05l-2.34,-3.42l-0.59,0.05l-0.49,-1.86l-3.77,-0.39l4.49,-10.91l-3.34,-0.89l-1.54,-1.33l-12.71,-18.6l-29.78,-31.94l-1.25,-0.84l-6.57,-0.34l-9.26,3.71l-0.03,-1.81l-1.74,-2.13l-1.06,-2.92l-3.54,-3.1h-1.31l-1.19,-4.11l-2.39,0.82l-1.14,-3.19l-2.34,-4.08l-0.6,-0.03l-1,-2.58l-5.4,-1.92l-2.75,0.32l-5.35,1.4l-0.09,0.92l-1.39,1.34l-5.03,2.74l-0.49,2.55l-3.39,3.69l-2.69,8.21l-0.36,3.37l-1.17,0.37l-2.01,5.28l-4.34,-1.6l0,0l-3.8,-1.52l-2.09,-2.6l-2.31,-1.24l-4.37,0.18l-5.21,-0.79l-4.83,-5.29l0.7,-9.42l3.32,-5.58l0.22,-1.4l1.42,-0.42l1.6,-1.74l1.49,-2.79l0.22,-1.19l-2.67,-4.88l2.74,-0.45l-0.16,-2.03l0.81,-0.98l-0.46,-0.66l0.96,-0.37l1.28,-2.74l0.71,0.5l0.82,-0.4l-0.17,-5.52l1.17,-0.69l1.47,0.19l1.23,-3.38l2.31,-1.58l-1.96,-1.22l0.84,-7.32l-5.52,-1.9l-2.96,1.43l-1.72,-3.41l-5.9,-1.8l-1.68,-1.8l-0.93,-5.74l3.69,-5.38l0,0l1.39,-0.4l0.05,1.01l0.63,0.03l7.55,-1.88l6.52,3.31l2.36,2.52l4.1,-3.04l9.29,-3.92l26.32,-3.36l21.9,-8.03l8.85,-0.88L542.88,27.47zM482.21,10.71l0.74,0.13l-0.08,0.66l-4.07,-0.61l1.17,-0.53L482.21,10.71zM510.46,9.17l1.11,0.19l-0.66,1.25l-2.55,-1.35l1.2,-0.56L510.46,9.17zM515.16,4.65l-0.11,0.53l-2.23,-0.29l0.79,1.94l-3.24,-1.54l-2.56,0.24l-0.38,-0.5l1,-1.54l1.01,-0.29l3.42,0.37L515.16,4.65zM495.68,4.01l2.56,0.58l2.67,2.63l-3.21,-0.48l-1.01,-0.98l-0.33,2.07L494,7.57l-0.85,0.58l0.49,1.27l-0.78,0.53l-1.39,-0.4l-0.71,-0.58l-0.35,-2.1l-2.52,-2.31l0.78,-4.46l0.6,0.08l0.68,2.31L495.68,4.01z\"/>\n\t\t<path id=\"NL-LI\" title=\"Limburg\" class=\"land\" d=\"M396.56,469.39L397.45,468.45L396.65,464.72L400.93,464.47L403.92,468.29L404.44,471.15L406.24,471.71L407.59,473.44L410.52,472.65L410.52,472.65L410.8,474.08L416.83,474.23L421.99,476.99L423.41,479.13L424.84,479.64L424.63,480.78L423.02,480.78L422.04,482L423.05,485.21L422.34,487.02L422.99,490.56L430.36,493.92L431.31,493.59L431.67,494.3L436.31,495.11L434.52,501.32L433.32,504.19L432.72,503.91L432.29,507.93L437.04,511.64L438.03,513.54L442.31,517.68L446.04,525.1L449.43,527.76L451.69,531.72L451.66,537.01L453.27,541.96L452.58,548.31L451.75,548.67L451.69,553.25L450.5,560.21L453.64,560.06L451.77,563.55L453.64,570L448.02,575.18L448.41,576.67L444.58,577.5L442.29,583.74L438.35,589.19L437.35,592.12L431.34,598.75L429.24,600.04L431.48,604.82L428.63,605.86L429.63,615.08L431.04,617.82L433.73,618.22L437.34,615.4L444.11,612.13L446.74,614.17L440.04,617.49L445.61,621.29L443.67,622.63L443.59,623.68L438.18,624.49L434.79,626.7L432.38,627.13L431.5,629.82L430.68,629.34L429.46,631.15L426.99,632.18L426.78,633.46L424.82,635.73L422.99,637.13L421.23,637.66L420.53,637.23L419.58,637.94L417.08,640.33L416.13,642.23L414.66,642.76L414.92,643.57L413.21,645.65L412.89,649.72L411.12,650.54L411.42,651.95L407.89,652.1L404.23,644.27L400.71,647.53L396.53,648.11L396.81,649.67L398.7,651.98L397.81,653.73L398.82,656.29L401.43,658.25L401.39,659.73L402.86,660.38L402.64,663.94L401.01,665.75L401.75,667.2L402.89,667.27L407.49,664.62L410.61,663.92L412.73,666.15L415.03,665.17L421.89,664.94L420.31,670.06L418.74,671.61L420.56,672.74L421.02,677.35L425.2,678.85L426.05,678.55L426.74,679.2L426.56,680.43L429.79,680.2L432.04,681.6L432.97,680.58L430.93,683.63L429.85,687.73L432.05,692.66L430.15,695.94L429.68,699.31L427.1,698.01L426.78,696.96L420.96,699.49L420.85,702.96L422.46,705.96L421.62,706.41L421.89,707.18L419.2,710.2L418.44,710.4L415.48,708.53L413.92,710.35L413.79,711.85L415.82,712.45L417.7,714.82L422.1,717.52L420.53,720.44L421.21,722.06L414.93,722.48L413.57,720.56L411.88,720.16L404.83,723.11L404.39,722.01L403.03,722.16L402.33,723.01L400.4,721.61L399.82,718.52L396.05,720.46L394.39,722.18L393.43,719.69L391.34,721.04L387.67,721.81L385.09,718.17L383.71,718.74L382.37,715.2L380.78,715.47L376.7,720.76L374.9,720.76L373.57,719.84L372.73,720.89L370.07,722.03L367.71,721.39L370.61,715.32L369.17,711.85L370.21,709.38L369.31,707.96L367.19,707.78L366.94,706.78L363.46,705.86L363.33,704.71L362.32,704.13L362.62,702.81L360.88,699.44L361.08,694.91L362.22,693.94L361.62,693.19L362.03,692.64L365.61,690.74L366.94,690.74L367.51,689.06L369.45,687.01L370.29,683.4L371.49,683.15L373.14,684.1L373.96,683.68L374.91,679.52L376.81,677.12L377.7,674.32L379.42,673.46L378.43,671.03L377.32,670.83L374.85,672.24L373.77,671.66L373.63,670.38L380.8,661.48L380.81,658.45L382.37,655.11L379.5,652.43L381.62,648.14L381.99,645.27L384.77,646.28L386.12,645.57L385.59,642.74L386.91,640.98L385.44,638.01L387.57,636.81L389.9,637.74L391.15,636.38L391.37,634.95L390.72,633.99L387.59,633.24L387.62,631.35L389.87,628.59L392.51,628.06L393.24,625.77L394.79,624.69L391.84,622.4L392.05,621.52L389.99,618.7L388.39,620.89L386.86,620.03L382.59,622.75L382.21,621.82L382.84,620.01L381.59,619.48L381.42,618.73L382.59,617.74L381.81,617.57L382.21,616.21L380.74,614.7L377.38,613.49L376.61,614.37L373.14,614.55L371.91,615.5L368.73,614.4L366.78,614.97L365.96,614.22L363.84,614.34L362.84,612.73L362.81,611.15L348.91,605.28L348.91,605.28L353.53,603.87L353.5,604.42L357.52,602.4L358.49,591.89L365.53,581.87L397.67,571.84L406.96,563.88L397.37,547.53L395.51,536.96L394.3,533.87L391.97,517.81L400.37,519.46L403.03,521.57L408.24,521.03L418.65,517.18L422.54,521.16L423.46,520.27L425.09,520.12L423.95,517.94L423.93,513.67L421.8,510.06L421.17,504.24L417.19,500.35L413.25,497.71L413,496.44L414.23,492.85L410.93,491.81L412.11,489.01L410.67,481.55L407.78,480.02L403.4,479.43L400.75,478.36L399.65,476.96L398.58,471.3z\"/>\n\t\t<path id=\"NL-NB\" title=\"Noord-Brabant\" class=\"land\" d=\"M259.91,453.21l3.01,3.8l4.24,1.94l3.23,4.64l1.69,0.08l1.54,-1.63l1.88,-0.25l2.39,2.07l3.61,1.76l-0.43,3.11l1.03,1.3l-2.53,2.57l0.06,1.86l5.59,-1.43l5.19,0.69l3.72,-0.82l4.84,2.5l3.35,-1.73l4.62,0.87l8.97,-5.07l1.74,-7.68l5.9,-8.91l1.3,-0.03l1.96,2.4l1.58,0.66l3.23,0.05l2.85,-1.1l1.49,-3.52l1.54,-0.66l1.57,0.51l2.44,2.43l3.29,0.82l1.01,-0.31l1.88,-2.55l4.72,-0.59l5.57,2.58l2.79,0.18l3.07,5.54l1.69,1.07l2.96,0.51l3.08,3.83l3.75,0.92l2.28,3.44l2.23,1.43l3.4,0.26l10.97,-1.79l2.37,0.43l0,0l2.03,1.91l1.06,5.66l1.11,1.4l2.64,1.07l4.38,0.59l2.9,1.53l1.44,7.46l-1.19,2.8l3.31,1.04l-1.23,3.59l0.25,1.27l3.94,2.64l3.97,3.89l0.63,5.82l2.14,3.61l0.02,4.27l1.14,2.18l-1.63,0.15l-0.92,0.89l-3.89,-3.99l-10.41,3.86l-5.21,0.53l-2.66,-2.11l-8.4,-1.65l2.33,16.06l1.22,3.09l1.85,10.57l9.59,16.35l-9.29,7.96l-32.14,10.03l-7.04,10.02l-0.96,10.52l-4.02,2.02l0.03,-0.55l-4.62,1.41l0,0l-0.6,-0.53l-0.71,-4.92l0.14,-5.77l-4.62,-4.31l-1.76,-3.08l-4.05,-1.14l-0.95,-0.08l-2.86,3.79l-3.85,0.73l-0.49,1.49l-0.89,0.1l-2.6,3.46l-11.31,-3.03l-1.57,3.08l-6.73,0.35l-4.78,-1.21l-4.41,1.01l-1.44,-1.39l2.14,-9.51l-6.19,-4.01l-4.13,1.36l-1.77,1.39l-4.46,-1.36l-0.41,-8.26l-9.53,-11.53l5.16,-9.84l-4.15,-9.83l-5.22,-0.33l-1.12,-1.93l0.08,-1.67l-0.71,-0.3l-3.99,3.85l-0.9,6.89l-6.22,5.27l-5.55,6.98l-0.54,-0.83l0.3,0.99l-1.01,0.46l-1.14,-0.33l1.6,-0.43l-2.14,-1.47l-1.57,-2.83l-0.93,0.56l-1.28,-0.86l0.19,-0.61l-0.6,0.48l-1.38,-0.35l-1.41,1.95l-2.52,-1.49l-0.81,0.68l-1.17,-0.53l-7.47,1.49l-2.77,-1.52l-0.68,-3.34l0.93,-0.94l1.72,-0.15l0.41,0.81l6.3,2.03l1.23,1.11l1.28,-0.03l-0.51,-0.56l1,-0.86h-2.71l-1.03,-5.97l1.68,-2.91l0.84,0.66l0.87,-0.79l-1.57,-1.19l0.85,-4.51l-3.15,-0.76l-0.85,-2.86l-6.92,-2.31l-3.17,1.19l-1.12,2.53l-2.55,1.34l-2.18,3.85l-1.93,0.46l-1.63,3.85l-4.1,1.87l0.38,4.43l-1.63,0.53l-1.17,-0.43l-1.9,0.99l-1.74,-0.89l-8.42,-1.57l-6.31,2.35l-0.85,-6.58l2.72,-5.95l-1.68,-2.46l-10.1,1.19l-1.58,1.65l-3.42,0.61l-9.27,4.91l-0.73,0.91l2.26,0.28l1,2.68l-0.92,0.56l0.35,1.21l-1.76,1.7l1.35,3.31l6.2,8.17l-0.87,1.04l0.82,1.72l-1.44,-0.28l-4.37,2.2l-2.71,0.25l-5.55,-0.76l-1.22,-4.75l-9.19,0.28l0,0l-0.54,-1.59l-1,-0.13l-0.52,-1.14l0.66,-2.28l-0.81,-4.96l1.9,-2.4l-8.85,-19.84l-0.03,-1.95l2.67,-8.07l0.09,-2.54l-1.41,-2.79l-5.22,-6.4l-0.74,-2.59l1.34,-4.75l1.8,-1.78l4.35,-1.91l1.68,-3.15l0,0l4.29,0.51l9.08,-2.52l3.29,-1.53l2.79,-2.26l1.46,-3.44l2.44,-2.98l0.89,-0.03l0.44,-1.15l3.21,-2.7l11.88,3.97l3.48,0.36l11.87,-3.26l2.5,-2.01l9.97,-2.85l4.79,-3.39l5.51,-5.33l2.25,-5.74l2.45,-2.91l4.57,-1.99l7.91,0.1l3.39,-2.14l2.86,-2.96l3.73,-1.86l4.38,-0.41L259.91,453.21zM249.37,548.14l-1.35,1.97l-0.14,-1.65l-0.51,0.43v-0.94l-0.87,0.53l0.59,0.71l-0.27,1.54l-0.9,0.46l3.59,1.57l0.55,-0.78l-0.38,-0.63l0.98,-0.68l-1.42,-0.38l-0.19,-0.81l0.97,0.1l-0.62,-1.11l1.5,-0.76l0.22,0.53l0.98,-1.16l-0.35,-0.58l-0.74,0.73l-2.93,0.05L249.37,548.14zM251.87,548.95l-0.71,-0.58l-0.41,0.43L251.87,548.95zM242.78,553.78l-0.4,0.53l0.43,-0.1L242.78,553.78zM249.51,557.48l-1.6,0.46l0.59,0.58l-0.66,0.56l1.84,-0.3L249.51,557.48z\"/>\n\t\t<path id=\"NL-NH\" title=\"Noord-Holland\" class=\"land\" d=\"M307.38,228.33l8.62,3.83l7.09,7.5l0,0l-0.36,0.26l0,0l-7.01,-7.42l-8.23,-3.59h-2.78L307.38,228.33zM212.99,155.72l-8.24,1.02l-1.69,0.92l0.81,0.81l-0.55,0.55l-1.39,-0.1l-0.87,-1.34l0.9,-1.68l4.13,-2.44l3.37,0.47l3.42,1.23L212.99,155.72zM268.4,164.24l-2.14,1.73l-0.38,2.49l1.47,-0.26l10.35,20.25l-1.52,18.28l1.22,0.08l0.67,1.17l0.87,3.63l4.43,4.51l1.17,0.39l1.77,0.03l1.41,-0.94l2.52,-3.7l4.46,0.26l3.39,1.77l3.54,-0.7l0.43,0.5l-1.55,0.55l2.15,-0.13l1.85,1.23l0.68,7.14l3.1,1.9l-1.2,-0.1l-0.22,1.38l-2.94,1.67l0.51,1.3l-5.79,1.48l-1.65,7.31l-0.79,1.17l-3.69,2.06l-1.5,2.16l-3.92,1.01l-2.33,2.47l-5.05,0.88l-1.17,-3.33l-2.15,0.16l-2.63,-2.78l-2.2,0.65l-1.17,1.2l-0.21,-0.68l-1.36,0.21l0.11,0.96l-1.08,-1.95l-0.93,0.03l-1.16,1.25l-2.75,1.25l-0.44,1.01l0.81,7.2l-0.43,1.01l1.93,5.38l-0.25,1.66l1.91,1.79l1.91,4.26l-0.11,1.53l2.5,5.16l0.7,0.03l-0.17,1.01l1.08,0.88l0.66,2.2l-2.34,3.53l-1.11,-0.08l-0.47,0.62l0.08,2.72l1.16,1.35l-0.1,1.11l-2.72,-0.13l-1.91,0.96l-0.7,-0.41l1.31,2.07l0.79,-1.06l1.08,0.26l-0.81,1.66l0.02,3.68l4.78,0.91l1.65,-0.41l0.3,-3.26l1.46,-1.92l-1.27,-7.72l1.82,5.26l1.36,1.66l3.75,0.28l-3.75,1.61l-1.09,1.14h-1.17l-0.47,3.21L273.7,296l-0.7,0.34l-0.08,-0.91l-1.15,1.04l-1.5,4.35l-2.07,-0.1l-1.22,1.79l-2.2,1.47l-1.22,3.41l-2.14,1.14l0.43,0.67l-0.63,1.86l-1.82,-1.27l-2.83,1.19l-2.64,-2.38l-0.71,0.83l0.47,1.24l2.55,1.32l-1.22,1.4l-0.47,-1.06l-0.85,0.72l3.29,2.12l2.42,2.92l2.34,0.47l2.18,-1.03l1.12,1.09l-0.98,1.37l0.44,0.96l0.74,-0.08l-0.16,-0.7l1.42,0.54l4.02,-0.21l1.28,-0.93l2.28,0.34l2.99,2.25l4.15,1.06l0,0l2.23,2.07l11,-1.88l9.73,2.45l4.65,3.07l3.81,3.77l0,0l-4.73,3.38l-4.62,-0.83l-1.5,1.37l-0.93,-0.41l-2.53,5.21l-3.47,3.09l-0.49,7.94l-4.37,8.73l-10.82,-0.59l-0.28,-1.13l2.93,-1.96l0.84,-1.49l-1.99,-3.97l-1.9,-1.06l-0.49,0.95l-6.79,0.18l-5.24,-2.14l-0.63,-2.19l3.31,-1.49l-2.18,-2.14l-0.57,-2.86l-1.36,0.57l-1.5,-4.69l4.05,-1.21l0.17,-1.14l1.28,-0.67l1.2,0.93l-0.76,-1.99l-1.68,0.13l-1.23,1.47l-1.5,-1.26l-0.71,0.21l-2.14,2.68h-0.7l-1.36,-3.2l-0.98,-0.41l-2.45,0.18l-2.63,2.63l-4.45,-0.93l-1.2,0.52l0.21,2.24l-2.33,1.7l-1.17,2.24l-6.77,0.13l-5.54,5.26l-2.98,1.37h-2.47l0,0l-1.65,-0.36l-1.55,0.67l-0.92,1.34l-4.76,2.65l-1.23,1.55l-0.92,-0.7l-0.24,-5.59l-3.09,-0.49l-2.66,1.83l-3.61,0.28l-5.13,2.99l-3.24,0.23l-4.6,-1.21l-1.68,0.57l-2.33,-0.54l-1.08,-0.88l-0.16,-1.31l1.39,-2.11l1.25,-6.55l-0.29,-2.04l3.13,-3.17l3.81,-8.44l-4.3,-1.63l-2.14,0.59l-1,1.86l-10.74,-4.85l0,0l10.57,-27.46l0.85,-6.03l0.81,-1.11l-0.6,-1.71l3.07,-7.78l4.05,-22.58l4.65,-39.09l1.42,-6.7l2.28,-5.04l2.67,-9.06l3.67,-14.66l0.82,-3.79l0.7,-14.14l1.68,-4.48l1.42,-1.31l6.98,-0.63l-0.13,0.58l1,-0.05l-0.79,0.66l1.19,1.83l1.16,-0.31l0.55,-1.28l-0.97,-0.86l1.54,-0.31l0.4,2.04l-1.98,5.48l2.6,5.55l10.79,6.54l5.14,-1.36l4.97,-2.51l2.42,-3.06l0.68,-2.33l1.5,-1.34l5.43,-1.57l1.84,0.52l2.47,-0.81l1.33,0.94l19.91,-17.64l0,0l0.65,0.87l0,0L268.4,164.24zM240.79,107.05l1.28,0.26l3.04,5.21l-0.81,1.68l-0.32,-0.87l-0.13,12.46l-0.62,1.89l-3.85,3.39l-0.14,2.71l-1.69,3.05l-1.68,0.87l-1.28,1.89l-3.88,0.94l-4.26,5.54l0.71,0.89l-0.46,0.66l-6.46,-2.05l-0.41,0.34l0.6,1l1.84,0.73l1.44,1.55l-1.52,-0.47l-1.71,1l0.43,0.58l-2.9,0.31l-1.61,1.1l-2.69,-4.01l-0.55,-2.86l0.25,-4.88l1.82,-7.9l4.53,-9.35l11.52,-18.15l4.07,-4.32l1.46,-0.55l1,0.42l2.48,3.92L240.79,107.05z\"/>\n\t\t<path id=\"NL-OV\" title=\"Overijssel\" class=\"land\" d=\"M436.89,186.27L439.24,189.9L450.01,202.23L444.85,207.94L444.66,209.2L437.95,211.62L436.89,213.11L442.69,228.07L445.69,231.64L446.91,231.14L449.32,231.82L450.17,230.65L452.28,230.15L453.87,231.64L456.56,232.36L458.33,234.47L459.46,234.55L460.8,235.8L462.51,234.92L463.94,236.5L465.04,235.59L469.16,234.53L469.7,235.46L469.32,237.15L475.16,241.37L476.94,245.19L477.34,247.9L478.68,249.46L480.5,248.03L482.01,248.08L482.51,247.17L483.94,247.9L485.9,245.43L486.45,247.45L487.93,248.73L490.6,248.36L491.72,245.89L493.23,247.58L497.72,247.9L499.94,249.98L499,240.2L505.03,235.57L515.13,233.3L530.16,239.76L528.64,245.5L530.46,245.61L530.46,245.61L532.66,248.55L531.58,255.51L539.08,262.13L536.01,262.99L532.53,262.21L530.86,265.93L525.35,264.81L529.29,273.04L528.17,281.98L538.12,288.43L540.73,289.21L552.83,289.19L552.82,291.62L554.45,291.28L566.81,295.3L570.11,292.73L572.44,287.81L574.2,286.57L576.38,291.36L577.68,296.77L579.45,298.38L581.68,304.02L585.31,304.43L587.68,311.18L587.57,316.69L584.91,320.41L583.5,323.84L583.23,326.37L580.19,332.75L580.43,336.98L582.96,341.67L586.13,345.21L585.43,346.83L582,349.05L578.88,349.61L576.49,348.66L573.61,349.36L572.73,350.57L572.11,354.15L568.23,360.77L561.36,362.1L560.91,363.8L557.36,367.1L555.86,373.81L553.2,376.38L538.18,376.97L538.18,376.97L531.05,377.02L522.56,374.04L524.24,371.34L523.83,367.33L524.37,365.96L523.64,364.7L520.38,362.98L519.72,362.93L518.94,364.29L517.33,362.95L514.68,365.32L513.35,364.09L513.75,362.62L511.23,360.4L505.43,361.82L499.14,360.79L495.58,361.69L493,359.74L492.06,356.36L489.76,355.64L488.46,353.01L485.27,349.67L483.51,346.24L483.56,344.95L478.04,344.33L473.29,345.26L471.29,346.75L471.2,348.94L468.54,349.36L465.27,349.33L464.06,347.94L461.86,348.94L460.82,348.58L460.74,349.33L457.12,349.07L452.07,348.02L449.76,346.29L445.63,346.26L445.04,348.79L444.32,348.02L444.16,349.18L443.54,348.92L444.41,350.44L444.09,351.21L443.37,349.33L440.95,348.22L440.52,346.57L441.96,345L442.15,343.58L440.19,343.76L437.5,342.71L438.41,340.39L435.57,335.79L435.44,333.29L434.17,330.69L433.84,330.09L431.13,329.73L430.5,325.73L428.93,325.52L429.84,322.4L429.85,317.26L432.72,315.37L430.38,313.82L430.12,312.29L430.75,311.42L433.89,310.82L437.2,309.24L439.17,301.28L438.18,297.73L433.97,292.53L433.51,286L432.16,284.37L428.51,282.27L426.69,278.59L422.05,275.58L420.94,276.9L419.23,277.5L412.34,284.63L410.97,282.66L408.03,284.39L407.41,283.82L406.21,284.37L404.06,279.26L400.85,274.75L398.39,272.57L396.24,273.37L396.24,273.37L394.5,264.76L387.76,257.56L382.57,257.36L382.63,250.5L392.02,250.52L393.27,249.48L402.59,247.43L404.65,248.78L406.99,248.49L410.86,245.87L416.1,244.72L419.66,242.36L420.45,241L418.17,238.48L415.6,237.78L413.08,235.57L407.27,233.98L407.21,233.09L409,231.06L411.88,228.9L412.26,227.31L410.2,223.56L409.09,222.99L405.8,213.68L395.58,205.33L391.99,204.08L389.09,204.08L385.51,199.2L385.51,199.2L389.3,196.56L391.46,198.05L391.81,199.12L392.81,198.55L398.68,200.11L401.99,198.55L406.2,194.58L406.18,192.54L406.89,191.68L409.3,191.16L410.77,191.76L410.79,190.37L413.62,189.43L415.77,194.81L417.86,195.81L421.99,194.68L422.7,196.12L426.43,194.11L426.23,190.92L428.02,190.19L430.79,190.29z\"/>\n\t\t<path id=\"NL-UT\" title=\"Utrecht\" class=\"land\" d=\"M227.2,348.02L229.67,348.02L232.64,346.65L238.18,341.39L244.95,341.26L246.12,339.02L248.45,337.32L248.24,335.07L249.45,334.56L253.89,335.48L256.52,332.85L258.97,332.67L259.95,333.09L261.31,336.28L262.01,336.28L264.15,333.6L264.86,333.4L266.36,334.66L267.6,333.19L269.27,333.06L270.03,335.05L268.83,334.12L267.55,334.79L267.37,335.92L263.32,337.14L264.83,341.83L266.19,341.26L266.76,344.12L268.94,346.26L265.63,347.76L266.27,349.95L271.5,352.09L278.29,351.91L278.78,350.95L280.68,352.01L282.68,355.98L281.84,357.47L278.91,359.43L279.19,360.56L290.02,361.15L294.39,352.42L294.88,344.48L298.34,341.39L300.87,336.18L301.81,336.59L303.31,335.23L307.93,336.05L312.66,332.67L312.66,332.67L323.77,343.25L323.77,343.25L321.95,351.08L328.42,356.36L328.06,359.86L325.54,360.58L326.19,363.57L334.75,365.29L336.21,366.81L335.99,368.54L338.9,369.15L338.71,371.03L340.84,372.09L340.07,373.58L338.34,373.83L337.76,375.2L339.26,376.35L339.89,377.95L336.62,377.61L337.73,378.33L336.27,380.16L336.48,382.83L334.2,384.27L332.49,386.45L336.24,389.05L339.44,388.56L339.85,387.17L342.57,387.35L344.09,385.73L343.04,385.3L343.55,384.4L344.83,384.78L344.94,384.27L343.61,380.83L345.02,380.47L345.43,380.95L346.76,380.23L346.73,382.42L347.33,382.83L347.46,384.68L348.42,385.32L348.68,387.33L347.44,394.87L348.65,395.39L348.44,397.16L351.45,398.29L351.24,402.78L350.37,402.8L352.79,404.62L353.85,407.39L356.11,408.77L357.14,411.93L358.74,413.82L358.16,415.54L358.87,419.43L357.93,419.66L358,420.97L357.09,420.46L355.5,421.87L352.82,420.07L347.49,418.49L345.21,415.77L340.1,413.88L336.9,411.62L333.2,411.7L328.88,410.85L325.76,413.18L320.79,415.08L315.64,414.9L312.82,417.79L310.89,418.77L309.54,418.69L307.09,417.05L302.52,416.18L297.66,412.57L296.46,412.88L293.26,417.41L290.38,417.18L288.21,415.77L288.21,415.77L283.23,413.95L281.68,408.88L278.04,407.08L275.03,406.6L270.33,408.57L265.95,412.72L264.72,415.08L263.12,415.26L260.68,413.34L258.85,413.23L254.45,417.62L249.97,416.59L246.63,421.76L240.43,423.25L239.14,418.2L234.59,413.05L232.55,409.06L231.34,408.65L229.94,406.78L236.28,405.21L235.13,402.39L228.75,403.7L228.31,402.26L228.31,401.34L229.7,400.34L229.43,398.41L232.61,394.39L235.22,392.41L238.53,391.2L239.33,389.56L232.85,390L232.91,388.63L231.74,386.3L231.56,380.06L226.15,376.17L226.41,375.12L228.35,374.25L229.57,370.88L230.84,370.34L233.24,369.57L233.61,371.08L234.49,371.78L238.72,372.01L239.51,371.39L238.92,367.35L242.21,365.73L236.91,362.05L234.52,361.2L229.68,355.41L228.61,355.46L228.38,351.57L227.26,349.72z\"/>\n\t\t<path id=\"NL-ZE\" title=\"Zeeland\" class=\"land\" d=\"M122.86,569.59l0.62,-0.25l0.73,1.57l-1.84,0.35l-2.44,-1.42l1.19,-0.78L122.86,569.59zM29.64,559.63l2.94,0.94l-0.02,0.73l1.47,0.05l1.57,1.85l3.5,0.63l1.41,1.21l11.11,2.28l5.13,5.36l2.75,-0.25l3.88,0.78l-0.57,1.95l1.66,0.88l0.67,-0.25l-0.17,-3.08l3.05,0.08l1.72,1.34l1.52,0.23l-0.49,0.96l1.16,2.25l0.08,-3.06l0.6,1.24l0.82,-1.19l2.85,1.67l4.04,-0.1l1.41,-2.3l2.36,-0.71l3.51,-2.83l6.44,-2.12l0.78,0.61l0.68,-4.05l0.87,-0.94l-0.51,-3.08l1.65,-2.33l6.9,1.37l-0.32,0.53l1.96,1.44l0.57,3.14h0.65l2.14,3.72l6.57,0.63l8.1,7.45l-2.58,-4.12l2.71,-0.71l0.98,1.37l1.6,0.5l1.08,1.74l0.41,-0.63l-1.03,-1.37l2.48,0.13l-2.94,-1.19l4.79,0.1l-0.66,-0.66l-4.43,-0.45l-1.31,-2.25l0.08,-1.16l1.69,-0.1l1.57,-0.96l0.43,3.08l3.4,1.54l-3.07,-2.5l0.33,-1.57l0.6,1.39l0.81,-2.88l2.86,0.43l1.12,0.86l0.65,2.15l2.66,3.49l-10.35,13.89l-16.33,11.71l-0.54,0.96l-2.91,0.68l-0.95,-1.06l-1.84,1.01l-0.57,-0.96l-1.04,0.03l-4.27,2.97l-0.3,-0.38l-1.41,2.34l-2.26,0.4l-0.71,1.94l-3.59,1.03l-1.23,-1.97l-0.48,0.81l-0.92,0.05l-0.29,1.94l-1.06,0.88l-3.77,1.28l-1.23,-1.84l2.29,-2.32l-0.93,-1.56l-4.59,3l-1.11,-0.68l-1.49,0.55l-0.92,-0.38l-2.18,1.11l-2.83,-1.03l-0.09,1.16l-2.01,-1.56l-0.25,-7.59l0.97,-2.67l-2.86,-1.97l-0.71,-0.3l-0.95,0.93l-2.28,-2.6l-4.54,-0.61l-1.01,0.4l-3.5,-1.01l-0.93,-1.31l-1.68,-0.03l-3.34,-2.4l-2.74,0.5l-7.83,-4.37l-1.57,1.87l0.76,1.36l-0.62,1.59l-3.13,-1.99l-1.22,1.21l-1.79,-0.23l-0.81,1.72l-0.87,0.13l-1.19,-1.19l-1.55,0.3l1.92,10.37l-11.93,1.18l-3.85,-0.91l-0.71,-2.7l-1.14,-1.08l-1.14,0.73l-3.24,-4.21l-1.96,-1.06l0.95,-0.78l-0.03,-1.44l-1.68,-1.41l-0.62,-1.57l1.42,-0.76L0,581.39l3.99,-4.52L3.7,574.6l-1.36,-1.77l0.1,-2.65l-1.31,-2.53l2.15,-2.1l0.3,0.38l6.63,-3.39l1.31,-0.2l0.9,1.01l2.86,-1.72l4.64,-1.09l3.5,-2.3l5.13,0.38L29.64,559.63zM52.44,525.81l0.54,-1.01l0.14,0.66L52.44,525.81zM63.8,523.22l1.82,0.41l-3.77,0.23l0.27,-0.96L63.8,523.22zM66.14,522.63l-0.93,-0.69l1.15,0.2L66.14,522.63zM64.77,521.92l0.17,0.84l-2.15,-0.61L64.77,521.92zM51.4,521.69l1.61,1.63l-0.79,2.11l-1.25,-3.12L51.4,521.69zM49.07,519.21l0.41,0.53l-1,-0.18L49.07,519.21zM75.57,469.26l1.31,1.28l4.53,1.43l1.61,-0.31l3.21,-4.08l5.47,-2.24l2.93,0.41l2.25,2.04l6.04,1.35l1.08,3.39l2.61,3.7l0.24,4.94l1.9,4.46l8.1,1.02l2.99,1.07l5.3,0.18l12.41,9.06l2.96,0.97l0,0l-1.68,3.15l-4.35,1.91l-1.8,1.78l-1.34,4.75l0.74,2.59l5.22,6.4l1.41,2.79l-0.09,2.54l-2.67,8.07l0.03,1.95l8.85,19.84l-1.9,2.4l0.81,4.96l-0.66,2.28l0.52,1.14l1,0.13l0.54,1.59l0,0l-5.7,0.25l0.4,-1.92l-1.95,-3.24l-2.88,-1.32l-1.04,0.2l-1.03,-1.62l-7.8,2.81l-11.3,-3.14l-4.32,-3.26l-0.78,-1.52l0.55,-0.86l-2.55,-0.79l-0.16,-1.09L106,551l-3.97,-2l-4.54,-4.53L91,547.86l-1.47,-0.48l0.17,1.29l-0.71,0.15l0.7,2.46l-3.97,9.97l-1.11,0.53l-3.53,-1.06l-2.14,1.54l-6.82,1.64l-6.41,-5.26l0.27,-1.09l-1.95,-0.13l-1.71,-1.11l-1.49,0.83l-2.01,-0.23l-0.92,-0.76l-0.84,-2.94l-0.79,0.08l-2.39,-3.21l-3.54,-2.56l-4.86,-0.2l-1.91,2.15l-4.18,0.41l-1.17,-0.94L38.14,547l-0.55,0.08l-0.1,2.1l-0.79,-0.23v-0.83l-0.33,0.68l-0.33,-0.89l-0.21,0.63l1.63,1.21l-4.3,0.08l-1.79,-2.28l-0.93,0.08l-4,-3.42l-4.53,-7.45l-6.85,-4.92l-3.34,-3.83l0.93,-4.01l9.34,-6.32l11.57,-6.58l4.75,0.05l4.21,1.55l7.77,-2.79l0.97,-5.16l-0.85,-1.04l1.14,-0.2l3.78,-4.22l-0.24,-0.48l0.78,-0.2l0.96,-2.11l-0.51,-0.64l0.78,-0.23l0.03,-2.88l-2.34,-0.74l-1.82,-1.5l-2.22,-4.2l-0.17,-3.29l2.53,-5.2l1.84,-1.4l9.84,-3.19l8.26,0.82l0.68,-3.16L75.57,469.26zM70.57,486l-0.98,-0.99l-0.73,0.13l-0.71,0.51l-0.33,1.76l-4.68,3.18l-5.51,2.16l-0.05,2.82l0.79,0.1l-0.3,1.07l-0.87,0.03l-1.31,3.18l0.9,-0.18l-1.44,1.58l1.46,0.76l-1.58,-0.46l-0.62,2.21l-0.89,-1.53l-1.46,1.27l-1.22,6.05l0.24,0.48l0.84,-0.61l0.03,0.51l1.57,-0.33l2.64,1.91l1.84,0.03l5.6,-1.93l1.9,0.41l1.01,-0.96l3.59,-0.86l2.93,0.56l2.58,-0.61l0.62,0.79l3.05,0.56l1.69,1.8l2.61,4.55l0.52,-0.46l-0.49,0.71l1.65,2.56l-0.19,0.79l-3.75,1.42l-1.77,2.39l-1.27,0.41l-3.47,-3.2l-2.75,0.63l-1.87,1.52l-6.96,-1.83l-4.23,2.46l-4.84,4.9l-0.79,-1.78l0.44,-1.52l1,1.9l0.05,-0.68l-2.96,-5.53l1.9,-2.49l-2.41,2.29l-3.97,-3.2l-1.23,-0.2l-1.25,-3.17l0.29,-1.32l-3.04,1.27l0.28,2.36l2.63,3.76l3.73,2.21l2.36,5.76l2.74,4.24l0.25,-0.46l0.54,0.99l-0.35,-0.91l2.53,-0.41l-1.87,-0.05l-0.46,-0.61l3.58,-1.8l-0.54,-1.04l1.2,-3.04l1.77,-1.14l0.76,0.05l-0.43,1.02l0.68,-0.74l7.03,0.36l2.06,-1.22l3.64,-0.63l1.99,0.81l0.84,1.37l1.88,0.38l0.93,-1.29l0.59,0.84l2.5,-0.69l2.22,0.43l1.91,-1.14l2.25,0.38l2.37,4.64l2.45,-1.07l1.84,0.33l5.41,1.95l0.16,2.05h0.49l-0.17,-2.03l1.96,0.81l0.66,1.34l3.43,2.64l1.17,-0.36l1.3,4.03l-0.6,1.7l3.73,5.6l0.82,0.03l1.42,3.09l7.12,3.65l2.42,-0.41l4.48,-2.61l3.05,0.53l1.55,0.96l1.74,-0.13l-1.03,-13.02l-6.8,-3.07l-0.63,-1.57l0.89,-1.83l-1.52,-1.75l-2.39,-0.41l-1.17,0.74l-1.23,-1.12l-5.16,0.36l-4.41,-0.81l-0.17,-0.96l-1.17,-0.53l-0.57,-2.56l-1.65,-1.04l-0.09,-1.19l-3.69,-1.07l0.43,-1.19l-1.01,-1.5l-3.29,-2.08l-2.17,-0.33l-0.87,-1.17l1.08,-2.87l5.14,-0.99l4.84,-3.13l3.48,-1.07l4.05,0.69l0.98,1.88l1.72,0.97l-0.21,-0.71l4.46,-0.53l2.04,0.51l0.27,-0.74l1.84,-0.28l0.08,-0.56l-0.74,-0.2l1.04,-0.58l-0.06,-0.92l-2.78,0.69l-4.68,-0.13l-1.53,-3.08l-2.48,-1.58l-0.13,-2.64l0.92,-0.1v-0.81l2.37,-2.09l4.49,0.89l6.85,3.89l-0.79,-2.52l-4.83,-2.14l0.16,-1.4l-1.08,0.99l-2.12,-0.74l-0.11,-0.74l3.29,0.08l-0.78,-1.04l-2.61,0.51l2.47,-0.99l0.71,0.36l0.47,-0.64l-1.52,-2.88l-6.52,1.45l-2.17,1.5l1.71,2.06l-0.27,0.76h-0.74l-0.6,2.31l-2.17,1.35l-1.87,2.47l-1.88,0.92l-1.49,-0.66l-6.31,3.36L96.56,506l-3.61,-2.72l-1.36,-2.37l-2.03,1.6l-4.43,-1.09l4.57,-3.99l-1.31,0.33l-4.37,3.46l0.08,-2.42l-1.01,-3.38l-1.39,-1.71l0.08,-1.12l-2.77,-2.62l-1.71,0.2l-3.91,-2.01l0.11,-1.53l-0.82,-1.22l-1.71,-0.53l-1.01,0.13l0.08,0.61L70.57,486z\"/>\n\t\t<path id=\"NL-ZH\" title=\"Zuid-Holland\" class=\"land\" d=\"M155.8,478.39l3.15,2.34l-3.97,-0.15l-1.09,-1.2L155.8,478.39zM143.96,470.44l5.3,1.02l7.77,3.19l0.95,1.27l-3.66,1.43l-4.53,-0.59l-7.82,-5.3l0.79,-1.12L143.96,470.44zM179.49,322.71l10.74,4.85l1,-1.86l2.14,-0.59l4.3,1.63l-3.81,8.44l-3.13,3.17l0.29,2.04l-1.25,6.55l-1.39,2.11l0.16,1.31l1.08,0.88l2.33,0.54l1.68,-0.57l4.6,1.21l3.24,-0.23l5.13,-2.99l3.61,-0.28l2.66,-1.83l3.09,0.49l0.24,5.59l0.92,0.7l1.23,-1.55l4.76,-2.65l0.92,-1.34l1.55,-0.67l1.65,0.36l0,0l0.06,1.7l1.12,1.85l0.22,3.89l1.08,-0.05l4.84,5.79l2.39,0.85l5.3,3.68l-3.29,1.62l0.59,4.04l-0.79,0.62l-4.23,-0.23l-0.89,-0.69l-0.36,-1.52l-2.41,0.77l-1.27,0.54l-1.22,3.37l-1.95,0.87l-0.25,1.05l5.41,3.88l0.17,6.24l1.17,2.34l-0.06,1.36l6.49,-0.44l-0.81,1.64l-3.31,1.21l-2.61,1.98l-3.18,4.03l0.27,1.92l-1.39,1v0.92l0.44,1.44l6.38,-1.31l1.15,2.82l-6.35,1.56l1.41,1.87l1.2,0.41l2.04,4l4.56,5.15l1.28,5.04l6.2,-1.48l3.34,-5.17l4.48,1.03l4.4,-4.38l1.84,0.1l2.44,1.92l1.6,-0.18l1.23,-2.36l4.38,-4.15l4.7,-1.97l3.01,0.49l3.64,1.79l1.55,5.07l4.98,1.82l0,0l-6.93,12.39l0.1,1.61l-0.74,-0.36l-2.2,4.07l-0.65,2.86l-3.67,-0.43l-1.66,3.17l-4.32,1.28l0.14,1.74l1.09,0.67l-2.9,1.87l-1.53,-0.61l-4,0.1l-1.8,-1.58l-0.41,0.43l0.59,1.87l-0.35,1.41l0.95,1.58l3.4,0.46l-1.03,4.98l-2.37,-0.05l0,0l-9.95,-1.81l-4.38,0.41l-3.73,1.86l-2.86,2.96l-3.39,2.14l-7.91,-0.1l-4.57,1.99l-2.45,2.91l-2.25,5.74l-5.51,5.33l-4.79,3.39l-9.97,2.85l-2.5,2.01l-11.87,3.26l-3.48,-0.36l-11.88,-3.97l-3.21,2.7l-0.44,1.15l-0.89,0.03l-2.44,2.98l-1.46,3.44l-2.79,2.26l-3.29,1.53l-9.08,2.52l-4.29,-0.51l0,0l-2.96,-0.97l-12.41,-9.06l-5.3,-0.18l-2.99,-1.07l-8.1,-1.02l-1.9,-4.46l-0.24,-4.94l-2.61,-3.7l-1.08,-3.39l-6.04,-1.35l-2.25,-2.04l-2.93,-0.41l-5.47,2.24l-3.21,4.08l-1.61,0.31l-4.53,-1.43l-1.31,-1.28l0,0l2.9,-1.86l1.44,-3.75l0.32,-1.86l-1.66,-5.51l2.63,-2.99l6.93,-1.58l11.16,-4.44l3.26,0.05l1.58,0.74l0.68,0.64l-1.42,-0.28l-0.22,1.12l3.85,2.78l-0.3,-1.33h1.49l1.49,-2.4l2.42,-1.51l-0.63,-2.58l-5.1,-7.92l0.25,-1.94l3.1,-5.37l-0.11,-0.79l-2.09,-1.43l-0.55,2.07l-2.28,-1.05l-2.91,1.05l-0.85,-0.79l2.44,-9.37l0.05,-2.1l-0.79,0.1v-1.38l0.97,-2.25l1.8,-0.33l1.55,-1.82l1.47,0.67l1.46,-0.23l4,2.28l8.67,-7.33l3.77,-5.75l15.65,-18.89l1.06,0.46l0.74,-1.1l-0.82,0.64l-0.38,-0.75l6.93,-6.94l11.79,-16l10.54,-18.89L179.49,322.71zM108.57,451.27l-0.41,2.76l1,-0.69l17.94,10.29l4.94,1.76l3.45,3.77l3.99,3.11l3.1,4.18l4.24,2.88l6.84,3.21l7.88,0.69l1.58,-0.56l1.79,-4.81l-13.32,-7.26l-10.85,-3.29l-2.91,-3.34l0.06,-2.35l-2.41,-2.68l-2.26,-1.25l-2.82,-0.03l-4.27,-4.8l-4.79,0.51l-1.09,-1.48l-0.32,1.05l-0.6,-0.77l-3.77,-1.33l-2.37,-2.99l-3.05,1.53L108.57,451.27z\"/>\n\t</g>\n</svg>"
+      },
+      "targets": [
+        {
+          "columns": [],
+          "datasource": {
+            "uid": "grafanaapi"
+          },
+          "filters": [],
+          "format": "table",
+          "global_query_id": "",
+          "hide": false,
+          "refId": "GrafanaAlerts",
+          "root_selector": "",
+          "source": "url",
+          "type": "json",
+          "url": "http://localhost:3000/api/alertmanager/grafana/api/v2/alerts",
+          "url_options": {
+            "data": "",
+            "method": "GET"
+          }
+        },
+        {
+          "datasource": {
+            "uid": "6W2nM-Vnz"
+          },
+          "editorMode": "code",
+          "expr": "device_scraping",
+          "hide": false,
+          "range": true,
+          "refId": "B"
+        },
+        {
+          "columns": [],
+          "datasource": {
+            "uid": "alertaui"
+          },
+          "filters": [],
+          "format": "table",
+          "global_query_id": "",
+          "hide": false,
+          "refId": "AlertaAlerts",
+          "root_selector": "alerts",
+          "source": "url",
+          "type": "json",
+          "url": "http://alerta-web:8080/api/alerts",
+          "url_options": {
+            "data": "",
+            "method": "GET",
+            "params": [
+              {
+                "key": "status",
+                "value": "open"
+              }
+            ]
+          }
+        }
+      ],
+      "title": "Panel Title",
+      "type": "aceiot-svg-panel"
+    }
+  ],
+  "refresh": "5s",
+  "schemaVersion": 36,
+  "style": "dark",
+  "tags": [],
+  "templating": {
+    "list": []
+  },
+  "time": {
+    "from": "now-5m",
+    "to": "now"
+  },
+  "timepicker": {},
+  "timezone": "",
+  "title": "ace.avg",
+  "uid": "tMZW8-U7z",
+  "version": 11,
+  "weekStart": ""
+}
diff --git a/docker-compose/grafana/datasources/alertaui.yaml b/docker-compose/grafana/datasources/alertaui.yaml
index 8fa7ddcfe36d5b1fcaf04a79a7defe166c26bcf8..7a3b62425a71ddf39642fa5f0fd515f7032170f7 100644
--- a/docker-compose/grafana/datasources/alertaui.yaml
+++ b/docker-compose/grafana/datasources/alertaui.yaml
@@ -12,7 +12,7 @@ datasources:
     # <string> custom UID which can be used to reference this datasource in other parts of the configuration, if not specified will be generated automatically
     uid: alertaui
     # <string> url
-    url: http://alerta-web:8080/api
+    url: http://alerta-server:8080/api
     # <string> Deprecated, use secureJsonData.password
     password:
     # <string> database user, if used
diff --git a/docker-compose/grafana/datasources/grafanaapi.yaml b/docker-compose/grafana/datasources/grafanaapi.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..a2310cdf2f4432c09581b1f60bbf9ec16a573606
--- /dev/null
+++ b/docker-compose/grafana/datasources/grafanaapi.yaml
@@ -0,0 +1,36 @@
+apiVersion: 1
+
+datasources:
+  # <string, required> name of the datasource. Required
+  - name: Grafana API
+    # <string, required> datasource type. Required
+    type: yesoreyeram-infinity-datasource
+    # <string, required> access mode. proxy or direct (Server or Browser in the UI). Required
+    access: proxy
+    # <int> org id. will default to orgId 1 if not specified
+    orgId: 1
+    # <string> custom UID which can be used to reference this datasource in other parts of the configuration, if not specified will be generated automatically
+    uid: grafanaapi
+    # <string> url
+    url: http://localhost:3000/api
+    # <string> Deprecated, use secureJsonData.password
+    password:
+    # <string> database user, if used
+    user: postgres
+    # <string> database name, if used
+    database: hdb
+    # <bool> enable/disable basic auth
+    basicAuth: false
+    # <string> basic auth username
+    basicAuthUser:
+    # <string> Deprecated, use secureJsonData.basicAuthPassword
+    basicAuthPassword:
+    # <bool> enable/disable with credentials headers
+    withCredentials:
+    # <bool> mark as default datasource. Max one per org
+    isDefault: false
+    # <map> fields that will be converted to json and stored in jsonData
+    version: 1
+    # <bool> allow users to edit datasources from the UI.
+    editable: false
+    
diff --git a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
index ec07d5b8bb3c1a382dded3d2b6d3c596065a1cdb..dd3c941b9fbdc1fa9fef279a9e0cbb9c250bf51b 100644
--- a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
+++ b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
@@ -11,8 +11,10 @@ tilebeam = DeviceProxy("STAT/TileBeam/1")
 pdu = DeviceProxy("STAT/PDU/1")
 beamlet = DeviceProxy("STAT/Beamlet/1")
 digitalbeam = DeviceProxy("STAT/DigitalBeam/1")
+antennafield = DeviceProxy("STAT/AntennaField/1")
 docker = DeviceProxy("STAT/Docker/1")
-temperature_manager = DeviceProxy("STAT/Temperature_manager/1")
+temperaturemanager = DeviceProxy("STAT/TemperatureManager/1")
 
 # Put them in a list in case one wants to iterate
-devices = [apsct, apspu, recv, sdp, sst, xst, unb2, boot, tilebeam, beamlet, digitalbeam, docker, temperature_manager]
+devices = [apsct, apspu, recv, sdp, sst, xst, unb2, boot, tilebeam, beamlet, digitalbeam, antennafield, temperaturemanager, docker]
+
diff --git a/docker-compose/prometheus-node-exporter.yml b/docker-compose/prometheus-node-exporter.yml
new file mode 100644
index 0000000000000000000000000000000000000000..da82726fc2ebec19bdba44067ea813adeb618170
--- /dev/null
+++ b/docker-compose/prometheus-node-exporter.yml
@@ -0,0 +1,25 @@
+#
+# Docker compose file that launches Prometheus Node Exporter
+#
+# Provides system metrics for Prometheus to scrape
+#
+
+version: '2'
+
+services:
+  prometheus-node-exporter:
+    image: prom/node-exporter
+    container_name: ${CONTAINER_NAME_PREFIX}prometheus-node-exporter
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
+    network_mode: host # run on the host to be able to access host network statistics
+    logging:
+      driver: syslog
+      options:
+        syslog-address: udp://${LOG_HOSTNAME}:1514
+        syslog-format: rfc3164
+        tag: "{{.Name}}"
+    restart: unless-stopped
diff --git a/docker-compose/prometheus.yml b/docker-compose/prometheus.yml
index 8029e9ba6d83e4af824ca307b8aae17af919333e..f91a1a17428b5abf7e53149377a21629539b3bb9 100644
--- a/docker-compose/prometheus.yml
+++ b/docker-compose/prometheus.yml
@@ -23,6 +23,8 @@ services:
         max-file: "10"
     networks:
       - control
+    extra_hosts:
+      - "host.docker.internal:host-gateway"
     volumes:
       - prometheus-data:/prometheus
     ports:
diff --git a/docker-compose/prometheus/Dockerfile b/docker-compose/prometheus/Dockerfile
index ad8e5165b06b55a3ca1e273d09ee2fbf6c69db1c..ed4c142dbe0f11c339abf71ff57854b7c345ec29 100644
--- a/docker-compose/prometheus/Dockerfile
+++ b/docker-compose/prometheus/Dockerfile
@@ -2,4 +2,4 @@ FROM prom/prometheus
 
 COPY prometheus.yml /etc/prometheus/prometheus.yml
 
-CMD ["--config.file=/etc/prometheus/prometheus.yml", "--storage.tsdb.path=/prometheus", "--web.console.libraries=/usr/share/prometheus/console_libraries", "--web.console.templates=/usr/share/prometheus/consoles", "--storage.tsdb.retention.time=31d"]
+CMD ["--config.file=/etc/prometheus/prometheus.yml", "--storage.tsdb.path=/prometheus", "--web.console.libraries=/usr/share/prometheus/console_libraries", "--web.console.templates=/usr/share/prometheus/consoles", "--storage.tsdb.retention.time=5y", "--storage.tsdb.retention.size=500GB", "--web.enable-admin-api"]
diff --git a/docker-compose/prometheus/prometheus.yml b/docker-compose/prometheus/prometheus.yml
index ac9c549be45d6aab48f585dd6ab234cfc1f15449..32746772773146e4b356c8c019e41c5356fecfd1 100644
--- a/docker-compose/prometheus/prometheus.yml
+++ b/docker-compose/prometheus/prometheus.yml
@@ -9,3 +9,8 @@ scrape_configs:
       - targets:
         - "tango-prometheus-exporter:8000"
 
+  - job_name: host
+    scrape_interval: 60s
+    static_configs:
+      - targets:
+        - "host.docker.internal:9100"
diff --git a/docker-compose/tango-prometheus-exporter/Dockerfile b/docker-compose/tango-prometheus-exporter/Dockerfile
index 1df83afa690c008f83868c1bc9c8d6c1a09323ef..e7ca8093801ee43ec8b5db9684ddfe1e7dd7d08f 100644
--- a/docker-compose/tango-prometheus-exporter/Dockerfile
+++ b/docker-compose/tango-prometheus-exporter/Dockerfile
@@ -1,15 +1,17 @@
 FROM tangocs/tango-pytango
 
+# curl is needed by pip
 USER root
-
 RUN apt-get update && apt-get install curl -y
 
 USER tango
 
-ADD ska-tango-grafana-exporter/exporter/code /code
-RUN pip install -r /code/pip-requirements.txt
+COPY code/pip-requirements.txt /tmp/
+RUN pip install -r /tmp/pip-requirements.txt
 
+ADD code /code
+COPY lofar2-policy.json /code/
 WORKDIR /code
 ENV PYTHONPATH '/code/'
 
-CMD ["python", "-u", "/code/collector.py"]
+CMD ["python", "-u", "/code/tango-prometheus-client.py", "--config=/code/lofar2-policy.json", "--timeout=250"]
diff --git a/docker-compose/tango-prometheus-exporter/code/pip-requirements.txt b/docker-compose/tango-prometheus-exporter/code/pip-requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..771b4421928ac8ddc6ac2c328b4827b238b94c06
--- /dev/null
+++ b/docker-compose/tango-prometheus-exporter/code/pip-requirements.txt
@@ -0,0 +1,2 @@
+prometheus_client
+python-logstash-async
diff --git a/docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py b/docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py
new file mode 100644
index 0000000000000000000000000000000000000000..489d5282acff1618d2c436a741f1a3c5d9f6db3b
--- /dev/null
+++ b/docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py
@@ -0,0 +1,245 @@
+import time
+import argparse
+from prometheus_client.core import GaugeMetricFamily, REGISTRY, CounterMetricFamily
+from prometheus_client import start_http_server
+from tango import Database, DeviceProxy, CmdArgType as ArgType, AttrDataFormat, DevState, DevFailed
+import logging
+import json
+import fnmatch
+from logstash_async.handler import AsynchronousLogstashHandler, LogstashFormatter
+
+logger = logging.getLogger()
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG)
+
+# log to memory only, with a limit of 600 seconds. this container is not important enough to keep
+# all logs around for, and as a scraper must be robust against overloading on all sides
+handler = AsynchronousLogstashHandler("elk", 5959, database_path=None, event_ttl=600)
+handler.setLevel(logging.INFO)
+logger.addHandler(handler)
+
+""" Functions to parse and apply policy files. """
+
+class ArchiverPolicy(object):
+    EMPTY_POLICY = {
+      # default policy
+      "default": {
+      },
+      # device-specific policies
+      "devices": {
+      }
+    }
+
+    @staticmethod
+    def load_config(resource: str) -> dict:
+        with open(resource) as fd:
+            return json.load(fd)
+
+    def __init__(self, config: dict = None):
+        self.config = config or self.EMPTY_POLICY
+
+    def devices(self) -> list:
+        return list(self.config["devices"].keys())
+
+    def attribute_list(self, device_name: str, attribute_list: list) -> dict:
+        """ Return the full set of archiving policy for the given device. """
+
+        if device_name not in self.devices():
+            return {}
+
+        attributes = set()
+
+        for config_set in [self.config["default"], self.config["devices"][device_name]]:
+            # include all specified attributes by pattern,
+            for include in config_set.get("include", []):
+                for attr in attribute_list:
+                    if fnmatch.fnmatch(attr, include):
+                        attributes.add(attr)
+
+            # then, remove any explicitly excluded attributes
+            for exclude in config_set.get("exclude", []):
+                for attr in attribute_list:
+                    if fnmatch.fnmatch(attr, exclude) and attr in attributes:
+                        attributes.remove(attr)
+
+        return sorted(list(attributes))
+
+class CustomCollector(object):
+    def __init__(self, station, config, proxy_timeout=250):
+        self.station = station
+        self.policy = ArchiverPolicy(config)
+        self.proxy_timeout = proxy_timeout
+
+    @staticmethod
+    def _to_metric(dev, attr_info, x, y, idx, value):
+        """ Convert the given values to a (labels, value) pair, used to construct a Metric. """
+
+        if attr_info.data_type in [ArgType.DevShort, ArgType.DevLong, ArgType.DevUShort, ArgType.DevULong, ArgType.DevLong64, ArgType.DevULong64, ArgType.DevInt, ArgType.DevFloat, ArgType.DevDouble]:
+            data_type = 'float'
+            str_value = ''
+            float_value = float(value)
+        elif attr_info.data_type == ArgType.DevBoolean:
+            data_type = 'bool'
+            str_value = ''
+            float_value = int(value)
+        elif attr_info.data_type == ArgType.DevString:
+            data_type = 'string'
+            str_value = str(value)
+            float_value = len(str_value)
+        elif attr_info.data_type == ArgType.DevEnum:
+            attr_config = dev.get_attribute_config(attr_info.name)
+            data_type = 'enum'
+            str_value = str(attr_config.enum_labels[value])
+            float_value = int(value)
+        elif attr_info.data_type == ArgType.DevState:
+            data_type = 'state'
+            str_value = ''
+            float_value = int(value)
+        else:
+            return None
+
+        # (labels, value)
+        return ([self.station, dev.dev_name(), attr_info.name, str_value, data_type, f"{x:02}", f"{y:02}", f"{idx:03}"], float_value)
+
+    def metrics_scalar(self, dev, attr_info, attr_value):
+        """ Return all metrics for a given SCALAR attribute. """
+
+        new_metric = self._to_metric(dev, attr_info, 0, 0, 0, attr_value.value)
+        return [new_metric] if new_metric else []
+
+    def metrics_spectrum(self, dev, attr_info, attr_value):
+        """ Return all metrics for a given SPECTRUM attribute. """
+
+        metrics = []
+        for x in range(int(attr_value.dim_x)):
+            new_metric = self._to_metric(dev, attr_info, x, 0, x, attr_value.value[x])
+            metrics.append(new_metric) if new_metric else None
+
+        return metrics
+
+    def metrics_image(self, dev, attr_info, attr_value):
+        """ Return all metrics for a given IMAGE attribute. """
+
+        metrics = []
+        for y in range(int(attr_value.dim_y)): 
+            for x in range(int(attr_value.dim_x)):
+                """ NOTE: We switch x and y in the annotation, to allow queries to combine 1D and 2D arrays in their first dimension using the same label (x). We effectively expose
+                          the array as [x][y] instead of [y][x]. """
+
+                new_metric = self._to_metric(dev, attr_info, y, x, y * attr_value.dim_x + x, attr_value.value[y][x])
+                metrics.append(new_metric) if new_metric else None
+
+        return metrics
+
+    def metrics(self, dev, attr_info, attr_value):
+        """ Return all metrics for a given attribute. """
+
+        if attr_info.data_format == AttrDataFormat.SCALAR:
+            return self.metrics_scalar(dev, attr_info, attr_value)
+        elif attr_info.data_format == AttrDataFormat.SPECTRUM:
+            return self.metrics_spectrum(dev, attr_info, attr_value)
+        elif attr_info.data_format == AttrDataFormat.IMAGE:
+            return self.metrics_image(dev, attr_info, attr_value)
+        else:
+            return []
+
+    def device_metrics(self, device_name):
+        """ Return all metrics for a given device, as configured. """
+
+        dev = DeviceProxy(device_name)
+        dev.set_timeout_millis(self.proxy_timeout)
+
+        # obtain extended info about all attributes
+        attr_infos = {attr_info.name: attr_info for attr_info in dev.attribute_list_query()}
+
+        if dev.state() not in [DevState.STANDBY, DevState.ON, DevState.ALARM]:
+            logger.error(f"Error processing device {device_name}: it is in state {dev.state()}")
+
+            # at least log state & status
+            attrs_to_scrape = ["State", "Status"]
+        else:
+            # obtain list of attributes to scrape
+            attrs_to_scrape = self.policy.attribute_list(device_name, attr_infos.keys())
+
+        logger.info(f"Processing device {device_name} attributes {attrs_to_scrape}")
+
+        # scrape each attribute
+        metrics = []
+        for attr_name in attrs_to_scrape:
+            try:
+                attr_value = dev.read_attribute(attr_name)
+
+                metrics.extend(self.metrics(dev, attr_infos[attr_name], attr_value))
+            except DevFailed as e:
+                reason = e.args[0].desc.replace("\n", " ")
+                logger.error(f"Error processing device {device_name} attribute {attr_name}: {reason}")
+            except Exception as e: 
+                logger.exception(f"Error processing device {device_name} attribute {attr_name}")
+
+        return metrics
+
+    def collect(self):
+        """ Yield all scraped metrics from all devices, as configured. """
+
+        logger.info("Start scraping")
+        scrape_begin = time.time()
+
+        attribute_metrics = GaugeMetricFamily("device_attribute", 'Device attribute value', labels=['station', 'device', 'name', 'str_value', 'type', 'x', 'y', 'idx'])
+        scraping_metrics = GaugeMetricFamily("device_scraping", 'Device scraping duration', labels=['station', 'device'])
+
+        for device_name in self.policy.devices():
+            logger.debug(f"Processing device {device_name}")
+            dev_scrape_begin = time.time()
+
+            try:
+                metrics = self.device_metrics(device_name)
+                for metric in metrics:
+                    attribute_metrics.add_metric(*metric)
+            except DevFailed as e: 
+                reason = e.args[0].desc.replace("\n", " ")
+                logger.error(f"Error processing device {device_name}: {reason}")
+            except Exception as e:
+                logger.exception(f"Error processing device {device_name}")
+            finally:
+                dev_scrape_end = time.time()
+
+            logger.info(f"Done processing device {device_name}. Took {dev_scrape_end - dev_scrape_begin} seconds.")
+
+            scraping_metrics.add_metric([self.station, device_name], dev_scrape_end - dev_scrape_begin)
+
+        scrape_end = time.time()
+        logger.info(f"Done scraping. Took {scrape_end - scrape_begin} seconds.")
+
+        scraping_metrics.add_metric(["total"], scrape_end - scrape_begin)
+        
+        yield attribute_metrics
+        yield scraping_metrics
+
+if __name__ == '__main__':
+    import sys
+
+    parser = argparse.ArgumentParser()
+    parser.add_argument('-c', '--config', type=str, required=True, help='configuration file')
+    parser.add_argument('-t', '--timeout', type=int, required=False, default=250, help='device proxy timeout (ms)')
+    parser.add_argument('-p', '--port', type=int, required=False, default=8000, help='HTTP server port to open')
+    args = parser.parse_args()
+
+    config = ArchiverPolicy.load_config(args.config)
+
+    db = Database()
+    try:
+        station = db.get_property("station","name")["name"][0]
+    except Exception as e:
+        logger.exception("Could not determine station name")
+        sys.exit(1)
+
+    collector = CustomCollector(config, station=station, proxy_timeout=args.timeout)
+
+    logger.info("Starting server")
+    start_http_server(args.port)
+
+    logger.info("Registering collector")
+    REGISTRY.register(collector)
+
+    logger.info("Idling")
+    while True:
+        time.sleep(1)
diff --git a/docker-compose/tango-prometheus-exporter/lofar2-policy.json b/docker-compose/tango-prometheus-exporter/lofar2-policy.json
new file mode 100644
index 0000000000000000000000000000000000000000..c0922be468f277195cb469d7b6637c3761cde90a
--- /dev/null
+++ b/docker-compose/tango-prometheus-exporter/lofar2-policy.json
@@ -0,0 +1,74 @@
+{
+    "default": {
+        "include": ["*"],
+        "exclude": ["*_RW"]
+    },
+
+    "devices": {
+        "STAT/APSCT/1": {
+        },
+        "STAT/APSPU/1": {
+        },
+        "STAT/Beamlet/1": {
+            "exclude": [
+                "FPGA_beamlet_subband_select_*",
+                "FPGA_bf_weights_*"
+            ]
+        },
+        "STAT/Boot/1": {
+        },
+        "STAT/DigitalBeam/1": {
+        },
+        "STAT/Docker/1": {
+        },
+        "STAT/PDU/1": {
+        },
+        "STAT/RECV/1": {
+            "include": [
+                "ANT_mask_RW",
+                "RCU_mask_RW"
+            ],
+            "exclude": [
+                "HBAT_BF_delay_steps_*",
+                "*_ITRF_R",
+                "*_ITRF_offsets_R"
+            ]
+        },
+        "STAT/SDP/1": {
+            "include": [
+                "TR_fpga_mask_RW"
+            ],
+            "exclude": [
+                "FPGA_subband_weights_*",
+                "FPGA_signal_input_samples_delay_*",
+                "FPGA_jesd204b_*",
+                "FPGA_scrap_*",
+                "FPGA_wg_amplitude_*",
+                "FPGA_wg_frequency_*",
+                "FPGA_wg_phase_*"
+            ]
+        },
+        "STAT/SST/1": {
+            "exclude": [
+                "sst_R",
+                "sst_timestamp_R",
+                "last_packet_R",
+                "integration_interval_R",
+                "subbands_calibrated_R"
+            ]
+        },
+        "STAT/TileBeam/1": {
+        },
+        "STAT/UNB2/1": {
+            "include": [
+                "UNB2_mask_RW"
+            ]
+        },
+        "STAT/XST/1": {
+            "exclude": [
+                "last_packet_R",
+                "xst_*_R"
+            ]
+        }
+    }
+}
diff --git a/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter b/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter
deleted file mode 160000
index e313399d197d266e49d6da0442ea983c6f92adad..0000000000000000000000000000000000000000
--- a/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit e313399d197d266e49d6da0442ea983c6f92adad
diff --git a/docker-compose/tango.yml b/docker-compose/tango.yml
index 5a6839f44a356113ae1fc525a0ff6e3290e777cd..c9cdac909bf4a863367f3541b1e77d5be659fd2a 100644
--- a/docker-compose/tango.yml
+++ b/docker-compose/tango.yml
@@ -70,7 +70,10 @@ services:
     restart: unless-stopped
 
   dsconfig:
-    image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-dsconfig:${TANGO_DSCONFIG_VERSION}
+    build:
+        context: dsconfig
+        args:
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-dsconfig:${TANGO_DSCONFIG_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}dsconfig
     networks:
       - control
diff --git a/sbin/run_integration_test.sh b/sbin/run_integration_test.sh
index 8b12f62eb6d49e341901b38b6c8671444445e994..7e0cf3b27ba111077c5739ce5fd832b4b6ea6e05 100755
--- a/sbin/run_integration_test.sh
+++ b/sbin/run_integration_test.sh
@@ -20,7 +20,7 @@ sleep 1 # dsconfig container must be up and running...
 # shellcheck disable=SC2016
 echo '/usr/local/bin/wait-for-it.sh ${TANGO_HOST} --strict --timeout=300 -- true' | make run dsconfig bash -
 
-DEVICES="device-boot device-apsct device-apspu device-sdp device-pdu device-recv device-sst device-unb2 device-xst device-beamlet device-digitalbeam device-tilebeam device-pdu device-temperature-manager"
+DEVICES="device-boot device-apsct device-apspu device-sdp device-recv device-sst device-unb2 device-xst device-beamlet device-digitalbeam device-tilebeam device-pdu device-antennafield device-temperature-manager"
 SIMULATORS="sdptr-sim recv-sim unb2-sim apsct-sim apspu-sim"
 
 # Build only the required images, please do not build everything that makes CI
diff --git a/sbin/update_ConfigDb.sh b/sbin/update_ConfigDb.sh
index 1255f1ea141a75940f2cd858dfc2b40818bd6ec2..f1401d9c6e40601036449553d2919c434c7f8bf1 100755
--- a/sbin/update_ConfigDb.sh
+++ b/sbin/update_ConfigDb.sh
@@ -11,6 +11,9 @@ fi
 # in the container won't be the same as on the host.
 docker cp "${file}" "${CONTAINER_NAME_PREFIX}"dsconfig:/tmp/dsconfig-update-settings.json || exit 1
 
+# update settings, Do not change -i into -it this will break integration tests in gitlab ci!
+docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig /manage_object_properties.py --write < "${file}"
+
 # update settings, Do not change -i into -it this will break integration tests in gitlab ci!
 docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig json2tango --write --update /tmp/dsconfig-update-settings.json
 
diff --git a/tangostationcontrol/docs/source/alerting.rst b/tangostationcontrol/docs/source/alerting.rst
index 032bcd379f68d3fa719dc8956334a910bf6227ee..88cc07db4afde1abfff7f2ef7c2a0cf9d2668895 100644
--- a/tangostationcontrol/docs/source/alerting.rst
+++ b/tangostationcontrol/docs/source/alerting.rst
@@ -103,19 +103,32 @@ The following enhancements are useful to configure for the alerts:
 - You'll want to alert on a query, followed by a ``Reduce`` step with Function ``Last`` and Mode ``Drop Non-numeric Value``. This triggers the alert on the latest value(s), but keeps the individual array elements separated,
 - In ``Add details``, the ``Dashboard UID`` and ``Panel ID`` annotations are useful to configure to where you want the user to go, as Grafana will generate hyperlinks from them. To obtain a dashboard uid, go to ``Dashboards -> Browse`` and check out its URL. For the panel id, view a panel and check the URL,
 - In ``Add details``, the ``Summary`` annotation will be used as the alert description,
-- In ``Custom labels``, add ``severity = major`` to raise the severity of the alert (default: warning). See also the `supported values <https://docs.alerta.io/webui/configuration.html#severity-colors>`_.
+- In ``Custom labels``, add ``severity = High`` to raise the severity of the alert (default: Low). See also the `supported values <https://github.com/alerta/alerta/blob/master/alerta/models/alarms/isa_18_2.py#L14>`_.
 
 Alerta dashboard
 ``````````````````
 
-The Alerta dashboard (http://localhost:8081) provides an overview of received alerts, which stay in the list until the alert condition disappears, and the alert is explicitly acknowledged or deleted:
+The Alerta dashboard (http://localhost:8081) provides an overview of received alerts, according to the ISA 18.2 Alarm Model. It distinguishes the following states:
 
-- *Acknowledging* an alert silences it for a day,
-- *Shelving* an alert silences it for 2 hours, and removes it from more overviews,
+- ``NORM``: the situation is nominal (any past alarm condition has been acknowledged),
+- ``UNACK``: an alarm condition is active, which has not been acknowledged by an operator,
+- ``RTNUN``: an alarm condition came and went, but has not been acknowledged by an operator,
+- ``ACKED``: an alarm condition is active, and has been acknowledged by an operator.
+
+Furthermore, the following rarer states are known:
+
+- ``SHLVD``: the alert is put aside, regardless of its condition,
+- ``DSUPR``: the alert is intentionally suppressed,
+- ``OOSRV``: the alert concerns something out of service, and thus should be ignored.
+
+Any alerts stay in the displayed list until the alert condition disappears, *and* the alert is explicitly acknowledged, shelved, or deleted:
+
+- *Acknowledging* an alert silences it for a day, unless its severity rises,
+- *Shelving* an alert silences it for a week, regardless of what happens,
 - *Watching* an alert means receiving browser notifications on changes,
 - *Deleting* an alert removes it until Grafana sends it again (default: 10 minutes).
 
-See ``docker-compose/alerta-web/alertad.conf`` for these settings.
+See ``docker-compose/alerta-server/alertad.conf`` for these settings.
 
 Several installed plugins enhance the received events:
 
@@ -135,9 +148,9 @@ Our Alerta setup is configured to send alerts to Slack. To set this up, you need
 
 .. hint:: To obtain the ``OAuth Token`` later on, go to https://api.slack.com/apps, click on your App, and look under ``Install App``.
 
-Now, edit ``docker-compose/alerta-web/alerta-secrets.json``:
+Now, edit ``docker-compose/alerta-server/alerta-secrets.json``:
 
-.. literalinclude:: ../../../docker-compose/alerta-web/alerta-secrets.json
+.. literalinclude:: ../../../docker-compose/alerta-server/alerta-secrets.json
 
 The ``SLACK_TOKEN`` is the ``OAuth Token``, and the ``SLACK_CHANNEL`` is the channel in which to post the alerts.
 
diff --git a/tangostationcontrol/docs/source/devices/antennafield.rst b/tangostationcontrol/docs/source/devices/antennafield.rst
new file mode 100644
index 0000000000000000000000000000000000000000..e91a3ba71ec4b417fdadc7a9183baef16e75bf31
--- /dev/null
+++ b/tangostationcontrol/docs/source/devices/antennafield.rst
@@ -0,0 +1,4 @@
+antennfield
+====================
+
+``antennafield == DeviceProxy("STAT/AntennaField/1")``
diff --git a/tangostationcontrol/docs/source/devices/temperature-manager.rst b/tangostationcontrol/docs/source/devices/temperature-manager.rst
index a69b807b7ed9f504964bbba808f5de49430ed4fd..c4f919377d5fbcb79338b0ea28e24c4cbf35c975 100644
--- a/tangostationcontrol/docs/source/devices/temperature-manager.rst
+++ b/tangostationcontrol/docs/source/devices/temperature-manager.rst
@@ -1,4 +1,4 @@
 temperature-manager
 ====================
 
-``temperature_manager == DeviceProxy("STAT/Temperature_manager/1")``
+``temperature_manager == DeviceProxy("STAT/TemperatureManager/1")``
diff --git a/tangostationcontrol/docs/source/index.rst b/tangostationcontrol/docs/source/index.rst
index 659b7ffc1b3fd12c738f660706ff2241449f2330..3808637151423688e7b25dd0a50d0e05a0bf3ae3 100644
--- a/tangostationcontrol/docs/source/index.rst
+++ b/tangostationcontrol/docs/source/index.rst
@@ -22,6 +22,7 @@ Even without having access to any LOFAR2.0 hardware, you can install the full st
    devices/tilebeam
    devices/beamlet
    devices/digitalbeam
+   devices/antennafield
    devices/boot
    devices/docker
    devices/pdu
diff --git a/tangostationcontrol/requirements.txt b/tangostationcontrol/requirements.txt
index 62cf05034ea7b40c479f6b75eb4c8d2641f2dcf9..b65be92168032276a4120804d581d4bc95e6c028 100644
--- a/tangostationcontrol/requirements.txt
+++ b/tangostationcontrol/requirements.txt
@@ -13,3 +13,4 @@ docker >= 5.0.3 # Apache 2
 python-logstash-async >= 2.3.0 # MIT
 python-casacore >= 3.3.1 # LGPLv3
 etrs-itrs@git+https://github.com/brentjens/etrs-itrs # license pending
+lofarantpos >= 0.5.0 # Apache 2
diff --git a/tangostationcontrol/setup.cfg b/tangostationcontrol/setup.cfg
index b9137a437285068f8ad03a7d324dfa8b9cfdb945..1d52727f782ff41dd2e7e75b2d3137314bfdb82f 100644
--- a/tangostationcontrol/setup.cfg
+++ b/tangostationcontrol/setup.cfg
@@ -39,6 +39,7 @@ console_scripts =
     l2ss-tilebeam = tangostationcontrol.devices.tilebeam:main
     l2ss-beamlet = tangostationcontrol.devices.sdp.beamlet:main
     l2ss-digitalbeam = tangostationcontrol.devices.sdp.digitalbeam:main
+    l2ss-antennafield = tangostationcontrol.devices.antennafield:main
     l2ss-boot = tangostationcontrol.devices.boot:main
     l2ss-docker-device = tangostationcontrol.devices.docker_device:main
     l2ss-observation = tangostationcontrol.devices.observation:main
diff --git a/tangostationcontrol/tangostationcontrol/beam/geo.py b/tangostationcontrol/tangostationcontrol/beam/geo.py
index 033a6c4e4293573d05cb3fa09bcab5071c88a97b..2c8195796811237750becb29f28fcf904f34f03d 100644
--- a/tangostationcontrol/tangostationcontrol/beam/geo.py
+++ b/tangostationcontrol/tangostationcontrol/beam/geo.py
@@ -1,5 +1,7 @@
 import etrsitrs
+import lofarantpos.geo
 import numpy
+import math
 
 """
    LOFAR station positions are measured in ETRS89, which are the coordinates of the position as it would be in 1989.
@@ -16,18 +18,34 @@ import numpy
    The ETRSitrs package does all the transformation calculations for us.
 """
 
+def _apply_fn_on_one_element_or_array(fn, array: numpy.array) -> numpy.array:
+    if array.ndim == 1:
+       # convert a single coordinate triple
+       return fn(array)
+    else:
+       # convert each coordinate triple
+       return numpy.apply_along_axis(fn, 1, array)
+
 def ETRS_to_ITRF(ETRS_coordinates: numpy.array, ITRF_reference_frame: str = "ITRF2005", ITRF_reference_epoch: float = 2015.5) -> numpy.array:
     """ Convert an array of coordinate triples from ETRS to ITRF, in the given reference frame and epoch. """
 
     # fetch converter
     ETRS_to_ITRF_fn = etrsitrs.convert_fn("ETRF2000", ITRF_reference_frame, ITRF_reference_epoch)
 
-    if ETRS_coordinates.ndim == 1:
-       # convert a single coordinate triple
-       ITRF_coordinates = ETRS_to_ITRF_fn(ETRS_coordinates)
-    else:
-       # convert each coordinate triple
-       ITRF_coordinates = numpy.apply_along_axis(ETRS_to_ITRF_fn, 1, ETRS_coordinates)
+    return _apply_fn_on_one_element_or_array(ETRS_to_ITRF_fn, ETRS_coordinates)
+
+def ETRS_to_GEO(ETRS_coordinates: numpy.array) -> numpy.array:
+    """ Convert an array of coordinate triples from ETRS to latitude/longitude (degrees). """
+
+    def ETRS_to_GEO_fn(etrs_coords):
+        geo_coords = lofarantpos.geo.geographic_from_xyz(etrs_coords)
+
+        return numpy.array([
+            geo_coords['lat_rad'] * 180 / math.pi,
+            geo_coords['lon_rad'] * 180 / math.pi
+            ])
+
+    return _apply_fn_on_one_element_or_array(ETRS_to_GEO_fn, ETRS_coordinates)
 
-    # return computed ITRF coordinates
-    return ITRF_coordinates
+# Geo coordinates are only used for rough positioning. The difference between ITRF and ETRS matters little here
+ITRF_to_GEO = ETRS_to_GEO
diff --git a/tangostationcontrol/tangostationcontrol/clients/snmp_client.py b/tangostationcontrol/tangostationcontrol/clients/snmp_client.py
index 7a7f45808cdc2d160cb9db3356d3a0e9beda4be0..83bdfb2e6da940cdb3d297e8fb521ce533fb8ab8 100644
--- a/tangostationcontrol/tangostationcontrol/clients/snmp_client.py
+++ b/tangostationcontrol/tangostationcontrol/clients/snmp_client.py
@@ -2,6 +2,8 @@
 from tangostationcontrol.clients.comms_client import CommClient
 
 from pysnmp import hlapi
+from pysnmp.smi import builder
+from os import path
 
 import numpy
 import logging
@@ -12,8 +14,9 @@ __all__ = ["SNMP_client"]
 
 snmp_to_numpy_dict = {
     hlapi.Integer32: numpy.int64,
+    hlapi.Integer: numpy.int64,
     hlapi.TimeTicks: numpy.int64,
-    str: str,
+    hlapi.OctetString: str,
     hlapi.ObjectIdentity: str,
     hlapi.Counter32: numpy.int64,
     hlapi.Gauge32: numpy.int64,
@@ -21,6 +24,27 @@ snmp_to_numpy_dict = {
 }
 
 
+class SNMP_comm:
+    """
+    Holds information for communicating with the SNMP server
+    """
+
+    def __init__(self, community, host, port):
+        self.port = port
+        self.engine = hlapi.SnmpEngine()
+        self.community = hlapi.CommunityData(community)
+        self.transport = hlapi.UdpTransportTarget((host, port))
+
+        # context data sets the version used. Default SNMPv2
+        self.ctx_data = hlapi.ContextData()
+
+    def getter(self, objs):
+        return next(hlapi.getCmd(self.engine, self.community, self.transport, self.ctx_data, *objs))
+
+    def setter(self, objs):
+        return next(hlapi.getCmd(self.engine, self.community, self.transport, self.ctx_data, *objs))
+
+
 class SNMP_client(CommClient):
     """
         messages to keep a check on the connection. On connection failure, reconnects once.
@@ -36,26 +60,23 @@ class SNMP_client(CommClient):
         super().__init__(fault_func, try_interval)
 
         logger.debug(f"setting up SNMP engine with host: {host} and community: {community}")
-        self.port = port
-
-        self.engine = hlapi.SnmpEngine()
-        self.community = hlapi.CommunityData(community)
-        self.transport = hlapi.UdpTransportTarget((host, port))
-
-        # context data sets the version used. Default SNMPv2
-        self.ctx_data = hlapi.ContextData()
+        self.SNMP_comm = SNMP_comm(community, host, port)
 
         # only sets up the engine, doesn't connect
         self.connected = True
 
+    def _process_annotation(self, annotation):
 
-    def _setup_annotation(self, annotation):
-        """
-        parses the annotation this attribute received for its initialisation.
-        """
+        try:
+            mib = annotation["mib"]
+            name = annotation["name"]
+
+            # SNMP has tables that require an index number to access them. regular non-table variable have an index of 0
+            idx = annotation.get('index', 0)
 
-        wrapper = annotation_wrapper(annotation)
-        return wrapper
+            return mib, name, idx
+        except KeyError:
+            raise ValueError(f"SNMP attribute annotation requires a dict argument with both a 'name' and 'mib' key. Instead got: {annotation}")        
 
     def setup_value_conversion(self, attribute):
         """
@@ -78,11 +99,11 @@ class SNMP_client(CommClient):
         """
 
         # process the annotation
-        wrapper = self._setup_annotation(annotation)
+        mib, name, idx = self._process_annotation(annotation)
 
         # get all the necessary data to set up the read/write functions from the attribute_wrapper
         dim_x, dim_y, dtype = self.setup_value_conversion(attribute)
-        snmp_attr = snmp_attribute(self, wrapper, dtype, dim_x, dim_y)
+        snmp_attr = snmp_attribute(self, mib, name, idx, dtype, dim_x, dim_y)
 
         # return the read/write functions
         def read_function():
@@ -93,145 +114,48 @@ class SNMP_client(CommClient):
 
         return read_function, write_function
 
+class snmp_attribute:
 
-class annotation_wrapper:
-    def __init__(self, annotation):
-        """
-        The SNMP client uses a dict and takes the following keys:
-
-        either
-            oids: Required. An oid string of the object
-        or
-            mib: the mib name
-            name: name of the value to read
-            index (optional) the index if the value thats being read from is a table.
-        """
-
-        # values start as None because we have a way too complicated interface
-        self.oids = None
-        self.mib = None
-        self.name = None
-        self.idx = None
-
-        # check if the 'oids' key is used and not the 'mib' and 'name' keys
-
-        if 'oids' in annotation and 'mib' not in annotation and 'name' not in annotation:
-            self.oids = annotation["oids"]
-
-            # checks to make sure this isn't present
-            if 'index' in annotation:
-                raise ValueError(f"SNMP attribute annotation doesn't support oid type declarations with an index present.")
-
+    def __init__(self, comm: SNMP_comm, mib, name, idx, dtype, dim_x, dim_y):
 
-        # check if the 'oids' key is NOT used but instead the 'mib' and 'name' keys
-        elif 'oids' not in annotation and 'mib' in annotation and 'name' in annotation:
-            self.mib = annotation["mib"]
-            self.name = annotation["name"]
+        self.comm = comm
+        self.mib = mib
+        self.name = name
+        self.idx = idx
+        self.dtype = dtype
 
-            # SNMP has tables that require an index number to access them. regular non-table variable have an index of 0
-            self.idx = annotation.get('index', 0)
+        self.len = self.get_len(dim_x, dim_y)
+        self.is_scalar = self.len == 1
 
-        else:
-            raise ValueError(
-                f"SNMP attribute annotation requires a dict argument with either a 'oids' key or both a 'name' and 'mib' key. Not both. Instead got: {annotation}")
+        self.objID = self.create_objID()
 
-    def create_objID(self, x, y):
-        is_scalar = (x + y) == 1
+    def get_len(self, dim_x, dim_y):
+        """""Small helper function to not clutter the __init__"""
 
-        # if oids are used
-        if self.oids is not None:
-            # get a list of str of the oids
-            self.oids = self._get_oids(x, y, self.oids)
+        if dim_x == 0:
+            dim_x = 1
+        if dim_y == 0:
+            dim_y = 1
+        return dim_x * dim_y
 
-            # turn the list of oids in to a tuple of pysnmp object identities. These are used for the
-            objID = tuple(hlapi.ObjectIdentity(self.oids[i]) for i in range(len(self.oids)))
+    def create_objID(self):
 
-        # if mib + name is used
+        if self.is_scalar:
+            objID = hlapi.ObjectIdentity(self.mib, self.name, self.idx)
         else:
-
-            # only scalars can be used at the present time.
-            if not is_scalar:
-                # tuple(hlapi.ObjectIdentity(mib, name, idx) for i in range(len(oids)))
-
-                raise ValueError(f"MIB + name type attributes can only be scalars, got dimensions of: ({x}, {y})")
-            else:
-                objID = hlapi.ObjectIdentity(self.mib, self.name, self.idx)
+            objID = tuple(hlapi.ObjectIdentity(self.mib, self.name, self.idx + i) for i in range(self.len))
 
         return objID
 
-    def _get_oids(self, x, y, in_oid):
-        """
-        This function expands oids depending on dimensionality.
-        if its a scalar its left alone, but if its an array it creates a list of sequential oids if not already provided
-
-        scalar "1.1.1.1" -> stays the same
-        spectrum: "1.1.1.1" -> ["1.1.1.1.1", "1.1.1.1.2, ..."]
-        """
-
-        if x == 0:
-            x = 1
-        if y == 0:
-            y = 1
-
-        is_scalar = (x * y) == 1
-        nof_oids = x * y
-
-        # if scalar
-        if is_scalar:
-            if type(in_oid) is str:
-                # for ease of handling put single oid in a 1 element list
-                in_oid = [in_oid]
-
-            return in_oid
-
-        else:
-            # if we got a single str oid, make a list of sequential oids
-            if type(in_oid) is str:
-                return ["{}.{}".format(in_oid, i + 1) for i in range(nof_oids)]
-
-            # if its an already expanded list of all oids
-            elif type(in_oid) is list and len(in_oid) == nof_oids:
-                return in_oid
-
-            # if its a list of attributes with the wrong length.
-            else:
-                raise ValueError(
-                    "SNMP oids need to either be a single value or an array the size of the attribute dimensions. got: {} expected: {}x{}={}".format(
-                        len(in_oid), x, y, x * y))
-
-
-class snmp_attribute:
-
-    def __init__(self, client : SNMP_client, wrapper, dtype, dim_x, dim_y):
-
-        self.client = client
-        self.wrapper = wrapper
-        self.dtype = dtype
-        self.dim_x = dim_x
-        self.dim_y = dim_y
-        self.is_scalar = (self.dim_x + self.dim_y) == 1
-
-        self.objID = self.wrapper.create_objID(self.dim_x, self.dim_y)
-
-    def next_wrap(self, cmd):
-        """
-        This function exists to allow the next(cmd) call to be mocked for unit testing. As the
-        """
-        return next(cmd)
-
     def read_function(self):
         """
         Read function we give to the attribute wrapper
         """
-
-        # must be recreated for each read it seems
+        # we need to remake this every time it seems or things dont work
         self.objs = tuple(hlapi.ObjectType(i) for i in self.objID)
 
-        # get the thingy to get the values
-        get_cmd = hlapi.getCmd(self.client.engine, self.client.community, self.client.trasport, self.client.ctx_data, *self.objs)
-
-        # dont ask me why 'next' is used to get all of the values
-        errorIndication, errorStatus, errorIndex, *varBinds = self.next_wrap(get_cmd)
+        # get all of the values
+        errorIndication, errorStatus, errorIndex, *varBinds = self.comm.getter(*self.objs)
 
         # get all the values in a list converted to the correct type
         val_lst = self.convert(varBinds)
@@ -250,8 +174,7 @@ class snmp_attribute:
         else:
             write_obj = tuple(hlapi.ObjectType(self.objID[i], value[i]) for i in range(len(self.objID)))
 
-        set_cmd = hlapi.setCmd(self.client.engine, self.client.community, self.client.trasport, self.client.ctx_data, *write_obj)
-        errorIndication, errorStatus, errorIndex, *varBinds = self.next_wrap(set_cmd)
+        errorIndication, errorStatus, errorIndex, *varBinds = self.comm.setter(write_obj)
 
     def convert(self, varBinds):
         """
@@ -259,20 +182,43 @@ class snmp_attribute:
         """
 
         vals = []
+
         if not self.is_scalar:
             #just the first element of this single element list
             varBinds = varBinds[0]
 
         for varBind in varBinds:
-            # class 'DisplayString' doesnt want to play along for whatever reason
-            if "DisplayString" in str(type(varBind[1])):
-                vals.append(varBind[1].prettyPrint())
-            elif type(varBind[1]) == hlapi.IpAddress:
+
+            # Some MIB's used custom types, some dont. Custom types are merely wrapped base types.
+            varbind_types = varBind[1].__class__.__bases__ + (type(varBind[1]),)
+
+            snmp_type = None
+
+            # find if one of the base types is present.
+            for i in varbind_types:
+                if i in snmp_to_numpy_dict.keys():
+                    snmp_type = i
+
+            if snmp_type is None:
+                raise TypeError(f"Error: did not find a valid snmp type. Got: {varbind_types}, expected one of: '{snmp_to_numpy_dict.keys()}'")
+
+            if snmp_type is hlapi.IpAddress:
                 # IpAddress values get printed as their raw value but in hex (7F 20 20 01 for 127.0.0.1 for example)
                 vals.append(varBind[1].prettyPrint())
+
+            elif snmp_type is hlapi.Integer32 or snmp_type is hlapi.Integer and self.dtype == str:
+                # Integers can have 'named values', Where a value can be translated to a specific name. A dict basically
+                # Example: {1: "other", 2: "invalid", 3: "dynamic", 4: "static",}
+
+                if varBind[1].namedValues == {}:
+                    # An empty dict {} means no namedValue's are present.
+                    vals.append(snmp_to_numpy_dict[snmp_type](varBind[1]))
+                else:
+                    # append the named values string instead of the raw number.
+                    vals.append(varBind[1].prettyPrint())
             else:
                 # convert from the funky pysnmp types to numpy types and then append
-                vals.append(snmp_to_numpy_dict[type(varBind[1])](varBind[1]))
+                vals.append(snmp_to_numpy_dict[snmp_type](varBind[1]))
 
         if self.is_scalar:
             vals = vals[0]
@@ -280,3 +226,16 @@ class snmp_attribute:
         return vals
 
 
+class mib_loader:
+
+    def __init__(self, mib_dir: str):
+        self.mibBuilder = builder.MibBuilder()
+
+        if not path.isabs(mib_dir):
+            mib_dir = "/" + mib_dir
+
+        mib_source = builder.DirMibSource(mib_dir)
+        self.mibBuilder.addMibSources(mib_source)
+
+    def load_pymib(self, mib_name):
+        self.mibBuilder.loadModule(mib_name)
diff --git a/tangostationcontrol/tangostationcontrol/devices/README.md b/tangostationcontrol/tangostationcontrol/devices/README.md
index 19d72e35c4f09d06d500d997d5255c1abdd53b77..378cece58eaa3ebe181e129e6f2f6cb6c2b8b1c3 100644
--- a/tangostationcontrol/tangostationcontrol/devices/README.md
+++ b/tangostationcontrol/tangostationcontrol/devices/README.md
@@ -10,7 +10,7 @@ If a new device is added, it will (likely) need to be referenced in several plac
 - Adjust `docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py` to make an alias for it available in Jupyter,
 - Adjust `tangostationcontrol/tangostationcontrol/devices/boot.py` to add the device to the station initialisation sequence,
 - Add to `docker-compose/` to create a YaML file to start the device in a docker container. NOTE: it needs a unique 57xx port assigned,
-                            current _unused_ port value: 5715
+                            current _unused_ port value: 5716
 - Adjust `tangostationcontrol/setup.cfg` to add an entry point for the device in the package installation,
 - Add to `tangostationcontrol/tangostationcontrol/integration_test/default/devices/` to add an integration test,
 - Adjust `sbin/run_integration_test.sh` to have the device started when running the integration tests,
diff --git a/tangostationcontrol/tangostationcontrol/devices/antennafield.py b/tangostationcontrol/tangostationcontrol/devices/antennafield.py
new file mode 100644
index 0000000000000000000000000000000000000000..e694649c3de773d4e380cd3feabbe02eda2e3a12
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/devices/antennafield.py
@@ -0,0 +1,195 @@
+# -*- coding: utf-8 -*-
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+""" AntennaField Device Server for LOFAR2.0
+
+"""
+from tango import DeviceProxy, DevSource
+from tango.server import device_property, attribute, AttrWriteType
+import numpy
+
+from tangostationcontrol.common.entrypoint import entry
+from tangostationcontrol.devices.lofar_device import lofar_device
+from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
+from tangostationcontrol.devices.device_decorators import fault_on_error
+
+import logging
+logger = logging.getLogger()
+
+
+__all__ = ["AntennaField", "main"]
+
+NUMBER_OF_HBAT = 48
+NUMBER_OF_ELEMENTS_PER_TILE = 16
+
+class mapped_attribute(attribute):
+    def __init__(self, mapping_attribute, dtype, max_dim_x, max_dim_y=0, access=AttrWriteType.READ, **kwargs):
+
+        if access == AttrWriteType.READ_WRITE:
+            @fault_on_error()
+            def write_func_wrapper(device, value):
+                write_func = device.set_mapped_attribute(mapping_attribute, value)
+
+            self.fset = write_func_wrapper
+
+        @fault_on_error()
+        def read_func_wrapper(device):
+            return device.get_mapped_attribute(mapping_attribute)
+
+        self.fget = read_func_wrapper
+        
+        super().__init__(dtype=dtype, max_dim_y=max_dim_y, max_dim_x=max_dim_x, access=access, fisallowed="is_attribute_wrapper_allowed", **kwargs)
+
+            
+@device_logging_to_python()
+class AntennaField(lofar_device):
+
+    HBAT_Power_to_RECV_mapping = device_property(
+        dtype=(numpy.int32,),
+        doc='The mapping of HBAT power lines to RECV mapping. Each RECV can handle 96 inputs. The HBAT number is the index and the value shows to which receiver device it is connected and on which input. The first integer is the input. The second integer is the RECV id. Example: [0, 3] = first receiver of property RECV_devices with input 3. -1 means that the HBAT is not connected. The property is stored in a one dimensional structure. It needs to be reshaped to a list of lists of two items.',
+        mandatory=False,
+        default_value = [-1] * NUMBER_OF_HBAT * 2
+    )
+
+    HBAT_Control_to_RECV_mapping = device_property(
+        dtype=(numpy.int32,),
+        doc='The mapping of HBAT control lines to RECV mapping. Each RECV can handle 96 inputs. The HBAT number is the index and the value shows to which receiver device it is connected and on which input. The first integer is the input. The second interger is the RECV id. Example: [1, 3] = STAT/RECV/1 with input 3. -1 means that the HBAT is not connected. The property is stored in a one dimensional structure. It needs to be reshaped to a list of lists of two items.',
+        mandatory=False,
+        default_value = [-1] * NUMBER_OF_HBAT * 2
+    )
+
+    RECV_devices = device_property(
+        dtype=(str,),
+        doc='Which Recv devices are in use by the AntennaField. The order is important and it should match up with the order of the mapping.',
+        mandatory=False,
+        default_value = []
+    )
+
+    HBAT_ANT_mask_RW             = mapped_attribute("ANT_mask_RW", dtype=(numpy.bool_,), max_dim_x=NUMBER_OF_HBAT, access=AttrWriteType.READ_WRITE)
+    HBAT_BF_delay_steps_R        = mapped_attribute("HBAT_BF_delay_steps_R", dtype=((numpy.int64,),), max_dim_x=NUMBER_OF_ELEMENTS_PER_TILE * 2, max_dim_y=NUMBER_OF_HBAT)
+    HBAT_BF_delay_steps_RW       = mapped_attribute("HBAT_BF_delay_steps_RW", dtype=((numpy.int64,),), max_dim_x=NUMBER_OF_ELEMENTS_PER_TILE * 2, max_dim_y=NUMBER_OF_HBAT, access=AttrWriteType.READ_WRITE)
+    HBAT_LED_on_R                = mapped_attribute("HBAT_LED_on_R", dtype=((numpy.bool_,),), max_dim_x=NUMBER_OF_ELEMENTS_PER_TILE * 2, max_dim_y=NUMBER_OF_HBAT)
+    HBAT_LED_on_RW               = mapped_attribute("HBAT_LED_on_RW", dtype=((numpy.bool_,),), max_dim_x=NUMBER_OF_ELEMENTS_PER_TILE * 2, max_dim_y=NUMBER_OF_HBAT, access=AttrWriteType.READ_WRITE)
+    HBAT_PWR_LNA_on_R            = mapped_attribute("HBAT_PWR_LNA_on_R", dtype=((numpy.bool_,),), max_dim_x=NUMBER_OF_ELEMENTS_PER_TILE * 2, max_dim_y=NUMBER_OF_HBAT)
+    HBAT_PWR_LNA_on_RW           = mapped_attribute("HBAT_PWR_LNA_on_RW", dtype=((numpy.bool_,),), max_dim_x=NUMBER_OF_ELEMENTS_PER_TILE * 2, max_dim_y=NUMBER_OF_HBAT, access=AttrWriteType.READ_WRITE)
+    HBAT_PWR_on_R                = mapped_attribute("HBAT_PWR_on_R", dtype=((numpy.bool_,),), max_dim_x=NUMBER_OF_ELEMENTS_PER_TILE * 2, max_dim_y=NUMBER_OF_HBAT)
+    HBAT_PWR_on_RW               = mapped_attribute("HBAT_PWR_on_RW", dtype=((numpy.bool_,),), max_dim_x=NUMBER_OF_ELEMENTS_PER_TILE * 2, max_dim_y=NUMBER_OF_HBAT, access=AttrWriteType.READ_WRITE)
+
+    @log_exceptions()
+    def configure_for_initialise(self):
+        super().configure_for_initialise()
+        self.__setup_all_receiver_proxies()
+        self.__setup_mapper()
+
+    def __setup_all_receiver_proxies(self):
+        self.recv_proxies = []
+
+        for recv in self.RECV_devices:
+            recv_proxy = DeviceProxy(recv)
+            # Want to force non-cached results from the receiver proxies
+            recv_proxy.set_source(DevSource.DEV)
+            self.recv_proxies.append(recv_proxy)
+
+    def __setup_mapper(self):
+        number_of_receivers = len(self.RECV_devices)
+        # Reshape of mapping is needed because properties are stored in 1d arrays
+        control_mapping = numpy.reshape(self.HBAT_Control_to_RECV_mapping, (NUMBER_OF_HBAT, 2))
+        power_mapping = numpy.reshape(self.HBAT_Power_to_RECV_mapping, (NUMBER_OF_HBAT, 2))
+        self.__mapper = HBATToRecvMapper(control_mapping, power_mapping, number_of_receivers)
+
+    def get_mapped_attribute(self, mapped_point):
+        recv_results = []
+
+        for recv_proxy in self.recv_proxies:
+            result = recv_proxy.read_attribute(mapped_point)
+            recv_results.append(result)
+
+        mapped_values = self.__mapper.map_read(mapped_point, recv_results)
+
+        return mapped_values
+
+    def set_mapped_attribute(self, mapped_point, value):
+        mapped_value = self.__mapper.map_write(mapped_point, value)
+
+        for idx, recv_proxy in enumerate(self.recv_proxies):
+            recv_proxy.write_attribute(mapped_point, mapped_value[idx])
+
+
+class HBATToRecvMapper(object):
+    def __init__(self, hbat_control_to_recv_mapping, hbat_power_to_recv_mapping, number_of_receivers):
+        self.__control_mapping = hbat_control_to_recv_mapping
+        self.__power_mapping = hbat_power_to_recv_mapping
+        self.__number_of_receivers = number_of_receivers
+        self.__default_value_mapping_read = {
+            "ANT_mask_RW": [False] * 48,
+            "HBAT_BF_delay_steps_R": [[0] * 32] * 48,
+            "HBAT_BF_delay_steps_RW": [[0] * 32] * 48,
+            "HBAT_LED_on_R": [[False] * 32] * 48,
+            "HBAT_LED_on_RW": [[False] * 32] * 48,
+            "HBAT_PWR_LNA_on_R": [[False] * 32] * 48,
+            "HBAT_PWR_LNA_on_RW": [[False] * 32] * 48,
+            "HBAT_PWR_on_R": [[False] * 32] * 48,
+            "HBAT_PWR_on_RW": [[False] * 32] * 48
+        }
+        self.__default_value_mapping_write = {
+            "ANT_mask_RW": [False] * 96,
+            "HBAT_BF_delay_steps_RW": [[0] * 32] * 96,
+            "HBAT_LED_on_RW": [[False] * 32] * 96,
+            "HBAT_PWR_LNA_on_RW": [[False] * 32] * 96,
+            "HBAT_PWR_on_RW": [[False] * 32] * 96
+        }
+        self.__reshape_attributes = {
+            "ANT_mask_RW": [32, 3]
+        }
+
+    def map_read(self, mapped_attribute, recv_results):
+        default_values = self.__default_value_mapping_read[mapped_attribute]
+        
+        return self._mapped_r_values(recv_results, default_values)
+        
+    def map_write(self, mapped_attribute, set_values):
+        default_values = self.__default_value_mapping_write[mapped_attribute]
+
+        mapped_values = self._mapped_rw_values(set_values, default_values)
+
+        if mapped_attribute in self.__reshape_attributes:
+            mapped_values = numpy.reshape(mapped_values,
+                                          (self.__number_of_receivers,
+                                           self.__reshape_attributes[mapped_attribute][0],
+                                           self.__reshape_attributes[mapped_attribute][1]))
+
+        return mapped_values
+
+    def _mapped_r_values(self, recv_results, default_values):
+        mapped_values = default_values
+
+        for idx, mapping in enumerate(self.__control_mapping):
+            recv = mapping[0]
+            rcu = mapping[1]
+            if recv > 0:
+                mapped_values[idx] = recv_results[recv - 1][rcu]
+
+        return mapped_values
+
+    def _mapped_rw_values(self, set_values, default_values):
+        mapped_values = []
+
+        for _ in range(self.__number_of_receivers):
+            mapped_values.append(default_values)
+
+        for idx, mapping in enumerate(self.__control_mapping):
+            recv = mapping[0]
+            rcu = mapping[1]
+            if recv > 0:
+                mapped_values[recv - 1][rcu] = set_values[idx]
+
+        return mapped_values
+
+# ----------
+# Run server
+# ----------
+def main(**kwargs):
+    """Main function of the ObservationControl module."""
+    return entry(AntennaField, **kwargs)
diff --git a/tangostationcontrol/tangostationcontrol/devices/boot.py b/tangostationcontrol/tangostationcontrol/devices/boot.py
index 3760fc5cbf5d7f01ad7e72c55ab73c0537c1cc40..90daa4954b90cd8722e4162f651532db072cb93f 100644
--- a/tangostationcontrol/tangostationcontrol/devices/boot.py
+++ b/tangostationcontrol/tangostationcontrol/devices/boot.py
@@ -244,7 +244,8 @@ class Boot(lofar_device):
                        "STAT/Beamlet/1",
                        "STAT/TileBeam/1",   # Accesses RECV and Beamlet
                        "STAT/DigitalBeam/1",
-                       "STAT/Temperature_manager/1",
+                       "STAT/TemperatureManager/1",
+                       "STAT/AntennaField/1",
                       ],
     )
 
diff --git a/tangostationcontrol/tangostationcontrol/devices/lofar_device.py b/tangostationcontrol/tangostationcontrol/devices/lofar_device.py
index cc276428d962fa006d2ef0619790b65c1f2ba270..6f0f1de56b5bd0ade282aeede42fd719ce375b13 100644
--- a/tangostationcontrol/tangostationcontrol/devices/lofar_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/lofar_device.py
@@ -143,11 +143,12 @@ class lofar_device(Device, metaclass=DeviceMeta):
     @log_exceptions()
     def Initialise(self):
         """
-        Command to ask for initialisation of this device. Can only be called in FAULT or OFF state.
+        Command to ask for initialisation of this device. Can only be called in OFF state.
 
         :return:None
         """
 
+        
         self.set_state(DevState.INIT)
         self.set_status("Device is in the INIT state.")
 
diff --git a/tangostationcontrol/tangostationcontrol/devices/recv.py b/tangostationcontrol/tangostationcontrol/devices/recv.py
index 93233f86f4a0dd0291205dea5f30e1ab9ae8c5fc..ec09e216f63ccc7ce39cb3ed7bad589f05a88a50 100644
--- a/tangostationcontrol/tangostationcontrol/devices/recv.py
+++ b/tangostationcontrol/tangostationcontrol/devices/recv.py
@@ -22,7 +22,7 @@ from math import pi
 
 # Additional import
 from tangostationcontrol.beam.hba_tile import HBATAntennaOffsets
-from tangostationcontrol.beam.geo import ETRS_to_ITRF
+from tangostationcontrol.beam.geo import ETRS_to_ITRF, ITRF_to_GEO
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.common.lofar_logging import device_logging_to_python
 from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper
@@ -85,14 +85,14 @@ class RECV(opcua_device):
         doc='Maximum amount of time to wait after turning RCU(s) on or off',
         dtype='DevFloat',
         mandatory=False,
-        default_value=10.0
+        default_value=30.0
     )
 
     RCU_DTH_On_Off_timeout = device_property(
         doc='Maximum amount of time to wait after turning dithering on or off',
         dtype='DevFloat',
         mandatory=False,
-        default_value=20.0
+        default_value=30.0
     )
 
     # ----- Calibration values
@@ -234,6 +234,10 @@ class RECV(opcua_device):
         doc='Absolute reference position of antenna field, in ITRF',
         dtype=(numpy.float,), max_dim_x=3)
 
+    Antenna_Field_Reference_GEO_R = attribute(access=AttrWriteType.READ,
+        doc='Absolute reference position of antenna field, in latitude/longitude (degrees)',
+        dtype=(numpy.float,), max_dim_x=2)
+
     HBAT_antenna_ITRF_offsets_R = attribute(access=AttrWriteType.READ,
         doc='Offsets of the antennas within a tile, in ITRF ("iHBADeltas"). True shape: 96x16x3.',
         dtype=((numpy.float,),), max_dim_x=48, max_dim_y=96)
@@ -242,6 +246,10 @@ class RECV(opcua_device):
         doc='Absolute reference position of each tile, in ITRF',
         dtype=((numpy.float,),), max_dim_x=3, max_dim_y=96)
 
+    HBAT_reference_GEO_R = attribute(access=AttrWriteType.READ,
+        doc='Absolute reference position of each tile, in latitude/longitude (degrees)',
+        dtype=((numpy.float,),), max_dim_x=2, max_dim_y=96)
+
     def read_Antenna_Field_Reference_ITRF_R(self):
         # provide ITRF field coordinates if they were configured
         if self.Antenna_Field_Reference_ITRF:
@@ -250,6 +258,9 @@ class RECV(opcua_device):
         # calculate them from ETRS coordinates if not, using the configured ITRF reference
         ETRS_coordinates = numpy.array(self.Antenna_Field_Reference_ETRS).reshape(3)
         return ETRS_to_ITRF(ETRS_coordinates, self.ITRF_Reference_Frame, self.ITRF_Reference_Epoch)
+
+    def read_Antenna_Field_Reference_GEO_R(self):
+        return ITRF_to_GEO(self.read_Antenna_Field_Reference_ITRF_R())
     
     def read_HBAT_antenna_ITRF_offsets_R(self):
         base_antenna_offsets        = numpy.array(self.HBAT_base_antenna_offsets).reshape(16,3)
@@ -274,6 +285,9 @@ class RECV(opcua_device):
         ETRS_coordinates = numpy.array(self.HBAT_reference_ETRS).reshape(96,3)
         return ETRS_to_ITRF(ETRS_coordinates, self.ITRF_Reference_Frame, self.ITRF_Reference_Epoch)
 
+    def read_HBAT_reference_GEO_R(self):
+        return ITRF_to_GEO(self.read_HBAT_reference_ITRF_R())
+
     # ----------
     # Summarising Attributes
     # ----------
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/beamlet.py b/tangostationcontrol/tangostationcontrol/devices/sdp/beamlet.py
index ae61d77bd51299d8dba44e77422d09d324d1ec53..ddd3e7899e354c3442b38c0df27737e267db0952 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/beamlet.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/beamlet.py
@@ -30,6 +30,7 @@ class Beamlet(opcua_device):
     A_pn = 6
     N_pol = 2
     N_beamlets_ctrl = 488
+    N_beamsets_ctrl = 2
     N_pol_bf = 2
 
     # -----------------
@@ -91,6 +92,7 @@ class Beamlet(opcua_device):
     FPGA_beamlet_output_hdr_udp_destination_port_RW = attribute_wrapper(comms_annotation=["FPGA_beamlet_output_hdr_udp_destination_port_RW"], datatype=numpy.uint16, dims=(16,), access=AttrWriteType.READ_WRITE)
     FPGA_beamlet_output_scale_R = attribute_wrapper(comms_annotation=["FPGA_beamlet_output_scale_R"], datatype=numpy.double, dims=(16,))
     FPGA_beamlet_output_scale_RW = attribute_wrapper(comms_annotation=["FPGA_beamlet_output_scale_RW"], datatype=numpy.double, dims=(16,), access=AttrWriteType.READ_WRITE)
+    FPGA_beamlet_output_bsn_R = attribute_wrapper(comms_annotation=["FPGA_beamlet_output_bsn_R"], datatype=numpy.int64, dims=(N_pn, N_beamsets_ctrl))
 
     FPGA_beamlet_output_nof_packets_R = attribute_wrapper(comms_annotation=["FPGA_beamlet_output_nof_packets_R"], datatype=numpy.int32, dims=(N_beamlets_ctrl, N_pn))
     FPGA_beamlet_output_nof_valid_R = attribute_wrapper(comms_annotation=["FPGA_beamlet_output_nof_valid_R"], datatype=numpy.int32, dims=(N_beamlets_ctrl, N_pn))
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py b/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py
index f507ef57c38b7b271e50e0404fce906a204b7227..914c84441b2170a68cba9b0372639ea25bc76e4d 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py
@@ -154,7 +154,8 @@ class SDP(opcua_device):
     # TODO: needs to not be statically declared as this can change depending on the station and configuration
     S_pn = 12 # Number of ADC signal inputs per Processing Node (PN) FPGA.
     N_pn = 16 # Number of FPGAs per antenna band that is controlled via the SC - SDP interface.
-
+    N_beamsets_ctrl = 2
+    
     # OPC-UA MP only points for AIT
     FPGA_signal_input_mean_R = attribute_wrapper(comms_annotation=["FPGA_signal_input_mean_R"], datatype=numpy.double , dims=(S_pn, N_pn))
     FPGA_signal_input_rms_R = attribute_wrapper(comms_annotation=["FPGA_signal_input_rms_R"], datatype=numpy.double, dims=(S_pn, N_pn))
@@ -170,6 +171,8 @@ class SDP(opcua_device):
     FPGA_signal_input_samples_delay_R = attribute_wrapper(comms_annotation=["FPGA_signal_input_samples_delay_R"], datatype=numpy.uint32, dims=(S_pn, N_pn))
     FPGA_signal_input_samples_delay_RW = attribute_wrapper(comms_annotation=["FPGA_signal_input_samples_delay_RW"], datatype=numpy.uint32, dims=(S_pn, N_pn), access=AttrWriteType.READ_WRITE)
 
+    FPGA_bst_offload_bsn_R = attribute_wrapper(comms_annotation=["FPGA_bst_offload_bsn_R"], datatype=numpy.int64, dims=(N_pn, N_beamsets_ctrl))
+
     # ----------
     # Summarising Attributes
     # ----------
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py b/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py
index 8f21a486fb5dcb9ff21b5a22dc6b4ad252cdd7c0..2ce9d18a718ec3ee61378299b6a45ce7d1acaf83 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py
@@ -87,6 +87,7 @@ class SST(Statistics):
     FPGA_sst_offload_hdr_ip_destination_address_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_hdr_ip_destination_address_R"], datatype=numpy.str, dims=(16,))
     FPGA_sst_offload_hdr_udp_destination_port_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_hdr_udp_destination_port_RW"], datatype=numpy.uint16, dims=(16,), access=AttrWriteType.READ_WRITE)
     FPGA_sst_offload_hdr_udp_destination_port_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_hdr_udp_destination_port_R"], datatype=numpy.uint16, dims=(16,))
+    FPGA_sst_offload_bsn_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_bsn_R"], datatype=numpy.int64, dims=(16,))
     FPGA_sst_offload_weighted_subbands_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_weighted_subbands_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
     FPGA_sst_offload_weighted_subbands_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_sst_offload_weighted_subbands_R"], datatype=numpy.bool_, dims=(16,))
 
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py b/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py
index c12eb5955950aa0af9ff1606ab53147106dea59d..443012ced8b29c6a34c4a4c57932554733ba022c 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py
@@ -103,6 +103,7 @@ class XST(Statistics):
     FPGA_xst_offload_hdr_ip_destination_address_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_hdr_ip_destination_address_R"], datatype=numpy.str, dims=(16,))
     FPGA_xst_offload_hdr_udp_destination_port_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_hdr_udp_destination_port_RW"], datatype=numpy.uint16, dims=(16,), access=AttrWriteType.READ_WRITE)
     FPGA_xst_offload_hdr_udp_destination_port_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_hdr_udp_destination_port_R"], datatype=numpy.uint16, dims=(16,))
+    FPGA_xst_offload_bsn_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_offload_bsn_R"], datatype=numpy.int64, dims=(16,))
     FPGA_xst_processing_enable_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_processing_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
     FPGA_xst_processing_enable_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_processing_enable_R"], datatype=numpy.bool_, dims=(16,))
     FPGA_xst_subband_select_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_subband_select_RW"], datatype=numpy.uint32, dims=(8,16), access=AttrWriteType.READ_WRITE)
@@ -116,6 +117,35 @@ class XST(Statistics):
     FPGA_xst_offload_nof_packets_R = attribute_wrapper(comms_annotation=["FPGA_xst_offload_nof_packets_R"], datatype=numpy.int32, dims=(16,))
     FPGA_xst_offload_nof_valid_R = attribute_wrapper(comms_annotation=["FPGA_xst_offload_nof_valid_R"], datatype=numpy.int32, dims=(16,))
 
+    N_pn = 16   # Number of FPGAs per antenna band that is controlled via the SC - SDP interface.
+    P_sq = 9
+
+    FPGA_xst_ring_rx_clear_total_counts_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_clear_total_counts_RW"], datatype=numpy.bool_, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
+    FPGA_xst_ring_rx_clear_total_counts_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_clear_total_counts_R"], datatype=numpy.bool_, dims=(N_pn,))
+    FPGA_xst_rx_align_stream_enable_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_stream_enable_RW"], datatype=numpy.uint32, dims=(N_pn,), access=AttrWriteType.READ_WRITE)
+    FPGA_xst_rx_align_stream_enable_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_stream_enable_R"], datatype=numpy.uint32, dims=(N_pn,))
+    FPGA_xst_ring_rx_total_nof_packets_received_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_total_nof_packets_received_R"], datatype=numpy.uint32, dims=(N_pn,))
+    FPGA_xst_ring_rx_total_nof_packets_discarded_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_total_nof_packets_discarded_R"], datatype=numpy.uint32, dims=(N_pn,))
+    FPGA_xst_ring_rx_total_nof_sync_received_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_total_nof_sync_received_R"], datatype=numpy.uint32, dims=(N_pn,))
+    FPGA_xst_ring_rx_total_nof_sync_discarded_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_total_nof_sync_discarded_R"], datatype=numpy.uint32, dims=(N_pn,))
+    FPGA_xst_ring_rx_bsn_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_bsn_R"], datatype=numpy.int64, dims=(N_pn, N_pn))
+    FPGA_xst_ring_rx_nof_packets_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_nof_packets_R"], datatype=numpy.int32, dims=(N_pn, N_pn))
+    FPGA_xst_ring_rx_nof_valid_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_nof_valid_R"], datatype=numpy.int32, dims=(N_pn, N_pn))
+    FPGA_xst_ring_rx_latency_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_rx_latency_R"], datatype=numpy.int32, dims=(N_pn, N_pn))
+    FPGA_xst_rx_align_bsn_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_bsn_R"], datatype=numpy.int64, dims=(N_pn,P_sq))
+    FPGA_xst_rx_align_nof_packets_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_nof_packets_R"], datatype=numpy.int32, dims=(N_pn,P_sq))
+    FPGA_xst_rx_align_nof_valid_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_nof_valid_R"], datatype=numpy.int32, dims=(N_pn,P_sq))
+    FPGA_xst_rx_align_latency_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_latency_R"], datatype=numpy.int32, dims=(N_pn,P_sq))
+    FPGA_xst_rx_align_nof_replaced_packets_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_rx_align_nof_replaced_packets_R"], datatype=numpy.int32, dims=(N_pn,P_sq))
+    FPGA_xst_aligned_bsn_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_aligned_bsn_R"], datatype=numpy.int64, dims=(N_pn,))
+    FPGA_xst_aligned_nof_packets_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_aligned_nof_packets_R"], datatype=numpy.int32, dims=(N_pn,))
+    FPGA_xst_aligned_nof_valid_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_aligned_nof_valid_R"], datatype=numpy.int32, dims=(N_pn,))
+    FPGA_xst_aligned_latency_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_aligned_latency_R"], datatype=numpy.int32, dims=(N_pn,))
+    FPGA_xst_ring_tx_bsn_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_tx_bsn_R"], datatype=numpy.int64, dims=(N_pn,N_pn))
+    FPGA_xst_ring_tx_nof_packets_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_tx_nof_packets_R"], datatype=numpy.int32, dims=(N_pn,N_pn))
+    FPGA_xst_ring_tx_nof_valid_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_tx_nof_valid_R"], datatype=numpy.int32, dims=(N_pn,N_pn))
+    FPGA_xst_ring_tx_latency_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["FPGA_xst_ring_tx_latency_R"], datatype=numpy.int32, dims=(N_pn,N_pn))
+
     # number of packets with valid payloads
     nof_valid_payloads_R    = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_valid_payloads"}, dims=(XSTCollector.MAX_FPGAS,), datatype=numpy.uint64)
     # number of packets with invalid payloads
diff --git a/tangostationcontrol/tangostationcontrol/devices/snmp_device.py b/tangostationcontrol/tangostationcontrol/devices/snmp_device.py
index 04d5a1425e19b0c5fbcb076f206bcd4ed122618a..b5bfd8395a75a9831bdaaf47fb17bbfdf947c36a 100644
--- a/tangostationcontrol/tangostationcontrol/devices/snmp_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/snmp_device.py
@@ -12,16 +12,14 @@
 """
 
 # PyTango imports
-from tango.server import run
-from tango.server import device_property
-from tango import AttrWriteType
-
-# Additional import
-from tangostationcontrol.clients.snmp_client import SNMP_client
-from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper
+from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.devices.lofar_device import lofar_device
 
-import numpy
+from tango.server import device_property, command
+import os
+
+# Additional import
+from tangostationcontrol.clients.snmp_client import SNMP_client, mib_loader
 
 import logging
 logger = logging.getLogger()
@@ -39,6 +37,10 @@ class SNMP(lofar_device):
         - Type:'DevString'
         SNMP_host
         - Type:'DevULong'
+        SNMP_community
+        - Type:'DevString'
+        SNMP_rel_mib_dir
+        - Type:'DevString'
         SNMP_timeout
         - Type:'DevDouble'
         """
@@ -57,6 +59,11 @@ class SNMP(lofar_device):
         mandatory=True
     )
 
+    SNMP_rel_mib_dir = device_property(
+        dtype='DevString',
+        mandatory=False
+    )
+
     SNMP_timeout = device_property(
         dtype='DevDouble',
         mandatory=True
@@ -66,43 +73,14 @@ class SNMP(lofar_device):
     # Attributes
     # ----------
 
+    # example attributes. mib and name mandatory and index optional.
 
-    # octetstring
-    sysDescr_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysDescr"}, datatype=numpy.str)
-    sysName_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysName"}, datatype=numpy.str)
-
-    # get a table element with the oid
-    ifDescr31_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.2.2.1.2.31"}, datatype=numpy.str)
-
-    # get 10 table elements with the oid and dimension
-    ifDescr_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.2.2.1.2"}, dims=(10,), datatype=numpy.str)
-
-    #timeticks
-    sysUpTime_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysUpTime"}, datatype=numpy.int64)
-
-    # OID
-    sysObjectID_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysObjectID"}, datatype=numpy.int64)
-
-    # integer
-    sysServices_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysServices"}, datatype=numpy.int64)
-    tcpRtoAlgorithm_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "tcpRtoAlgorithm"}, datatype=numpy.int64)
-    snmpEnableAuthenTraps_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "snmpEnableAuthenTraps"}, datatype=numpy.int64)
-
-    #gauge
-    tcpCurrEstab_R = attribute_wrapper(comms_annotation={"mib": "RFC1213-MIB", "name": "tcpCurrEstab"}, datatype=numpy.int64)
-
-    #counter32
-    tcpActiveOpens_R = attribute_wrapper(comms_annotation={"mib": "RFC1213-MIB", "name": "tcpActiveOpens"}, datatype=numpy.int64)
-    snmpInPkts_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "snmpInPkts"}, datatype=numpy.int64)
-
-    #IP address
-    ipAdEntAddr_R = attribute_wrapper(comms_annotation={"mib": "RFC1213-MIB", "name": "ipAdEntAddr", "index": (127,0,0,1)}, datatype=numpy.str)
-    ipAdEntIfIndex_R = attribute_wrapper(comms_annotation={"mib": "RFC1213-MIB", "name": "ipAdEntIfIndex", "index": (10, 87, 6, 14)}, datatype=numpy.str)
-
-    #str RW attribute
-    sysContact_obj_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysContact"}, datatype=numpy.str)
-    sysContact_obj_RW = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysContact"}, datatype=numpy.str, access=AttrWriteType.READ_WRITE)
-
+    # Reads from a table and returns an array of table entries 1 to 10 (note, tables require an index key and start at 1)
+    # test_attr1_R = attribute_wrapper(comms_annotation={"mib": "TEST-MIB", "name": "test_attr1", "index": 1}, dims=(10,), datatype=numpy.str)
+    # indices can also be IP addresses sometimes. Gets a single scalar value
+    # test_attr2_R = attribute_wrapper(comms_annotation={"mib": "TEST-MIB", "name": "test_attr2", "index": (127,0,0,1)}, datatype=numpy.int64)
+    # if the attribute doesn't get the value from a table, then no index is needed, or the default of 0 can be supplied.
+    # test_attr3_R = attribute_wrapper(comms_annotation={"mib": "TEST-MIB", "name": "test_attr3"}, datatype=numpy.int64)
 
 
     # --------
@@ -125,6 +103,30 @@ class SNMP(lofar_device):
 
         self.snmp_manager.start()
 
+    @command(dtype_out=str)
+    def get_mib_dir(self):
+
+        if not os.path.isabs(self.SNMP_rel_mib_dir):
+            mib_path = os.path.dirname(__file__) + "/" + self.SNMP_rel_mib_dir
+        else:
+            # if the string does start with
+            mib_path = self.SNMP_rel_mib_dir
+        return mib_path
+
+    def init_device(self):
+        super().init_device()
+
+        # create the mib_loader and set the mib path
+        loader = mib_loader(self.get_mib_dir())
+
+        for i in self.attr_list():
+            try:
+                # for all of the attributes attempt to load the pre-compiled MIB. Skips already loaded ones
+                loader.load_pymib(i.comms_annotation["mib"])
+            except Exception as e:
+                raise Exception(f"Failed to load MIB file: {i.comms_annotation.get('mib')} for attribute {i.get_name()}") from e
+
+
 
 # --------
 # Commands
@@ -136,8 +138,5 @@ class SNMP(lofar_device):
 # ----------
 def main(args=None, **kwargs):
     """Main function of the module."""
+    return entry((SNMP,), args=args, **kwargs)
 
-    from tangostationcontrol.common.lofar_logging import configure_logger
-    configure_logger()
-
-    return run((SNMP,), args=args, **kwargs)
diff --git a/tangostationcontrol/tangostationcontrol/devices/temperature_manager.py b/tangostationcontrol/tangostationcontrol/devices/temperature_manager.py
index 68fc910f3f9d0b07809b2f3556fabdf594ed651c..20dd1875b64ad517247353a769668cd5fc0ad036 100644
--- a/tangostationcontrol/tangostationcontrol/devices/temperature_manager.py
+++ b/tangostationcontrol/tangostationcontrol/devices/temperature_manager.py
@@ -23,7 +23,7 @@ import logging
 
 logger = logging.getLogger()
 
-__all__ = ["temperature_manager", "main"]
+__all__ = ["TemperatureManager", "main"]
 
 
 class temp_attr:
@@ -39,7 +39,7 @@ class temp_attr:
 
 
 @device_logging_to_python()
-class temperature_manager(lofar_device):
+class TemperatureManager(lofar_device):
     # -----------------
     # Device Properties
     # -----------------
@@ -133,4 +133,4 @@ class temperature_manager(lofar_device):
 # ----------
 def main(**kwargs):
     """Main function of the temperature manager module."""
-    return entry(temperature_manager, **kwargs)
+    return entry(TemperatureManager, **kwargs)
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_antennafield.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_antennafield.py
new file mode 100644
index 0000000000000000000000000000000000000000..2b34767d6c74df2199017e1a884a0e2165af7fb0
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_antennafield.py
@@ -0,0 +1,32 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
+
+from .base import AbstractTestBases
+
+class TestAntennaFieldDevice(AbstractTestBases.TestDeviceBase):
+
+    def setUp(self):
+        super().setUp("STAT/AntennaField/1")
+        self.proxy.put_property({"RECV_devices": ["STAT/RECV/1"],
+                                 "HBAT_Power_to_RECV_mapping": [1, 1, 1, 0] + [-1] * 92})
+        self.recv_proxy = self.setup_recv_proxy()
+
+    def setup_recv_proxy(self):
+        # setup RECV
+        recv_proxy = TestDeviceProxy("STAT/RECV/1")
+        recv_proxy.off()
+        recv_proxy.warm_boot()
+        recv_proxy.set_defaults()
+        return recv_proxy
+
+    def test_property_recv_devices_has_one_receiver(self):
+        result = self.proxy.get_property("RECV_devices")
+        self.assertSequenceEqual(result["RECV_devices"], ["STAT/RECV/1"])
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_temperature_manager.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_temperature_manager.py
index 2d64309cb6273dd73d9b3fabcad8882b019c5810..ea884acf1d7557b1e72f64ee71eee756145f083f 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_temperature_manager.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_temperature_manager.py
@@ -13,4 +13,4 @@ class TestTemperatureManager(AbstractTestBases.TestDeviceBase):
 
     def setUp(self):
         """Intentionally recreate the device object in each test"""
-        super().setUp("STAT/Temperature_manager/1")
+        super().setUp("STAT/TemperatureManager/1")
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py
index 091db2c253e5e163167afc204170d89cf087c61a..05cfe5bf41241d2cf142479026d91b07f332e384 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py
@@ -136,6 +136,55 @@ class TestArchiver(BaseIntegrationTestCase):
         """
         sdp_proxy.off()
     
+    def test_archive_image_boolean_attribute(self):
+        """Test if a boolean image attribute is correctly archived"""
+        # Start RECV Device
+        recv_proxy = TestDeviceProxy("STAT/RECV/1")
+        recv_proxy.off()
+        time.sleep(1)   # To be deleted with L2SS-592
+        recv_proxy.initialise()
+        time.sleep(1)   # To be deleted with L2SS-592
+        self.assertEqual(DevState.STANDBY, recv_proxy.state())
+        recv_proxy.set_defaults()
+        recv_proxy.on()
+        self.assertEqual(DevState.ON, recv_proxy.state())
+
+        """
+        # Safety operation that prevents event subscriber to go in Fault state
+        self.archiver.remove_attributes_in_error()
+        time.sleep(3)
+        """
+        polling_period=1000
+        archive_event_period=5000
+        attr_fullname = 'stat/recv/1/ant_mask_rw'  # boolean 3x32
+        self.archiver.add_attribute_to_archiver(attr_fullname, polling_period, archive_event_period)
+        time.sleep(3)
+        # Test if the attribute has been correctly added to event subscriber
+        self.assertTrue(self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname)))
+
+        # Retrieve data from DB views
+        self.retriever = RetrieverTimescale()
+        self.assertIsNotNone(self.retriever)
+        records = self._wait_for_archiving(attr_fullname, archive_event_period)  
+        self.assertTrue(len(records)>0)
+        item = records[-1]                                                  # last table record
+        self.assertEqual('stat/recv/1',item.device)                         # column device
+        self.assertEqual('ant_mask_rw',item.name)                           # column attribute
+        self.assertEqual(datetime,type(item.data_time))                     # column datetime
+        self.assertEqual(int,type(item.x))                                  # column index x
+        self.assertEqual(int,type(item.y))                                  # column index y
+        self.assertEqual(int,type(item.value))                              # column value (bool stored as int)
+        self.assertLessEqual(item.value,1)                                  # column value (must be 0 or 1)
+
+        """
+        # Remove attribute at the end of the test
+        self.archiver.remove_attribute_from_archiver(attr_fullname)
+        time.sleep(3)
+        # Test if the attribute has been correctly removed
+        self.assertFalse(self.archiver.is_attribute_archived(attribute_fqdn(attr_fullname)))
+        """
+        recv_proxy.off()
+    
     def test_get_maximum_device_load(self):
         """ Test if the maximum device load is correctly computed """
         # Start RECV Device
diff --git a/tangostationcontrol/tangostationcontrol/test/beam/test_geo.py b/tangostationcontrol/tangostationcontrol/test/beam/test_geo.py
index 858b3f32e954d19271f8a0dc6fc3cba7b92f47e2..5694376be684b96e74a6197e5e8c3d13f3df6d39 100644
--- a/tangostationcontrol/tangostationcontrol/test/beam/test_geo.py
+++ b/tangostationcontrol/tangostationcontrol/test/beam/test_geo.py
@@ -7,7 +7,7 @@
 # Distributed under the terms of the APACHE license.
 # See LICENSE.txt for more info.
 
-from tangostationcontrol.beam.geo import ETRS_to_ITRF
+from tangostationcontrol.beam.geo import ETRS_to_ITRF, ETRS_to_GEO
 
 from tangostationcontrol.test import base
 
@@ -41,3 +41,32 @@ class TestETRS_to_ITRF(base.TestCase):
         LOFAR1_CS001_LBA_ITRF = [3826923.50275, 460915.488115, 5064643.517]
 
         numpy.testing.assert_almost_equal(CS001_LBA_ITRF, LOFAR1_CS001_LBA_ITRF, decimal=1.5)
+
+class TestETRS_to_GEO(base.TestCase):
+    def test_convert_single_coordinate(self):
+        """ Convert a single coordinate. """
+        ETRS_coords = numpy.array([1.0, 1.0, 1.0])
+        GEO_coords = ETRS_to_GEO(ETRS_coords)
+
+        self.assertEqual((2,), GEO_coords.shape)
+
+    def test_convert_array(self):
+        """ Convert an array of coordinates. """
+        ETRS_coords = numpy.array([ [1.0, 1.0, 1.0], [2.0, 2.0, 2.0], [3.0, 3.0, 3.0] ])
+        GEO_coords = ETRS_to_GEO(ETRS_coords)
+
+        self.assertEqual((3,2), GEO_coords.shape)
+
+    def test_verify_CS001_LBA(self):
+        """ Verify if the calculated CS001LBA phase center matches those calculated in LOFAR1. """
+
+        # See CLBA in MAC/Deployment/data/Coordinates/ETRF_FILES/CS001/CS001-antenna-positions-ETRS.csv 
+        CS001_LBA_ETRS = [3826923.942, 460915.117, 5064643.229]
+
+        # Convert to GEO
+        CS001_LBA_GEO = ETRS_to_GEO(numpy.array(CS001_LBA_ETRS))
+
+        # verify against actual position
+        LOFAR1_CS001_LBA_GEO = [52.911, 6.868]
+
+        numpy.testing.assert_almost_equal(CS001_LBA_GEO, LOFAR1_CS001_LBA_GEO, decimal=3)
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/SNMP_mib_loading/TEST-MIB.py b/tangostationcontrol/tangostationcontrol/test/clients/SNMP_mib_loading/TEST-MIB.py
new file mode 100644
index 0000000000000000000000000000000000000000..de190adc7e763c307b2e7548a200a3f22b3f5cf2
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/test/clients/SNMP_mib_loading/TEST-MIB.py
@@ -0,0 +1,7 @@
+Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, iso, Gauge32, MibIdentifier, Bits, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "iso", "Gauge32", "MibIdentifier", "Bits","Counter32")
+ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint")
+NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
+
+testNamedValue = MibScalar((9, 8, 7, 6, 5, 4, 3, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("A", 1), ("B", 2), ("C", 3), ("D", 4)))).setMaxAccess("readonly")
+mibBuilder.exportSymbols("TEST-MIB", testNamedValue=testNamedValue)
+
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/test_mib_loading.py b/tangostationcontrol/tangostationcontrol/test/clients/test_mib_loading.py
new file mode 100644
index 0000000000000000000000000000000000000000..6abf0f52e6ccda67f5ba482a8c12f811d5421fcb
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/test/clients/test_mib_loading.py
@@ -0,0 +1,53 @@
+from tangostationcontrol.test import base
+
+from tangostationcontrol.clients.snmp_client import mib_loader
+from pysnmp.smi import view
+import pysnmp.hlapi as pysnmp
+from pysnmp.smi.rfc1902 import ObjectIdentity
+
+from os import path
+
+
+class TestMibLoading(base.TestCase):
+
+    #name and directory of the pymib file
+    mib = "TEST-MIB"
+
+    # mib file is in a folder that is in the same folder as this test
+    rel_dir = "SNMP_mib_loading"
+
+    def test_content(self):
+        """
+        This file contains a 1 variable named: testNamedValue with oid "9.8.7.6.5.4.3.2.1" with named values: ("A", 1), ("B", 2), ("C", 3), ("D", 4)
+        In order to confirm that the mib is indeed loaded correctly this test has to get the oids, the values and the named values
+
+        """
+
+        abs_dir = path.dirname(__file__) + "/" + self.rel_dir
+        loader = mib_loader(abs_dir)
+        loader.load_pymib(self.mib)
+
+        # used to view mibs client side
+        mibView = view.MibViewController(loader.mibBuilder)
+
+        # The expected testNamedValue parameters as written in TEST-MIB.py
+        testNamedValue = "testNamedValue"
+        testNamedValue_oid = "9.8.7.6.5.4.3.2.1"
+        testNamedValue_named = "A"
+        testNamedValue_value = 1
+
+        # get testValue and set a value of 1
+        obj_T = pysnmp.ObjectType(ObjectIdentity(self.mib, testNamedValue), pysnmp.Integer32(1))
+        obj_T.resolveWithMib(mibView)
+
+        # get the oid
+        self.assertEqual(str(obj_T[0]), testNamedValue_oid)
+
+        # get the name format: mib::name
+        self.assertEqual(obj_T[0].prettyPrint(), f"{self.mib}::{testNamedValue}")
+
+        # get the namedValue
+        self.assertEqual(str(obj_T[1]), testNamedValue_named)
+
+        # get the numerical value
+        self.assertEqual(int(obj_T[1]), testNamedValue_value)
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/test_snmp_client.py b/tangostationcontrol/tangostationcontrol/test/clients/test_snmp_client.py
index 4d3a5c22ab3b7ac61ccbdfd78671f0c9ed4cf56a..9db811338d77f5afab496dc5f5ec1e305cc98a94 100644
--- a/tangostationcontrol/tangostationcontrol/test/clients/test_snmp_client.py
+++ b/tangostationcontrol/tangostationcontrol/test/clients/test_snmp_client.py
@@ -5,7 +5,7 @@ from unittest import mock
 
 from tangostationcontrol.test import base
 
-from tangostationcontrol.clients.snmp_client import SNMP_client, snmp_attribute, annotation_wrapper
+from tangostationcontrol.clients.snmp_client import SNMP_client, snmp_attribute, SNMP_comm
 
 
 class server_imitator:
@@ -13,7 +13,7 @@ class server_imitator:
     snmp_to_numpy_dict = {
         hlapi.Integer32: numpy.int64,
         hlapi.TimeTicks: numpy.int64,
-        str: str,
+        hlapi.OctetString: str,
         hlapi.Counter32: numpy.int64,
         hlapi.Gauge32: numpy.int64,
         hlapi.IpAddress: str,
@@ -35,6 +35,8 @@ class server_imitator:
                 read_val = (None, snmp_type("1.3.6.1.2.1.1.1.0"))
             elif snmp_type is hlapi.IpAddress:
                 read_val = (None, snmp_type("1.1.1.1"))
+            elif snmp_type is hlapi.OctetString:
+                read_val = (None, snmp_type("1"))
             else:
                 read_val = (None, snmp_type(1))
 
@@ -48,12 +50,16 @@ class server_imitator:
                 read_val = []
                 for _i in range(dims[0]):
                     read_val.append((None, snmp_type(f"1.1.1.1")))
+            elif snmp_type is hlapi.OctetString:
+                read_val = []
+                for _i in range(dims[0]):
+                    read_val.append((None, snmp_type("1")))
             else:
                 read_val = []
                 for _i in range(dims[0]):
                     read_val.append((None, snmp_type(1)))
         else:
-            raise Exception("Image not yet supported :(")
+            raise Exception("Image not supported :(")
 
         return read_val
 
@@ -66,14 +72,14 @@ class server_imitator:
         if dims == self.dim_list["scalar"]:
             snmp_type_dict = {hlapi.ObjectIdentity:"1.3.6.1.2.1.1.1.0.1",
                             hlapi.IpAddress: "1.1.1.1",
-                            str: "1"}
+                            hlapi.OctetString: "1"}
             check_val = 1
             for k,v in snmp_type_dict.items():
                 if snmp_type is k:  check_val = v
         elif dims == self.dim_list["spectrum"]:
             snmp_type_dict = {hlapi.ObjectIdentity:["1.3.6.1.2.1.1.1.0.1"] * dims[0],
                             hlapi.IpAddress: ["1.1.1.1"] * dims[0],
-                            str: ["1"] * dims[0]}
+                            hlapi.OctetString: ["1"] * dims[0]}
             check_val = check_val = [1] * dims[0]
             for k,v in snmp_type_dict.items():
                 if snmp_type is k:  check_val = v
@@ -84,39 +90,6 @@ class server_imitator:
 
 class TestSNMP(base.TestCase):
 
-
-    def test_annotation_success(self):
-        """
-        unit test for the processing of annotation. Has 2 lists. 1 with things that should succeed and 1 with things that should fail.
-        """
-
-        client = SNMP_client(community='public', host='localhost', timeout=10, fault_func=None, try_interval=2)
-
-        test_list = [
-            # test name nad MIB type annotation
-            {"mib": "SNMPv2-MIB", "name": "sysDescr"},
-
-            # test name nad MIB type annotation with index
-            {"mib": "RFC1213-MIB", "name": "ipAdEntAddr", "index": (127, 0, 0, 1)},
-            {"mib": "random-MIB", "name": "aName", "index": 2},
-
-            #oid
-            {"oids": "1.3.6.1.2.1.2.2.1.2.31"}
-        ]
-
-
-        for i in test_list:
-            wrapper = client._setup_annotation(annotation=i)
-
-            if wrapper.oids is not None:
-                self.assertEqual(wrapper.oids, i["oids"])
-
-            else:
-                self.assertEqual(wrapper.mib, i["mib"], f"expected mib with: {i['mib']}, got: {wrapper.idx} from: {i}")
-                self.assertEqual(wrapper.name, i["name"], f"expected name with: {i['name']}, got: {wrapper.idx} from: {i}")
-                self.assertEqual(wrapper.idx, i.get('index', 0), f"expected idx with: {i.get('index', 0)}, got: {wrapper.idx} from: {i}")
-
-
     def test_annotation_fail(self):
         """
         unit test for the processing of annotation. Has 2 lists. 1 with things that should succeed and 1 with things that should fail.
@@ -125,56 +98,19 @@ class TestSNMP(base.TestCase):
         client = SNMP_client(community='public', host='localhost', timeout=10, fault_func=None, try_interval=2)
 
         fail_list = [
-            # OIDS cant use the index
-            {"oids": "1.3.6.1.2.1.2.2.1.2.31", "index": 2},
-            # mixed annotation is not allowed
-            {"oids": "1.3.6.1.2.1.2.2.1.2.31", "name": "thisShouldFail"},
             # no 'name'
             {"mib": "random-MIB", "index": 2},
+            # no MIB
+            {"name": "random-name", "index": 2},
         ]
 
         for i in fail_list:
             with self.assertRaises(ValueError):
-                client._setup_annotation(annotation=i)
-
-    def test_oids_scalar(self):
-
-        test_oid = "1.1.1.1"
-
-        server = server_imitator()
-
-        x, y = server.dim_list['scalar']
-
-        # we just need the object to call another function
-        wrapper = annotation_wrapper(annotation = {"oids": "Not None lol"})
-        # scalar
-        scalar_expected = [test_oid]
-        ret_oids = wrapper._get_oids(x, y, test_oid)
-        self.assertEqual(ret_oids, scalar_expected, f"Expected: {scalar_expected}, got: {ret_oids}")
-
-    def test_oids_spectrum(self):
-        """
-        Tests the "get_oids" function, which is for getting lists of sequential oids.
-
-        Results should basically be an incrementing list of oids with the final number incremented by 1 each time.
-        So "1.1" with dims of 3x1 might become ["1.1.1", "1.1.2", "1.1.3"]
-        """
-        server = server_imitator()
-
-        test_oid = "1.1.1.1"
-        x, y = server.dim_list['spectrum']
-
-        # we just need the object to call another function
-        wrapper = annotation_wrapper(annotation={"oids": "Not None lol"})
-
-        # spectrum
-        spectrum_expected = [test_oid + ".1", test_oid + ".2", test_oid + ".3", test_oid + ".4"]
-        ret_oids = wrapper._get_oids(x, y, test_oid)
-        self.assertListEqual(ret_oids, spectrum_expected, f"Expected: {spectrum_expected}, got: {ret_oids}")
+                client._process_annotation(annotation=i)
 
     @mock.patch('pysnmp.hlapi.ObjectIdentity')
     @mock.patch('pysnmp.hlapi.ObjectType')
-    @mock.patch('tangostationcontrol.clients.snmp_client.snmp_attribute.next_wrap')
+    @mock.patch('tangostationcontrol.clients.snmp_client.SNMP_comm.getter')
     def test_snmp_obj_get(self, m_next, m_obj_T, m_obj_i):
         """
         Attempts to read a fake SNMP variable and checks whether it got what it expected
@@ -186,21 +122,24 @@ class TestSNMP(base.TestCase):
             for i in server.snmp_to_numpy_dict:
                 m_next.return_value = (None, None, None, server.get_return_val(i, server.dim_list[j]))
 
-                m_client = mock.Mock()
+                def __fakeInit__(self):
+                    pass
 
+                with mock.patch.object(SNMP_comm, '__init__', __fakeInit__):
+                    m_comms = SNMP_comm()
 
-                wrapper = annotation_wrapper(annotation={"oids": "1.3.6.1.2.1.2.2.1.2.31"})
-                snmp_attr = snmp_attribute(client=m_client, wrapper=wrapper, dtype=server.snmp_to_numpy_dict[i], dim_x=server.dim_list[j][0], dim_y=server.dim_list[j][1])
+                    snmp_attr = snmp_attribute(comm=m_comms, mib="test", name="test", idx=0, dtype=server.snmp_to_numpy_dict[i], dim_x=server.dim_list[j][0], dim_y=server.dim_list[j][1])
 
-                val = snmp_attr.read_function()
+                    val = snmp_attr.read_function()
 
-                checkval = server.val_check(i, server.dim_list[j])
-                self.assertEqual(checkval, val, f"Expected: {checkval}, got: {val}")
+                    checkval = server.val_check(i, server.dim_list[j])
+                    self.assertEqual(checkval, val, f"During test {j} {i}; Expected: {checkval} of type {i}, got: {val} of type {type(val)}")
 
     @mock.patch('pysnmp.hlapi.ObjectIdentity')
+    @mock.patch('pysnmp.hlapi.ObjectType')
     @mock.patch('pysnmp.hlapi.setCmd')
-    @mock.patch('tangostationcontrol.clients.snmp_client.snmp_attribute.next_wrap')
-    def test_snmp_obj_set(self, m_next, m_nextCmd, m_obj_i):
+    @mock.patch('tangostationcontrol.clients.snmp_client.SNMP_comm.setter')
+    def test_snmp_obj_set(self, m_next, m_nextCmd, m_obj_T, m_obj_ID):
         """
         Attempts to write a value to an SNMP server, but instead intercepts it and compared whether the values is as expected.
         """
@@ -211,27 +150,53 @@ class TestSNMP(base.TestCase):
             for i in server.snmp_to_numpy_dict:
                 m_next.return_value = (None, None, None, server.get_return_val(i, server.dim_list[j]))
 
-                m_client = mock.Mock()
-                set_val = server.val_check(i, server.dim_list[j])
+                def __fakeInit__(self):
+                    pass
+
+                with mock.patch.object(SNMP_comm, '__init__', __fakeInit__):
+                    m_comms = SNMP_comm()
+                    
+                    set_val = server.val_check(i, server.dim_list[j])
+
+                    snmp_attr = snmp_attribute(comm=m_comms, mib="test", name="test", idx=0, dtype=server.snmp_to_numpy_dict[i], dim_x=server.dim_list[j][0], dim_y=server.dim_list[j][1])
+
+                    res_lst = []
+                    def test(*value):
+                        res_lst.append(value[1])
+                        return None, None, None, server.get_return_val(i, server.dim_list[j])
+
+                    hlapi.ObjectType = test
+
+                    snmp_attr.write_function(set_val)
+
+                    if len(res_lst) == 1:
+                        res_lst = res_lst[0]
 
-                wrapper = annotation_wrapper(annotation={"oids": "1.3.6.1.2.1.2.2.1.2.31"})
-                snmp_attr = snmp_attribute(client=m_client, wrapper=wrapper, dtype=server.snmp_to_numpy_dict[i], dim_x=server.dim_list[j][0], dim_y=server.dim_list[j][1])
+                    checkval = server.val_check(i, server.dim_list[j])
+                    self.assertEqual(checkval, res_lst, f"During test {j} {i}; Expected: {checkval}, got: {res_lst}")
 
-                res_lst = []
-                def test(*value):
-                    res_lst.append(value[1])
-                    return None, None, None, server.get_return_val(i, server.dim_list[j])
+    @mock.patch('tangostationcontrol.clients.snmp_client.SNMP_comm.getter')
+    def test_named_value(self, m_next):
+        # # {1: "other", 2: "invalid", 3: "dynamic", 4: "static",}
+        # test_val = hlapi.Integer.withNamedValues(enable=1, disable=0)
+        # test_val(1)
 
-                hlapi.ObjectType = test
+        m_comms = mock.Mock()
+        snmp_attr = snmp_attribute(comm=m_comms, mib="test", name="test", idx=0, dtype=str, dim_x=1, dim_y=0)
 
-                snmp_attr.write_function(set_val)
 
-                if len(res_lst) == 1:
-                    res_lst = res_lst[0]
+        # create a named integer with the values: 'enable' for 1 and 'disable' for 0
+        test_val = ((None, hlapi.Integer.withNamedValues(enable=1, disable=0)(1)),)
+        ret_val = snmp_attr.convert(test_val)
 
-                checkval = server.val_check(i, server.dim_list[j])
-                self.assertEqual(checkval, res_lst, f"Expected: {checkval}, got: {res_lst}")
+        # should return 'enable' since we supplied the value 1
+        self.assertEqual(ret_val, "enable", f"Expected: to get 'enable', got: {ret_val} of type {type(ret_val)}")
 
 
+        # create an unnamed integer with a value of 2
+        test_val = ((None, hlapi.Integer(2)),)
+        ret_val = snmp_attr.convert(test_val)
 
+        # check to make sure the value is indeed 2
+        self.assertEqual(ret_val, 2, f"Expected: to get {2}, got: {ret_val} of type {type(ret_val)}")
 
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_antennafield_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_antennafield_device.py
new file mode 100644
index 0000000000000000000000000000000000000000..189cd08f6536fdeaa8f895b280d889e137ad63fa
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_antennafield_device.py
@@ -0,0 +1,298 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+import numpy
+import unittest
+
+from tangostationcontrol.devices.antennafield import HBATToRecvMapper
+
+class TestHBATToRecvMapper(unittest.TestCase):
+    # A mapping where HBATs are all not mapped to power RCUs
+    power_not_connected = [[-1, -1] * 48]
+    # A mapping where HBATs are all not mapped to control RCUs
+    control_not_connected = [[-1, -1] * 48]
+    # A mapping where first two HBATs are mapped on the first Receiver.
+    # The first HBAT control line on RCU 1 and the second HBAT control line on RCU 0.
+    control_hba_0_and_1_on_rcu_1_and_0_of_recv_1 = [[1, 1], [1, 0]] + [[-1, -1]] * 46
+    
+    def test_ant_read_mask_r_no_mapping(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 3)
+
+        receiver_values = [[[False] * 3] * 32, [[False] * 3] * 32, [[False] * 3] * 32]
+        expected = [False] * 48
+        actual = mapper.map_read("ANT_mask_RW", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_ant_read_mask_r_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 3)
+
+        receiver_values = [[False, True, False], [[False] * 3] * 31, [[False] * 3] * 32, [[False] * 3] * 32]
+        expected = [True, False] + [False] * 46
+        actual = mapper.map_read("ANT_mask_RW", receiver_values)
+
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_bf_read_delay_steps_r_no_mapping(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 3)
+
+        receiver_values = [[[0] * 32] * 96, [[0] * 32] * 96, [[0] * 32] * 96]
+        expected = [[0] * 32] * 48
+        actual = mapper.map_read("HBAT_BF_delay_steps_R", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_bf_read_delay_steps_r_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 3)
+
+        receiver_values = [[[2] * 32, [1] * 32] + [[0] * 32] * 94, [[0] * 32] * 96, [[0] * 32] * 96]
+        expected = [[1] * 32, [2] * 32] + [[0] * 32] * 46
+        actual = mapper.map_read("HBAT_BF_delay_steps_R", receiver_values)
+
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_bf_read_delay_steps_rw_no_mapping(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 3)
+
+        receiver_values = [[[0] * 32] * 96, [[0] * 32] * 96, [[0] * 32] * 96]
+        expected = [[0] * 32] * 48
+        actual = mapper.map_read("HBAT_BF_delay_steps_RW", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_bf_read_delay_steps_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 3)
+
+        receiver_values = [[[2] * 32, [1] * 32] + [[0] * 32] * 94, [[0] * 32] * 96, [[0] * 32] * 96]
+        expected = [[1] * 32, [2] * 32] + [[0] * 32] * 46
+        actual = mapper.map_read("HBAT_BF_delay_steps_RW", receiver_values)
+
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_read_led_on_r_unmapped(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 3)
+
+        receiver_values = [[[False] * 32] * 96, [[False] * 32] * 96, [[False] * 32] * 96]
+        expected = [[False] * 32] * 48
+        actual = mapper.map_read("HBAT_LED_on_R", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_read_led_on_r_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 3)
+
+        receiver_values = [[[False, True] * 16, [True, False] * 16] + [[False] * 32] * 94, [[False] * 32] * 96, [[False] * 32] * 96]
+
+        expected = [[True, False] * 16, [False, True] * 16] + [[False] * 32] * 46
+        actual = mapper.map_read("HBAT_LED_on_R", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_read_led_on_rw_unmapped(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 3)
+
+        receiver_values = [[[False] * 32] * 96, [[False] * 32] * 96, [[False] * 32] * 96]
+        expected = [[False] * 32] * 48
+        actual = mapper.map_read("HBAT_LED_on_RW", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_read_led_on_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 3)
+
+        receiver_values = [[[False, True] * 16, [True, False] * 16] + [[False] * 32] * 94, [[False] * 32] * 96, [[False] * 32] * 96]
+
+        expected = [[True, False] * 16, [False, True] * 16] + [[False] * 32] * 46
+        actual = mapper.map_read("HBAT_LED_on_RW", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_read_pwr_lna_on_r_unmapped(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 3)
+
+        receiver_values = [[[False] * 32] * 96, [[False] * 32] * 96, [[False] * 32] * 96]
+        expected = [[False] * 32] * 48
+        actual = mapper.map_read("HBAT_PWR_LNA_on_R", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_read_pwr_lna_on_r_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 3)
+
+        receiver_values = [[[False, True] * 16, [True, False] * 16] + [[False] * 32] * 94, [[False] * 32] * 96, [[False] * 32] * 96]
+
+        expected = [[True, False] * 16, [False, True] * 16] + [[False] * 32] * 46
+        actual = mapper.map_read("HBAT_PWR_LNA_on_R", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_read_pwr_lna_on_rw_unmapped(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 3)
+
+        receiver_values = [[[False] * 32] * 96, [[False] * 32] * 96, [[False] * 32] * 96]
+        expected = [[False] * 32] * 48
+        actual = mapper.map_read("HBAT_PWR_LNA_on_RW", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_read_pwr_lna_on_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 3)
+
+        receiver_values = [[[False, True] * 16, [True, False] * 16] + [[False] * 32] * 94, [[False] * 32] * 96, [[False] * 32] * 96]
+
+        expected = [[True, False] * 16, [False, True] * 16] + [[False] * 32] * 46
+        actual = mapper.map_read("HBAT_PWR_LNA_on_RW", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_read_pwr_on_r_unmapped(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 3)
+
+        receiver_values = [[[False] * 32] * 96, [[False] * 32] * 96, [[False] * 32] * 96]
+        expected = [[False] * 32] * 48
+        actual = mapper.map_read("HBAT_PWR_on_R", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_read_pwr_on_r_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 3)
+
+        receiver_values = [[[False, True] * 16, [True, False] * 16] + [[False] * 32] * 94, [[False] * 32] * 96, [[False] * 32] * 96]
+
+        expected = [[True, False] * 16, [False, True] * 16] + [[False] * 32] * 46
+        actual = mapper.map_read("HBAT_PWR_on_R", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_read_pwr_on_rw_unmapped(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 3)
+
+        receiver_values = [[[False] * 32] * 96, [[False] * 32] * 96, [[False] * 32] * 96]
+        expected = [[False] * 32] * 48
+        actual = mapper.map_read("HBAT_PWR_on_RW", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_read_pwr_on_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 3)
+
+        receiver_values = [[[False, True] * 16, [True, False] * 16] + [[False] * 32] * 94, [[False] * 32] * 96, [[False] * 32] * 96]
+
+        expected = [[True, False] * 16, [False, True] * 16] + [[False] * 32] * 46
+        actual = mapper.map_read("HBAT_PWR_on_RW", receiver_values)
+        numpy.testing.assert_equal(expected, actual)
+
+# Rename to write
+
+    def test_map_write_ant_mask_rw_no_mapping_and_one_receiver(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 1)
+
+        set_values = [False] * 48
+        expected = [[[False] * 3] * 32]
+        actual = mapper.map_write("ANT_mask_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_ant_mask_rw_no_mapping_and_two_receivers(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 2)
+
+        set_values = [False] * 48
+        expected = [[[False] * 3] * 32] * 2
+        actual = mapper.map_write("ANT_mask_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_ant_mask_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 1)
+
+        set_values = [True, False] + [False] * 46
+        expected = [[[False, True, False]] + [[False] * 3] * 31]
+        actual = mapper.map_write("ANT_mask_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_bf_delay_steps_rw_no_mapping_and_one_receiver(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 1)
+
+        set_values = [[1] * 32] * 48
+        expected = [[[0] * 32] * 96]
+        actual = mapper.map_write("HBAT_BF_delay_steps_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_bf_delay_steps_rw_no_mapping_and_two_receivers(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 2)
+
+        set_values = [[1] * 32] * 48
+        expected = [[[0] * 32] * 96, [[0] * 32] * 96]
+        actual = mapper.map_write("HBAT_BF_delay_steps_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_bf_delay_steps_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 1)
+
+        set_values = [[1] * 32, [2] * 32] + [[0] * 32] * 46
+        expected = [[[2] * 32, [1] * 32] + [[0] * 32] * 94]
+        actual = mapper.map_write("HBAT_BF_delay_steps_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_led_on_rw_no_mapping_and_one_receiver(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 1)
+
+        set_values = [[False] * 32] * 48
+        expected = [[[False] * 32] * 96]
+        actual = mapper.map_write("HBAT_LED_on_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_led_on_rw_no_mapping_and_two_receivers(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 2)
+
+        set_values = [[False] * 32] * 48
+        expected = [[[False] * 32] * 96, [[False] * 32] * 96]
+        actual = mapper.map_write("HBAT_LED_on_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_led_on_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 1)
+
+        set_values = [[False, True] * 16, [True, False] * 16] + [[False] * 32]  * 46
+        expected = [[[True, False] * 16, [False, True] * 16] + [[0] * 32] * 94]
+        actual = mapper.map_write("HBAT_LED_on_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_pwr_lna_on_rw_no_mapping_and_one_receiver(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 1)
+
+        set_values = [[False] * 32] * 48
+        expected = [[[False] * 32] * 96]
+        actual = mapper.map_write("HBAT_PWR_LNA_on_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_pwr_lna_on_rw_no_mapping_and_two_receivers(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 2)
+
+        set_values = [[False] * 32] * 48
+        expected = [[[False] * 32] * 96, [[False] * 32] * 96]
+        actual = mapper.map_write("HBAT_PWR_LNA_on_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_pwr_lna_on_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 1)
+
+        set_values = [[False, True] * 16, [True, False] * 16] + [[False] * 32]  * 46
+        expected = [[[True, False] * 16, [False, True] * 16] + [[0] * 32] * 94]
+        actual = mapper.map_write("HBAT_PWR_LNA_on_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_pwr_on_rw_no_mapping_and_one_receiver(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 1)
+
+        set_values = [[False] * 32] * 48
+        expected = [[[False] * 32] * 96]
+        actual = mapper.map_write("HBAT_PWR_on_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_lna_on_rw_no_mapping_and_two_receivers(self):
+        mapper = HBATToRecvMapper(self.control_not_connected, self.power_not_connected, 2)
+
+        set_values = [[False] * 32] * 48
+        expected = [[[False] * 32] * 96, [[False] * 32] * 96]
+        actual = mapper.map_write("HBAT_PWR_on_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
+    def test_map_write_pwr_on_rw_hba_0_and_1_on_rcu_1_and_0_of_recv_1(self):
+        mapper = HBATToRecvMapper(self.control_hba_0_and_1_on_rcu_1_and_0_of_recv_1, self.power_not_connected, 1)
+
+        set_values = [[False, True] * 16, [True, False] * 16] + [[False] * 32]  * 46
+        expected = [[[True, False] * 16, [False, True] * 16] + [[0] * 32] * 94]
+        actual = mapper.map_write("HBAT_PWR_on_RW", set_values)
+        numpy.testing.assert_equal(expected, actual)
+
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_lofar_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_lofar_device.py
index 46004707ea59c681015b987ce97adb26931a189a..15434810dd7bf9d3162ce64282661f3fa358b3de 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_lofar_device.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_lofar_device.py
@@ -52,3 +52,4 @@ class TestLofarDevice(base.TestCase):
             proxy.initialise()
             self.assertEqual(42.0, proxy.read_attribute_A)
             self.assertListEqual([42.0, 43.0], proxy.read_attribute_B_array.tolist())
+
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_snmp_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_snmp_device.py
new file mode 100644
index 0000000000000000000000000000000000000000..6289b2a33162031b01998aeeb84cc2119fd78860
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_snmp_device.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+from tango.test_context import DeviceTestContext
+
+from tangostationcontrol.devices import snmp_device, lofar_device
+
+import mock
+from os import path
+
+from tangostationcontrol.test import base
+
+
+class TestSNMPDevice(base.TestCase):
+
+    # some dummy values for mandatory properties
+    snmp_properties = {'SNMP_community': 'localhost', 'SNMP_host': 161, 'SNMP_rel_mib_dir': "SNMP_mib_loading", 'SNMP_timeout': 5.0}
+
+    def setUp(self):
+        super(TestSNMPDevice, self).setUp()
+
+        # Patch DeviceProxy to allow making the proxies during initialisation
+        # that we otherwise avoid using
+        for device in [lofar_device]:
+            proxy_patcher = mock.patch.object(
+                device, 'DeviceProxy')
+            proxy_patcher.start()
+            self.addCleanup(proxy_patcher.stop)
+
+    def test_get_mib_dir(self):
+        with DeviceTestContext(snmp_device.SNMP, properties=self.snmp_properties, process=True) as proxy:
+
+            mib_dir = proxy.get_mib_dir()
+
+            self.assertEqual(mib_dir, f"{path.dirname(snmp_device.__file__)}/{self.snmp_properties['SNMP_rel_mib_dir']}")
diff --git a/tangostationcontrol/tangostationcontrol/test/toolkit/test_mib_compiler.py b/tangostationcontrol/tangostationcontrol/test/toolkit/test_mib_compiler.py
new file mode 100644
index 0000000000000000000000000000000000000000..8641f6483f04ef9e21c27b5bcaaaa4aff4f6587d
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/test/toolkit/test_mib_compiler.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+from tangostationcontrol.test import base
+from tangostationcontrol.toolkit.mib_compiler import mib_compiler
+
+import sys
+from os.path import isfile
+from os import getcwd
+from tempfile import TemporaryDirectory
+from unittest import mock
+
+class TestCompiler(base.TestCase):
+    def test_compile(self):
+
+        with TemporaryDirectory() as tmpdir:
+            new_sys_argv = [sys.argv[0], "--mibs", "TEST-MIB",
+                            "--source", f"{getcwd()}/tangostationcontrol/toolkit/mib_compiler/mibs",
+                            "--destination", f"{tmpdir}", "-v"]
+            with mock.patch.object(mib_compiler.sys, 'argv', new_sys_argv):
+                with self.assertRaises(SystemExit):
+                    mib_compiler.main()
+
+            # check if file was written
+            self.assertTrue(isfile(f"{tmpdir}/TEST-MIB.py"))
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py b/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py
index fd6f8f7aa0f39d52f79798e89923e75c60de0b5e..801ba8634209b73918b54e6f3fb716563cb73b1b 100644
--- a/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py
+++ b/tangostationcontrol/tangostationcontrol/toolkit/archiver_base_ts.py
@@ -154,6 +154,79 @@ class Lofar_Array_Ulong64(Lofar_Array_Attribute):
 class Lofar_Array_Ushort(Lofar_Array_Attribute):
     __tablename__ = 'lofar_array_ushort'
     value = Column(INTEGER)
+
+class Lofar_Image_Attribute(Base):
+    """
+    Abstract Class that represents a Lofar customized Tango Attribute view 
+    """
+    __abstract__ = True 
+    __table_args__ = {'extend_existing': True}
+
+    data_time = Column(TIMESTAMP, primary_key=True)
+    device = Column(String, primary_key=True)
+    name = Column(String, primary_key=True)
+    x = Column(INTEGER, primary_key=True)
+    y = Column(INTEGER, primary_key=True)
+
+    def __repr__(self):
+        return f"<Attribute(device='{self.device}', name='{self.name}', data_time='{self.data_time}',index_x='{self.x}',index_y='{self.y}',value='{self.value}'>"
+
+class Lofar_Image_Boolean(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_boolean'
+    value = Column(Boolean)
+
+class Lofar_Image_Double(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_double'
+    value = Column(FLOAT)
+
+class Lofar_Image_Encoded(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_encoded'
+    value = Column(BYTEA)
+
+class Lofar_Image_Enum(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_enum'
+    value = Column(INTEGER)
+
+class Lofar_Image_Float(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_float'
+    value = Column(FLOAT)
+
+class Lofar_Image_Long(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_long'
+    value = Column(INT4RANGE)
+
+class Lofar_Image_Long64(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_long64'
+    value = Column(INT8RANGE)
+
+class Lofar_Image_Short(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_short'
+    value = Column(INTEGER)
+
+class Lofar_Image_State(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_state'
+    value = Column(INTEGER)
+
+class Lofar_Image_String(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_string'
+    value = Column(TEXT)
+
+class Lofar_Image_Uchar(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_uchar'
+    value = Column(INTEGER)
+
+class Lofar_Image_Ulong(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_ulong'
+    value = Column(INTEGER)
+
+class Lofar_Image_Ulong64(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_ulong64'
+    value = Column(INTEGER)
+
+class Lofar_Image_Ushort(Lofar_Image_Attribute):
+    __tablename__ = 'lofar_image_ushort'
+    value = Column(INTEGER)
+
 # ----------------- ----------------- ----------------- #
 
 class Attribute(Base):
@@ -777,6 +850,8 @@ def get_viewclass_by_tablename(tablename: str):
                     return c
                 elif format=='array' and c.__tablename__ == f"lofar_array_{datatype}":              
                     return c
+                elif format=='image' and c.__tablename__ == f"lofar_image_{datatype}":
+                    return c
     return None
 
 def build_array_from_record(rows: List[Array], dim_x: int):
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/README.md b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..d99a55af6738b5556e72a8655e607a2a6641acd7
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/README.md
@@ -0,0 +1,28 @@
+#MIB Compiler
+
+The MIB compiler script 'compiles' .mib files to a custom python representation that pysnmp can load immediately. 
+
+In order to compile scripts there must be a valid mib file in the source directory as well as any potential imported files. 
+You can find out which mib files need to be imported by opening the file and looking at the `IMPORTS` section, where imported mib files are listed. 
+These mibs may also have subsequent mib files that need to be imported. Alternatively these imports may also be found in the verbose debug log. 
+
+This script will also generate pymib files for all the imported mib files.  
+
+`--mibs`: A list of mib files that need to be compiled.
+
+`--destination`: The output folder for the compiled mibs. This argument is optional. The default destination folder is `~/output_pymibs`
+
+`--source`: A list of source folders and addresses from where the mib files are sourced. This argument is optional. The default source folder is `~/mibs`
+It can be useful to also list a web address as source, as there exist various sites that host mib files. 
+
+`--debug`: enable verbose debugging. Useful for figuring out errors.
+
+example usage:
+To source the mib TEST-MIB from the default `/mibs` location
+`python3 mib_compiler.py --mibs TEST-MIB`
+
+To source the mib TEST-MIB from the default `/mibs` location but to a custom output folder
+`python3 mib_compiler.py --mibs TEST-MIB --destination home/user/output`
+
+To source te mib TEST-MIB and all its imports from the path `home/user/mibs` and web address `http://www.net-snmp.org/docs/mibs/`
+`python3 mib_compiler.py --mibs TEST-MIB --source home/user/mibs http://www.net-snmp.org/docs/mibs/`
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mib_compiler.py b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mib_compiler.py
new file mode 100644
index 0000000000000000000000000000000000000000..a96f2c34670d900665dce697c048e1460866c89f
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mib_compiler.py
@@ -0,0 +1,66 @@
+import argparse
+import sys
+
+from pysnmp.smi import builder, compiler
+
+from pathlib import Path
+
+from pysmi import debug
+import logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger("mib_compiler")
+
+
+def mib_compile(mib_list : list, src, dst):
+
+    mibBuilder = builder.MibBuilder()
+
+    # set the compiler, the source path and set the www.net-snmp.org site as mib source as well.
+    compiler.addMibCompiler(mibBuilder, sources=src, destination=dst)
+
+    for i in mib_list:
+        # compile it
+        try:
+            mibBuilder.loadModules(i)
+            logger.debug(f"loaded {i}")
+        except Exception as e:
+            raise Exception(f"Something went wrong, try checking whether all the mib fills imported by the provided mib files are present in the source locations ({src}) \r\n (To do this enable debug options and scroll up) ") from e
+
+def main():
+    abs_path = str(Path().absolute()).replace("\\", "/")
+    out_path = f"{abs_path}/output_pymibs"
+    in_path = f"{abs_path}/mibs"
+
+    parser = argparse.ArgumentParser(
+        description='Compiles .mib files in to the easy to load pysnmp format')
+    parser.add_argument(
+        '-m', '--mibs', type=str, required=True, nargs='+', help='list of mib names to compile')
+    parser.add_argument(
+        '-d', '--destination', type=str,  required=False, default=out_path,
+        help='sets the output directory for the compiled mibs. (default: '
+             '%(default)s)')
+    parser.add_argument(
+        '-s', '--source', type=str, required=False, nargs='+',  default=in_path,
+        help='sets the input paths or addresses to read the .mib files from  (default: '
+             '%(default)s)')
+    parser.add_argument(
+        '-v', '--debug', dest='debug', action='store_true', default=False,
+        help='increase log output')
+
+    args = parser.parse_args()
+
+    # argparse arguments
+    mibs = args.mibs
+    destination = args.destination
+    source = args.source
+    debug_option = args.debug
+
+    if debug_option:
+        debug.setLogger(debug.Debug('compiler'))
+
+    mib_compile(mib_list=mibs, src=source, dst=destination)
+
+    sys.exit(1)
+
+if __name__ == "__main__":
+    main()
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-CONF b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-CONF
new file mode 100644
index 0000000000000000000000000000000000000000..24a1eed95d62f81ea88c3a78017696fa05400340
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-CONF
@@ -0,0 +1,322 @@
+SNMPv2-CONF DEFINITIONS ::= BEGIN
+
+IMPORTS ObjectName, NotificationName, ObjectSyntax
+                                               FROM SNMPv2-SMI;
+
+-- definitions for conformance groups
+
+OBJECT-GROUP MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  ObjectsPart
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+
+    VALUE NOTATION ::=
+                  value(VALUE OBJECT IDENTIFIER)
+
+    ObjectsPart ::=
+                  "OBJECTS" "{" Objects "}"
+    Objects ::=
+                  Object
+                | Objects "," Object
+    Object ::=
+
+                  value(ObjectName)
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    -- a character string as defined in [2]
+    Text ::= value(IA5String)
+END
+
+-- more definitions for conformance groups
+
+NOTIFICATION-GROUP MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  NotificationsPart
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+
+    VALUE NOTATION ::=
+                  value(VALUE OBJECT IDENTIFIER)
+
+    NotificationsPart ::=
+                  "NOTIFICATIONS" "{" Notifications "}"
+    Notifications ::=
+                  Notification
+                | Notifications "," Notification
+    Notification ::=
+                  value(NotificationName)
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    -- a character string as defined in [2]
+    Text ::= value(IA5String)
+END
+
+-- definitions for compliance statements
+
+MODULE-COMPLIANCE MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+                  ModulePart
+
+    VALUE NOTATION ::=
+                  value(VALUE OBJECT IDENTIFIER)
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    ModulePart ::=
+                  Modules
+    Modules ::=
+                  Module
+                | Modules Module
+    Module ::=
+                  -- name of module --
+                  "MODULE" ModuleName
+                  MandatoryPart
+                  CompliancePart
+
+    ModuleName ::=
+                  -- identifier must start with uppercase letter
+                  identifier ModuleIdentifier
+                  -- must not be empty unless contained
+                  -- in MIB Module
+                | empty
+    ModuleIdentifier ::=
+                  value(OBJECT IDENTIFIER)
+                | empty
+
+    MandatoryPart ::=
+                  "MANDATORY-GROUPS" "{" Groups "}"
+                | empty
+
+    Groups ::=
+
+                  Group
+                | Groups "," Group
+    Group ::=
+                  value(OBJECT IDENTIFIER)
+
+    CompliancePart ::=
+                  Compliances
+                | empty
+
+    Compliances ::=
+                  Compliance
+                | Compliances Compliance
+    Compliance ::=
+                  ComplianceGroup
+                | Object
+
+    ComplianceGroup ::=
+                  "GROUP" value(OBJECT IDENTIFIER)
+                  "DESCRIPTION" Text
+
+    Object ::=
+                  "OBJECT" value(ObjectName)
+                  SyntaxPart
+                  WriteSyntaxPart
+                  AccessPart
+                  "DESCRIPTION" Text
+
+    -- must be a refinement for object's SYNTAX clause
+    SyntaxPart ::= "SYNTAX" Syntax
+                | empty
+
+    -- must be a refinement for object's SYNTAX clause
+    WriteSyntaxPart ::= "WRITE-SYNTAX" Syntax
+                | empty
+
+    Syntax ::=    -- Must be one of the following:
+                       -- a base type (or its refinement),
+                       -- a textual convention (or its refinement), or
+                       -- a BITS pseudo-type
+                  type
+                | "BITS" "{" NamedBits "}"
+
+    NamedBits ::= NamedBit
+                | NamedBits "," NamedBit
+
+    NamedBit ::= identifier "(" number ")" -- number is nonnegative
+
+    AccessPart ::=
+                  "MIN-ACCESS" Access
+                | empty
+    Access ::=
+                  "not-accessible"
+                | "accessible-for-notify"
+                | "read-only"
+                | "read-write"
+                | "read-create"
+
+    -- a character string as defined in [2]
+    Text ::= value(IA5String)
+END
+
+-- definitions for capabilities statements
+
+AGENT-CAPABILITIES MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  "PRODUCT-RELEASE" Text
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+                  ModulePart
+
+    VALUE NOTATION ::=
+                  value(VALUE OBJECT IDENTIFIER)
+
+    Status ::=
+                  "current"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    ModulePart ::=
+                  Modules
+                | empty
+    Modules ::=
+                  Module
+                | Modules Module
+    Module ::=
+                  -- name of module --
+                  "SUPPORTS" ModuleName
+                  "INCLUDES" "{" Groups "}"
+                  VariationPart
+
+    ModuleName ::=
+
+                  -- identifier must start with uppercase letter
+                  identifier ModuleIdentifier
+    ModuleIdentifier ::=
+                  value(OBJECT IDENTIFIER)
+                | empty
+
+    Groups ::=
+                  Group
+                | Groups "," Group
+    Group ::=
+                  value(OBJECT IDENTIFIER)
+
+    VariationPart ::=
+                  Variations
+                | empty
+    Variations ::=
+                  Variation
+                | Variations Variation
+
+    Variation ::=
+                  ObjectVariation
+                | NotificationVariation
+
+    NotificationVariation ::=
+                  "VARIATION" value(NotificationName)
+                  AccessPart
+                  "DESCRIPTION" Text
+
+    ObjectVariation ::=
+                  "VARIATION" value(ObjectName)
+                  SyntaxPart
+                  WriteSyntaxPart
+                  AccessPart
+                  CreationPart
+                  DefValPart
+                  "DESCRIPTION" Text
+
+    -- must be a refinement for object's SYNTAX clause
+    SyntaxPart ::= "SYNTAX" Syntax
+                | empty
+
+    WriteSyntaxPart ::= "WRITE-SYNTAX" Syntax
+                | empty
+
+    Syntax ::=    -- Must be one of the following:
+                       -- a base type (or its refinement),
+                       -- a textual convention (or its refinement), or
+                       -- a BITS pseudo-type
+
+                  type
+                | "BITS" "{" NamedBits "}"
+
+    NamedBits ::= NamedBit
+                | NamedBits "," NamedBit
+
+    NamedBit ::= identifier "(" number ")" -- number is nonnegative
+
+    AccessPart ::=
+                  "ACCESS" Access
+                | empty
+
+    Access ::=
+                  "not-implemented"
+                -- only "not-implemented" for notifications
+                | "accessible-for-notify"
+                | "read-only"
+                | "read-write"
+                | "read-create"
+                -- following is for backward-compatibility only
+                | "write-only"
+
+    CreationPart ::=
+                  "CREATION-REQUIRES" "{" Cells "}"
+                | empty
+    Cells ::=
+                  Cell
+                | Cells "," Cell
+    Cell ::=
+                  value(ObjectName)
+
+    DefValPart ::= "DEFVAL" "{" Defvalue "}"
+                | empty
+
+    Defvalue ::=  -- must be valid for the object's syntax
+                  -- in this macro's SYNTAX clause, if present,
+                  -- or if not, in object's OBJECT-TYPE macro
+                  value(ObjectSyntax)
+                | "{" BitsValue "}"
+
+    BitsValue ::= BitNames
+                | empty
+
+    BitNames ::=  BitName
+                | BitNames "," BitName
+
+    BitName ::= identifier
+
+    -- a character string as defined in [2]
+    Text ::= value(IA5String)
+END
+
+END
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-SMI b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-SMI
new file mode 100644
index 0000000000000000000000000000000000000000..2132646cab00e28cf2f679fc1bb308ee2d12a1a1
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-SMI
@@ -0,0 +1,352 @@
+SNMPv2-SMI DEFINITIONS ::= BEGIN
+
+
+-- the path to the root
+
+org            OBJECT IDENTIFIER ::= { iso 3 }  --  "iso" = 1
+dod            OBJECT IDENTIFIER ::= { org 6 }
+internet       OBJECT IDENTIFIER ::= { dod 1 }
+
+directory      OBJECT IDENTIFIER ::= { internet 1 }
+
+mgmt           OBJECT IDENTIFIER ::= { internet 2 }
+mib-2          OBJECT IDENTIFIER ::= { mgmt 1 }
+transmission   OBJECT IDENTIFIER ::= { mib-2 10 }
+
+experimental   OBJECT IDENTIFIER ::= { internet 3 }
+
+private        OBJECT IDENTIFIER ::= { internet 4 }
+enterprises    OBJECT IDENTIFIER ::= { private 1 }
+
+security       OBJECT IDENTIFIER ::= { internet 5 }
+
+snmpV2         OBJECT IDENTIFIER ::= { internet 6 }
+
+-- transport domains
+snmpDomains    OBJECT IDENTIFIER ::= { snmpV2 1 }
+
+-- transport proxies
+snmpProxys     OBJECT IDENTIFIER ::= { snmpV2 2 }
+
+-- module identities
+snmpModules    OBJECT IDENTIFIER ::= { snmpV2 3 }
+
+-- Extended UTCTime, to allow dates with four-digit years
+-- (Note that this definition of ExtUTCTime is not to be IMPORTed
+--  by MIB modules.)
+ExtUTCTime ::= OCTET STRING(SIZE(11 | 13))
+    -- format is YYMMDDHHMMZ or YYYYMMDDHHMMZ
+    --   where: YY   - last two digits of year (only years
+    --                 between 1900-1999)
+    --          YYYY - last four digits of the year (any year)
+    --          MM   - month (01 through 12)
+    --          DD   - day of month (01 through 31)
+    --          HH   - hours (00 through 23)
+    --          MM   - minutes (00 through 59)
+    --          Z    - denotes GMT (the ASCII character Z)
+    --
+    -- For example, "9502192015Z" and "199502192015Z" represent
+    -- 8:15pm GMT on 19 February 1995. Years after 1999 must use
+    -- the four digit year format. Years 1900-1999 may use the
+    -- two or four digit format.
+
+-- definitions for information modules
+
+MODULE-IDENTITY MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  "LAST-UPDATED" value(Update ExtUTCTime)
+                  "ORGANIZATION" Text
+                  "CONTACT-INFO" Text
+                  "DESCRIPTION" Text
+                  RevisionPart
+
+    VALUE NOTATION ::=
+                  value(VALUE OBJECT IDENTIFIER)
+
+    RevisionPart ::=
+                  Revisions
+                | empty
+    Revisions ::=
+                  Revision
+                | Revisions Revision
+    Revision ::=
+                  "REVISION" value(Update ExtUTCTime)
+                  "DESCRIPTION" Text
+
+    -- a character string as defined in section 3.1.1
+    Text ::= value(IA5String)
+END
+
+
+OBJECT-IDENTITY MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+
+    VALUE NOTATION ::=
+                  value(VALUE OBJECT IDENTIFIER)
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    -- a character string as defined in section 3.1.1
+    Text ::= value(IA5String)
+END
+
+
+-- names of objects
+-- (Note that these definitions of ObjectName and NotificationName
+--  are not to be IMPORTed by MIB modules.)
+
+ObjectName ::=
+    OBJECT IDENTIFIER
+
+NotificationName ::=
+    OBJECT IDENTIFIER
+
+-- syntax of objects
+
+-- the "base types" defined here are:
+--   3 built-in ASN.1 types: INTEGER, OCTET STRING, OBJECT IDENTIFIER
+--   8 application-defined types: Integer32, IpAddress, Counter32,
+--              Gauge32, Unsigned32, TimeTicks, Opaque, and Counter64
+
+ObjectSyntax ::=
+    CHOICE {
+        simple
+            SimpleSyntax,
+
+          -- note that SEQUENCEs for conceptual tables and
+          -- rows are not mentioned here...
+
+        application-wide
+            ApplicationSyntax
+    }
+
+-- built-in ASN.1 types
+
+SimpleSyntax ::=
+    CHOICE {
+        -- INTEGERs with a more restrictive range
+        -- may also be used
+        integer-value               -- includes Integer32
+            INTEGER (-2147483648..2147483647),
+
+        -- OCTET STRINGs with a more restrictive size
+        -- may also be used
+        string-value
+            OCTET STRING (SIZE (0..65535)),
+
+        objectID-value
+            OBJECT IDENTIFIER
+    }
+
+-- indistinguishable from INTEGER, but never needs more than
+-- 32-bits for a two's complement representation
+Integer32 ::=
+        INTEGER (-2147483648..2147483647)
+
+
+-- application-wide types
+
+ApplicationSyntax ::=
+    CHOICE {
+        ipAddress-value
+            IpAddress,
+
+        counter-value
+            Counter32,
+
+        timeticks-value
+            TimeTicks,
+
+        arbitrary-value
+            Opaque,
+
+        big-counter-value
+            Counter64,
+
+        unsigned-integer-value  -- includes Gauge32
+            Unsigned32
+    }
+
+-- in network-byte order
+-- (this is a tagged type for historical reasons)
+IpAddress ::=
+    [APPLICATION 0]
+        IMPLICIT OCTET STRING (SIZE (4))
+
+-- this wraps
+Counter32 ::=
+    [APPLICATION 1]
+        IMPLICIT INTEGER (0..4294967295)
+
+-- this doesn't wrap
+Gauge32 ::=
+    [APPLICATION 2]
+        IMPLICIT INTEGER (0..4294967295)
+
+-- an unsigned 32-bit quantity
+-- indistinguishable from Gauge32
+Unsigned32 ::=
+    [APPLICATION 2]
+        IMPLICIT INTEGER (0..4294967295)
+
+-- hundredths of seconds since an epoch
+TimeTicks ::=
+    [APPLICATION 3]
+        IMPLICIT INTEGER (0..4294967295)
+
+-- for backward-compatibility only
+Opaque ::=
+    [APPLICATION 4]
+        IMPLICIT OCTET STRING
+
+-- for counters that wrap in less than one hour with only 32 bits
+Counter64 ::=
+    [APPLICATION 6]
+        IMPLICIT INTEGER (0..18446744073709551615)
+
+
+-- definition for objects
+
+OBJECT-TYPE MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  "SYNTAX" Syntax
+                  UnitsPart
+                  "MAX-ACCESS" Access
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+                  IndexPart
+                  DefValPart
+
+    VALUE NOTATION ::=
+                  value(VALUE ObjectName)
+
+    Syntax ::=   -- Must be one of the following:
+                       -- a base type (or its refinement),
+                       -- a textual convention (or its refinement), or
+                       -- a BITS pseudo-type
+                   type
+                | "BITS" "{" NamedBits "}"
+
+    NamedBits ::= NamedBit
+                | NamedBits "," NamedBit
+
+    NamedBit ::=  identifier "(" number ")" -- number is nonnegative
+
+    UnitsPart ::=
+                  "UNITS" Text
+                | empty
+
+    Access ::=
+                  "not-accessible"
+                | "accessible-for-notify"
+                | "read-only"
+                | "read-write"
+                | "read-create"
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    IndexPart ::=
+                  "INDEX"    "{" IndexTypes "}"
+                | "AUGMENTS" "{" Entry      "}"
+                | empty
+    IndexTypes ::=
+                  IndexType
+                | IndexTypes "," IndexType
+    IndexType ::=
+                  "IMPLIED" Index
+                | Index
+    Index ::=
+                    -- use the SYNTAX value of the
+                    -- correspondent OBJECT-TYPE invocation
+                  value(ObjectName)
+    Entry ::=
+                    -- use the INDEX value of the
+                    -- correspondent OBJECT-TYPE invocation
+                  value(ObjectName)
+
+    DefValPart ::= "DEFVAL" "{" Defvalue "}"
+                | empty
+
+    Defvalue ::=  -- must be valid for the type specified in
+                  -- SYNTAX clause of same OBJECT-TYPE macro
+                  value(ObjectSyntax)
+                | "{" BitsValue "}"
+
+    BitsValue ::= BitNames
+                | empty
+
+    BitNames ::=  BitName
+                | BitNames "," BitName
+
+    BitName ::= identifier
+
+    -- a character string as defined in section 3.1.1
+    Text ::= value(IA5String)
+END
+
+
+-- definitions for notifications
+
+NOTIFICATION-TYPE MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  ObjectsPart
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+
+    VALUE NOTATION ::=
+                  value(VALUE NotificationName)
+
+    ObjectsPart ::=
+                  "OBJECTS" "{" Objects "}"
+                | empty
+    Objects ::=
+                  Object
+                | Objects "," Object
+    Object ::=
+                  value(ObjectName)
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    -- a character string as defined in section 3.1.1
+    Text ::= value(IA5String)
+END
+
+-- definitions of administrative identifiers
+
+zeroDotZero    OBJECT-IDENTITY
+    STATUS     current
+    DESCRIPTION
+            "A value used for null identifiers."
+    ::= { 0 0 }
+
+END
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-TC b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-TC
new file mode 100644
index 0000000000000000000000000000000000000000..a68f9690d198b2533905c8ab9baa604c7a7a9a54
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-TC
@@ -0,0 +1,786 @@
+SNMPv2-TC DEFINITIONS ::= BEGIN
+
+IMPORTS
+    TimeTicks         FROM SNMPv2-SMI;
+
+
+-- definition of textual conventions
+
+TEXTUAL-CONVENTION MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  DisplayPart
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+                  "SYNTAX" Syntax
+
+    VALUE NOTATION ::=
+                   value(VALUE Syntax)      -- adapted ASN.1
+
+    DisplayPart ::=
+                  "DISPLAY-HINT" Text
+                | empty
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    -- a character string as defined in [2]
+    Text ::= value(IA5String)
+
+    Syntax ::=   -- Must be one of the following:
+                       -- a base type (or its refinement), or
+                       -- a BITS pseudo-type
+                  type
+                | "BITS" "{" NamedBits "}"
+
+    NamedBits ::= NamedBit
+                | NamedBits "," NamedBit
+
+    NamedBit ::=  identifier "(" number ")" -- number is nonnegative
+
+END
+
+
+
+
+DisplayString ::= TEXTUAL-CONVENTION
+    DISPLAY-HINT "255a"
+    STATUS       current
+    DESCRIPTION
+            "Represents textual information taken from the NVT ASCII
+            character set, as defined in pages 4, 10-11 of RFC 854.
+
+            To summarize RFC 854, the NVT ASCII repertoire specifies:
+
+              - the use of character codes 0-127 (decimal)
+
+              - the graphics characters (32-126) are interpreted as
+                US ASCII
+
+              - NUL, LF, CR, BEL, BS, HT, VT and FF have the special
+                meanings specified in RFC 854
+
+              - the other 25 codes have no standard interpretation
+
+              - the sequence 'CR LF' means newline
+
+              - the sequence 'CR NUL' means carriage-return
+
+              - an 'LF' not preceded by a 'CR' means moving to the
+                same column on the next line.
+
+              - the sequence 'CR x' for any x other than LF or NUL is
+                illegal.  (Note that this also means that a string may
+                end with either 'CR LF' or 'CR NUL', but not with CR.)
+
+            Any object defined using this syntax may not exceed 255
+            characters in length."
+    SYNTAX       OCTET STRING (SIZE (0..255))
+
+PhysAddress ::= TEXTUAL-CONVENTION
+    DISPLAY-HINT "1x:"
+    STATUS       current
+    DESCRIPTION
+            "Represents media- or physical-level addresses."
+    SYNTAX       OCTET STRING
+
+
+MacAddress ::= TEXTUAL-CONVENTION
+    DISPLAY-HINT "1x:"
+    STATUS       current
+    DESCRIPTION
+            "Represents an 802 MAC address represented in the
+            `canonical' order defined by IEEE 802.1a, i.e., as if it
+            were transmitted least significant bit first, even though
+            802.5 (in contrast to other 802.x protocols) requires MAC
+            addresses to be transmitted most significant bit first."
+    SYNTAX       OCTET STRING (SIZE (6))
+
+TruthValue ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "Represents a boolean value."
+    SYNTAX       INTEGER { true(1), false(2) }
+
+TestAndIncr ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "Represents integer-valued information used for atomic
+            operations.  When the management protocol is used to specify
+            that an object instance having this syntax is to be
+            modified, the new value supplied via the management protocol
+            must precisely match the value presently held by the
+            instance.  If not, the management protocol set operation
+            fails with an error of `inconsistentValue'.  Otherwise, if
+            the current value is the maximum value of 2^31-1 (2147483647
+            decimal), then the value held by the instance is wrapped to
+            zero; otherwise, the value held by the instance is
+            incremented by one.  (Note that regardless of whether the
+            management protocol set operation succeeds, the variable-
+            binding in the request and response PDUs are identical.)
+
+            The value of the ACCESS clause for objects having this
+            syntax is either `read-write' or `read-create'.  When an
+            instance of a columnar object having this syntax is created,
+            any value may be supplied via the management protocol.
+
+            When the network management portion of the system is re-
+            initialized, the value of every object instance having this
+            syntax must either be incremented from its value prior to
+            the re-initialization, or (if the value prior to the re-
+            initialization is unknown) be set to a pseudo-randomly
+            generated value."
+    SYNTAX       INTEGER (0..2147483647)
+
+AutonomousType ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "Represents an independently extensible type identification
+            value.  It may, for example, indicate a particular sub-tree
+            with further MIB definitions, or define a particular type of
+            protocol or hardware."
+    SYNTAX       OBJECT IDENTIFIER
+
+
+InstancePointer ::= TEXTUAL-CONVENTION
+    STATUS       obsolete
+    DESCRIPTION
+            "A pointer to either a specific instance of a MIB object or
+            a conceptual row of a MIB table in the managed device.  In
+            the latter case, by convention, it is the name of the
+            particular instance of the first accessible columnar object
+            in the conceptual row.
+
+            The two uses of this textual convention are replaced by
+            VariablePointer and RowPointer, respectively."
+    SYNTAX       OBJECT IDENTIFIER
+
+
+VariablePointer ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "A pointer to a specific object instance.  For example,
+            sysContact.0 or ifInOctets.3."
+    SYNTAX       OBJECT IDENTIFIER
+
+
+RowPointer ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "Represents a pointer to a conceptual row.  The value is the
+            name of the instance of the first accessible columnar object
+            in the conceptual row.
+
+            For example, ifIndex.3 would point to the 3rd row in the
+            ifTable (note that if ifIndex were not-accessible, then
+            ifDescr.3 would be used instead)."
+    SYNTAX       OBJECT IDENTIFIER
+
+RowStatus ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "The RowStatus textual convention is used to manage the
+            creation and deletion of conceptual rows, and is used as the
+            value of the SYNTAX clause for the status column of a
+            conceptual row (as described in Section 7.7.1 of [2].)
+            The status column has six defined values:
+
+                 - `active', which indicates that the conceptual row is
+                 available for use by the managed device;
+
+                 - `notInService', which indicates that the conceptual
+                 row exists in the agent, but is unavailable for use by
+                 the managed device (see NOTE below); 'notInService' has
+                 no implication regarding the internal consistency of
+                 the row, availability of resources, or consistency with
+                 the current state of the managed device;
+
+                 - `notReady', which indicates that the conceptual row
+                 exists in the agent, but is missing information
+                 necessary in order to be available for use by the
+                 managed device (i.e., one or more required columns in
+                 the conceptual row have not been instanciated);
+
+                 - `createAndGo', which is supplied by a management
+                 station wishing to create a new instance of a
+                 conceptual row and to have its status automatically set
+                 to active, making it available for use by the managed
+                 device;
+
+                 - `createAndWait', which is supplied by a management
+                 station wishing to create a new instance of a
+                 conceptual row (but not make it available for use by
+                 the managed device); and,
+
+                 - `destroy', which is supplied by a management station
+                 wishing to delete all of the instances associated with
+                 an existing conceptual row.
+
+            Whereas five of the six values (all except `notReady') may
+            be specified in a management protocol set operation, only
+            three values will be returned in response to a management
+            protocol retrieval operation:  `notReady', `notInService' or
+            `active'.  That is, when queried, an existing conceptual row
+            has only three states:  it is either available for use by
+            the managed device (the status column has value `active');
+            it is not available for use by the managed device, though
+            the agent has sufficient information to attempt to make it
+            so (the status column has value `notInService'); or, it is
+            not available for use by the managed device, and an attempt
+            to make it so would fail because the agent has insufficient
+            information (the state column has value `notReady').
+
+                                     NOTE WELL
+
+                 This textual convention may be used for a MIB table,
+                 irrespective of whether the values of that table's
+                 conceptual rows are able to be modified while it is
+                 active, or whether its conceptual rows must be taken
+                 out of service in order to be modified.  That is, it is
+                 the responsibility of the DESCRIPTION clause of the
+                 status column to specify whether the status column must
+                 not be `active' in order for the value of some other
+                 column of the same conceptual row to be modified.  If
+                 such a specification is made, affected columns may be
+                 changed by an SNMP set PDU if the RowStatus would not
+                 be equal to `active' either immediately before or after
+                 processing the PDU.  In other words, if the PDU also
+                 contained a varbind that would change the RowStatus
+                 value, the column in question may be changed if the
+                 RowStatus was not equal to `active' as the PDU was
+                 received, or if the varbind sets the status to a value
+                 other than 'active'.
+
+
+            Also note that whenever any elements of a row exist, the
+            RowStatus column must also exist.
+
+            To summarize the effect of having a conceptual row with a
+            status column having a SYNTAX clause value of RowStatus,
+            consider the following state diagram:
+
+
+                                         STATE
+              +--------------+-----------+-------------+-------------
+              |      A       |     B     |      C      |      D
+              |              |status col.|status column|
+              |status column |    is     |      is     |status column
+    ACTION    |does not exist|  notReady | notInService|  is active
+--------------+--------------+-----------+-------------+-------------
+set status    |noError    ->D|inconsist- |inconsistent-|inconsistent-
+column to     |       or     |   entValue|        Value|        Value
+createAndGo   |inconsistent- |           |             |
+              |         Value|           |             |
+--------------+--------------+-----------+-------------+-------------
+set status    |noError  see 1|inconsist- |inconsistent-|inconsistent-
+column to     |       or     |   entValue|        Value|        Value
+createAndWait |wrongValue    |           |             |
+--------------+--------------+-----------+-------------+-------------
+set status    |inconsistent- |inconsist- |noError      |noError
+column to     |         Value|   entValue|             |
+active        |              |           |             |
+              |              |     or    |             |
+              |              |           |             |
+              |              |see 2   ->D|see 8     ->D|          ->D
+--------------+--------------+-----------+-------------+-------------
+set status    |inconsistent- |inconsist- |noError      |noError   ->C
+column to     |         Value|   entValue|             |
+notInService  |              |           |             |
+              |              |     or    |             |      or
+              |              |           |             |
+              |              |see 3   ->C|          ->C|see 6
+--------------+--------------+-----------+-------------+-------------
+set status    |noError       |noError    |noError      |noError   ->A
+column to     |              |           |             |      or
+destroy       |           ->A|        ->A|          ->A|see 7
+--------------+--------------+-----------+-------------+-------------
+set any other |see 4         |noError    |noError      |see 5
+column to some|              |           |             |
+value         |              |      see 1|          ->C|          ->D
+--------------+--------------+-----------+-------------+-------------
+
+            (1) goto B or C, depending on information available to the
+            agent.
+
+            (2) if other variable bindings included in the same PDU,
+            provide values for all columns which are missing but
+            required, and all columns have acceptable values, then
+            return noError and goto D.
+
+            (3) if other variable bindings included in the same PDU,
+            provide legal values for all columns which are missing but
+            required, then return noError and goto C.
+
+            (4) at the discretion of the agent, the return value may be
+            either:
+
+                 inconsistentName:  because the agent does not choose to
+                 create such an instance when the corresponding
+                 RowStatus instance does not exist, or
+
+                 inconsistentValue:  if the supplied value is
+                 inconsistent with the state of some other MIB object's
+                 value, or
+
+                 noError: because the agent chooses to create the
+                 instance.
+
+            If noError is returned, then the instance of the status
+            column must also be created, and the new state is B or C,
+            depending on the information available to the agent.  If
+            inconsistentName or inconsistentValue is returned, the row
+            remains in state A.
+
+            (5) depending on the MIB definition for the column/table,
+            either noError or inconsistentValue may be returned.
+
+            (6) the return value can indicate one of the following
+            errors:
+
+                 wrongValue: because the agent does not support
+                 notInService (e.g., an agent which does not support
+                 createAndWait), or
+
+                 inconsistentValue: because the agent is unable to take
+                 the row out of service at this time, perhaps because it
+                 is in use and cannot be de-activated.
+
+            (7) the return value can indicate the following error:
+
+                 inconsistentValue: because the agent is unable to
+                 remove the row at this time, perhaps because it is in
+                 use and cannot be de-activated.
+
+            (8) the transition to D can fail, e.g., if the values of the
+            conceptual row are inconsistent, then the error code would
+            be inconsistentValue.
+
+            NOTE: Other processing of (this and other varbinds of) the
+            set request may result in a response other than noError
+            being returned, e.g., wrongValue, noCreation, etc.
+
+
+                              Conceptual Row Creation
+
+            There are four potential interactions when creating a
+            conceptual row:  selecting an instance-identifier which is
+            not in use; creating the conceptual row; initializing any
+            objects for which the agent does not supply a default; and,
+            making the conceptual row available for use by the managed
+            device.
+
+            Interaction 1: Selecting an Instance-Identifier
+
+            The algorithm used to select an instance-identifier varies
+            for each conceptual row.  In some cases, the instance-
+            identifier is semantically significant, e.g., the
+            destination address of a route, and a management station
+            selects the instance-identifier according to the semantics.
+
+            In other cases, the instance-identifier is used solely to
+            distinguish conceptual rows, and a management station
+            without specific knowledge of the conceptual row might
+            examine the instances present in order to determine an
+            unused instance-identifier.  (This approach may be used, but
+            it is often highly sub-optimal; however, it is also a
+            questionable practice for a naive management station to
+            attempt conceptual row creation.)
+
+            Alternately, the MIB module which defines the conceptual row
+            might provide one or more objects which provide assistance
+            in determining an unused instance-identifier.  For example,
+            if the conceptual row is indexed by an integer-value, then
+            an object having an integer-valued SYNTAX clause might be
+            defined for such a purpose, allowing a management station to
+            issue a management protocol retrieval operation.  In order
+            to avoid unnecessary collisions between competing management
+            stations, `adjacent' retrievals of this object should be
+            different.
+
+            Finally, the management station could select a pseudo-random
+            number to use as the index.  In the event that this index
+            was already in use and an inconsistentValue was returned in
+            response to the management protocol set operation, the
+            management station should simply select a new pseudo-random
+            number and retry the operation.
+
+            A MIB designer should choose between the two latter
+            algorithms based on the size of the table (and therefore the
+            efficiency of each algorithm).  For tables in which a large
+            number of entries are expected, it is recommended that a MIB
+            object be defined that returns an acceptable index for
+            creation.  For tables with small numbers of entries, it is
+            recommended that the latter pseudo-random index mechanism be
+            used.
+
+            Interaction 2: Creating the Conceptual Row
+
+            Once an unused instance-identifier has been selected, the
+            management station determines if it wishes to create and
+            activate the conceptual row in one transaction or in a
+            negotiated set of interactions.
+
+            Interaction 2a: Creating and Activating the Conceptual Row
+
+            The management station must first determine the column
+            requirements, i.e., it must determine those columns for
+            which it must or must not provide values.  Depending on the
+            complexity of the table and the management station's
+            knowledge of the agent's capabilities, this determination
+            can be made locally by the management station.  Alternately,
+            the management station issues a management protocol get
+            operation to examine all columns in the conceptual row that
+            it wishes to create.  In response, for each column, there
+            are three possible outcomes:
+
+                 - a value is returned, indicating that some other
+                 management station has already created this conceptual
+                 row.  We return to interaction 1.
+
+                 - the exception `noSuchInstance' is returned,
+                 indicating that the agent implements the object-type
+                 associated with this column, and that this column in at
+                 least one conceptual row would be accessible in the MIB
+                 view used by the retrieval were it to exist. For those
+                 columns to which the agent provides read-create access,
+                 the `noSuchInstance' exception tells the management
+                 station that it should supply a value for this column
+                 when the conceptual row is to be created.
+
+                 - the exception `noSuchObject' is returned, indicating
+                 that the agent does not implement the object-type
+                 associated with this column or that there is no
+                 conceptual row for which this column would be
+                 accessible in the MIB view used by the retrieval.  As
+                 such, the management station can not issue any
+                 management protocol set operations to create an
+                 instance of this column.
+
+            Once the column requirements have been determined, a
+            management protocol set operation is accordingly issued.
+            This operation also sets the new instance of the status
+            column to `createAndGo'.
+
+            When the agent processes the set operation, it verifies that
+            it has sufficient information to make the conceptual row
+            available for use by the managed device.  The information
+            available to the agent is provided by two sources:  the
+            management protocol set operation which creates the
+            conceptual row, and, implementation-specific defaults
+            supplied by the agent (note that an agent must provide
+            implementation-specific defaults for at least those objects
+            which it implements as read-only).  If there is sufficient
+            information available, then the conceptual row is created, a
+            `noError' response is returned, the status column is set to
+            `active', and no further interactions are necessary (i.e.,
+            interactions 3 and 4 are skipped).  If there is insufficient
+            information, then the conceptual row is not created, and the
+            set operation fails with an error of `inconsistentValue'.
+            On this error, the management station can issue a management
+            protocol retrieval operation to determine if this was
+            because it failed to specify a value for a required column,
+            or, because the selected instance of the status column
+            already existed.  In the latter case, we return to
+            interaction 1.  In the former case, the management station
+            can re-issue the set operation with the additional
+            information, or begin interaction 2 again using
+            `createAndWait' in order to negotiate creation of the
+            conceptual row.
+
+                                     NOTE WELL
+
+                 Regardless of the method used to determine the column
+                 requirements, it is possible that the management
+                 station might deem a column necessary when, in fact,
+                 the agent will not allow that particular columnar
+                 instance to be created or written.  In this case, the
+                 management protocol set operation will fail with an
+                 error such as `noCreation' or `notWritable'.  In this
+                 case, the management station decides whether it needs
+                 to be able to set a value for that particular columnar
+                 instance.  If not, the management station re-issues the
+                 management protocol set operation, but without setting
+                 a value for that particular columnar instance;
+                 otherwise, the management station aborts the row
+                 creation algorithm.
+
+            Interaction 2b: Negotiating the Creation of the Conceptual
+            Row
+
+            The management station issues a management protocol set
+            operation which sets the desired instance of the status
+            column to `createAndWait'.  If the agent is unwilling to
+            process a request of this sort, the set operation fails with
+            an error of `wrongValue'.  (As a consequence, such an agent
+            must be prepared to accept a single management protocol set
+            operation, i.e., interaction 2a above, containing all of the
+            columns indicated by its column requirements.)  Otherwise,
+            the conceptual row is created, a `noError' response is
+            returned, and the status column is immediately set to either
+            `notInService' or `notReady', depending on whether it has
+            sufficient information to (attempt to) make the conceptual
+            row available for use by the managed device.  If there is
+            sufficient information available, then the status column is
+            set to `notInService'; otherwise, if there is insufficient
+            information, then the status column is set to `notReady'.
+            Regardless, we proceed to interaction 3.
+
+            Interaction 3: Initializing non-defaulted Objects
+
+            The management station must now determine the column
+            requirements.  It issues a management protocol get operation
+            to examine all columns in the created conceptual row.  In
+            the response, for each column, there are three possible
+            outcomes:
+
+                 - a value is returned, indicating that the agent
+                 implements the object-type associated with this column
+                 and had sufficient information to provide a value.  For
+                 those columns to which the agent provides read-create
+                 access (and for which the agent allows their values to
+                 be changed after their creation), a value return tells
+                 the management station that it may issue additional
+                 management protocol set operations, if it desires, in
+                 order to change the value associated with this column.
+
+                 - the exception `noSuchInstance' is returned,
+                 indicating that the agent implements the object-type
+                 associated with this column, and that this column in at
+                 least one conceptual row would be accessible in the MIB
+                 view used by the retrieval were it to exist. However,
+                 the agent does not have sufficient information to
+                 provide a value, and until a value is provided, the
+                 conceptual row may not be made available for use by the
+                 managed device.  For those columns to which the agent
+                 provides read-create access, the `noSuchInstance'
+                 exception tells the management station that it must
+                 issue additional management protocol set operations, in
+                 order to provide a value associated with this column.
+
+                 - the exception `noSuchObject' is returned, indicating
+                 that the agent does not implement the object-type
+                 associated with this column or that there is no
+                 conceptual row for which this column would be
+                 accessible in the MIB view used by the retrieval.  As
+                 such, the management station can not issue any
+                 management protocol set operations to create an
+                 instance of this column.
+
+            If the value associated with the status column is
+            `notReady', then the management station must first deal with
+            all `noSuchInstance' columns, if any.  Having done so, the
+            value of the status column becomes `notInService', and we
+            proceed to interaction 4.
+
+            Interaction 4: Making the Conceptual Row Available
+
+            Once the management station is satisfied with the values
+            associated with the columns of the conceptual row, it issues
+            a management protocol set operation to set the status column
+            to `active'.  If the agent has sufficient information to
+            make the conceptual row available for use by the managed
+            device, the management protocol set operation succeeds (a
+            `noError' response is returned).  Otherwise, the management
+            protocol set operation fails with an error of
+            `inconsistentValue'.
+
+                                     NOTE WELL
+
+                 A conceptual row having a status column with value
+                 `notInService' or `notReady' is unavailable to the
+                 managed device.  As such, it is possible for the
+                 managed device to create its own instances during the
+                 time between the management protocol set operation
+                 which sets the status column to `createAndWait' and the
+                 management protocol set operation which sets the status
+                 column to `active'.  In this case, when the management
+                 protocol set operation is issued to set the status
+                 column to `active', the values held in the agent
+                 supersede those used by the managed device.
+
+            If the management station is prevented from setting the
+            status column to `active' (e.g., due to management station
+            or network failure) the conceptual row will be left in the
+            `notInService' or `notReady' state, consuming resources
+            indefinitely.  The agent must detect conceptual rows that
+            have been in either state for an abnormally long period of
+            time and remove them.  It is the responsibility of the
+            DESCRIPTION clause of the status column to indicate what an
+            abnormally long period of time would be.  This period of
+            time should be long enough to allow for human response time
+            (including `think time') between the creation of the
+            conceptual row and the setting of the status to `active'.
+            In the absence of such information in the DESCRIPTION
+            clause, it is suggested that this period be approximately 5
+            minutes in length.  This removal action applies not only to
+            newly-created rows, but also to previously active rows which
+            are set to, and left in, the notInService state for a
+            prolonged period exceeding that which is considered normal
+            for such a conceptual row.
+
+                             Conceptual Row Suspension
+
+            When a conceptual row is `active', the management station
+            may issue a management protocol set operation which sets the
+            instance of the status column to `notInService'.  If the
+            agent is unwilling to do so, the set operation fails with an
+            error of `wrongValue' or `inconsistentValue'.  Otherwise,
+            the conceptual row is taken out of service, and a `noError'
+            response is returned.  It is the responsibility of the
+            DESCRIPTION clause of the status column to indicate under
+            what circumstances the status column should be taken out of
+            service (e.g., in order for the value of some other column
+            of the same conceptual row to be modified).
+
+
+                              Conceptual Row Deletion
+
+            For deletion of conceptual rows, a management protocol set
+            operation is issued which sets the instance of the status
+            column to `destroy'.  This request may be made regardless of
+            the current value of the status column (e.g., it is possible
+            to delete conceptual rows which are either `notReady',
+            `notInService' or `active'.)  If the operation succeeds,
+            then all instances associated with the conceptual row are
+            immediately removed."
+    SYNTAX       INTEGER {
+                     -- the following two values are states:
+                     -- these values may be read or written
+                     active(1),
+                     notInService(2),
+
+                     -- the following value is a state:
+                     -- this value may be read, but not written
+                     notReady(3),
+
+                     -- the following three values are
+                     -- actions: these values may be written,
+                     --   but are never read
+                     createAndGo(4),
+                     createAndWait(5),
+                     destroy(6)
+                 }
+
+TimeStamp ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "The value of the sysUpTime object at which a specific
+            occurrence happened.  The specific occurrence must be
+            defined in the description of any object defined using this
+            type.
+
+            If sysUpTime is reset to zero as a result of a re-
+            initialization of the network management (sub)system, then
+            the values of all TimeStamp objects are also reset.
+            However, after approximately 497 days without a re-
+            initialization, the sysUpTime object will reach 2^^32-1 and
+            then increment around to zero; in this case, existing values
+            of TimeStamp objects do not change.  This can lead to
+            ambiguities in the value of TimeStamp objects."
+    SYNTAX       TimeTicks
+
+
+TimeInterval ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "A period of time, measured in units of 0.01 seconds."
+    SYNTAX       INTEGER (0..2147483647)
+
+DateAndTime ::= TEXTUAL-CONVENTION
+    DISPLAY-HINT "2d-1d-1d,1d:1d:1d.1d,1a1d:1d"
+    STATUS       current
+    DESCRIPTION
+            "A date-time specification.
+
+            field  octets  contents                  range
+            -----  ------  --------                  -----
+              1      1-2   year*                     0..65536
+              2       3    month                     1..12
+              3       4    day                       1..31
+              4       5    hour                      0..23
+              5       6    minutes                   0..59
+              6       7    seconds                   0..60
+                           (use 60 for leap-second)
+              7       8    deci-seconds              0..9
+              8       9    direction from UTC        '+' / '-'
+              9      10    hours from UTC*           0..13
+             10      11    minutes from UTC          0..59
+
+            * Notes:
+            - the value of year is in network-byte order
+            - daylight saving time in New Zealand is +13
+
+            For example, Tuesday May 26, 1992 at 1:30:15 PM EDT would be
+            displayed as:
+
+                             1992-5-26,13:30:15.0,-4:0
+
+            Note that if only local time is known, then timezone
+            information (fields 8-10) is not present."
+    SYNTAX       OCTET STRING (SIZE (8 | 11))
+
+
+StorageType ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "Describes the memory realization of a conceptual row.  A
+            row which is volatile(2) is lost upon reboot.  A row which
+            is either nonVolatile(3), permanent(4) or readOnly(5), is
+            backed up by stable storage.  A row which is permanent(4)
+            can be changed but not deleted.  A row which is readOnly(5)
+            cannot be changed nor deleted.
+
+            If the value of an object with this syntax is either
+            permanent(4) or readOnly(5), it cannot be written.
+            Conversely, if the value is either other(1), volatile(2) or
+            nonVolatile(3), it cannot be modified to be permanent(4) or
+            readOnly(5).  (All illegal modifications result in a
+            'wrongValue' error.)
+
+            Every usage of this textual convention is required to
+            specify the columnar objects which a permanent(4) row must
+            at a minimum allow to be writable."
+    SYNTAX       INTEGER {
+                     other(1),       -- eh?
+                     volatile(2),    -- e.g., in RAM
+                     nonVolatile(3), -- e.g., in NVRAM
+                     permanent(4),   -- e.g., partially in ROM
+                     readOnly(5)     -- e.g., completely in ROM
+                 }
+
+TDomain ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+          "Denotes a kind of transport service.
+
+          Some possible values, such as snmpUDPDomain, are defined in
+          the SNMPv2-TM MIB module.  Other possible values are defined
+          in other MIB modules."
+    REFERENCE    "The SNMPv2-TM MIB module is defined in RFC 1906."
+    SYNTAX       OBJECT IDENTIFIER
+
+
+TAddress ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+          "Denotes a transport service address.
+
+          A TAddress value is always interpreted within the context of a
+          TDomain value.  Thus, each definition of a TDomain value must
+          be accompanied by a definition of a textual convention for use
+          with that TDomain.  Some possible textual conventions, such as
+          SnmpUDPAddress for snmpUDPDomain, are defined in the SNMPv2-TM
+          MIB module.  Other possible textual conventions are defined in
+          other MIB modules."
+    REFERENCE    "The SNMPv2-TM MIB module is defined in RFC 1906."
+    SYNTAX       OCTET STRING (SIZE (1..255))
+
+
+END
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/TEST-MIB.txt b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/TEST-MIB.txt
new file mode 100644
index 0000000000000000000000000000000000000000..d0ea67e2730c46edcc175a5326cf4d44e29a4dc3
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/TEST-MIB.txt
@@ -0,0 +1,36 @@
+TEST-MIB DEFINITIONS ::= BEGIN
+
+--
+-- A simple MIB objects for testing
+--
+
+IMPORTS
+    MODULE-IDENTITY, OBJECT-TYPE, Integer32, org FROM SNMPv2-SMI
+    ;
+
+testMib MODULE-IDENTITY
+    LAST-UPDATED "202004060000Z"
+    ORGANIZATION "astron"
+    CONTACT-INFO "astron"
+    DESCRIPTION "Test mib"
+    ::= { org 2 }
+
+--
+-- top level structure
+--
+TestVal       OBJECT IDENTIFIER ::= { testMib 1 }
+
+--
+-- Example scalars
+--
+
+testValue OBJECT-TYPE
+    SYNTAX      Integer32
+    MAX-ACCESS  read-write
+    STATUS      current
+    DESCRIPTION
+	"This is simply a test value."
+    DEFVAL { 1 }
+    ::= { TestVal 1 }
+
+END