diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index f5a14f0b59ce2355c2a9ca7f46c0860aeb83f198..de8a85e3ba87d0e659d0ecb96e6457e6c48fab46 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -42,17 +42,6 @@ stages:
     - . bootstrap/etc/lofar20rc.sh || true
 ##    Allow docker image script to execute
 #    - chmod u+x $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh
-# This suffers from only refs changes not working as expected:
-# https://gitlab.com/gitlab-org/gitlab-foss/-/issues/55012
-# Therefore we have to add `only: refs: - merge_requests` to all jobs that are
-# only supposed to run on merge requests with file changes. However,
-# two pipelines will spawn instead of one of which one tagged with 'detached`.
-.base_docker_images_except:
-  extends: .base_docker_images
-  except:
-    refs:
-      - tags
-      - master
 .base_docker_store_images:
   extends: .base_docker_images
   script:
@@ -66,15 +55,17 @@ docker_store_images_master_tag:
       - master
 docker_store_images_changes:
   extends: .base_docker_store_images
-  only:
-    refs:
-      - merge_requests
-    changes:
+  rules:
+#   https://stackoverflow.com/questions/68955071/how-to-disable-detached-pipelines-in-gitlab
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/.env
-  except:
-    refs:
-      - tags
-      - master
+      when: always
 docker_build_image_all:
   extends: .base_docker_images
   only:
@@ -110,43 +101,62 @@ docker_build_image_all:
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sst latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-unb2 latest
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-xst latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-temperature-manager latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh archiver-timescale latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-cm latest
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-es latest
+
 docker_build_image_elk:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/elk.yml
       - docker-compose/elk/*
       - docker-compose/elk-configure-host/*
+      when: always
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh elk $tag
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh elk-configure-host $tag
 docker_build_image_lofar_device_base:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/lofar-device-base.yml
       - docker-compose/lofar-device-base/*
+      when: always
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh lofar-device-base $tag
 docker_build_image_prometheus:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/prometheus.yml
       - docker-compose/prometheus/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh prometheus $tag
 docker_build_image_itango:
-  extends: .base_docker_images_except
+  extends: .base_docker_images
   only:
     refs:
       - merge_requests
@@ -157,247 +167,395 @@ docker_build_image_itango:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh itango $tag
 docker_build_image_grafana:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/grafana.yml
       - docker-compose/grafana/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh grafana $tag
 docker_build_image_jupyter:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/jupyter.yml
       - docker-compose/jupyter/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh jupyter $tag
 docker_build_image_apsct_sim:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/aspct-sim.yml
       - docker-compose/pypcc-sim-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh apsct-sim $tag
 docker_build_image_apspu_sim:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/apspu-sim.yml
       - docker-compose/pypcc-sim-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh apspu-sim $tag
 docker_build_image_recv_sim:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/recv-sim.yml
       - docker-compose/pypcc-sim-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh recv-sim $tag
 docker_build_image_sdptr_sim:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/sdptr-sim.yml
       - docker-compose/sdptr-sim/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh sdptr-sim $tag
 docker_build_image_unb2_sim:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/unb2-sim.yml
       - docker-compose/pypcc-sim-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh unb2-sim $tag
 docker_build_image_device_apsct:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-aspct.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-aspct $tag
 docker_build_image_device_apspu:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-apspu.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-apspu $tag
 docker_build_image_device_pdu:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-pdu.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-pdu $tag
 docker_build_image_device_tilebeam:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-tilebeam.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-tilebeam $tag
 docker_build_image_device_beamlet:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-beamlet.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-beamlet $tag
 docker_build_image_device_digitalbeam:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-digitalbeam.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-digitalbeam $tag
 docker_build_image_device_boot:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-boot.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-boot $tag
 docker_build_image_device_docker:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-docker.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-docker $tag
 docker_build_image_device_ovservation_control:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-observation_control.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-observation_control $tag
 docker_build_image_device_antennafield:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-antennafield.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-antennafield $tag
 docker_build_image_device_recv:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-recv.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-recv $tag
 docker_build_image_device_sdp:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-sdp.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sdp $tag
 docker_build_image_device_sst:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-sst.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-sst $tag
 docker_build_image_device_unb2:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-unb2.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-unb2 $tag
 docker_build_image_device_xst:
-  extends: .base_docker_images_except
-  only:
-    refs:
-      - merge_requests
-    changes:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
       - docker-compose/device-xst.yml
       - docker-compose/lofar-device-base/*
   script:
 #    Do not remove 'bash' or statement will be ignored by primitive docker shell
     - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-xst $tag
+docker_build_image_device_temperature_manager:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
+      - docker-compose/device-temperature-manager.yml
+      - docker-compose/lofar-device-base/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh device-temperature-manager $tag
+docker_build_image_archiver_timescale:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
+      - docker-compose/archiver-timescale.yml
+      - docker-compose/timescaledb/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh archiver-timescale $tag
+docker_build_image_hdbppts_cm:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
+      - docker-compose/archiver-timescale.yml
+      - docker-compose/tango-archiver-ts/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-cm $tag
+docker_build_image_hdbppts_es:
+  extends: .base_docker_images
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+    - if: '$CI_COMMIT_TAG != null'
+      when: never
+    - if: '$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH'
+      when: never
+    - changes:
+      - docker-compose/archiver-timescale.yml
+      - docker-compose/tango-archiver-ts/*
+  script:
+#    Do not remove 'bash' or statement will be ignored by primitive docker shell
+    - bash $CI_PROJECT_DIR/sbin/tag_and_push_docker_image.sh hdbppts-es $tag
 newline_at_eof:
   stage: linting
   before_script:
diff --git a/.gitmodules b/.gitmodules
index 1c9e69fc593c305a941f8d35e16f2efb531cefb5..f1248450adb0a12584a247b8119bc9653e6498f0 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,3 +1,7 @@
 [submodule "tangostationcontrol/tangostationcontrol/toolkit/libhdbpp-python"]
 	path = tangostationcontrol/tangostationcontrol/toolkit/libhdbpp-python
 	url = https://gitlab.com/tango-controls/hdbpp/libhdbpp-python.git
+[submodule "docker-compose/alerta-web"]
+	path = docker-compose/alerta-web
+	url = https://github.com/jjdmol/alerta-webui
+	branch = add-isa-18-2-states
diff --git a/CDB/LOFAR_ConfigDb.json b/CDB/LOFAR_ConfigDb.json
index 56263d3c25383e5066b71e4ffdef640b8f4b348e..9c243aab268d9ca13f37bfe3d7af9cfbba8222de 100644
--- a/CDB/LOFAR_ConfigDb.json
+++ b/CDB/LOFAR_ConfigDb.json
@@ -1,4 +1,14 @@
 {
+    "objects": {
+        "station": {
+            "name": [
+                "DevStation"
+            ],
+            "number": [
+                "999"
+            ]
+        }
+    },
     "servers": {
         "Docker": {
             "STAT": {
@@ -32,10 +42,18 @@
                 }
             }
         },
+        "TemperatureManager": {
+            "STAT": {
+                "TemperatureManager": {
+                    "STAT/TemperatureManager/1": {}
+                }
+            }
+        },
         "TileBeam": {
             "STAT": {
                 "TileBeam": {
-                    "STAT/TileBeam/1": {}
+                    "STAT/TileBeam/1": {
+                    }
                 }
             }
         },
diff --git a/CDB/stations/DTS_ConfigDb.json b/CDB/stations/DTS_ConfigDb.json
index 398ef7d63577ce62f61c2374b9335a905ebce566..b0cd5d91ed6795c579680df10fe53a82b1e93ccb 100644
--- a/CDB/stations/DTS_ConfigDb.json
+++ b/CDB/stations/DTS_ConfigDb.json
@@ -1,4 +1,14 @@
 {
+    "objects": {
+        "station": {
+            "name": [
+                "DTS"
+            ],
+            "number": [
+                "902"
+            ]
+        }
+    },
     "servers": {
         "boot": {
             "STAT": {
diff --git a/CDB/stations/DTS_Outside_ConfigDb.json b/CDB/stations/DTS_Outside_ConfigDb.json
new file mode 100644
index 0000000000000000000000000000000000000000..e1b6e19079df728ebba70204fea9768249501224
--- /dev/null
+++ b/CDB/stations/DTS_Outside_ConfigDb.json
@@ -0,0 +1,524 @@
+{
+    "objects": {
+        "station": {
+            "name": [
+                "DTSOutside"
+            ],
+            "number": [
+                "903"
+            ]
+        }
+    },
+    "servers": {
+        "boot": {
+            "STAT": {
+                "Boot": {
+                    "STAT/Boot/1": {
+                        "properties": {
+                            "Initialise_Hardware": [
+                                "True"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "APSCT": {
+            "STAT": {
+                "APSCT": {
+                    "STAT/APSCT/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.87.6.80"
+                            ],
+                            "OPC_Server_Port": [
+                                "4843"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "APSPU": {
+            "STAT": {
+                "APSPU": {
+                    "STAT/APSPU/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.87.6.80"
+                            ],
+                            "OPC_Server_Port": [
+                                "4842"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "Beamlet": {
+            "STAT": {
+                "Beamlet": {
+                    "STAT/Beamlet/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.99.0.250"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ],
+                            "FPGA_beamlet_output_hdr_eth_destination_mac_RW_default": [
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7"
+                            ],
+                            "FPGA_beamlet_output_hdr_ip_destination_address_RW_default": [
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "RECV": {
+            "STAT": {
+                "RECV": {
+                    "STAT/RECV/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.87.6.80"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ],
+                            "HBAT_reference_ETRS": [
+                                "3839371.416", "430339.901", "5057958.886",
+                                "3839368.919", "430335.979", "5057961.1",
+                                "3839365.645", "430339.299", "5057963.288",
+                                "3839368.142", "430343.221", "5057961.074",
+                                "3839374.094", "430299.513", "5057960.017",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0",
+                                "0", "0", "0"
+                            ],
+                            "HBAT_PQR_rotation_angle_deg": [
+                                "45.73",
+                                "45.73",
+                                "45.73",
+                                "45.73",
+                                "54.40",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0",
+                                "0"
+                            ],
+                            "HBAT_PQR_to_ETRS_rotation_matrix": [
+                               "-0.11660087", "-0.79095632", "0.60065992",
+                               " 0.99317077", "-0.09529842", "0.06730545",
+                               " 0.00400627", " 0.60440575", "0.79666658"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "SDP": {
+            "STAT": {
+                "SDP": {
+                    "STAT/SDP/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.99.0.250"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ],
+                            "FPGA_sdp_info_station_id_RW_default": [
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903",
+                                "903"
+                            ],
+                            "TR_fpga_mask_RW_default": [
+                                "True",
+                                "True",
+                                "True",
+                                "True",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False",
+                                "False"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "SST": {
+            "STAT": {
+                "SST": {
+                    "STAT/SST/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.99.0.250"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ],
+                            "FPGA_sst_offload_hdr_eth_destination_mac_RW_default": [
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7"
+                            ],
+                            "FPGA_sst_offload_hdr_ip_destination_address_RW_default": [
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "XST": {
+            "STAT": {
+                "XST": {
+                    "STAT/XST/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.99.0.250"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ],
+                            "FPGA_xst_offload_hdr_eth_destination_mac_RW_default": [
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7",
+                                "3c:ec:ef:86:2f:b7"
+                            ],
+                            "FPGA_xst_offload_hdr_ip_destination_address_RW_default": [
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "UNB2": {
+            "STAT": {
+                "UNB2": {
+                    "STAT/UNB2/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "10.87.6.80"
+                            ],
+                            "OPC_Server_Port": [
+                                "4841"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        }
+    }
+}
diff --git a/CDB/stations/LTS_ConfigDb.json b/CDB/stations/LTS_ConfigDb.json
index 63b16a78b809ec51644094a41f6700f7f16ced22..f6cacc0d93adddef12a3868a3e1c93c70a083f6b 100644
--- a/CDB/stations/LTS_ConfigDb.json
+++ b/CDB/stations/LTS_ConfigDb.json
@@ -1,4 +1,14 @@
 {
+    "objects": {
+        "station": {
+            "name": [
+                "LTS"
+            ],
+            "number": [
+                "901"
+            ]
+        }
+    },
     "servers": {
         "boot": {
             "STAT": {
diff --git a/CDB/stations/simulators_ConfigDb.json b/CDB/stations/simulators_ConfigDb.json
index 59fcdee54cef6d9947419e839805e9851c539f65..6afb6b21adb00239edf53241dfe4b515cad0f35a 100644
--- a/CDB/stations/simulators_ConfigDb.json
+++ b/CDB/stations/simulators_ConfigDb.json
@@ -116,6 +116,16 @@
                 }
             }
         },
+        "TemperatureManager": {
+            "STAT": {
+                "TemperatureManager": {
+                    "STAT/TemperatureManager/1": {
+                        "properties": {
+                        }
+                    }
+                }
+            }
+        },
         "RECV": {
             "STAT": {
                 "RECV": {
diff --git a/bin/dump_ConfigDb.sh b/bin/dump_ConfigDb.sh
index c1f6dc214e32458af1f1d555332ecb40c2b71601..2532b8e275a3c4a609dc9b618fb143f8815f94a6 100755
--- a/bin/dump_ConfigDb.sh
+++ b/bin/dump_ConfigDb.sh
@@ -1,4 +1,7 @@
 #!/bin/bash
 
 # writes the JSON dump to stdout, Do not change -i into -it incompatible with gitlab ci!
-docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig python -m dsconfig.dump
+docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig bash -c '
+  python -m dsconfig.dump > /tmp/dsconfig-configdb-dump.json
+  /manage_object_properties.py -r > /tmp/dsconfig-objectdb-dump.json
+  /merge_json.py /tmp/dsconfig-objectdb-dump.json /tmp/dsconfig-configdb-dump.json'
diff --git a/docker-compose/alerta-web/Dockerfile b/docker-compose/alerta-server/Dockerfile
similarity index 82%
rename from docker-compose/alerta-web/Dockerfile
rename to docker-compose/alerta-server/Dockerfile
index 80431da39da9ddb7ff0c28997660163234eb6d57..04f9bce1233a38a09cea6814b2ce8ac54f30fd84 100644
--- a/docker-compose/alerta-web/Dockerfile
+++ b/docker-compose/alerta-server/Dockerfile
@@ -9,6 +9,9 @@ RUN bash -c 'source /venv/bin/activate; pip install /tmp/grafana-plugin'
 COPY lofar-plugin /tmp/lofar-plugin
 RUN bash -c 'source /venv/bin/activate; pip install /tmp/lofar-plugin'
 
+COPY lofar-routing-plugin /tmp/lofar-routing-plugin
+RUN bash -c 'source /venv/bin/activate; pip install /tmp/lofar-routing-plugin'
+
 COPY alertad.conf /app/alertad.conf
 COPY alerta.conf /app/alerta.conf
 COPY config.json /web/config.json
diff --git a/docker-compose/alerta-web/README.md b/docker-compose/alerta-server/README.md
similarity index 100%
rename from docker-compose/alerta-web/README.md
rename to docker-compose/alerta-server/README.md
diff --git a/docker-compose/alerta-web/alerta-secrets.json b/docker-compose/alerta-server/alerta-secrets.json
similarity index 100%
rename from docker-compose/alerta-web/alerta-secrets.json
rename to docker-compose/alerta-server/alerta-secrets.json
diff --git a/docker-compose/alerta-web/alerta.conf b/docker-compose/alerta-server/alerta.conf
similarity index 100%
rename from docker-compose/alerta-web/alerta.conf
rename to docker-compose/alerta-server/alerta.conf
diff --git a/docker-compose/alerta-web/alertad.conf b/docker-compose/alerta-server/alertad.conf
similarity index 75%
rename from docker-compose/alerta-web/alertad.conf
rename to docker-compose/alerta-server/alertad.conf
index dc7b6c2e295ae4230a9373ed26f148d6aad59cd0..b0088c6c2bf8f26fd9cec59a3e12680dcbb1029e 100644
--- a/docker-compose/alerta-web/alertad.conf
+++ b/docker-compose/alerta-server/alertad.conf
@@ -1,15 +1,22 @@
+import os
+
 DEBUG = True
 SECRET = "T=&7xvF2S&x7w_JAcq$h1x5ocfA)8H2i"
 
 # Allow non-admin views
 CUSTOMER_VIEWS = True
 
+# Use more advanced ANSI/ISA 18.2 alarm model,
+# which does not auto-close alarms and thus
+# allows for tracking alarms that came and went.
+ALARM_MODEL = "ISA_18_2"
+
 # Never timeout alerts
 ALERT_TIMEOUT = 0
 # Auto unack after a day
 ACK_TIMEOUT = 24 * 3600
 # Auto unshelve after 2 hours
-SHELVE_TIMEOUT = 2 * 3600
+SHELVE_TIMEOUT = 7 * 24 * 3600
 
 # Use custom date formats
 DATE_FORMAT_MEDIUM_DATE = "dd DD/MM HH:mm"
@@ -17,10 +24,31 @@ DATE_FORMAT_LONG_DATE   = "yyyy-MM-DD HH:mm:ss.sss"
 
 # Default overview settings
 COLUMNS = ['severity', 'status', 'createTime', 'lastReceiveTime', 'resource', 'grafanaDashboardHtml', 'grafanaPanelHtml', 'event', 'text']
-DEFAULT_FILTER = {'status': ['open']}
+DEFAULT_FILTER = {'status': ['UNACK', 'RTNUN']}
 SORT_LIST_BY = "createTime"
 AUTO_REFRESH_INTERVAL = 5000 # ms
 
+COLOR_MAP = {
+    'severity': {
+        'Critical': 'red',
+        'High': 'orange',
+        'Medium': '#FFF380', # corn yellow
+        'Low': 'dodgerblue',
+        'Advisory': 'lightblue',
+        'OK': '#00CC00',  # lime green
+        'Unknown': 'silver'
+    },
+    'text': 'black'
+}
+
+# Allow alerta-web to refer to alerta-server for the client
+CORS_ORIGINS = [
+    'http://localhost:8081',
+    'http://localhost:8082',
+    os.environ.get("BASE_URL", ""),
+    os.environ.get("DASHBOARD_URL", ""),
+]
+
 # ------------------------------------
 #    Plugin configuration
 # ------------------------------------
@@ -28,7 +56,7 @@ AUTO_REFRESH_INTERVAL = 5000 # ms
 PLUGINS = ['reject', 'blackout', 'acked_by', 'enhance', 'grafana', 'lofar', 'slack']
 
 # Slack plugin settings, see https://github.com/alerta/alerta-contrib/tree/master/plugins/slack
-import os, json
+import json
 
 with open("/run/secrets/alerta-secrets") as secrets_file:
     secrets = json.load(secrets_file)
diff --git a/docker-compose/alerta-web/config.json b/docker-compose/alerta-server/config.json
similarity index 100%
rename from docker-compose/alerta-web/config.json
rename to docker-compose/alerta-server/config.json
diff --git a/docker-compose/alerta-web/grafana-plugin/alerta_grafana.py b/docker-compose/alerta-server/grafana-plugin/alerta_grafana.py
similarity index 100%
rename from docker-compose/alerta-web/grafana-plugin/alerta_grafana.py
rename to docker-compose/alerta-server/grafana-plugin/alerta_grafana.py
diff --git a/docker-compose/alerta-web/grafana-plugin/setup.py b/docker-compose/alerta-server/grafana-plugin/setup.py
similarity index 100%
rename from docker-compose/alerta-web/grafana-plugin/setup.py
rename to docker-compose/alerta-server/grafana-plugin/setup.py
diff --git a/docker-compose/alerta-server/lofar-plugin/alerta_lofar.py b/docker-compose/alerta-server/lofar-plugin/alerta_lofar.py
new file mode 100644
index 0000000000000000000000000000000000000000..b227069c8805b0f71aa8438c474d5a9afe5129ac
--- /dev/null
+++ b/docker-compose/alerta-server/lofar-plugin/alerta_lofar.py
@@ -0,0 +1,69 @@
+import os
+import json
+import logging
+
+from alerta.plugins import PluginBase
+import alerta.models.alarms.isa_18_2 as isa_18_2
+
+LOG = logging.getLogger()
+
+
+class EnhanceLOFAR(PluginBase):
+    """
+    Plugin for enhancing alerts with LOFAR-specific information
+    """
+
+    @staticmethod
+    def _fix_severity(alert):
+        """
+          Force conversion of severity to ISA 18.2 model, to allow Alerta to parse the alert.
+
+          For example, the 'prometheus' webhook by default uses the 'warning' severity,
+          but also users might specify a non-existing severity level.
+        """
+            
+        if alert.severity not in isa_18_2.SEVERITY_MAP:
+            # Save original severity
+            alert.attributes['unparsableSeverity'] = alert.severity
+
+            translation = {
+                "normal":   isa_18_2.OK,
+                "ok":       isa_18_2.OK,
+                "cleared":  isa_18_2.OK,
+                "warning":  isa_18_2.LOW,
+                "minor":    isa_18_2.MEDIUM,
+                "major":    isa_18_2.HIGH,
+                "critical": isa_18_2.CRITICAL,
+            }
+
+            alert.severity = translation.get(alert.severity.lower(), isa_18_2.MEDIUM)
+
+    def pre_receive(self, alert, **kwargs):
+        self._fix_severity(alert)
+
+        # Parse LOFAR-specific fields
+        for tag in alert.tags:
+            try:
+                key, value = tag.split("=", 1)
+            except ValueError:
+                continue
+
+            if key == "device":
+                alert.attributes['lofarDevice'] = value
+
+            if key == "name":
+                alert.attributes['lofarAttribute'] = value
+
+            if key == "station":
+                alert.resource = value
+
+        return alert
+
+    def post_receive(self, alert, **kwargs):
+        return
+
+    def status_change(self, alert, status, text, **kwargs):
+        return
+
+    def take_action(self, alert, action, text, **kwargs):
+        raise NotImplementedError
diff --git a/docker-compose/alerta-web/lofar-plugin/setup.py b/docker-compose/alerta-server/lofar-plugin/setup.py
similarity index 100%
rename from docker-compose/alerta-web/lofar-plugin/setup.py
rename to docker-compose/alerta-server/lofar-plugin/setup.py
diff --git a/docker-compose/alerta-server/lofar-routing-plugin/routing.py b/docker-compose/alerta-server/lofar-routing-plugin/routing.py
new file mode 100644
index 0000000000000000000000000000000000000000..bcd9f9e159c5f44bf12cacf17fb926b5db7bdb5a
--- /dev/null
+++ b/docker-compose/alerta-server/lofar-routing-plugin/routing.py
@@ -0,0 +1,72 @@
+import logging
+
+from alerta.app import alarm_model
+from alerta.models.enums import ChangeType
+
+LOG = logging.getLogger('alerta.plugins.routing')
+
+# For a description of this interface,
+# see https://docs.alerta.io/gettingstarted/tutorial-3-plugins.html?highlight=rules#step-3-route-alerts-to-plugins
+def rules(alert, plugins, config):
+    if alert.previous_severity is None:
+        # The alert still has to be parsed, and enriched, before it is
+        # merged into existing alerts.
+        return rules_prereceive(alert, plugins, config)
+    else:
+        # The alert has been processed. Check to which plugins we
+        # want to send it.
+        return rules_postreceive(alert, plugins, config)
+
+def rules_prereceive(alert, plugins, config):
+    """ Rules to determine which processing filters to use. """
+
+    # no filtering
+    return (plugins.values(), {})
+
+def _is_new_problem(alert) -> bool:
+    """ Return whether the state change denotes a newly identified issue
+        on a system that (as far as the operator knew) was fine before.
+        
+        Returns True when detecting NORM -> UNACK transitions, and False
+        on any duplicates of this transition.
+        
+        Note that RTNUN -> UNACK is thus not triggered on. """
+
+    if alert.status != 'UNACK':
+        # Only report problems (not ACKing, SHELVing, etc)
+        return False
+    elif alert.last_receive_time != alert.update_time:
+        # Ignore anything that didn't update the alert,
+        # to avoid triggering on alerts that repeat
+        # the current situation
+        return False
+    else:
+        # Only report if the previous status was NORM, to avoid
+        # triggering on (f.e.) RTNUN -> UNACK transitions.
+        for h in alert.history: # is sorted new -> old
+            if h.status == alert.status:
+                # ignore any update that didn't change the status
+                continue
+
+            return h.status == "NORM"
+
+        # ... or if there was no previous status (a brand new alert)
+        return True
+
+def rules_postreceive(alert, plugins, config):
+    """ Rules to determine which emission methods to use. """
+
+    # decide whether to notify the user on slack
+    send_to_slack = _is_new_problem(alert)
+
+    LOG.debug(f"Sending alert {alert.event} with status {alert.status} and severity {alert.previous_severity} => {alert.severity} to slack? {send_to_slack}")
+
+    # filter the plugin list based on these decisions
+    use_plugins = []
+    for name, plugin in plugins.items():
+        if name == 'slack' and not send_to_slack:
+            pass
+        else:
+            use_plugins.append(plugin)
+
+    return (use_plugins, {})
diff --git a/docker-compose/alerta-server/lofar-routing-plugin/setup.py b/docker-compose/alerta-server/lofar-routing-plugin/setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..038881e14b12d0f0c0ca941fd629a53ac730df75
--- /dev/null
+++ b/docker-compose/alerta-server/lofar-routing-plugin/setup.py
@@ -0,0 +1,24 @@
+
+from setuptools import setup, find_packages
+
+version = '1.0.0'
+
+setup(
+    name="alerta-routing",
+    version=version,
+    description='Alerta plugin to configure LOFAR custom alert routing',
+    url='https://git.astron.nl/lofar2.0/tango',
+    license='Apache License 2.0',
+    author='Jan David Mol',
+    author_email='mol@astron.nl',
+    packages=find_packages(),
+    py_modules=['routing'],
+    include_package_data=True,
+    zip_safe=True,
+    entry_points={
+        'alerta.routing': [
+            'rules = routing:rules'
+        ]
+    },
+    python_requires='>=3.5'
+)
diff --git a/docker-compose/alerta-web b/docker-compose/alerta-web
new file mode 160000
index 0000000000000000000000000000000000000000..9ee69dfbd0e33604169604b5a5cc506d560cb60b
--- /dev/null
+++ b/docker-compose/alerta-web
@@ -0,0 +1 @@
+Subproject commit 9ee69dfbd0e33604169604b5a5cc506d560cb60b
diff --git a/docker-compose/alerta-web/lofar-plugin/alerta_lofar.py b/docker-compose/alerta-web/lofar-plugin/alerta_lofar.py
deleted file mode 100644
index c4f618d2d6675feab78fce49cedc9f8030766c97..0000000000000000000000000000000000000000
--- a/docker-compose/alerta-web/lofar-plugin/alerta_lofar.py
+++ /dev/null
@@ -1,41 +0,0 @@
-import os
-import json
-import logging
-
-from alerta.plugins import PluginBase
-
-LOG = logging.getLogger()
-
-
-class EnhanceLOFAR(PluginBase):
-    """
-    Plugin for enhancing alerts with LOFAR-specific information
-    """
-
-    def pre_receive(self, alert, **kwargs):
-        # Parse LOFAR-specific fields
-        for tag in alert.tags:
-            try:
-                key, value = tag.split("=", 1)
-            except ValueError:
-                continue
-
-            if key == "device":
-                alert.attributes['lofarDevice'] = value
-
-            if key == "name":
-                alert.attributes['lofarAttribute'] = value
-
-            if key == "station":
-                alert.resource = value
-
-        return alert
-
-    def post_receive(self, alert, **kwargs):
-        return
-
-    def status_change(self, alert, status, text, **kwargs):
-        return
-
-    def take_action(self, alert, action, text, **kwargs):
-        raise NotImplementedError
diff --git a/docker-compose/alerta-web/rules.json b/docker-compose/alerta-web/rules.json
deleted file mode 100644
index ca8df8cf7b01a4bd014387e045a2492d35292300..0000000000000000000000000000000000000000
--- a/docker-compose/alerta-web/rules.json
+++ /dev/null
@@ -1 +0,0 @@
-{"test":[{"name":"test2","interval":"10s","rules":[{"expr":"","for":"20s","labels":{"severity":"major"},"annotations":{"__dashboardUid__":"nC8N_kO7k","__panelId__":"9","summary":"My test alert"},"grafana_alert":{"id":3,"orgId":1,"title":"FPGA processing error 2","condition":"B","data":[{"refId":"A","queryType":"","relativeTimeRange":{"from":600,"to":0},"datasourceUid":"ZqArtG97z","model":{"exemplar":false,"expr":"device_attribute{device=\"stat/sdp/1\",name=\"FPGA_error_R\"}","format":"time_series","group":[],"hide":false,"interval":"","intervalMs":1000,"legendFormat":"","maxDataPoints":43200,"metricColumn":"name","rawQuery":true,"rawSql":"SELECT\n  data_time AS \"time\",\n  x::text,\n  device,\n  name,\n  case when value then 1 else 0 end AS value\nFROM lofar_array_boolean\nWHERE\n  $__timeFilter(data_time) AND\n  name = 'fpga_error_r'\nORDER BY 1,2","refId":"A","select":[[{"params":["x"],"type":"column"}],[{"params":["value"],"type":"column"}]],"table":"lofar_array_boolean","timeColumn":"data_time","timeColumnType":"timestamptz","where":[{"name":"$__timeFilter","params":[],"type":"macro"},{"datatype":"text","name":"","params":["name","=","'fpga_error_r'"],"type":"expression"}]}},{"refId":"B","queryType":"","relativeTimeRange":{"from":0,"to":0},"datasourceUid":"-100","model":{"conditions":[{"evaluator":{"params":[0,0],"type":"gt"},"operator":{"type":"and"},"query":{"params":[]},"reducer":{"params":[],"type":"avg"},"type":"query"}],"datasource":{"type":"__expr__","uid":"__expr__"},"expression":"A","hide":false,"intervalMs":1000,"maxDataPoints":43200,"reducer":"last","refId":"B","settings":{"mode":"dropNN"},"type":"reduce"}}],"updated":"2022-04-04T14:18:48Z","intervalSeconds":10,"version":1,"uid":"waXdSCynk","namespace_uid":"9DkbdYy7z","namespace_id":6,"rule_group":"test2","no_data_state":"OK","exec_err_state":"Error"}}]},{"name":"test","interval":"10s","rules":[{"expr":"","for":"20s","labels":{"severity":"major"},"annotations":{"__dashboardUid__":"nC8N_kO7k","__panelId__":"9","summary":"My test alert"},"grafana_alert":{"id":2,"orgId":1,"title":"FPGA processing error","condition":"B","data":[{"refId":"A","queryType":"","relativeTimeRange":{"from":600,"to":0},"datasourceUid":"ZqArtG97z","model":{"exemplar":false,"expr":"device_attribute{device=\"stat/sdp/1\",name=\"FPGA_error_R\"}","format":"time_series","group":[],"hide":false,"interval":"","intervalMs":1000,"legendFormat":"","maxDataPoints":43200,"metricColumn":"name","rawQuery":true,"rawSql":"SELECT\n  data_time AS \"time\",\n  x::text,\n  device,\n  name,\n  case when value then 1 else 0 end AS value\nFROM lofar_array_boolean\nWHERE\n  $__timeFilter(data_time) AND\n  name = 'fpga_error_r'\nORDER BY 1,2","refId":"A","select":[[{"params":["x"],"type":"column"}],[{"params":["value"],"type":"column"}]],"table":"lofar_array_boolean","timeColumn":"data_time","timeColumnType":"timestamptz","where":[{"name":"$__timeFilter","params":[],"type":"macro"},{"datatype":"text","name":"","params":["name","=","'fpga_error_r'"],"type":"expression"}]}},{"refId":"B","queryType":"","relativeTimeRange":{"from":0,"to":0},"datasourceUid":"-100","model":{"conditions":[{"evaluator":{"params":[0,0],"type":"gt"},"operator":{"type":"and"},"query":{"params":[]},"reducer":{"params":[],"type":"avg"},"type":"query"}],"datasource":{"type":"__expr__","uid":"__expr__"},"expression":"A","hide":false,"intervalMs":1000,"maxDataPoints":43200,"reducer":"last","refId":"B","settings":{"mode":"dropNN"},"type":"reduce"}}],"updated":"2022-04-04T14:16:22Z","intervalSeconds":10,"version":1,"uid":"MIt4Ijs7k","namespace_uid":"9DkbdYy7z","namespace_id":6,"rule_group":"test","no_data_state":"OK","exec_err_state":"Error"}}]}]}
\ No newline at end of file
diff --git a/docker-compose/alerta.yml b/docker-compose/alerta.yml
index 2ae3be42c17e450007914facd2a686c7cce1d63e..f828f1413d034e93b8c855876d647439696c69f3 100644
--- a/docker-compose/alerta.yml
+++ b/docker-compose/alerta.yml
@@ -5,7 +5,7 @@ volumes:
 
 secrets:
   alerta-secrets:
-    file: alerta-web/alerta-secrets.json
+    file: alerta-server/alerta-secrets.json
 
 services:
   alerta-web:
@@ -14,7 +14,21 @@ services:
     networks:
       - control
     ports:
-      - "8081:8080"
+      - 8081:80
+    depends_on:
+      - alerta-server
+    command: >
+      sh -c 'echo {\"endpoint\": \"http://\${HOSTNAME}:8082/api\"} > /usr/share/nginx/html/config.json &&
+             nginx -g "daemon off;"'
+    restart: always
+
+  alerta-server:
+    build: alerta-server
+    container_name: alerta-server
+    networks:
+      - control
+    ports:
+      - 8082:8080 # NOTE: This exposes an API and a web UI. Ignore the web UI as we replaced it with alerta-web
     depends_on:
       - alerta-db
     secrets:
diff --git a/docker-compose/device-temperature-manager.yml b/docker-compose/device-temperature-manager.yml
new file mode 100644
index 0000000000000000000000000000000000000000..cf1ac02fd36bbf0a7ef63ba05979e8702fad8985
--- /dev/null
+++ b/docker-compose/device-temperature-manager.yml
@@ -0,0 +1,42 @@
+#
+# Requires:
+#   - lofar-device-base.yml
+#
+version: '2'
+
+volumes:
+  iers-data: {}
+
+services:
+  device-temperature-manager:
+    image: device-temperature-manager
+    # build explicitly, as docker-compose does not understand a local image
+    # being shared among services.
+    build:
+        context: ..
+        dockerfile: docker-compose/lofar-device-base/Dockerfile
+        args:
+            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+    container_name: ${CONTAINER_NAME_PREFIX}device-temperature-manager
+    logging:
+      driver: "json-file"
+      options:
+        max-size: "100m"
+        max-file: "10"
+    networks:
+      - control
+    ports:
+      - "5716:5716" # unique port for this DS
+    extra_hosts:
+      - "host.docker.internal:host-gateway"
+    volumes:
+      - ..:/opt/lofar/tango:rw
+    environment:
+      - TANGO_HOST=${TANGO_HOST}
+    working_dir: /opt/lofar/tango
+    entrypoint:
+      - bin/start-ds.sh
+      # configure CORBA to _listen_ on 0:port, but tell others we're _reachable_ through ${HOSTNAME}:port, since CORBA
+      # can't know about our Docker port forwarding
+      - l2ss-temperature-manager TemperatureManager STAT -v -ORBendPoint giop:tcp:0:5716 -ORBendPointPublish giop:tcp:${HOSTNAME}:5716
+    restart: unless-stopped
diff --git a/docker-compose/dsconfig/Dockerfile b/docker-compose/dsconfig/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..9783411ff933a880dca5003b8d5bceab703ff54a
--- /dev/null
+++ b/docker-compose/dsconfig/Dockerfile
@@ -0,0 +1,5 @@
+ARG SOURCE_IMAGE
+FROM ${SOURCE_IMAGE}
+
+COPY manage_object_properties.py /
+COPY merge_json.py /
diff --git a/docker-compose/dsconfig/manage_object_properties.py b/docker-compose/dsconfig/manage_object_properties.py
new file mode 100755
index 0000000000000000000000000000000000000000..7c4a75bb7d97293fa9df3b94af81486393350ee8
--- /dev/null
+++ b/docker-compose/dsconfig/manage_object_properties.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python3
+"""
+
+   Import/export the object propertis of the Tango Controls Database.
+
+"""
+
+from tango import Database
+
+def read_objects(db = None) -> dict:
+    """ Read and return all object properties. """
+
+    db = db or Database()
+
+    result = {}
+
+    objects = db.get_object_list("*").value_string
+
+    for obj in objects:
+        result[obj] = {}
+        properties = db.get_object_property_list(obj, "*").value_string
+
+        for prop in properties:
+            value = db.get_property(obj, prop)[prop]
+
+            result[obj][prop] = list(value)
+
+    return result
+
+def write_objects(objects: dict, db = None) -> None:
+    """ Write the given object properties. """
+
+    db = db or Database()
+
+    for obj, properties in objects.items():
+        db.put_property(obj, properties)
+
+if __name__ == "__main__":
+    import sys
+    import argparse
+    import json
+
+    parser = argparse.ArgumentParser("Import/export object properties of the Tango Database using the JSON file format")
+    parser.add_argument('-w', '--write', default=False, required=False, action='store_true', help='import objects from stdin')
+    parser.add_argument('-r', '--read', default=False, required=False, action='store_true', help='export all objects to stdout in JSON')
+    args = parser.parse_args()
+
+    if not args.read and not args.write:
+        parser.print_help()
+        sys.exit(1)
+
+    # import
+    if args.write:
+        objects = json.load(sys.stdin)
+        write_objects(objects["objects"])
+
+    # export
+    if args.read:
+        objects = read_objects()
+        print(json.dumps({"objects": objects}, indent=4))
diff --git a/docker-compose/dsconfig/merge_json.py b/docker-compose/dsconfig/merge_json.py
new file mode 100755
index 0000000000000000000000000000000000000000..c0b04d8466273862950f1a7060541d961d937d7d
--- /dev/null
+++ b/docker-compose/dsconfig/merge_json.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python3
+
+""" Merge all JSON files given on the command line at top level. """
+
+import json
+
+if __name__ == "__main__":
+    import argparse
+
+    parser = argparse.ArgumentParser("Merge input JSON files at top level. Keys from later files override those from earlier files.")
+    parser.add_argument('files', metavar='FILE', type=str, nargs='+', help='JSON input files')
+    args = parser.parse_args()
+
+    result = {}
+
+    # read all provided files
+    for filename in args.files:
+        with open(filename) as f:
+            file_dict = json.load(f)
+
+            # add them to the result
+            result.update(file_dict)
+
+
+    # print result in JSON
+    print(json.dumps(result, indent=4))
diff --git a/docker-compose/grafana/alerting.json b/docker-compose/grafana/alerting.json
index d5193964ae1127c0f76cc60a05dfc8f0dd4e1bf4..bc5c76e7f8870efa52e60e21bf621ae0f1cd8418 100644
--- a/docker-compose/grafana/alerting.json
+++ b/docker-compose/grafana/alerting.json
@@ -15,7 +15,7 @@
             "type": "webhook",
             "disableResolveMessage": false,
             "settings": {
-              "url": "http://alerta-web:8080/api/webhooks/prometheus?api-key=demo-key"
+              "url": "http://alerta-server:8080/api/webhooks/prometheus?api-key=demo-key"
             },
             "secureFields": {}
           }
diff --git a/docker-compose/grafana/datasources/alertaui.yaml b/docker-compose/grafana/datasources/alertaui.yaml
index 8fa7ddcfe36d5b1fcaf04a79a7defe166c26bcf8..7a3b62425a71ddf39642fa5f0fd515f7032170f7 100644
--- a/docker-compose/grafana/datasources/alertaui.yaml
+++ b/docker-compose/grafana/datasources/alertaui.yaml
@@ -12,7 +12,7 @@ datasources:
     # <string> custom UID which can be used to reference this datasource in other parts of the configuration, if not specified will be generated automatically
     uid: alertaui
     # <string> url
-    url: http://alerta-web:8080/api
+    url: http://alerta-server:8080/api
     # <string> Deprecated, use secureJsonData.password
     password:
     # <string> database user, if used
diff --git a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
index d9cdd0e945bc0f0c54eacedc767c364e7336f89e..ba7ce483e7aed1ce1c1b47fcb1b34a5eead09501 100644
--- a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
+++ b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
@@ -13,6 +13,7 @@ beamlet = DeviceProxy("STAT/Beamlet/1")
 digitalbeam = DeviceProxy("STAT/DigitalBeam/1")
 antennafield = DeviceProxy("STAT/AntennaField/1")
 docker = DeviceProxy("STAT/Docker/1")
+temperaturemanager = DeviceProxy("STAT/TemperatureManager/1")
 
 # Put them in a list in case one wants to iterate
-devices = [apsct, apspu, recv, sdp, sst, xst, unb2, boot, tilebeam, beamlet, digitalbeam, antennafield, docker]
+devices = [apsct, apspu, recv, sdp, sst, xst, unb2, boot, tilebeam, beamlet, digitalbeam, antennafield, temperaturemanager, docker]
diff --git a/docker-compose/prometheus/prometheus.yml b/docker-compose/prometheus/prometheus.yml
index 32746772773146e4b356c8c019e41c5356fecfd1..63cfabb9a0561cb66bff714f4f4e176fd43e8ff7 100644
--- a/docker-compose/prometheus/prometheus.yml
+++ b/docker-compose/prometheus/prometheus.yml
@@ -6,11 +6,24 @@ global:
 scrape_configs:
   - job_name: tango
     static_configs:
-      - targets:
-        - "tango-prometheus-exporter:8000"
-
+      - targets: ["tango-prometheus-exporter:8000"]
+        labels:
+          "host": "localhost"
   - job_name: host
     scrape_interval: 60s
     static_configs:
-      - targets:
-        - "host.docker.internal:9100"
+      - targets: ["host.docker.internal:9100"]
+        labels:
+          "host": "localhost"
+  # scrape local services
+  - job_name: prometheus
+    static_configs:
+      - targets: ["localhost:9090"]
+        labels:
+          "host": "localhost"
+  - job_name: grafana
+    static_configs:
+      - targets: ["grafana:3000"]
+        labels:
+          "host": "localhost"
+~
diff --git a/docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py b/docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py
index 3152ca29c8c0d78846508e8301e259e694579e75..86298311c3247eef342becd6d2679c8cea41e6a8 100644
--- a/docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py
+++ b/docker-compose/tango-prometheus-exporter/code/tango-prometheus-client.py
@@ -64,12 +64,12 @@ class ArchiverPolicy(object):
         return sorted(list(attributes))
 
 class CustomCollector(object):
-    def __init__(self, config, proxy_timeout=250):
+    def __init__(self, config, station, proxy_timeout=250):
+        self.station = station
         self.policy = ArchiverPolicy(config)
         self.proxy_timeout = proxy_timeout
 
-    @staticmethod
-    def _to_metric(dev, attr_info, x, y, idx, value):
+    def _to_metric(self, dev, attr_info, x, y, idx, value):
         """ Convert the given values to a (labels, value) pair, used to construct a Metric. """
 
         if attr_info.data_type in [ArgType.DevShort, ArgType.DevLong, ArgType.DevUShort, ArgType.DevULong, ArgType.DevLong64, ArgType.DevULong64, ArgType.DevInt, ArgType.DevFloat, ArgType.DevDouble]:
@@ -97,7 +97,7 @@ class CustomCollector(object):
             return None
 
         # (labels, value)
-        return ([dev.dev_name(), attr_info.name, str_value, data_type, f"{x:02}", f"{y:02}", f"{idx:03}"], float_value)
+        return ([self.station, dev.dev_name(), attr_info.name, str_value, data_type, f"{x:02}", f"{y:02}", f"{idx:03}"], float_value)
 
     def metrics_scalar(self, dev, attr_info, attr_value):
         """ Return all metrics for a given SCALAR attribute. """
@@ -182,8 +182,8 @@ class CustomCollector(object):
         logger.info("Start scraping")
         scrape_begin = time.time()
 
-        attribute_metrics = GaugeMetricFamily("device_attribute", 'Device attribute value', labels=['device', 'name', 'str_value', 'type', 'x', 'y', 'idx'])
-        scraping_metrics = GaugeMetricFamily("device_scraping", 'Device scraping duration', labels=['device'])
+        attribute_metrics = GaugeMetricFamily("device_attribute", 'Device attribute value', labels=['station', 'device', 'name', 'str_value', 'type', 'x', 'y', 'idx'])
+        scraping_metrics = GaugeMetricFamily("device_scraping", 'Device scraping duration', labels=['station', 'device'])
 
         for device_name in self.policy.devices():
             logger.debug(f"Processing device {device_name}")
@@ -203,17 +203,19 @@ class CustomCollector(object):
 
             logger.info(f"Done processing device {device_name}. Took {dev_scrape_end - dev_scrape_begin} seconds.")
 
-            scraping_metrics.add_metric([device_name], dev_scrape_end - dev_scrape_begin)
+            scraping_metrics.add_metric([self.station, device_name], dev_scrape_end - dev_scrape_begin)
 
         scrape_end = time.time()
         logger.info(f"Done scraping. Took {scrape_end - scrape_begin} seconds.")
 
-        scraping_metrics.add_metric(["total"], scrape_end - scrape_begin)
+        scraping_metrics.add_metric([self.station, "total"], scrape_end - scrape_begin)
         
         yield attribute_metrics
         yield scraping_metrics
 
 if __name__ == '__main__':
+    import sys
+
     parser = argparse.ArgumentParser()
     parser.add_argument('-c', '--config', type=str, required=True, help='configuration file')
     parser.add_argument('-t', '--timeout', type=int, required=False, default=250, help='device proxy timeout (ms)')
@@ -221,7 +223,15 @@ if __name__ == '__main__':
     args = parser.parse_args()
 
     config = ArchiverPolicy.load_config(args.config)
-    collector = CustomCollector(config, proxy_timeout=args.timeout)
+
+    db = Database()
+    try:
+        station = db.get_property("station","name")["name"][0]
+    except Exception as e:
+        logger.exception("Could not determine station name")
+        sys.exit(1)
+
+    collector = CustomCollector(config, station=station, proxy_timeout=args.timeout)
 
     logger.info("Starting server")
     start_http_server(args.port)
diff --git a/docker-compose/tango.yml b/docker-compose/tango.yml
index 5a6839f44a356113ae1fc525a0ff6e3290e777cd..c9cdac909bf4a863367f3541b1e77d5be659fd2a 100644
--- a/docker-compose/tango.yml
+++ b/docker-compose/tango.yml
@@ -70,7 +70,10 @@ services:
     restart: unless-stopped
 
   dsconfig:
-    image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-dsconfig:${TANGO_DSCONFIG_VERSION}
+    build:
+        context: dsconfig
+        args:
+            SOURCE_IMAGE: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/tango-dsconfig:${TANGO_DSCONFIG_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}dsconfig
     networks:
       - control
diff --git a/sbin/run_integration_test.sh b/sbin/run_integration_test.sh
index fd18a0ee309d322c6de8e6bf12c8df9c183b235c..7e0cf3b27ba111077c5739ce5fd832b4b6ea6e05 100755
--- a/sbin/run_integration_test.sh
+++ b/sbin/run_integration_test.sh
@@ -20,7 +20,7 @@ sleep 1 # dsconfig container must be up and running...
 # shellcheck disable=SC2016
 echo '/usr/local/bin/wait-for-it.sh ${TANGO_HOST} --strict --timeout=300 -- true' | make run dsconfig bash -
 
-DEVICES="device-boot device-apsct device-apspu device-sdp device-recv device-sst device-unb2 device-xst device-beamlet device-digitalbeam device-tilebeam device-pdu device-antennafield"
+DEVICES="device-boot device-apsct device-apspu device-sdp device-recv device-sst device-unb2 device-xst device-beamlet device-digitalbeam device-tilebeam device-pdu device-antennafield device-temperature-manager"
 SIMULATORS="sdptr-sim recv-sim unb2-sim apsct-sim apspu-sim"
 
 # Build only the required images, please do not build everything that makes CI
diff --git a/sbin/update_ConfigDb.sh b/sbin/update_ConfigDb.sh
index 1255f1ea141a75940f2cd858dfc2b40818bd6ec2..f1401d9c6e40601036449553d2919c434c7f8bf1 100755
--- a/sbin/update_ConfigDb.sh
+++ b/sbin/update_ConfigDb.sh
@@ -11,6 +11,9 @@ fi
 # in the container won't be the same as on the host.
 docker cp "${file}" "${CONTAINER_NAME_PREFIX}"dsconfig:/tmp/dsconfig-update-settings.json || exit 1
 
+# update settings, Do not change -i into -it this will break integration tests in gitlab ci!
+docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig /manage_object_properties.py --write < "${file}"
+
 # update settings, Do not change -i into -it this will break integration tests in gitlab ci!
 docker exec -i "${CONTAINER_NAME_PREFIX}"dsconfig json2tango --write --update /tmp/dsconfig-update-settings.json
 
diff --git a/tangostationcontrol/docs/source/alerting.rst b/tangostationcontrol/docs/source/alerting.rst
index 032bcd379f68d3fa719dc8956334a910bf6227ee..88cc07db4afde1abfff7f2ef7c2a0cf9d2668895 100644
--- a/tangostationcontrol/docs/source/alerting.rst
+++ b/tangostationcontrol/docs/source/alerting.rst
@@ -103,19 +103,32 @@ The following enhancements are useful to configure for the alerts:
 - You'll want to alert on a query, followed by a ``Reduce`` step with Function ``Last`` and Mode ``Drop Non-numeric Value``. This triggers the alert on the latest value(s), but keeps the individual array elements separated,
 - In ``Add details``, the ``Dashboard UID`` and ``Panel ID`` annotations are useful to configure to where you want the user to go, as Grafana will generate hyperlinks from them. To obtain a dashboard uid, go to ``Dashboards -> Browse`` and check out its URL. For the panel id, view a panel and check the URL,
 - In ``Add details``, the ``Summary`` annotation will be used as the alert description,
-- In ``Custom labels``, add ``severity = major`` to raise the severity of the alert (default: warning). See also the `supported values <https://docs.alerta.io/webui/configuration.html#severity-colors>`_.
+- In ``Custom labels``, add ``severity = High`` to raise the severity of the alert (default: Low). See also the `supported values <https://github.com/alerta/alerta/blob/master/alerta/models/alarms/isa_18_2.py#L14>`_.
 
 Alerta dashboard
 ``````````````````
 
-The Alerta dashboard (http://localhost:8081) provides an overview of received alerts, which stay in the list until the alert condition disappears, and the alert is explicitly acknowledged or deleted:
+The Alerta dashboard (http://localhost:8081) provides an overview of received alerts, according to the ISA 18.2 Alarm Model. It distinguishes the following states:
 
-- *Acknowledging* an alert silences it for a day,
-- *Shelving* an alert silences it for 2 hours, and removes it from more overviews,
+- ``NORM``: the situation is nominal (any past alarm condition has been acknowledged),
+- ``UNACK``: an alarm condition is active, which has not been acknowledged by an operator,
+- ``RTNUN``: an alarm condition came and went, but has not been acknowledged by an operator,
+- ``ACKED``: an alarm condition is active, and has been acknowledged by an operator.
+
+Furthermore, the following rarer states are known:
+
+- ``SHLVD``: the alert is put aside, regardless of its condition,
+- ``DSUPR``: the alert is intentionally suppressed,
+- ``OOSRV``: the alert concerns something out of service, and thus should be ignored.
+
+Any alerts stay in the displayed list until the alert condition disappears, *and* the alert is explicitly acknowledged, shelved, or deleted:
+
+- *Acknowledging* an alert silences it for a day, unless its severity rises,
+- *Shelving* an alert silences it for a week, regardless of what happens,
 - *Watching* an alert means receiving browser notifications on changes,
 - *Deleting* an alert removes it until Grafana sends it again (default: 10 minutes).
 
-See ``docker-compose/alerta-web/alertad.conf`` for these settings.
+See ``docker-compose/alerta-server/alertad.conf`` for these settings.
 
 Several installed plugins enhance the received events:
 
@@ -135,9 +148,9 @@ Our Alerta setup is configured to send alerts to Slack. To set this up, you need
 
 .. hint:: To obtain the ``OAuth Token`` later on, go to https://api.slack.com/apps, click on your App, and look under ``Install App``.
 
-Now, edit ``docker-compose/alerta-web/alerta-secrets.json``:
+Now, edit ``docker-compose/alerta-server/alerta-secrets.json``:
 
-.. literalinclude:: ../../../docker-compose/alerta-web/alerta-secrets.json
+.. literalinclude:: ../../../docker-compose/alerta-server/alerta-secrets.json
 
 The ``SLACK_TOKEN`` is the ``OAuth Token``, and the ``SLACK_CHANNEL`` is the channel in which to post the alerts.
 
diff --git a/tangostationcontrol/docs/source/devices/temperature-manager.rst b/tangostationcontrol/docs/source/devices/temperature-manager.rst
new file mode 100644
index 0000000000000000000000000000000000000000..c4f919377d5fbcb79338b0ea28e24c4cbf35c975
--- /dev/null
+++ b/tangostationcontrol/docs/source/devices/temperature-manager.rst
@@ -0,0 +1,4 @@
+temperature-manager
+====================
+
+``temperature_manager == DeviceProxy("STAT/TemperatureManager/1")``
diff --git a/tangostationcontrol/docs/source/index.rst b/tangostationcontrol/docs/source/index.rst
index 263bcd064268839baa452e087f1c732a8ea92ffa..3808637151423688e7b25dd0a50d0e05a0bf3ae3 100644
--- a/tangostationcontrol/docs/source/index.rst
+++ b/tangostationcontrol/docs/source/index.rst
@@ -29,6 +29,7 @@ Even without having access to any LOFAR2.0 hardware, you can install the full st
    devices/recv
    devices/sdp
    devices/sst-xst
+   devices/temperature-manager
    devices/configure
    configure_station
    alerting
diff --git a/tangostationcontrol/setup.cfg b/tangostationcontrol/setup.cfg
index de930c9d399dd5412884f37b3957a698d330b163..1d52727f782ff41dd2e7e75b2d3137314bfdb82f 100644
--- a/tangostationcontrol/setup.cfg
+++ b/tangostationcontrol/setup.cfg
@@ -51,6 +51,7 @@ console_scripts =
     l2ss-statistics-writer = tangostationcontrol.statistics_writer.statistics_writer:main
     l2ss-unb2 = tangostationcontrol.devices.unb2:main
     l2ss-xst = tangostationcontrol.devices.sdp.xst:main
+    l2ss-temperature-manager = tangostationcontrol.devices.temperature_manager:main
 
 # The following entry points should eventually be removed / replaced
     l2ss-cold-start = tangostationcontrol.toolkit.lts_cold_start:main
diff --git a/tangostationcontrol/tangostationcontrol/clients/opcua_client.py b/tangostationcontrol/tangostationcontrol/clients/opcua_client.py
index f94ec4e16fa7d1baf117d8108bc3f896560df70f..9b8637f339858768e026da152975efa4af1cce88 100644
--- a/tangostationcontrol/tangostationcontrol/clients/opcua_client.py
+++ b/tangostationcontrol/tangostationcontrol/clients/opcua_client.py
@@ -9,7 +9,7 @@ from tangostationcontrol.clients.comms_client import AsyncCommClient
 import logging
 logger = logging.getLogger()
 
-__all__ = ["OPCUAConnection", "event_loop"]
+__all__ = ["OPCUAConnection", "ProtocolAttribute"]
 
 numpy_to_OPCua_dict = {
     numpy.bool_: asyncua.ua.VariantType.Boolean,
diff --git a/tangostationcontrol/tangostationcontrol/clients/snmp_client.py b/tangostationcontrol/tangostationcontrol/clients/snmp_client.py
index 7a7f45808cdc2d160cb9db3356d3a0e9beda4be0..83bdfb2e6da940cdb3d297e8fb521ce533fb8ab8 100644
--- a/tangostationcontrol/tangostationcontrol/clients/snmp_client.py
+++ b/tangostationcontrol/tangostationcontrol/clients/snmp_client.py
@@ -2,6 +2,8 @@
 from tangostationcontrol.clients.comms_client import CommClient
 
 from pysnmp import hlapi
+from pysnmp.smi import builder
+from os import path
 
 import numpy
 import logging
@@ -12,8 +14,9 @@ __all__ = ["SNMP_client"]
 
 snmp_to_numpy_dict = {
     hlapi.Integer32: numpy.int64,
+    hlapi.Integer: numpy.int64,
     hlapi.TimeTicks: numpy.int64,
-    str: str,
+    hlapi.OctetString: str,
     hlapi.ObjectIdentity: str,
     hlapi.Counter32: numpy.int64,
     hlapi.Gauge32: numpy.int64,
@@ -21,6 +24,27 @@ snmp_to_numpy_dict = {
 }
 
 
+class SNMP_comm:
+    """
+    Holds information for communicating with the SNMP server
+    """
+
+    def __init__(self, community, host, port):
+        self.port = port
+        self.engine = hlapi.SnmpEngine()
+        self.community = hlapi.CommunityData(community)
+        self.transport = hlapi.UdpTransportTarget((host, port))
+
+        # context data sets the version used. Default SNMPv2
+        self.ctx_data = hlapi.ContextData()
+
+    def getter(self, objs):
+        return next(hlapi.getCmd(self.engine, self.community, self.transport, self.ctx_data, *objs))
+
+    def setter(self, objs):
+        return next(hlapi.getCmd(self.engine, self.community, self.transport, self.ctx_data, *objs))
+
+
 class SNMP_client(CommClient):
     """
         messages to keep a check on the connection. On connection failure, reconnects once.
@@ -36,26 +60,23 @@ class SNMP_client(CommClient):
         super().__init__(fault_func, try_interval)
 
         logger.debug(f"setting up SNMP engine with host: {host} and community: {community}")
-        self.port = port
-
-        self.engine = hlapi.SnmpEngine()
-        self.community = hlapi.CommunityData(community)
-        self.transport = hlapi.UdpTransportTarget((host, port))
-
-        # context data sets the version used. Default SNMPv2
-        self.ctx_data = hlapi.ContextData()
+        self.SNMP_comm = SNMP_comm(community, host, port)
 
         # only sets up the engine, doesn't connect
         self.connected = True
 
+    def _process_annotation(self, annotation):
 
-    def _setup_annotation(self, annotation):
-        """
-        parses the annotation this attribute received for its initialisation.
-        """
+        try:
+            mib = annotation["mib"]
+            name = annotation["name"]
+
+            # SNMP has tables that require an index number to access them. regular non-table variable have an index of 0
+            idx = annotation.get('index', 0)
 
-        wrapper = annotation_wrapper(annotation)
-        return wrapper
+            return mib, name, idx
+        except KeyError:
+            raise ValueError(f"SNMP attribute annotation requires a dict argument with both a 'name' and 'mib' key. Instead got: {annotation}")        
 
     def setup_value_conversion(self, attribute):
         """
@@ -78,11 +99,11 @@ class SNMP_client(CommClient):
         """
 
         # process the annotation
-        wrapper = self._setup_annotation(annotation)
+        mib, name, idx = self._process_annotation(annotation)
 
         # get all the necessary data to set up the read/write functions from the attribute_wrapper
         dim_x, dim_y, dtype = self.setup_value_conversion(attribute)
-        snmp_attr = snmp_attribute(self, wrapper, dtype, dim_x, dim_y)
+        snmp_attr = snmp_attribute(self, mib, name, idx, dtype, dim_x, dim_y)
 
         # return the read/write functions
         def read_function():
@@ -93,145 +114,48 @@ class SNMP_client(CommClient):
 
         return read_function, write_function
 
+class snmp_attribute:
 
-class annotation_wrapper:
-    def __init__(self, annotation):
-        """
-        The SNMP client uses a dict and takes the following keys:
-
-        either
-            oids: Required. An oid string of the object
-        or
-            mib: the mib name
-            name: name of the value to read
-            index (optional) the index if the value thats being read from is a table.
-        """
-
-        # values start as None because we have a way too complicated interface
-        self.oids = None
-        self.mib = None
-        self.name = None
-        self.idx = None
-
-        # check if the 'oids' key is used and not the 'mib' and 'name' keys
-
-        if 'oids' in annotation and 'mib' not in annotation and 'name' not in annotation:
-            self.oids = annotation["oids"]
-
-            # checks to make sure this isn't present
-            if 'index' in annotation:
-                raise ValueError(f"SNMP attribute annotation doesn't support oid type declarations with an index present.")
-
+    def __init__(self, comm: SNMP_comm, mib, name, idx, dtype, dim_x, dim_y):
 
-        # check if the 'oids' key is NOT used but instead the 'mib' and 'name' keys
-        elif 'oids' not in annotation and 'mib' in annotation and 'name' in annotation:
-            self.mib = annotation["mib"]
-            self.name = annotation["name"]
+        self.comm = comm
+        self.mib = mib
+        self.name = name
+        self.idx = idx
+        self.dtype = dtype
 
-            # SNMP has tables that require an index number to access them. regular non-table variable have an index of 0
-            self.idx = annotation.get('index', 0)
+        self.len = self.get_len(dim_x, dim_y)
+        self.is_scalar = self.len == 1
 
-        else:
-            raise ValueError(
-                f"SNMP attribute annotation requires a dict argument with either a 'oids' key or both a 'name' and 'mib' key. Not both. Instead got: {annotation}")
+        self.objID = self.create_objID()
 
-    def create_objID(self, x, y):
-        is_scalar = (x + y) == 1
+    def get_len(self, dim_x, dim_y):
+        """""Small helper function to not clutter the __init__"""
 
-        # if oids are used
-        if self.oids is not None:
-            # get a list of str of the oids
-            self.oids = self._get_oids(x, y, self.oids)
+        if dim_x == 0:
+            dim_x = 1
+        if dim_y == 0:
+            dim_y = 1
+        return dim_x * dim_y
 
-            # turn the list of oids in to a tuple of pysnmp object identities. These are used for the
-            objID = tuple(hlapi.ObjectIdentity(self.oids[i]) for i in range(len(self.oids)))
+    def create_objID(self):
 
-        # if mib + name is used
+        if self.is_scalar:
+            objID = hlapi.ObjectIdentity(self.mib, self.name, self.idx)
         else:
-
-            # only scalars can be used at the present time.
-            if not is_scalar:
-                # tuple(hlapi.ObjectIdentity(mib, name, idx) for i in range(len(oids)))
-
-                raise ValueError(f"MIB + name type attributes can only be scalars, got dimensions of: ({x}, {y})")
-            else:
-                objID = hlapi.ObjectIdentity(self.mib, self.name, self.idx)
+            objID = tuple(hlapi.ObjectIdentity(self.mib, self.name, self.idx + i) for i in range(self.len))
 
         return objID
 
-    def _get_oids(self, x, y, in_oid):
-        """
-        This function expands oids depending on dimensionality.
-        if its a scalar its left alone, but if its an array it creates a list of sequential oids if not already provided
-
-        scalar "1.1.1.1" -> stays the same
-        spectrum: "1.1.1.1" -> ["1.1.1.1.1", "1.1.1.1.2, ..."]
-        """
-
-        if x == 0:
-            x = 1
-        if y == 0:
-            y = 1
-
-        is_scalar = (x * y) == 1
-        nof_oids = x * y
-
-        # if scalar
-        if is_scalar:
-            if type(in_oid) is str:
-                # for ease of handling put single oid in a 1 element list
-                in_oid = [in_oid]
-
-            return in_oid
-
-        else:
-            # if we got a single str oid, make a list of sequential oids
-            if type(in_oid) is str:
-                return ["{}.{}".format(in_oid, i + 1) for i in range(nof_oids)]
-
-            # if its an already expanded list of all oids
-            elif type(in_oid) is list and len(in_oid) == nof_oids:
-                return in_oid
-
-            # if its a list of attributes with the wrong length.
-            else:
-                raise ValueError(
-                    "SNMP oids need to either be a single value or an array the size of the attribute dimensions. got: {} expected: {}x{}={}".format(
-                        len(in_oid), x, y, x * y))
-
-
-class snmp_attribute:
-
-    def __init__(self, client : SNMP_client, wrapper, dtype, dim_x, dim_y):
-
-        self.client = client
-        self.wrapper = wrapper
-        self.dtype = dtype
-        self.dim_x = dim_x
-        self.dim_y = dim_y
-        self.is_scalar = (self.dim_x + self.dim_y) == 1
-
-        self.objID = self.wrapper.create_objID(self.dim_x, self.dim_y)
-
-    def next_wrap(self, cmd):
-        """
-        This function exists to allow the next(cmd) call to be mocked for unit testing. As the
-        """
-        return next(cmd)
-
     def read_function(self):
         """
         Read function we give to the attribute wrapper
         """
-
-        # must be recreated for each read it seems
+        # we need to remake this every time it seems or things dont work
         self.objs = tuple(hlapi.ObjectType(i) for i in self.objID)
 
-        # get the thingy to get the values
-        get_cmd = hlapi.getCmd(self.client.engine, self.client.community, self.client.trasport, self.client.ctx_data, *self.objs)
-
-        # dont ask me why 'next' is used to get all of the values
-        errorIndication, errorStatus, errorIndex, *varBinds = self.next_wrap(get_cmd)
+        # get all of the values
+        errorIndication, errorStatus, errorIndex, *varBinds = self.comm.getter(*self.objs)
 
         # get all the values in a list converted to the correct type
         val_lst = self.convert(varBinds)
@@ -250,8 +174,7 @@ class snmp_attribute:
         else:
             write_obj = tuple(hlapi.ObjectType(self.objID[i], value[i]) for i in range(len(self.objID)))
 
-        set_cmd = hlapi.setCmd(self.client.engine, self.client.community, self.client.trasport, self.client.ctx_data, *write_obj)
-        errorIndication, errorStatus, errorIndex, *varBinds = self.next_wrap(set_cmd)
+        errorIndication, errorStatus, errorIndex, *varBinds = self.comm.setter(write_obj)
 
     def convert(self, varBinds):
         """
@@ -259,20 +182,43 @@ class snmp_attribute:
         """
 
         vals = []
+
         if not self.is_scalar:
             #just the first element of this single element list
             varBinds = varBinds[0]
 
         for varBind in varBinds:
-            # class 'DisplayString' doesnt want to play along for whatever reason
-            if "DisplayString" in str(type(varBind[1])):
-                vals.append(varBind[1].prettyPrint())
-            elif type(varBind[1]) == hlapi.IpAddress:
+
+            # Some MIB's used custom types, some dont. Custom types are merely wrapped base types.
+            varbind_types = varBind[1].__class__.__bases__ + (type(varBind[1]),)
+
+            snmp_type = None
+
+            # find if one of the base types is present.
+            for i in varbind_types:
+                if i in snmp_to_numpy_dict.keys():
+                    snmp_type = i
+
+            if snmp_type is None:
+                raise TypeError(f"Error: did not find a valid snmp type. Got: {varbind_types}, expected one of: '{snmp_to_numpy_dict.keys()}'")
+
+            if snmp_type is hlapi.IpAddress:
                 # IpAddress values get printed as their raw value but in hex (7F 20 20 01 for 127.0.0.1 for example)
                 vals.append(varBind[1].prettyPrint())
+
+            elif snmp_type is hlapi.Integer32 or snmp_type is hlapi.Integer and self.dtype == str:
+                # Integers can have 'named values', Where a value can be translated to a specific name. A dict basically
+                # Example: {1: "other", 2: "invalid", 3: "dynamic", 4: "static",}
+
+                if varBind[1].namedValues == {}:
+                    # An empty dict {} means no namedValue's are present.
+                    vals.append(snmp_to_numpy_dict[snmp_type](varBind[1]))
+                else:
+                    # append the named values string instead of the raw number.
+                    vals.append(varBind[1].prettyPrint())
             else:
                 # convert from the funky pysnmp types to numpy types and then append
-                vals.append(snmp_to_numpy_dict[type(varBind[1])](varBind[1]))
+                vals.append(snmp_to_numpy_dict[snmp_type](varBind[1]))
 
         if self.is_scalar:
             vals = vals[0]
@@ -280,3 +226,16 @@ class snmp_attribute:
         return vals
 
 
+class mib_loader:
+
+    def __init__(self, mib_dir: str):
+        self.mibBuilder = builder.MibBuilder()
+
+        if not path.isabs(mib_dir):
+            mib_dir = "/" + mib_dir
+
+        mib_source = builder.DirMibSource(mib_dir)
+        self.mibBuilder.addMibSources(mib_source)
+
+    def load_pymib(self, mib_name):
+        self.mibBuilder.loadModule(mib_name)
diff --git a/tangostationcontrol/tangostationcontrol/common/measures.py b/tangostationcontrol/tangostationcontrol/common/measures.py
index 58b3c90f456ef3060d1431a7809395e1e63be237..596e0ff4e9c229a6ea848e0ac40c2a3edddc27e4 100644
--- a/tangostationcontrol/tangostationcontrol/common/measures.py
+++ b/tangostationcontrol/tangostationcontrol/common/measures.py
@@ -37,6 +37,7 @@ import tarfile
 import datetime
 import os
 import sys
+import shutil
 
 # Where to store the measures table sets
 IERS_ROOTDIR = "/opt/IERS"
@@ -135,7 +136,7 @@ def download_measures() -> str:
             shutil.rmtree(iers_dir_final)
         except Exception as e:
             # move out of the way instead then
-            iers_dir_final.rename(iers_final.with_suffix("delete-me"))
+            iers_dir_final.rename(iers_dir_final.with_suffix("delete-me"))
 
     # update our name to reflect the correct timestamp
     iers_dir_download.rename(iers_dir_final)
diff --git a/tangostationcontrol/tangostationcontrol/devices/apsct.py b/tangostationcontrol/tangostationcontrol/devices/apsct.py
index 58c91bc5036e4de799f2b42c4cc1cbd5cd04bb22..243d1584065678a480577dcdea69eba654cb5207 100644
--- a/tangostationcontrol/tangostationcontrol/devices/apsct.py
+++ b/tangostationcontrol/tangostationcontrol/devices/apsct.py
@@ -21,7 +21,7 @@ import numpy
 from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.common.lofar_logging import device_logging_to_python
-from tangostationcontrol.devices.device_decorators import *
+from tangostationcontrol.devices.device_decorators import only_in_states
 from tangostationcontrol.devices.opcua_device import opcua_device
 from tangostationcontrol.devices.lofar_device import lofar_device
 
diff --git a/tangostationcontrol/tangostationcontrol/devices/boot.py b/tangostationcontrol/tangostationcontrol/devices/boot.py
index 452dc1916fe1a13fb340bd4f3b2e568369448927..c14c7476021f18e313463bfe8ac82e86c46ccb25 100644
--- a/tangostationcontrol/tangostationcontrol/devices/boot.py
+++ b/tangostationcontrol/tangostationcontrol/devices/boot.py
@@ -21,7 +21,7 @@ from tango import AttrWriteType, DeviceProxy, DevState, DevSource
 # Additional import
 import numpy
 
-from tangostationcontrol.devices.device_decorators import *
+from tangostationcontrol.devices.device_decorators import only_in_states
 
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.devices.lofar_device import lofar_device
@@ -244,6 +244,7 @@ class Boot(lofar_device):
                        "STAT/Beamlet/1",
                        "STAT/TileBeam/1",   # Accesses RECV and Beamlet
                        "STAT/DigitalBeam/1",
+                       "STAT/TemperatureManager/1",
                        "STAT/AntennaField/1",
                       ],
     )
diff --git a/tangostationcontrol/tangostationcontrol/devices/observation.py b/tangostationcontrol/tangostationcontrol/devices/observation.py
index 7a99b13d3c64e96eaed4568dce169176fd9e78ce..6fcfafa22411a2f46b3c077afb4dbd06bd9ee14f 100644
--- a/tangostationcontrol/tangostationcontrol/devices/observation.py
+++ b/tangostationcontrol/tangostationcontrol/devices/observation.py
@@ -14,7 +14,7 @@ from time import time
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
 from tangostationcontrol.common.lofar_version import get_version
-from tangostationcontrol.devices.device_decorators import *
+from tangostationcontrol.devices.device_decorators import only_in_states, only_when_on, fault_on_error
 
 from json import loads
 
diff --git a/tangostationcontrol/tangostationcontrol/devices/observation_control.py b/tangostationcontrol/tangostationcontrol/devices/observation_control.py
index 9d1c459a0bc29f50d88d81d37ba7fd040de9a480..0d116c34fdf7f697fa89525991c67d2e0658d72b 100644
--- a/tangostationcontrol/tangostationcontrol/devices/observation_control.py
+++ b/tangostationcontrol/tangostationcontrol/devices/observation_control.py
@@ -17,7 +17,7 @@ from json import loads
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
 from tangostationcontrol.common.lofar_version import get_version
-from tangostationcontrol.devices.device_decorators import *
+from tangostationcontrol.devices.device_decorators import only_in_states, only_when_on, fault_on_error
 from tangostationcontrol.devices.observation import Observation
 
 import logging
@@ -145,7 +145,7 @@ class ObservationControl(Device):
     @log_exceptions()
     @DebugIt()
     def Fault(self):
-        stop_all_observations()
+        self.stop_all_observations()
         self.set_state(DevState.FAULT)
 
     @only_when_on()
diff --git a/tangostationcontrol/tangostationcontrol/devices/recv.py b/tangostationcontrol/tangostationcontrol/devices/recv.py
index 1254fbdc35e4975751c34836da248778a3468cd0..090d6349c929266bf5a49d65772c36cdc3b820cf 100644
--- a/tangostationcontrol/tangostationcontrol/devices/recv.py
+++ b/tangostationcontrol/tangostationcontrol/devices/recv.py
@@ -26,7 +26,7 @@ from tangostationcontrol.beam.geo import ETRS_to_ITRF, ITRF_to_GEO
 from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.common.lofar_logging import device_logging_to_python
 from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper
-from tangostationcontrol.devices.device_decorators import *
+from tangostationcontrol.devices.device_decorators import only_in_states
 from tangostationcontrol.devices.opcua_device import opcua_device
 from tangostationcontrol.devices.lofar_device import lofar_device
 
@@ -85,14 +85,14 @@ class RECV(opcua_device):
         doc='Maximum amount of time to wait after turning RCU(s) on or off',
         dtype='DevFloat',
         mandatory=False,
-        default_value=10.0
+        default_value=30.0
     )
 
     RCU_DTH_On_Off_timeout = device_property(
         doc='Maximum amount of time to wait after turning dithering on or off',
         dtype='DevFloat',
         mandatory=False,
-        default_value=20.0
+        default_value=30.0
     )
 
     # ----- Calibration values
diff --git a/tangostationcontrol/tangostationcontrol/devices/snmp_device.py b/tangostationcontrol/tangostationcontrol/devices/snmp_device.py
index 04d5a1425e19b0c5fbcb076f206bcd4ed122618a..b5bfd8395a75a9831bdaaf47fb17bbfdf947c36a 100644
--- a/tangostationcontrol/tangostationcontrol/devices/snmp_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/snmp_device.py
@@ -12,16 +12,14 @@
 """
 
 # PyTango imports
-from tango.server import run
-from tango.server import device_property
-from tango import AttrWriteType
-
-# Additional import
-from tangostationcontrol.clients.snmp_client import SNMP_client
-from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper
+from tangostationcontrol.common.entrypoint import entry
 from tangostationcontrol.devices.lofar_device import lofar_device
 
-import numpy
+from tango.server import device_property, command
+import os
+
+# Additional import
+from tangostationcontrol.clients.snmp_client import SNMP_client, mib_loader
 
 import logging
 logger = logging.getLogger()
@@ -39,6 +37,10 @@ class SNMP(lofar_device):
         - Type:'DevString'
         SNMP_host
         - Type:'DevULong'
+        SNMP_community
+        - Type:'DevString'
+        SNMP_rel_mib_dir
+        - Type:'DevString'
         SNMP_timeout
         - Type:'DevDouble'
         """
@@ -57,6 +59,11 @@ class SNMP(lofar_device):
         mandatory=True
     )
 
+    SNMP_rel_mib_dir = device_property(
+        dtype='DevString',
+        mandatory=False
+    )
+
     SNMP_timeout = device_property(
         dtype='DevDouble',
         mandatory=True
@@ -66,43 +73,14 @@ class SNMP(lofar_device):
     # Attributes
     # ----------
 
+    # example attributes. mib and name mandatory and index optional.
 
-    # octetstring
-    sysDescr_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysDescr"}, datatype=numpy.str)
-    sysName_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysName"}, datatype=numpy.str)
-
-    # get a table element with the oid
-    ifDescr31_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.2.2.1.2.31"}, datatype=numpy.str)
-
-    # get 10 table elements with the oid and dimension
-    ifDescr_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.2.2.1.2"}, dims=(10,), datatype=numpy.str)
-
-    #timeticks
-    sysUpTime_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysUpTime"}, datatype=numpy.int64)
-
-    # OID
-    sysObjectID_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysObjectID"}, datatype=numpy.int64)
-
-    # integer
-    sysServices_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysServices"}, datatype=numpy.int64)
-    tcpRtoAlgorithm_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "tcpRtoAlgorithm"}, datatype=numpy.int64)
-    snmpEnableAuthenTraps_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "snmpEnableAuthenTraps"}, datatype=numpy.int64)
-
-    #gauge
-    tcpCurrEstab_R = attribute_wrapper(comms_annotation={"mib": "RFC1213-MIB", "name": "tcpCurrEstab"}, datatype=numpy.int64)
-
-    #counter32
-    tcpActiveOpens_R = attribute_wrapper(comms_annotation={"mib": "RFC1213-MIB", "name": "tcpActiveOpens"}, datatype=numpy.int64)
-    snmpInPkts_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "snmpInPkts"}, datatype=numpy.int64)
-
-    #IP address
-    ipAdEntAddr_R = attribute_wrapper(comms_annotation={"mib": "RFC1213-MIB", "name": "ipAdEntAddr", "index": (127,0,0,1)}, datatype=numpy.str)
-    ipAdEntIfIndex_R = attribute_wrapper(comms_annotation={"mib": "RFC1213-MIB", "name": "ipAdEntIfIndex", "index": (10, 87, 6, 14)}, datatype=numpy.str)
-
-    #str RW attribute
-    sysContact_obj_R = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysContact"}, datatype=numpy.str)
-    sysContact_obj_RW = attribute_wrapper(comms_annotation={"mib": "SNMPv2-MIB", "name": "sysContact"}, datatype=numpy.str, access=AttrWriteType.READ_WRITE)
-
+    # Reads from a table and returns an array of table entries 1 to 10 (note, tables require an index key and start at 1)
+    # test_attr1_R = attribute_wrapper(comms_annotation={"mib": "TEST-MIB", "name": "test_attr1", "index": 1}, dims=(10,), datatype=numpy.str)
+    # indices can also be IP addresses sometimes. Gets a single scalar value
+    # test_attr2_R = attribute_wrapper(comms_annotation={"mib": "TEST-MIB", "name": "test_attr2", "index": (127,0,0,1)}, datatype=numpy.int64)
+    # if the attribute doesn't get the value from a table, then no index is needed, or the default of 0 can be supplied.
+    # test_attr3_R = attribute_wrapper(comms_annotation={"mib": "TEST-MIB", "name": "test_attr3"}, datatype=numpy.int64)
 
 
     # --------
@@ -125,6 +103,30 @@ class SNMP(lofar_device):
 
         self.snmp_manager.start()
 
+    @command(dtype_out=str)
+    def get_mib_dir(self):
+
+        if not os.path.isabs(self.SNMP_rel_mib_dir):
+            mib_path = os.path.dirname(__file__) + "/" + self.SNMP_rel_mib_dir
+        else:
+            # if the string does start with
+            mib_path = self.SNMP_rel_mib_dir
+        return mib_path
+
+    def init_device(self):
+        super().init_device()
+
+        # create the mib_loader and set the mib path
+        loader = mib_loader(self.get_mib_dir())
+
+        for i in self.attr_list():
+            try:
+                # for all of the attributes attempt to load the pre-compiled MIB. Skips already loaded ones
+                loader.load_pymib(i.comms_annotation["mib"])
+            except Exception as e:
+                raise Exception(f"Failed to load MIB file: {i.comms_annotation.get('mib')} for attribute {i.get_name()}") from e
+
+
 
 # --------
 # Commands
@@ -136,8 +138,5 @@ class SNMP(lofar_device):
 # ----------
 def main(args=None, **kwargs):
     """Main function of the module."""
+    return entry((SNMP,), args=args, **kwargs)
 
-    from tangostationcontrol.common.lofar_logging import configure_logger
-    configure_logger()
-
-    return run((SNMP,), args=args, **kwargs)
diff --git a/tangostationcontrol/tangostationcontrol/devices/temperature_manager.py b/tangostationcontrol/tangostationcontrol/devices/temperature_manager.py
new file mode 100644
index 0000000000000000000000000000000000000000..c1db80b161524c04be7489e7017ed373ae70c1ff
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/devices/temperature_manager.py
@@ -0,0 +1,57 @@
+# -*- coding: utf-8 -*-
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+""" overtemperature managing Device Server for LOFAR2.0
+
+"""
+
+# Additional import
+from tangostationcontrol.common.entrypoint import entry
+from tangostationcontrol.devices.lofar_device import lofar_device
+from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
+
+import logging
+
+logger = logging.getLogger()
+
+__all__ = ["TemperatureManager", "main"]
+
+
+@device_logging_to_python()
+class TemperatureManager(lofar_device):
+    # -----------------
+    # Device Properties
+    # -----------------
+
+    # ----------
+    # Attributes
+    # ----------
+
+    # --------
+    # overloaded functions
+    # --------
+
+    def init_device(self):
+        super().init_device()
+
+    @log_exceptions()
+    def configure_for_initialise(self):
+        super().configure_for_initialise()
+
+    @log_exceptions()
+    def configure_for_on(self):
+        super().configure_for_on()
+
+    @log_exceptions()
+    def configure_for_off(self):
+        super().configure_for_off()
+
+
+# ----------
+# Run server
+# ----------
+def main(**kwargs):
+    """Main function of the temperature manager module."""
+    return entry(TemperatureManager, **kwargs)
diff --git a/tangostationcontrol/tangostationcontrol/examples/load_from_disk/ini_device.py b/tangostationcontrol/tangostationcontrol/examples/load_from_disk/ini_device.py
index 032c9b01b2a5447111d6245ffcba1bd610b3b655..430addb9be61ae29ead58a17b8c4096d18d3bcea 100644
--- a/tangostationcontrol/tangostationcontrol/examples/load_from_disk/ini_device.py
+++ b/tangostationcontrol/tangostationcontrol/examples/load_from_disk/ini_device.py
@@ -19,7 +19,7 @@ import numpy
 # Additional import
 from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper
 from tangostationcontrol.devices.lofar_device import lofar_device
-from tangostationcontrol.examples.load_from_disk.ini_client import *
+from tangostationcontrol.examples.load_from_disk.ini_client import ini_client
 
 import logging
 logger = logging.getLogger()
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_temperature_manager.py b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_temperature_manager.py
new file mode 100644
index 0000000000000000000000000000000000000000..ea884acf1d7557b1e72f64ee71eee756145f083f
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/devices/test_device_temperature_manager.py
@@ -0,0 +1,16 @@
+
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+from .base import AbstractTestBases
+
+class TestTemperatureManager(AbstractTestBases.TestDeviceBase):
+
+    def setUp(self):
+        """Intentionally recreate the device object in each test"""
+        super().setUp("STAT/TemperatureManager/1")
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py
index 05cfe5bf41241d2cf142479026d91b07f332e384..c63a78363d343be702a2f2cd5addcf253ae1a82e 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/default/toolkit/test_archiver.py
@@ -8,13 +8,14 @@
 # See LICENSE.txt for more info.
 
 from tangostationcontrol.integration_test.base import BaseIntegrationTestCase
-from tangostationcontrol.toolkit.archiver import *
+from tangostationcontrol.toolkit.archiver import Archiver
 from tangostationcontrol.toolkit.retriever import RetrieverTimescale
 from tangostationcontrol.toolkit.archiver_util import attribute_fqdn
 from tangostationcontrol.integration_test.device_proxy import TestDeviceProxy
 
 import time
 from datetime import datetime
+from tango import DevState
 
 
 class TestArchiver(BaseIntegrationTestCase):
diff --git a/tangostationcontrol/tangostationcontrol/statistics_writer/udp_dev/udp_write_manager.py b/tangostationcontrol/tangostationcontrol/statistics_writer/udp_dev/udp_write_manager.py
index d8e234225237bac1796f11afe80045c3e09b15d8..9ba412576d1ffdee1c7819467e2292286869079e 100644
--- a/tangostationcontrol/tangostationcontrol/statistics_writer/udp_dev/udp_write_manager.py
+++ b/tangostationcontrol/tangostationcontrol/statistics_writer/udp_dev/udp_write_manager.py
@@ -7,7 +7,7 @@ from statistics_writer.udp_dev import udp_server as udp
 import netifaces as ni
 from packet import SSTPacket
 
-__all__ = ["statistics_writer"]
+__all__ = ["Statistics_Writer"]
 
 
 class Statistics_Writer:
diff --git a/tangostationcontrol/tangostationcontrol/test/beam/test_delays.py b/tangostationcontrol/tangostationcontrol/test/beam/test_delays.py
index 3b3900138c2a9f0a9394b02f6272745fa53bdb6d..b984660d677c81349c134d9be7c7441924253fc6 100644
--- a/tangostationcontrol/tangostationcontrol/test/beam/test_delays.py
+++ b/tangostationcontrol/tangostationcontrol/test/beam/test_delays.py
@@ -1,6 +1,6 @@
 import datetime
 
-from tangostationcontrol.beam.delays import *
+from tangostationcontrol.beam.delays import delay_calculator
 from tangostationcontrol.test import base
 
 
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/SNMP_mib_loading/TEST-MIB.py b/tangostationcontrol/tangostationcontrol/test/clients/SNMP_mib_loading/TEST-MIB.py
new file mode 100644
index 0000000000000000000000000000000000000000..de190adc7e763c307b2e7548a200a3f22b3f5cf2
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/test/clients/SNMP_mib_loading/TEST-MIB.py
@@ -0,0 +1,7 @@
+Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, iso, Gauge32, MibIdentifier, Bits, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "iso", "Gauge32", "MibIdentifier", "Bits","Counter32")
+ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint")
+NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
+
+testNamedValue = MibScalar((9, 8, 7, 6, 5, 4, 3, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("A", 1), ("B", 2), ("C", 3), ("D", 4)))).setMaxAccess("readonly")
+mibBuilder.exportSymbols("TEST-MIB", testNamedValue=testNamedValue)
+
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/test_attr_wrapper.py b/tangostationcontrol/tangostationcontrol/test/clients/test_attr_wrapper.py
index c55e9d1c09fcf4d78d52b520d0d5b8b7ee96e60e..13702c87caaac779f94c292e8dbe41ddf0e6e1bd 100644
--- a/tangostationcontrol/tangostationcontrol/test/clients/test_attr_wrapper.py
+++ b/tangostationcontrol/tangostationcontrol/test/clients/test_attr_wrapper.py
@@ -7,12 +7,12 @@
 """
 
 # External imports
-from tango import DevState, DevFailed
+from tango import DevState, DevFailed, AttrWriteType
 
 # Internal imports
 from tangostationcontrol.test.clients.test_client import test_client
-from tangostationcontrol.clients.attribute_wrapper import *
-from tangostationcontrol.devices.lofar_device import *
+from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper
+from tangostationcontrol.devices.lofar_device import lofar_device
 import tangostationcontrol.devices.lofar_device
 
 # Test imports
@@ -21,6 +21,7 @@ from tangostationcontrol.test import base
 
 import asyncio
 import mock
+import numpy
 
 scalar_dims = (1,)
 spectrum_dims = (4,)
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/test_mib_loading.py b/tangostationcontrol/tangostationcontrol/test/clients/test_mib_loading.py
new file mode 100644
index 0000000000000000000000000000000000000000..6abf0f52e6ccda67f5ba482a8c12f811d5421fcb
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/test/clients/test_mib_loading.py
@@ -0,0 +1,53 @@
+from tangostationcontrol.test import base
+
+from tangostationcontrol.clients.snmp_client import mib_loader
+from pysnmp.smi import view
+import pysnmp.hlapi as pysnmp
+from pysnmp.smi.rfc1902 import ObjectIdentity
+
+from os import path
+
+
+class TestMibLoading(base.TestCase):
+
+    #name and directory of the pymib file
+    mib = "TEST-MIB"
+
+    # mib file is in a folder that is in the same folder as this test
+    rel_dir = "SNMP_mib_loading"
+
+    def test_content(self):
+        """
+        This file contains a 1 variable named: testNamedValue with oid "9.8.7.6.5.4.3.2.1" with named values: ("A", 1), ("B", 2), ("C", 3), ("D", 4)
+        In order to confirm that the mib is indeed loaded correctly this test has to get the oids, the values and the named values
+
+        """
+
+        abs_dir = path.dirname(__file__) + "/" + self.rel_dir
+        loader = mib_loader(abs_dir)
+        loader.load_pymib(self.mib)
+
+        # used to view mibs client side
+        mibView = view.MibViewController(loader.mibBuilder)
+
+        # The expected testNamedValue parameters as written in TEST-MIB.py
+        testNamedValue = "testNamedValue"
+        testNamedValue_oid = "9.8.7.6.5.4.3.2.1"
+        testNamedValue_named = "A"
+        testNamedValue_value = 1
+
+        # get testValue and set a value of 1
+        obj_T = pysnmp.ObjectType(ObjectIdentity(self.mib, testNamedValue), pysnmp.Integer32(1))
+        obj_T.resolveWithMib(mibView)
+
+        # get the oid
+        self.assertEqual(str(obj_T[0]), testNamedValue_oid)
+
+        # get the name format: mib::name
+        self.assertEqual(obj_T[0].prettyPrint(), f"{self.mib}::{testNamedValue}")
+
+        # get the namedValue
+        self.assertEqual(str(obj_T[1]), testNamedValue_named)
+
+        # get the numerical value
+        self.assertEqual(int(obj_T[1]), testNamedValue_value)
diff --git a/tangostationcontrol/tangostationcontrol/test/clients/test_snmp_client.py b/tangostationcontrol/tangostationcontrol/test/clients/test_snmp_client.py
index 4d3a5c22ab3b7ac61ccbdfd78671f0c9ed4cf56a..9db811338d77f5afab496dc5f5ec1e305cc98a94 100644
--- a/tangostationcontrol/tangostationcontrol/test/clients/test_snmp_client.py
+++ b/tangostationcontrol/tangostationcontrol/test/clients/test_snmp_client.py
@@ -5,7 +5,7 @@ from unittest import mock
 
 from tangostationcontrol.test import base
 
-from tangostationcontrol.clients.snmp_client import SNMP_client, snmp_attribute, annotation_wrapper
+from tangostationcontrol.clients.snmp_client import SNMP_client, snmp_attribute, SNMP_comm
 
 
 class server_imitator:
@@ -13,7 +13,7 @@ class server_imitator:
     snmp_to_numpy_dict = {
         hlapi.Integer32: numpy.int64,
         hlapi.TimeTicks: numpy.int64,
-        str: str,
+        hlapi.OctetString: str,
         hlapi.Counter32: numpy.int64,
         hlapi.Gauge32: numpy.int64,
         hlapi.IpAddress: str,
@@ -35,6 +35,8 @@ class server_imitator:
                 read_val = (None, snmp_type("1.3.6.1.2.1.1.1.0"))
             elif snmp_type is hlapi.IpAddress:
                 read_val = (None, snmp_type("1.1.1.1"))
+            elif snmp_type is hlapi.OctetString:
+                read_val = (None, snmp_type("1"))
             else:
                 read_val = (None, snmp_type(1))
 
@@ -48,12 +50,16 @@ class server_imitator:
                 read_val = []
                 for _i in range(dims[0]):
                     read_val.append((None, snmp_type(f"1.1.1.1")))
+            elif snmp_type is hlapi.OctetString:
+                read_val = []
+                for _i in range(dims[0]):
+                    read_val.append((None, snmp_type("1")))
             else:
                 read_val = []
                 for _i in range(dims[0]):
                     read_val.append((None, snmp_type(1)))
         else:
-            raise Exception("Image not yet supported :(")
+            raise Exception("Image not supported :(")
 
         return read_val
 
@@ -66,14 +72,14 @@ class server_imitator:
         if dims == self.dim_list["scalar"]:
             snmp_type_dict = {hlapi.ObjectIdentity:"1.3.6.1.2.1.1.1.0.1",
                             hlapi.IpAddress: "1.1.1.1",
-                            str: "1"}
+                            hlapi.OctetString: "1"}
             check_val = 1
             for k,v in snmp_type_dict.items():
                 if snmp_type is k:  check_val = v
         elif dims == self.dim_list["spectrum"]:
             snmp_type_dict = {hlapi.ObjectIdentity:["1.3.6.1.2.1.1.1.0.1"] * dims[0],
                             hlapi.IpAddress: ["1.1.1.1"] * dims[0],
-                            str: ["1"] * dims[0]}
+                            hlapi.OctetString: ["1"] * dims[0]}
             check_val = check_val = [1] * dims[0]
             for k,v in snmp_type_dict.items():
                 if snmp_type is k:  check_val = v
@@ -84,39 +90,6 @@ class server_imitator:
 
 class TestSNMP(base.TestCase):
 
-
-    def test_annotation_success(self):
-        """
-        unit test for the processing of annotation. Has 2 lists. 1 with things that should succeed and 1 with things that should fail.
-        """
-
-        client = SNMP_client(community='public', host='localhost', timeout=10, fault_func=None, try_interval=2)
-
-        test_list = [
-            # test name nad MIB type annotation
-            {"mib": "SNMPv2-MIB", "name": "sysDescr"},
-
-            # test name nad MIB type annotation with index
-            {"mib": "RFC1213-MIB", "name": "ipAdEntAddr", "index": (127, 0, 0, 1)},
-            {"mib": "random-MIB", "name": "aName", "index": 2},
-
-            #oid
-            {"oids": "1.3.6.1.2.1.2.2.1.2.31"}
-        ]
-
-
-        for i in test_list:
-            wrapper = client._setup_annotation(annotation=i)
-
-            if wrapper.oids is not None:
-                self.assertEqual(wrapper.oids, i["oids"])
-
-            else:
-                self.assertEqual(wrapper.mib, i["mib"], f"expected mib with: {i['mib']}, got: {wrapper.idx} from: {i}")
-                self.assertEqual(wrapper.name, i["name"], f"expected name with: {i['name']}, got: {wrapper.idx} from: {i}")
-                self.assertEqual(wrapper.idx, i.get('index', 0), f"expected idx with: {i.get('index', 0)}, got: {wrapper.idx} from: {i}")
-
-
     def test_annotation_fail(self):
         """
         unit test for the processing of annotation. Has 2 lists. 1 with things that should succeed and 1 with things that should fail.
@@ -125,56 +98,19 @@ class TestSNMP(base.TestCase):
         client = SNMP_client(community='public', host='localhost', timeout=10, fault_func=None, try_interval=2)
 
         fail_list = [
-            # OIDS cant use the index
-            {"oids": "1.3.6.1.2.1.2.2.1.2.31", "index": 2},
-            # mixed annotation is not allowed
-            {"oids": "1.3.6.1.2.1.2.2.1.2.31", "name": "thisShouldFail"},
             # no 'name'
             {"mib": "random-MIB", "index": 2},
+            # no MIB
+            {"name": "random-name", "index": 2},
         ]
 
         for i in fail_list:
             with self.assertRaises(ValueError):
-                client._setup_annotation(annotation=i)
-
-    def test_oids_scalar(self):
-
-        test_oid = "1.1.1.1"
-
-        server = server_imitator()
-
-        x, y = server.dim_list['scalar']
-
-        # we just need the object to call another function
-        wrapper = annotation_wrapper(annotation = {"oids": "Not None lol"})
-        # scalar
-        scalar_expected = [test_oid]
-        ret_oids = wrapper._get_oids(x, y, test_oid)
-        self.assertEqual(ret_oids, scalar_expected, f"Expected: {scalar_expected}, got: {ret_oids}")
-
-    def test_oids_spectrum(self):
-        """
-        Tests the "get_oids" function, which is for getting lists of sequential oids.
-
-        Results should basically be an incrementing list of oids with the final number incremented by 1 each time.
-        So "1.1" with dims of 3x1 might become ["1.1.1", "1.1.2", "1.1.3"]
-        """
-        server = server_imitator()
-
-        test_oid = "1.1.1.1"
-        x, y = server.dim_list['spectrum']
-
-        # we just need the object to call another function
-        wrapper = annotation_wrapper(annotation={"oids": "Not None lol"})
-
-        # spectrum
-        spectrum_expected = [test_oid + ".1", test_oid + ".2", test_oid + ".3", test_oid + ".4"]
-        ret_oids = wrapper._get_oids(x, y, test_oid)
-        self.assertListEqual(ret_oids, spectrum_expected, f"Expected: {spectrum_expected}, got: {ret_oids}")
+                client._process_annotation(annotation=i)
 
     @mock.patch('pysnmp.hlapi.ObjectIdentity')
     @mock.patch('pysnmp.hlapi.ObjectType')
-    @mock.patch('tangostationcontrol.clients.snmp_client.snmp_attribute.next_wrap')
+    @mock.patch('tangostationcontrol.clients.snmp_client.SNMP_comm.getter')
     def test_snmp_obj_get(self, m_next, m_obj_T, m_obj_i):
         """
         Attempts to read a fake SNMP variable and checks whether it got what it expected
@@ -186,21 +122,24 @@ class TestSNMP(base.TestCase):
             for i in server.snmp_to_numpy_dict:
                 m_next.return_value = (None, None, None, server.get_return_val(i, server.dim_list[j]))
 
-                m_client = mock.Mock()
+                def __fakeInit__(self):
+                    pass
 
+                with mock.patch.object(SNMP_comm, '__init__', __fakeInit__):
+                    m_comms = SNMP_comm()
 
-                wrapper = annotation_wrapper(annotation={"oids": "1.3.6.1.2.1.2.2.1.2.31"})
-                snmp_attr = snmp_attribute(client=m_client, wrapper=wrapper, dtype=server.snmp_to_numpy_dict[i], dim_x=server.dim_list[j][0], dim_y=server.dim_list[j][1])
+                    snmp_attr = snmp_attribute(comm=m_comms, mib="test", name="test", idx=0, dtype=server.snmp_to_numpy_dict[i], dim_x=server.dim_list[j][0], dim_y=server.dim_list[j][1])
 
-                val = snmp_attr.read_function()
+                    val = snmp_attr.read_function()
 
-                checkval = server.val_check(i, server.dim_list[j])
-                self.assertEqual(checkval, val, f"Expected: {checkval}, got: {val}")
+                    checkval = server.val_check(i, server.dim_list[j])
+                    self.assertEqual(checkval, val, f"During test {j} {i}; Expected: {checkval} of type {i}, got: {val} of type {type(val)}")
 
     @mock.patch('pysnmp.hlapi.ObjectIdentity')
+    @mock.patch('pysnmp.hlapi.ObjectType')
     @mock.patch('pysnmp.hlapi.setCmd')
-    @mock.patch('tangostationcontrol.clients.snmp_client.snmp_attribute.next_wrap')
-    def test_snmp_obj_set(self, m_next, m_nextCmd, m_obj_i):
+    @mock.patch('tangostationcontrol.clients.snmp_client.SNMP_comm.setter')
+    def test_snmp_obj_set(self, m_next, m_nextCmd, m_obj_T, m_obj_ID):
         """
         Attempts to write a value to an SNMP server, but instead intercepts it and compared whether the values is as expected.
         """
@@ -211,27 +150,53 @@ class TestSNMP(base.TestCase):
             for i in server.snmp_to_numpy_dict:
                 m_next.return_value = (None, None, None, server.get_return_val(i, server.dim_list[j]))
 
-                m_client = mock.Mock()
-                set_val = server.val_check(i, server.dim_list[j])
+                def __fakeInit__(self):
+                    pass
+
+                with mock.patch.object(SNMP_comm, '__init__', __fakeInit__):
+                    m_comms = SNMP_comm()
+                    
+                    set_val = server.val_check(i, server.dim_list[j])
+
+                    snmp_attr = snmp_attribute(comm=m_comms, mib="test", name="test", idx=0, dtype=server.snmp_to_numpy_dict[i], dim_x=server.dim_list[j][0], dim_y=server.dim_list[j][1])
+
+                    res_lst = []
+                    def test(*value):
+                        res_lst.append(value[1])
+                        return None, None, None, server.get_return_val(i, server.dim_list[j])
+
+                    hlapi.ObjectType = test
+
+                    snmp_attr.write_function(set_val)
+
+                    if len(res_lst) == 1:
+                        res_lst = res_lst[0]
 
-                wrapper = annotation_wrapper(annotation={"oids": "1.3.6.1.2.1.2.2.1.2.31"})
-                snmp_attr = snmp_attribute(client=m_client, wrapper=wrapper, dtype=server.snmp_to_numpy_dict[i], dim_x=server.dim_list[j][0], dim_y=server.dim_list[j][1])
+                    checkval = server.val_check(i, server.dim_list[j])
+                    self.assertEqual(checkval, res_lst, f"During test {j} {i}; Expected: {checkval}, got: {res_lst}")
 
-                res_lst = []
-                def test(*value):
-                    res_lst.append(value[1])
-                    return None, None, None, server.get_return_val(i, server.dim_list[j])
+    @mock.patch('tangostationcontrol.clients.snmp_client.SNMP_comm.getter')
+    def test_named_value(self, m_next):
+        # # {1: "other", 2: "invalid", 3: "dynamic", 4: "static",}
+        # test_val = hlapi.Integer.withNamedValues(enable=1, disable=0)
+        # test_val(1)
 
-                hlapi.ObjectType = test
+        m_comms = mock.Mock()
+        snmp_attr = snmp_attribute(comm=m_comms, mib="test", name="test", idx=0, dtype=str, dim_x=1, dim_y=0)
 
-                snmp_attr.write_function(set_val)
 
-                if len(res_lst) == 1:
-                    res_lst = res_lst[0]
+        # create a named integer with the values: 'enable' for 1 and 'disable' for 0
+        test_val = ((None, hlapi.Integer.withNamedValues(enable=1, disable=0)(1)),)
+        ret_val = snmp_attr.convert(test_val)
 
-                checkval = server.val_check(i, server.dim_list[j])
-                self.assertEqual(checkval, res_lst, f"Expected: {checkval}, got: {res_lst}")
+        # should return 'enable' since we supplied the value 1
+        self.assertEqual(ret_val, "enable", f"Expected: to get 'enable', got: {ret_val} of type {type(ret_val)}")
 
 
+        # create an unnamed integer with a value of 2
+        test_val = ((None, hlapi.Integer(2)),)
+        ret_val = snmp_attr.convert(test_val)
 
+        # check to make sure the value is indeed 2
+        self.assertEqual(ret_val, 2, f"Expected: to get {2}, got: {ret_val} of type {type(ret_val)}")
 
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_snmp_device.py b/tangostationcontrol/tangostationcontrol/test/devices/test_snmp_device.py
new file mode 100644
index 0000000000000000000000000000000000000000..6289b2a33162031b01998aeeb84cc2119fd78860
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_snmp_device.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+from tango.test_context import DeviceTestContext
+
+from tangostationcontrol.devices import snmp_device, lofar_device
+
+import mock
+from os import path
+
+from tangostationcontrol.test import base
+
+
+class TestSNMPDevice(base.TestCase):
+
+    # some dummy values for mandatory properties
+    snmp_properties = {'SNMP_community': 'localhost', 'SNMP_host': 161, 'SNMP_rel_mib_dir': "SNMP_mib_loading", 'SNMP_timeout': 5.0}
+
+    def setUp(self):
+        super(TestSNMPDevice, self).setUp()
+
+        # Patch DeviceProxy to allow making the proxies during initialisation
+        # that we otherwise avoid using
+        for device in [lofar_device]:
+            proxy_patcher = mock.patch.object(
+                device, 'DeviceProxy')
+            proxy_patcher.start()
+            self.addCleanup(proxy_patcher.stop)
+
+    def test_get_mib_dir(self):
+        with DeviceTestContext(snmp_device.SNMP, properties=self.snmp_properties, process=True) as proxy:
+
+            mib_dir = proxy.get_mib_dir()
+
+            self.assertEqual(mib_dir, f"{path.dirname(snmp_device.__file__)}/{self.snmp_properties['SNMP_rel_mib_dir']}")
diff --git a/tangostationcontrol/tangostationcontrol/test/toolkit/test_mib_compiler.py b/tangostationcontrol/tangostationcontrol/test/toolkit/test_mib_compiler.py
new file mode 100644
index 0000000000000000000000000000000000000000..8641f6483f04ef9e21c27b5bcaaaa4aff4f6587d
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/test/toolkit/test_mib_compiler.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+from tangostationcontrol.test import base
+from tangostationcontrol.toolkit.mib_compiler import mib_compiler
+
+import sys
+from os.path import isfile
+from os import getcwd
+from tempfile import TemporaryDirectory
+from unittest import mock
+
+class TestCompiler(base.TestCase):
+    def test_compile(self):
+
+        with TemporaryDirectory() as tmpdir:
+            new_sys_argv = [sys.argv[0], "--mibs", "TEST-MIB",
+                            "--source", f"{getcwd()}/tangostationcontrol/toolkit/mib_compiler/mibs",
+                            "--destination", f"{tmpdir}", "-v"]
+            with mock.patch.object(mib_compiler.sys, 'argv', new_sys_argv):
+                with self.assertRaises(SystemExit):
+                    mib_compiler.main()
+
+            # check if file was written
+            self.assertTrue(isfile(f"{tmpdir}/TEST-MIB.py"))
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/README.md b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..d99a55af6738b5556e72a8655e607a2a6641acd7
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/README.md
@@ -0,0 +1,28 @@
+#MIB Compiler
+
+The MIB compiler script 'compiles' .mib files to a custom python representation that pysnmp can load immediately. 
+
+In order to compile scripts there must be a valid mib file in the source directory as well as any potential imported files. 
+You can find out which mib files need to be imported by opening the file and looking at the `IMPORTS` section, where imported mib files are listed. 
+These mibs may also have subsequent mib files that need to be imported. Alternatively these imports may also be found in the verbose debug log. 
+
+This script will also generate pymib files for all the imported mib files.  
+
+`--mibs`: A list of mib files that need to be compiled.
+
+`--destination`: The output folder for the compiled mibs. This argument is optional. The default destination folder is `~/output_pymibs`
+
+`--source`: A list of source folders and addresses from where the mib files are sourced. This argument is optional. The default source folder is `~/mibs`
+It can be useful to also list a web address as source, as there exist various sites that host mib files. 
+
+`--debug`: enable verbose debugging. Useful for figuring out errors.
+
+example usage:
+To source the mib TEST-MIB from the default `/mibs` location
+`python3 mib_compiler.py --mibs TEST-MIB`
+
+To source the mib TEST-MIB from the default `/mibs` location but to a custom output folder
+`python3 mib_compiler.py --mibs TEST-MIB --destination home/user/output`
+
+To source te mib TEST-MIB and all its imports from the path `home/user/mibs` and web address `http://www.net-snmp.org/docs/mibs/`
+`python3 mib_compiler.py --mibs TEST-MIB --source home/user/mibs http://www.net-snmp.org/docs/mibs/`
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mib_compiler.py b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mib_compiler.py
new file mode 100644
index 0000000000000000000000000000000000000000..a96f2c34670d900665dce697c048e1460866c89f
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mib_compiler.py
@@ -0,0 +1,66 @@
+import argparse
+import sys
+
+from pysnmp.smi import builder, compiler
+
+from pathlib import Path
+
+from pysmi import debug
+import logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger("mib_compiler")
+
+
+def mib_compile(mib_list : list, src, dst):
+
+    mibBuilder = builder.MibBuilder()
+
+    # set the compiler, the source path and set the www.net-snmp.org site as mib source as well.
+    compiler.addMibCompiler(mibBuilder, sources=src, destination=dst)
+
+    for i in mib_list:
+        # compile it
+        try:
+            mibBuilder.loadModules(i)
+            logger.debug(f"loaded {i}")
+        except Exception as e:
+            raise Exception(f"Something went wrong, try checking whether all the mib fills imported by the provided mib files are present in the source locations ({src}) \r\n (To do this enable debug options and scroll up) ") from e
+
+def main():
+    abs_path = str(Path().absolute()).replace("\\", "/")
+    out_path = f"{abs_path}/output_pymibs"
+    in_path = f"{abs_path}/mibs"
+
+    parser = argparse.ArgumentParser(
+        description='Compiles .mib files in to the easy to load pysnmp format')
+    parser.add_argument(
+        '-m', '--mibs', type=str, required=True, nargs='+', help='list of mib names to compile')
+    parser.add_argument(
+        '-d', '--destination', type=str,  required=False, default=out_path,
+        help='sets the output directory for the compiled mibs. (default: '
+             '%(default)s)')
+    parser.add_argument(
+        '-s', '--source', type=str, required=False, nargs='+',  default=in_path,
+        help='sets the input paths or addresses to read the .mib files from  (default: '
+             '%(default)s)')
+    parser.add_argument(
+        '-v', '--debug', dest='debug', action='store_true', default=False,
+        help='increase log output')
+
+    args = parser.parse_args()
+
+    # argparse arguments
+    mibs = args.mibs
+    destination = args.destination
+    source = args.source
+    debug_option = args.debug
+
+    if debug_option:
+        debug.setLogger(debug.Debug('compiler'))
+
+    mib_compile(mib_list=mibs, src=source, dst=destination)
+
+    sys.exit(1)
+
+if __name__ == "__main__":
+    main()
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-CONF b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-CONF
new file mode 100644
index 0000000000000000000000000000000000000000..24a1eed95d62f81ea88c3a78017696fa05400340
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-CONF
@@ -0,0 +1,322 @@
+SNMPv2-CONF DEFINITIONS ::= BEGIN
+
+IMPORTS ObjectName, NotificationName, ObjectSyntax
+                                               FROM SNMPv2-SMI;
+
+-- definitions for conformance groups
+
+OBJECT-GROUP MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  ObjectsPart
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+
+    VALUE NOTATION ::=
+                  value(VALUE OBJECT IDENTIFIER)
+
+    ObjectsPart ::=
+                  "OBJECTS" "{" Objects "}"
+    Objects ::=
+                  Object
+                | Objects "," Object
+    Object ::=
+
+                  value(ObjectName)
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    -- a character string as defined in [2]
+    Text ::= value(IA5String)
+END
+
+-- more definitions for conformance groups
+
+NOTIFICATION-GROUP MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  NotificationsPart
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+
+    VALUE NOTATION ::=
+                  value(VALUE OBJECT IDENTIFIER)
+
+    NotificationsPart ::=
+                  "NOTIFICATIONS" "{" Notifications "}"
+    Notifications ::=
+                  Notification
+                | Notifications "," Notification
+    Notification ::=
+                  value(NotificationName)
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    -- a character string as defined in [2]
+    Text ::= value(IA5String)
+END
+
+-- definitions for compliance statements
+
+MODULE-COMPLIANCE MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+                  ModulePart
+
+    VALUE NOTATION ::=
+                  value(VALUE OBJECT IDENTIFIER)
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    ModulePart ::=
+                  Modules
+    Modules ::=
+                  Module
+                | Modules Module
+    Module ::=
+                  -- name of module --
+                  "MODULE" ModuleName
+                  MandatoryPart
+                  CompliancePart
+
+    ModuleName ::=
+                  -- identifier must start with uppercase letter
+                  identifier ModuleIdentifier
+                  -- must not be empty unless contained
+                  -- in MIB Module
+                | empty
+    ModuleIdentifier ::=
+                  value(OBJECT IDENTIFIER)
+                | empty
+
+    MandatoryPart ::=
+                  "MANDATORY-GROUPS" "{" Groups "}"
+                | empty
+
+    Groups ::=
+
+                  Group
+                | Groups "," Group
+    Group ::=
+                  value(OBJECT IDENTIFIER)
+
+    CompliancePart ::=
+                  Compliances
+                | empty
+
+    Compliances ::=
+                  Compliance
+                | Compliances Compliance
+    Compliance ::=
+                  ComplianceGroup
+                | Object
+
+    ComplianceGroup ::=
+                  "GROUP" value(OBJECT IDENTIFIER)
+                  "DESCRIPTION" Text
+
+    Object ::=
+                  "OBJECT" value(ObjectName)
+                  SyntaxPart
+                  WriteSyntaxPart
+                  AccessPart
+                  "DESCRIPTION" Text
+
+    -- must be a refinement for object's SYNTAX clause
+    SyntaxPart ::= "SYNTAX" Syntax
+                | empty
+
+    -- must be a refinement for object's SYNTAX clause
+    WriteSyntaxPart ::= "WRITE-SYNTAX" Syntax
+                | empty
+
+    Syntax ::=    -- Must be one of the following:
+                       -- a base type (or its refinement),
+                       -- a textual convention (or its refinement), or
+                       -- a BITS pseudo-type
+                  type
+                | "BITS" "{" NamedBits "}"
+
+    NamedBits ::= NamedBit
+                | NamedBits "," NamedBit
+
+    NamedBit ::= identifier "(" number ")" -- number is nonnegative
+
+    AccessPart ::=
+                  "MIN-ACCESS" Access
+                | empty
+    Access ::=
+                  "not-accessible"
+                | "accessible-for-notify"
+                | "read-only"
+                | "read-write"
+                | "read-create"
+
+    -- a character string as defined in [2]
+    Text ::= value(IA5String)
+END
+
+-- definitions for capabilities statements
+
+AGENT-CAPABILITIES MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  "PRODUCT-RELEASE" Text
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+                  ModulePart
+
+    VALUE NOTATION ::=
+                  value(VALUE OBJECT IDENTIFIER)
+
+    Status ::=
+                  "current"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    ModulePart ::=
+                  Modules
+                | empty
+    Modules ::=
+                  Module
+                | Modules Module
+    Module ::=
+                  -- name of module --
+                  "SUPPORTS" ModuleName
+                  "INCLUDES" "{" Groups "}"
+                  VariationPart
+
+    ModuleName ::=
+
+                  -- identifier must start with uppercase letter
+                  identifier ModuleIdentifier
+    ModuleIdentifier ::=
+                  value(OBJECT IDENTIFIER)
+                | empty
+
+    Groups ::=
+                  Group
+                | Groups "," Group
+    Group ::=
+                  value(OBJECT IDENTIFIER)
+
+    VariationPart ::=
+                  Variations
+                | empty
+    Variations ::=
+                  Variation
+                | Variations Variation
+
+    Variation ::=
+                  ObjectVariation
+                | NotificationVariation
+
+    NotificationVariation ::=
+                  "VARIATION" value(NotificationName)
+                  AccessPart
+                  "DESCRIPTION" Text
+
+    ObjectVariation ::=
+                  "VARIATION" value(ObjectName)
+                  SyntaxPart
+                  WriteSyntaxPart
+                  AccessPart
+                  CreationPart
+                  DefValPart
+                  "DESCRIPTION" Text
+
+    -- must be a refinement for object's SYNTAX clause
+    SyntaxPart ::= "SYNTAX" Syntax
+                | empty
+
+    WriteSyntaxPart ::= "WRITE-SYNTAX" Syntax
+                | empty
+
+    Syntax ::=    -- Must be one of the following:
+                       -- a base type (or its refinement),
+                       -- a textual convention (or its refinement), or
+                       -- a BITS pseudo-type
+
+                  type
+                | "BITS" "{" NamedBits "}"
+
+    NamedBits ::= NamedBit
+                | NamedBits "," NamedBit
+
+    NamedBit ::= identifier "(" number ")" -- number is nonnegative
+
+    AccessPart ::=
+                  "ACCESS" Access
+                | empty
+
+    Access ::=
+                  "not-implemented"
+                -- only "not-implemented" for notifications
+                | "accessible-for-notify"
+                | "read-only"
+                | "read-write"
+                | "read-create"
+                -- following is for backward-compatibility only
+                | "write-only"
+
+    CreationPart ::=
+                  "CREATION-REQUIRES" "{" Cells "}"
+                | empty
+    Cells ::=
+                  Cell
+                | Cells "," Cell
+    Cell ::=
+                  value(ObjectName)
+
+    DefValPart ::= "DEFVAL" "{" Defvalue "}"
+                | empty
+
+    Defvalue ::=  -- must be valid for the object's syntax
+                  -- in this macro's SYNTAX clause, if present,
+                  -- or if not, in object's OBJECT-TYPE macro
+                  value(ObjectSyntax)
+                | "{" BitsValue "}"
+
+    BitsValue ::= BitNames
+                | empty
+
+    BitNames ::=  BitName
+                | BitNames "," BitName
+
+    BitName ::= identifier
+
+    -- a character string as defined in [2]
+    Text ::= value(IA5String)
+END
+
+END
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-SMI b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-SMI
new file mode 100644
index 0000000000000000000000000000000000000000..2132646cab00e28cf2f679fc1bb308ee2d12a1a1
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-SMI
@@ -0,0 +1,352 @@
+SNMPv2-SMI DEFINITIONS ::= BEGIN
+
+
+-- the path to the root
+
+org            OBJECT IDENTIFIER ::= { iso 3 }  --  "iso" = 1
+dod            OBJECT IDENTIFIER ::= { org 6 }
+internet       OBJECT IDENTIFIER ::= { dod 1 }
+
+directory      OBJECT IDENTIFIER ::= { internet 1 }
+
+mgmt           OBJECT IDENTIFIER ::= { internet 2 }
+mib-2          OBJECT IDENTIFIER ::= { mgmt 1 }
+transmission   OBJECT IDENTIFIER ::= { mib-2 10 }
+
+experimental   OBJECT IDENTIFIER ::= { internet 3 }
+
+private        OBJECT IDENTIFIER ::= { internet 4 }
+enterprises    OBJECT IDENTIFIER ::= { private 1 }
+
+security       OBJECT IDENTIFIER ::= { internet 5 }
+
+snmpV2         OBJECT IDENTIFIER ::= { internet 6 }
+
+-- transport domains
+snmpDomains    OBJECT IDENTIFIER ::= { snmpV2 1 }
+
+-- transport proxies
+snmpProxys     OBJECT IDENTIFIER ::= { snmpV2 2 }
+
+-- module identities
+snmpModules    OBJECT IDENTIFIER ::= { snmpV2 3 }
+
+-- Extended UTCTime, to allow dates with four-digit years
+-- (Note that this definition of ExtUTCTime is not to be IMPORTed
+--  by MIB modules.)
+ExtUTCTime ::= OCTET STRING(SIZE(11 | 13))
+    -- format is YYMMDDHHMMZ or YYYYMMDDHHMMZ
+    --   where: YY   - last two digits of year (only years
+    --                 between 1900-1999)
+    --          YYYY - last four digits of the year (any year)
+    --          MM   - month (01 through 12)
+    --          DD   - day of month (01 through 31)
+    --          HH   - hours (00 through 23)
+    --          MM   - minutes (00 through 59)
+    --          Z    - denotes GMT (the ASCII character Z)
+    --
+    -- For example, "9502192015Z" and "199502192015Z" represent
+    -- 8:15pm GMT on 19 February 1995. Years after 1999 must use
+    -- the four digit year format. Years 1900-1999 may use the
+    -- two or four digit format.
+
+-- definitions for information modules
+
+MODULE-IDENTITY MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  "LAST-UPDATED" value(Update ExtUTCTime)
+                  "ORGANIZATION" Text
+                  "CONTACT-INFO" Text
+                  "DESCRIPTION" Text
+                  RevisionPart
+
+    VALUE NOTATION ::=
+                  value(VALUE OBJECT IDENTIFIER)
+
+    RevisionPart ::=
+                  Revisions
+                | empty
+    Revisions ::=
+                  Revision
+                | Revisions Revision
+    Revision ::=
+                  "REVISION" value(Update ExtUTCTime)
+                  "DESCRIPTION" Text
+
+    -- a character string as defined in section 3.1.1
+    Text ::= value(IA5String)
+END
+
+
+OBJECT-IDENTITY MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+
+    VALUE NOTATION ::=
+                  value(VALUE OBJECT IDENTIFIER)
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    -- a character string as defined in section 3.1.1
+    Text ::= value(IA5String)
+END
+
+
+-- names of objects
+-- (Note that these definitions of ObjectName and NotificationName
+--  are not to be IMPORTed by MIB modules.)
+
+ObjectName ::=
+    OBJECT IDENTIFIER
+
+NotificationName ::=
+    OBJECT IDENTIFIER
+
+-- syntax of objects
+
+-- the "base types" defined here are:
+--   3 built-in ASN.1 types: INTEGER, OCTET STRING, OBJECT IDENTIFIER
+--   8 application-defined types: Integer32, IpAddress, Counter32,
+--              Gauge32, Unsigned32, TimeTicks, Opaque, and Counter64
+
+ObjectSyntax ::=
+    CHOICE {
+        simple
+            SimpleSyntax,
+
+          -- note that SEQUENCEs for conceptual tables and
+          -- rows are not mentioned here...
+
+        application-wide
+            ApplicationSyntax
+    }
+
+-- built-in ASN.1 types
+
+SimpleSyntax ::=
+    CHOICE {
+        -- INTEGERs with a more restrictive range
+        -- may also be used
+        integer-value               -- includes Integer32
+            INTEGER (-2147483648..2147483647),
+
+        -- OCTET STRINGs with a more restrictive size
+        -- may also be used
+        string-value
+            OCTET STRING (SIZE (0..65535)),
+
+        objectID-value
+            OBJECT IDENTIFIER
+    }
+
+-- indistinguishable from INTEGER, but never needs more than
+-- 32-bits for a two's complement representation
+Integer32 ::=
+        INTEGER (-2147483648..2147483647)
+
+
+-- application-wide types
+
+ApplicationSyntax ::=
+    CHOICE {
+        ipAddress-value
+            IpAddress,
+
+        counter-value
+            Counter32,
+
+        timeticks-value
+            TimeTicks,
+
+        arbitrary-value
+            Opaque,
+
+        big-counter-value
+            Counter64,
+
+        unsigned-integer-value  -- includes Gauge32
+            Unsigned32
+    }
+
+-- in network-byte order
+-- (this is a tagged type for historical reasons)
+IpAddress ::=
+    [APPLICATION 0]
+        IMPLICIT OCTET STRING (SIZE (4))
+
+-- this wraps
+Counter32 ::=
+    [APPLICATION 1]
+        IMPLICIT INTEGER (0..4294967295)
+
+-- this doesn't wrap
+Gauge32 ::=
+    [APPLICATION 2]
+        IMPLICIT INTEGER (0..4294967295)
+
+-- an unsigned 32-bit quantity
+-- indistinguishable from Gauge32
+Unsigned32 ::=
+    [APPLICATION 2]
+        IMPLICIT INTEGER (0..4294967295)
+
+-- hundredths of seconds since an epoch
+TimeTicks ::=
+    [APPLICATION 3]
+        IMPLICIT INTEGER (0..4294967295)
+
+-- for backward-compatibility only
+Opaque ::=
+    [APPLICATION 4]
+        IMPLICIT OCTET STRING
+
+-- for counters that wrap in less than one hour with only 32 bits
+Counter64 ::=
+    [APPLICATION 6]
+        IMPLICIT INTEGER (0..18446744073709551615)
+
+
+-- definition for objects
+
+OBJECT-TYPE MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  "SYNTAX" Syntax
+                  UnitsPart
+                  "MAX-ACCESS" Access
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+                  IndexPart
+                  DefValPart
+
+    VALUE NOTATION ::=
+                  value(VALUE ObjectName)
+
+    Syntax ::=   -- Must be one of the following:
+                       -- a base type (or its refinement),
+                       -- a textual convention (or its refinement), or
+                       -- a BITS pseudo-type
+                   type
+                | "BITS" "{" NamedBits "}"
+
+    NamedBits ::= NamedBit
+                | NamedBits "," NamedBit
+
+    NamedBit ::=  identifier "(" number ")" -- number is nonnegative
+
+    UnitsPart ::=
+                  "UNITS" Text
+                | empty
+
+    Access ::=
+                  "not-accessible"
+                | "accessible-for-notify"
+                | "read-only"
+                | "read-write"
+                | "read-create"
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    IndexPart ::=
+                  "INDEX"    "{" IndexTypes "}"
+                | "AUGMENTS" "{" Entry      "}"
+                | empty
+    IndexTypes ::=
+                  IndexType
+                | IndexTypes "," IndexType
+    IndexType ::=
+                  "IMPLIED" Index
+                | Index
+    Index ::=
+                    -- use the SYNTAX value of the
+                    -- correspondent OBJECT-TYPE invocation
+                  value(ObjectName)
+    Entry ::=
+                    -- use the INDEX value of the
+                    -- correspondent OBJECT-TYPE invocation
+                  value(ObjectName)
+
+    DefValPart ::= "DEFVAL" "{" Defvalue "}"
+                | empty
+
+    Defvalue ::=  -- must be valid for the type specified in
+                  -- SYNTAX clause of same OBJECT-TYPE macro
+                  value(ObjectSyntax)
+                | "{" BitsValue "}"
+
+    BitsValue ::= BitNames
+                | empty
+
+    BitNames ::=  BitName
+                | BitNames "," BitName
+
+    BitName ::= identifier
+
+    -- a character string as defined in section 3.1.1
+    Text ::= value(IA5String)
+END
+
+
+-- definitions for notifications
+
+NOTIFICATION-TYPE MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  ObjectsPart
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+
+    VALUE NOTATION ::=
+                  value(VALUE NotificationName)
+
+    ObjectsPart ::=
+                  "OBJECTS" "{" Objects "}"
+                | empty
+    Objects ::=
+                  Object
+                | Objects "," Object
+    Object ::=
+                  value(ObjectName)
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    -- a character string as defined in section 3.1.1
+    Text ::= value(IA5String)
+END
+
+-- definitions of administrative identifiers
+
+zeroDotZero    OBJECT-IDENTITY
+    STATUS     current
+    DESCRIPTION
+            "A value used for null identifiers."
+    ::= { 0 0 }
+
+END
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-TC b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-TC
new file mode 100644
index 0000000000000000000000000000000000000000..a68f9690d198b2533905c8ab9baa604c7a7a9a54
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/SNMPv2-TC
@@ -0,0 +1,786 @@
+SNMPv2-TC DEFINITIONS ::= BEGIN
+
+IMPORTS
+    TimeTicks         FROM SNMPv2-SMI;
+
+
+-- definition of textual conventions
+
+TEXTUAL-CONVENTION MACRO ::=
+BEGIN
+    TYPE NOTATION ::=
+                  DisplayPart
+                  "STATUS" Status
+                  "DESCRIPTION" Text
+                  ReferPart
+                  "SYNTAX" Syntax
+
+    VALUE NOTATION ::=
+                   value(VALUE Syntax)      -- adapted ASN.1
+
+    DisplayPart ::=
+                  "DISPLAY-HINT" Text
+                | empty
+
+    Status ::=
+                  "current"
+                | "deprecated"
+                | "obsolete"
+
+    ReferPart ::=
+                  "REFERENCE" Text
+                | empty
+
+    -- a character string as defined in [2]
+    Text ::= value(IA5String)
+
+    Syntax ::=   -- Must be one of the following:
+                       -- a base type (or its refinement), or
+                       -- a BITS pseudo-type
+                  type
+                | "BITS" "{" NamedBits "}"
+
+    NamedBits ::= NamedBit
+                | NamedBits "," NamedBit
+
+    NamedBit ::=  identifier "(" number ")" -- number is nonnegative
+
+END
+
+
+
+
+DisplayString ::= TEXTUAL-CONVENTION
+    DISPLAY-HINT "255a"
+    STATUS       current
+    DESCRIPTION
+            "Represents textual information taken from the NVT ASCII
+            character set, as defined in pages 4, 10-11 of RFC 854.
+
+            To summarize RFC 854, the NVT ASCII repertoire specifies:
+
+              - the use of character codes 0-127 (decimal)
+
+              - the graphics characters (32-126) are interpreted as
+                US ASCII
+
+              - NUL, LF, CR, BEL, BS, HT, VT and FF have the special
+                meanings specified in RFC 854
+
+              - the other 25 codes have no standard interpretation
+
+              - the sequence 'CR LF' means newline
+
+              - the sequence 'CR NUL' means carriage-return
+
+              - an 'LF' not preceded by a 'CR' means moving to the
+                same column on the next line.
+
+              - the sequence 'CR x' for any x other than LF or NUL is
+                illegal.  (Note that this also means that a string may
+                end with either 'CR LF' or 'CR NUL', but not with CR.)
+
+            Any object defined using this syntax may not exceed 255
+            characters in length."
+    SYNTAX       OCTET STRING (SIZE (0..255))
+
+PhysAddress ::= TEXTUAL-CONVENTION
+    DISPLAY-HINT "1x:"
+    STATUS       current
+    DESCRIPTION
+            "Represents media- or physical-level addresses."
+    SYNTAX       OCTET STRING
+
+
+MacAddress ::= TEXTUAL-CONVENTION
+    DISPLAY-HINT "1x:"
+    STATUS       current
+    DESCRIPTION
+            "Represents an 802 MAC address represented in the
+            `canonical' order defined by IEEE 802.1a, i.e., as if it
+            were transmitted least significant bit first, even though
+            802.5 (in contrast to other 802.x protocols) requires MAC
+            addresses to be transmitted most significant bit first."
+    SYNTAX       OCTET STRING (SIZE (6))
+
+TruthValue ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "Represents a boolean value."
+    SYNTAX       INTEGER { true(1), false(2) }
+
+TestAndIncr ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "Represents integer-valued information used for atomic
+            operations.  When the management protocol is used to specify
+            that an object instance having this syntax is to be
+            modified, the new value supplied via the management protocol
+            must precisely match the value presently held by the
+            instance.  If not, the management protocol set operation
+            fails with an error of `inconsistentValue'.  Otherwise, if
+            the current value is the maximum value of 2^31-1 (2147483647
+            decimal), then the value held by the instance is wrapped to
+            zero; otherwise, the value held by the instance is
+            incremented by one.  (Note that regardless of whether the
+            management protocol set operation succeeds, the variable-
+            binding in the request and response PDUs are identical.)
+
+            The value of the ACCESS clause for objects having this
+            syntax is either `read-write' or `read-create'.  When an
+            instance of a columnar object having this syntax is created,
+            any value may be supplied via the management protocol.
+
+            When the network management portion of the system is re-
+            initialized, the value of every object instance having this
+            syntax must either be incremented from its value prior to
+            the re-initialization, or (if the value prior to the re-
+            initialization is unknown) be set to a pseudo-randomly
+            generated value."
+    SYNTAX       INTEGER (0..2147483647)
+
+AutonomousType ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "Represents an independently extensible type identification
+            value.  It may, for example, indicate a particular sub-tree
+            with further MIB definitions, or define a particular type of
+            protocol or hardware."
+    SYNTAX       OBJECT IDENTIFIER
+
+
+InstancePointer ::= TEXTUAL-CONVENTION
+    STATUS       obsolete
+    DESCRIPTION
+            "A pointer to either a specific instance of a MIB object or
+            a conceptual row of a MIB table in the managed device.  In
+            the latter case, by convention, it is the name of the
+            particular instance of the first accessible columnar object
+            in the conceptual row.
+
+            The two uses of this textual convention are replaced by
+            VariablePointer and RowPointer, respectively."
+    SYNTAX       OBJECT IDENTIFIER
+
+
+VariablePointer ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "A pointer to a specific object instance.  For example,
+            sysContact.0 or ifInOctets.3."
+    SYNTAX       OBJECT IDENTIFIER
+
+
+RowPointer ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "Represents a pointer to a conceptual row.  The value is the
+            name of the instance of the first accessible columnar object
+            in the conceptual row.
+
+            For example, ifIndex.3 would point to the 3rd row in the
+            ifTable (note that if ifIndex were not-accessible, then
+            ifDescr.3 would be used instead)."
+    SYNTAX       OBJECT IDENTIFIER
+
+RowStatus ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "The RowStatus textual convention is used to manage the
+            creation and deletion of conceptual rows, and is used as the
+            value of the SYNTAX clause for the status column of a
+            conceptual row (as described in Section 7.7.1 of [2].)
+            The status column has six defined values:
+
+                 - `active', which indicates that the conceptual row is
+                 available for use by the managed device;
+
+                 - `notInService', which indicates that the conceptual
+                 row exists in the agent, but is unavailable for use by
+                 the managed device (see NOTE below); 'notInService' has
+                 no implication regarding the internal consistency of
+                 the row, availability of resources, or consistency with
+                 the current state of the managed device;
+
+                 - `notReady', which indicates that the conceptual row
+                 exists in the agent, but is missing information
+                 necessary in order to be available for use by the
+                 managed device (i.e., one or more required columns in
+                 the conceptual row have not been instanciated);
+
+                 - `createAndGo', which is supplied by a management
+                 station wishing to create a new instance of a
+                 conceptual row and to have its status automatically set
+                 to active, making it available for use by the managed
+                 device;
+
+                 - `createAndWait', which is supplied by a management
+                 station wishing to create a new instance of a
+                 conceptual row (but not make it available for use by
+                 the managed device); and,
+
+                 - `destroy', which is supplied by a management station
+                 wishing to delete all of the instances associated with
+                 an existing conceptual row.
+
+            Whereas five of the six values (all except `notReady') may
+            be specified in a management protocol set operation, only
+            three values will be returned in response to a management
+            protocol retrieval operation:  `notReady', `notInService' or
+            `active'.  That is, when queried, an existing conceptual row
+            has only three states:  it is either available for use by
+            the managed device (the status column has value `active');
+            it is not available for use by the managed device, though
+            the agent has sufficient information to attempt to make it
+            so (the status column has value `notInService'); or, it is
+            not available for use by the managed device, and an attempt
+            to make it so would fail because the agent has insufficient
+            information (the state column has value `notReady').
+
+                                     NOTE WELL
+
+                 This textual convention may be used for a MIB table,
+                 irrespective of whether the values of that table's
+                 conceptual rows are able to be modified while it is
+                 active, or whether its conceptual rows must be taken
+                 out of service in order to be modified.  That is, it is
+                 the responsibility of the DESCRIPTION clause of the
+                 status column to specify whether the status column must
+                 not be `active' in order for the value of some other
+                 column of the same conceptual row to be modified.  If
+                 such a specification is made, affected columns may be
+                 changed by an SNMP set PDU if the RowStatus would not
+                 be equal to `active' either immediately before or after
+                 processing the PDU.  In other words, if the PDU also
+                 contained a varbind that would change the RowStatus
+                 value, the column in question may be changed if the
+                 RowStatus was not equal to `active' as the PDU was
+                 received, or if the varbind sets the status to a value
+                 other than 'active'.
+
+
+            Also note that whenever any elements of a row exist, the
+            RowStatus column must also exist.
+
+            To summarize the effect of having a conceptual row with a
+            status column having a SYNTAX clause value of RowStatus,
+            consider the following state diagram:
+
+
+                                         STATE
+              +--------------+-----------+-------------+-------------
+              |      A       |     B     |      C      |      D
+              |              |status col.|status column|
+              |status column |    is     |      is     |status column
+    ACTION    |does not exist|  notReady | notInService|  is active
+--------------+--------------+-----------+-------------+-------------
+set status    |noError    ->D|inconsist- |inconsistent-|inconsistent-
+column to     |       or     |   entValue|        Value|        Value
+createAndGo   |inconsistent- |           |             |
+              |         Value|           |             |
+--------------+--------------+-----------+-------------+-------------
+set status    |noError  see 1|inconsist- |inconsistent-|inconsistent-
+column to     |       or     |   entValue|        Value|        Value
+createAndWait |wrongValue    |           |             |
+--------------+--------------+-----------+-------------+-------------
+set status    |inconsistent- |inconsist- |noError      |noError
+column to     |         Value|   entValue|             |
+active        |              |           |             |
+              |              |     or    |             |
+              |              |           |             |
+              |              |see 2   ->D|see 8     ->D|          ->D
+--------------+--------------+-----------+-------------+-------------
+set status    |inconsistent- |inconsist- |noError      |noError   ->C
+column to     |         Value|   entValue|             |
+notInService  |              |           |             |
+              |              |     or    |             |      or
+              |              |           |             |
+              |              |see 3   ->C|          ->C|see 6
+--------------+--------------+-----------+-------------+-------------
+set status    |noError       |noError    |noError      |noError   ->A
+column to     |              |           |             |      or
+destroy       |           ->A|        ->A|          ->A|see 7
+--------------+--------------+-----------+-------------+-------------
+set any other |see 4         |noError    |noError      |see 5
+column to some|              |           |             |
+value         |              |      see 1|          ->C|          ->D
+--------------+--------------+-----------+-------------+-------------
+
+            (1) goto B or C, depending on information available to the
+            agent.
+
+            (2) if other variable bindings included in the same PDU,
+            provide values for all columns which are missing but
+            required, and all columns have acceptable values, then
+            return noError and goto D.
+
+            (3) if other variable bindings included in the same PDU,
+            provide legal values for all columns which are missing but
+            required, then return noError and goto C.
+
+            (4) at the discretion of the agent, the return value may be
+            either:
+
+                 inconsistentName:  because the agent does not choose to
+                 create such an instance when the corresponding
+                 RowStatus instance does not exist, or
+
+                 inconsistentValue:  if the supplied value is
+                 inconsistent with the state of some other MIB object's
+                 value, or
+
+                 noError: because the agent chooses to create the
+                 instance.
+
+            If noError is returned, then the instance of the status
+            column must also be created, and the new state is B or C,
+            depending on the information available to the agent.  If
+            inconsistentName or inconsistentValue is returned, the row
+            remains in state A.
+
+            (5) depending on the MIB definition for the column/table,
+            either noError or inconsistentValue may be returned.
+
+            (6) the return value can indicate one of the following
+            errors:
+
+                 wrongValue: because the agent does not support
+                 notInService (e.g., an agent which does not support
+                 createAndWait), or
+
+                 inconsistentValue: because the agent is unable to take
+                 the row out of service at this time, perhaps because it
+                 is in use and cannot be de-activated.
+
+            (7) the return value can indicate the following error:
+
+                 inconsistentValue: because the agent is unable to
+                 remove the row at this time, perhaps because it is in
+                 use and cannot be de-activated.
+
+            (8) the transition to D can fail, e.g., if the values of the
+            conceptual row are inconsistent, then the error code would
+            be inconsistentValue.
+
+            NOTE: Other processing of (this and other varbinds of) the
+            set request may result in a response other than noError
+            being returned, e.g., wrongValue, noCreation, etc.
+
+
+                              Conceptual Row Creation
+
+            There are four potential interactions when creating a
+            conceptual row:  selecting an instance-identifier which is
+            not in use; creating the conceptual row; initializing any
+            objects for which the agent does not supply a default; and,
+            making the conceptual row available for use by the managed
+            device.
+
+            Interaction 1: Selecting an Instance-Identifier
+
+            The algorithm used to select an instance-identifier varies
+            for each conceptual row.  In some cases, the instance-
+            identifier is semantically significant, e.g., the
+            destination address of a route, and a management station
+            selects the instance-identifier according to the semantics.
+
+            In other cases, the instance-identifier is used solely to
+            distinguish conceptual rows, and a management station
+            without specific knowledge of the conceptual row might
+            examine the instances present in order to determine an
+            unused instance-identifier.  (This approach may be used, but
+            it is often highly sub-optimal; however, it is also a
+            questionable practice for a naive management station to
+            attempt conceptual row creation.)
+
+            Alternately, the MIB module which defines the conceptual row
+            might provide one or more objects which provide assistance
+            in determining an unused instance-identifier.  For example,
+            if the conceptual row is indexed by an integer-value, then
+            an object having an integer-valued SYNTAX clause might be
+            defined for such a purpose, allowing a management station to
+            issue a management protocol retrieval operation.  In order
+            to avoid unnecessary collisions between competing management
+            stations, `adjacent' retrievals of this object should be
+            different.
+
+            Finally, the management station could select a pseudo-random
+            number to use as the index.  In the event that this index
+            was already in use and an inconsistentValue was returned in
+            response to the management protocol set operation, the
+            management station should simply select a new pseudo-random
+            number and retry the operation.
+
+            A MIB designer should choose between the two latter
+            algorithms based on the size of the table (and therefore the
+            efficiency of each algorithm).  For tables in which a large
+            number of entries are expected, it is recommended that a MIB
+            object be defined that returns an acceptable index for
+            creation.  For tables with small numbers of entries, it is
+            recommended that the latter pseudo-random index mechanism be
+            used.
+
+            Interaction 2: Creating the Conceptual Row
+
+            Once an unused instance-identifier has been selected, the
+            management station determines if it wishes to create and
+            activate the conceptual row in one transaction or in a
+            negotiated set of interactions.
+
+            Interaction 2a: Creating and Activating the Conceptual Row
+
+            The management station must first determine the column
+            requirements, i.e., it must determine those columns for
+            which it must or must not provide values.  Depending on the
+            complexity of the table and the management station's
+            knowledge of the agent's capabilities, this determination
+            can be made locally by the management station.  Alternately,
+            the management station issues a management protocol get
+            operation to examine all columns in the conceptual row that
+            it wishes to create.  In response, for each column, there
+            are three possible outcomes:
+
+                 - a value is returned, indicating that some other
+                 management station has already created this conceptual
+                 row.  We return to interaction 1.
+
+                 - the exception `noSuchInstance' is returned,
+                 indicating that the agent implements the object-type
+                 associated with this column, and that this column in at
+                 least one conceptual row would be accessible in the MIB
+                 view used by the retrieval were it to exist. For those
+                 columns to which the agent provides read-create access,
+                 the `noSuchInstance' exception tells the management
+                 station that it should supply a value for this column
+                 when the conceptual row is to be created.
+
+                 - the exception `noSuchObject' is returned, indicating
+                 that the agent does not implement the object-type
+                 associated with this column or that there is no
+                 conceptual row for which this column would be
+                 accessible in the MIB view used by the retrieval.  As
+                 such, the management station can not issue any
+                 management protocol set operations to create an
+                 instance of this column.
+
+            Once the column requirements have been determined, a
+            management protocol set operation is accordingly issued.
+            This operation also sets the new instance of the status
+            column to `createAndGo'.
+
+            When the agent processes the set operation, it verifies that
+            it has sufficient information to make the conceptual row
+            available for use by the managed device.  The information
+            available to the agent is provided by two sources:  the
+            management protocol set operation which creates the
+            conceptual row, and, implementation-specific defaults
+            supplied by the agent (note that an agent must provide
+            implementation-specific defaults for at least those objects
+            which it implements as read-only).  If there is sufficient
+            information available, then the conceptual row is created, a
+            `noError' response is returned, the status column is set to
+            `active', and no further interactions are necessary (i.e.,
+            interactions 3 and 4 are skipped).  If there is insufficient
+            information, then the conceptual row is not created, and the
+            set operation fails with an error of `inconsistentValue'.
+            On this error, the management station can issue a management
+            protocol retrieval operation to determine if this was
+            because it failed to specify a value for a required column,
+            or, because the selected instance of the status column
+            already existed.  In the latter case, we return to
+            interaction 1.  In the former case, the management station
+            can re-issue the set operation with the additional
+            information, or begin interaction 2 again using
+            `createAndWait' in order to negotiate creation of the
+            conceptual row.
+
+                                     NOTE WELL
+
+                 Regardless of the method used to determine the column
+                 requirements, it is possible that the management
+                 station might deem a column necessary when, in fact,
+                 the agent will not allow that particular columnar
+                 instance to be created or written.  In this case, the
+                 management protocol set operation will fail with an
+                 error such as `noCreation' or `notWritable'.  In this
+                 case, the management station decides whether it needs
+                 to be able to set a value for that particular columnar
+                 instance.  If not, the management station re-issues the
+                 management protocol set operation, but without setting
+                 a value for that particular columnar instance;
+                 otherwise, the management station aborts the row
+                 creation algorithm.
+
+            Interaction 2b: Negotiating the Creation of the Conceptual
+            Row
+
+            The management station issues a management protocol set
+            operation which sets the desired instance of the status
+            column to `createAndWait'.  If the agent is unwilling to
+            process a request of this sort, the set operation fails with
+            an error of `wrongValue'.  (As a consequence, such an agent
+            must be prepared to accept a single management protocol set
+            operation, i.e., interaction 2a above, containing all of the
+            columns indicated by its column requirements.)  Otherwise,
+            the conceptual row is created, a `noError' response is
+            returned, and the status column is immediately set to either
+            `notInService' or `notReady', depending on whether it has
+            sufficient information to (attempt to) make the conceptual
+            row available for use by the managed device.  If there is
+            sufficient information available, then the status column is
+            set to `notInService'; otherwise, if there is insufficient
+            information, then the status column is set to `notReady'.
+            Regardless, we proceed to interaction 3.
+
+            Interaction 3: Initializing non-defaulted Objects
+
+            The management station must now determine the column
+            requirements.  It issues a management protocol get operation
+            to examine all columns in the created conceptual row.  In
+            the response, for each column, there are three possible
+            outcomes:
+
+                 - a value is returned, indicating that the agent
+                 implements the object-type associated with this column
+                 and had sufficient information to provide a value.  For
+                 those columns to which the agent provides read-create
+                 access (and for which the agent allows their values to
+                 be changed after their creation), a value return tells
+                 the management station that it may issue additional
+                 management protocol set operations, if it desires, in
+                 order to change the value associated with this column.
+
+                 - the exception `noSuchInstance' is returned,
+                 indicating that the agent implements the object-type
+                 associated with this column, and that this column in at
+                 least one conceptual row would be accessible in the MIB
+                 view used by the retrieval were it to exist. However,
+                 the agent does not have sufficient information to
+                 provide a value, and until a value is provided, the
+                 conceptual row may not be made available for use by the
+                 managed device.  For those columns to which the agent
+                 provides read-create access, the `noSuchInstance'
+                 exception tells the management station that it must
+                 issue additional management protocol set operations, in
+                 order to provide a value associated with this column.
+
+                 - the exception `noSuchObject' is returned, indicating
+                 that the agent does not implement the object-type
+                 associated with this column or that there is no
+                 conceptual row for which this column would be
+                 accessible in the MIB view used by the retrieval.  As
+                 such, the management station can not issue any
+                 management protocol set operations to create an
+                 instance of this column.
+
+            If the value associated with the status column is
+            `notReady', then the management station must first deal with
+            all `noSuchInstance' columns, if any.  Having done so, the
+            value of the status column becomes `notInService', and we
+            proceed to interaction 4.
+
+            Interaction 4: Making the Conceptual Row Available
+
+            Once the management station is satisfied with the values
+            associated with the columns of the conceptual row, it issues
+            a management protocol set operation to set the status column
+            to `active'.  If the agent has sufficient information to
+            make the conceptual row available for use by the managed
+            device, the management protocol set operation succeeds (a
+            `noError' response is returned).  Otherwise, the management
+            protocol set operation fails with an error of
+            `inconsistentValue'.
+
+                                     NOTE WELL
+
+                 A conceptual row having a status column with value
+                 `notInService' or `notReady' is unavailable to the
+                 managed device.  As such, it is possible for the
+                 managed device to create its own instances during the
+                 time between the management protocol set operation
+                 which sets the status column to `createAndWait' and the
+                 management protocol set operation which sets the status
+                 column to `active'.  In this case, when the management
+                 protocol set operation is issued to set the status
+                 column to `active', the values held in the agent
+                 supersede those used by the managed device.
+
+            If the management station is prevented from setting the
+            status column to `active' (e.g., due to management station
+            or network failure) the conceptual row will be left in the
+            `notInService' or `notReady' state, consuming resources
+            indefinitely.  The agent must detect conceptual rows that
+            have been in either state for an abnormally long period of
+            time and remove them.  It is the responsibility of the
+            DESCRIPTION clause of the status column to indicate what an
+            abnormally long period of time would be.  This period of
+            time should be long enough to allow for human response time
+            (including `think time') between the creation of the
+            conceptual row and the setting of the status to `active'.
+            In the absence of such information in the DESCRIPTION
+            clause, it is suggested that this period be approximately 5
+            minutes in length.  This removal action applies not only to
+            newly-created rows, but also to previously active rows which
+            are set to, and left in, the notInService state for a
+            prolonged period exceeding that which is considered normal
+            for such a conceptual row.
+
+                             Conceptual Row Suspension
+
+            When a conceptual row is `active', the management station
+            may issue a management protocol set operation which sets the
+            instance of the status column to `notInService'.  If the
+            agent is unwilling to do so, the set operation fails with an
+            error of `wrongValue' or `inconsistentValue'.  Otherwise,
+            the conceptual row is taken out of service, and a `noError'
+            response is returned.  It is the responsibility of the
+            DESCRIPTION clause of the status column to indicate under
+            what circumstances the status column should be taken out of
+            service (e.g., in order for the value of some other column
+            of the same conceptual row to be modified).
+
+
+                              Conceptual Row Deletion
+
+            For deletion of conceptual rows, a management protocol set
+            operation is issued which sets the instance of the status
+            column to `destroy'.  This request may be made regardless of
+            the current value of the status column (e.g., it is possible
+            to delete conceptual rows which are either `notReady',
+            `notInService' or `active'.)  If the operation succeeds,
+            then all instances associated with the conceptual row are
+            immediately removed."
+    SYNTAX       INTEGER {
+                     -- the following two values are states:
+                     -- these values may be read or written
+                     active(1),
+                     notInService(2),
+
+                     -- the following value is a state:
+                     -- this value may be read, but not written
+                     notReady(3),
+
+                     -- the following three values are
+                     -- actions: these values may be written,
+                     --   but are never read
+                     createAndGo(4),
+                     createAndWait(5),
+                     destroy(6)
+                 }
+
+TimeStamp ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "The value of the sysUpTime object at which a specific
+            occurrence happened.  The specific occurrence must be
+            defined in the description of any object defined using this
+            type.
+
+            If sysUpTime is reset to zero as a result of a re-
+            initialization of the network management (sub)system, then
+            the values of all TimeStamp objects are also reset.
+            However, after approximately 497 days without a re-
+            initialization, the sysUpTime object will reach 2^^32-1 and
+            then increment around to zero; in this case, existing values
+            of TimeStamp objects do not change.  This can lead to
+            ambiguities in the value of TimeStamp objects."
+    SYNTAX       TimeTicks
+
+
+TimeInterval ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "A period of time, measured in units of 0.01 seconds."
+    SYNTAX       INTEGER (0..2147483647)
+
+DateAndTime ::= TEXTUAL-CONVENTION
+    DISPLAY-HINT "2d-1d-1d,1d:1d:1d.1d,1a1d:1d"
+    STATUS       current
+    DESCRIPTION
+            "A date-time specification.
+
+            field  octets  contents                  range
+            -----  ------  --------                  -----
+              1      1-2   year*                     0..65536
+              2       3    month                     1..12
+              3       4    day                       1..31
+              4       5    hour                      0..23
+              5       6    minutes                   0..59
+              6       7    seconds                   0..60
+                           (use 60 for leap-second)
+              7       8    deci-seconds              0..9
+              8       9    direction from UTC        '+' / '-'
+              9      10    hours from UTC*           0..13
+             10      11    minutes from UTC          0..59
+
+            * Notes:
+            - the value of year is in network-byte order
+            - daylight saving time in New Zealand is +13
+
+            For example, Tuesday May 26, 1992 at 1:30:15 PM EDT would be
+            displayed as:
+
+                             1992-5-26,13:30:15.0,-4:0
+
+            Note that if only local time is known, then timezone
+            information (fields 8-10) is not present."
+    SYNTAX       OCTET STRING (SIZE (8 | 11))
+
+
+StorageType ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+            "Describes the memory realization of a conceptual row.  A
+            row which is volatile(2) is lost upon reboot.  A row which
+            is either nonVolatile(3), permanent(4) or readOnly(5), is
+            backed up by stable storage.  A row which is permanent(4)
+            can be changed but not deleted.  A row which is readOnly(5)
+            cannot be changed nor deleted.
+
+            If the value of an object with this syntax is either
+            permanent(4) or readOnly(5), it cannot be written.
+            Conversely, if the value is either other(1), volatile(2) or
+            nonVolatile(3), it cannot be modified to be permanent(4) or
+            readOnly(5).  (All illegal modifications result in a
+            'wrongValue' error.)
+
+            Every usage of this textual convention is required to
+            specify the columnar objects which a permanent(4) row must
+            at a minimum allow to be writable."
+    SYNTAX       INTEGER {
+                     other(1),       -- eh?
+                     volatile(2),    -- e.g., in RAM
+                     nonVolatile(3), -- e.g., in NVRAM
+                     permanent(4),   -- e.g., partially in ROM
+                     readOnly(5)     -- e.g., completely in ROM
+                 }
+
+TDomain ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+          "Denotes a kind of transport service.
+
+          Some possible values, such as snmpUDPDomain, are defined in
+          the SNMPv2-TM MIB module.  Other possible values are defined
+          in other MIB modules."
+    REFERENCE    "The SNMPv2-TM MIB module is defined in RFC 1906."
+    SYNTAX       OBJECT IDENTIFIER
+
+
+TAddress ::= TEXTUAL-CONVENTION
+    STATUS       current
+    DESCRIPTION
+          "Denotes a transport service address.
+
+          A TAddress value is always interpreted within the context of a
+          TDomain value.  Thus, each definition of a TDomain value must
+          be accompanied by a definition of a textual convention for use
+          with that TDomain.  Some possible textual conventions, such as
+          SnmpUDPAddress for snmpUDPDomain, are defined in the SNMPv2-TM
+          MIB module.  Other possible textual conventions are defined in
+          other MIB modules."
+    REFERENCE    "The SNMPv2-TM MIB module is defined in RFC 1906."
+    SYNTAX       OCTET STRING (SIZE (1..255))
+
+
+END
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/TEST-MIB.txt b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/TEST-MIB.txt
new file mode 100644
index 0000000000000000000000000000000000000000..d0ea67e2730c46edcc175a5326cf4d44e29a4dc3
--- /dev/null
+++ b/tangostationcontrol/tangostationcontrol/toolkit/mib_compiler/mibs/TEST-MIB.txt
@@ -0,0 +1,36 @@
+TEST-MIB DEFINITIONS ::= BEGIN
+
+--
+-- A simple MIB objects for testing
+--
+
+IMPORTS
+    MODULE-IDENTITY, OBJECT-TYPE, Integer32, org FROM SNMPv2-SMI
+    ;
+
+testMib MODULE-IDENTITY
+    LAST-UPDATED "202004060000Z"
+    ORGANIZATION "astron"
+    CONTACT-INFO "astron"
+    DESCRIPTION "Test mib"
+    ::= { org 2 }
+
+--
+-- top level structure
+--
+TestVal       OBJECT IDENTIFIER ::= { testMib 1 }
+
+--
+-- Example scalars
+--
+
+testValue OBJECT-TYPE
+    SYNTAX      Integer32
+    MAX-ACCESS  read-write
+    STATUS      current
+    DESCRIPTION
+	"This is simply a test value."
+    DEFVAL { 1 }
+    ::= { TestVal 1 }
+
+END
diff --git a/tangostationcontrol/tox.ini b/tangostationcontrol/tox.ini
index 40f6e5cf38bace12fc00a2ccba1c0515678906c3..a788f870f7a1a3482913d01f0a39c3e25f6b3394 100644
--- a/tangostationcontrol/tox.ini
+++ b/tangostationcontrol/tox.ini
@@ -77,5 +77,5 @@ commands =
 
 [flake8]
 filename = *.py,.stestr.conf,.txt
-select = W292,B601,B602,T100,M001,F401,B001,B002,B003,B004,B005,B006,B007,B008,B009,B010,B011,B012,B013,B014.B015,B016,B017,B018
-exclude=.tox,.egg-info,libhdbpp-python
+select = W292,B601,B602,T100,M001,F401,B001,B002,B003,B004,B005,B006,B007,B008,B009,B010,B011,B012,B013,B014.B015,B016,B017,B018, F401, F402, F403, F404, F405, F811, F812, F821, F822, F823, F831
+exclude=.tox,.egg-info,libhdbpp-python, SNMP_mib_loading