diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 4f7dac6a327ee188433624ae348a8691d0eb4bf9..ca3a617c1b052564c46e2a5e426fe9a1e86787d6 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -9,29 +9,43 @@ variables:
 cache:
   paths:
     - .cache/pip
-    - devices/.tox
 stages:
   - building
   - linting
   - static-analysis
   - unit-tests
   - integration-tests
-linting:
+newline_at_eof:
+      stage: linting
+      before_script:
+        - pip3 install -r devices/test-requirements.txt
+      script:
+        - flake8 --filename *.sh,*.conf,*.md,*.yml --select=W292 --exclude .tox,.egg-info,docker
+python_linting:
   stage: linting
   script:
     - cd devices
     - tox -e pep8
-static-analysis:
+bandit:
   stage: static-analysis
-  allow_failure: true
   script:
     - cd devices
     - tox -e bandit
+shellcheck:
+  stage: static-analysis
+  allow_failure: true
+  before_script:
+    - sudo apt-get update
+    - sudo apt-get install -y shellcheck
+  script:
+    - shellcheck **/*.sh
 unit_test:
   stage: unit-tests
   before_script:
     - sudo apt-get update
     - sudo apt-get install -y git
+    - pip3 install -r devices/test-requirements.txt
+    - pip3 install -r docker-compose/itango/lofar-requirements.txt
   script:
     - cd devices
     - tox -e py37
diff --git a/CDB/LOFAR_ConfigDb.json b/CDB/LOFAR_ConfigDb.json
index 197686104afa47aeb018b0c5caade4d286a4fe9b..c7fc852c35cb9fcff76b24ab0e10664e17489156 100644
--- a/CDB/LOFAR_ConfigDb.json
+++ b/CDB/LOFAR_ConfigDb.json
@@ -1,5 +1,12 @@
 {
     "servers": {
+        "docker_device": {
+            "LTS": {
+                "Docker": {
+                    "LTS/Docker/1": {}
+                }
+            }
+        },
         "Femto": {
             "CS999": {
                 "Femto": {
@@ -14,10 +21,10 @@
                 }
             }
         },
-        "PCC": {
+        "RECV": {
             "LTS": {
-                "PCC": {
-                    "LTS/PCC/1": {
+                "RECV": {
+                    "LTS/RECV/1": {
                         "attribute_properties": {
                             "Ant_mask_RW": {
                                 "archive_period": [
@@ -742,9 +749,12 @@
                 "SST": {
                     "LTS/SST/1": {
                         "properties": {
-                            "Statistics_Client_Port": [
+                            "Statistics_Client_UDP_Port": [
                                 "5001"
                             ],
+                            "Statistics_Client_TCP_Port": [
+                                "5101"
+                            ],
                             "OPC_Server_Name": [
                                 "dop36.astron.nl"
                             ],
@@ -755,22 +765,22 @@
                                 "5.0"
                             ],
                             "FPGA_sst_offload_hdr_eth_destination_mac_RW_default": [
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de",
-                                "6c:2b:59:97:cb:de"
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd"
                             ],
                             "FPGA_sst_offload_hdr_ip_destination_address_RW_default": [
                                 "10.99.250.250",
@@ -813,6 +823,104 @@
                 }
             }
         },
+        "XST": {
+            "LTS": {
+                "XST": {
+                    "LTS/XST/1": {
+                        "properties": {
+                            "Statistics_Client_UDP_Port": [
+                                "5002"
+                            ],
+                            "Statistics_Client_TCP_Port": [
+                                "5102"
+                            ],
+                            "OPC_Server_Name": [
+                                "dop36.astron.nl"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ],
+                            "FPGA_xst_offload_hdr_eth_destination_mac_RW_default": [
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd"
+                            ],
+                            "FPGA_xst_offload_hdr_ip_destination_address_RW_default": [
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250"
+                            ],
+                            "FPGA_xst_offload_hdr_udp_destination_port_RW_default": [
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "UNB2": {
+            "LTS": {
+                "UNB2": {
+                    "LTS/UNB2/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "despi.astron.nl"
+                            ],
+                            "OPC_Server_Port": [
+                                "4842"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
         "StatsCrosslet": {
             "CS997": {
                 "StatsCrosslet": {
diff --git a/CDB/integration_ConfigDb.json b/CDB/integration_ConfigDb.json
index b2f9cca6dc8db917942f35bb8be25e4cb88bdb93..a73e1f0f10a3d834d9af63ab81a75936f0183843 100644
--- a/CDB/integration_ConfigDb.json
+++ b/CDB/integration_ConfigDb.json
@@ -1,12 +1,12 @@
 {
     "servers": {
-        "PCC": {
+        "RECV": {
             "LTS": {
-                "PCC": {
-                    "LTS/PCC/1": {
+                "RECV": {
+                    "LTS/RECV/1": {
                         "properties": {
                             "OPC_Server_Name": [
-                                "pypcc-sim"
+                                "recv-sim"
                             ],
                             "OPC_Server_Port": [
                                 "4842"
@@ -32,6 +32,54 @@
                             ],
                             "OPC_Time_Out": [
                                 "5.0"
+                            ],
+                            "FPGA_sdp_info_station_id_RW_default": [
+                                "901",
+                                "901",
+                                "901",
+                                "901",
+                                "901",
+                                "901",
+                                "901",
+                                "901",
+                                "901",
+                                "901",
+                                "901",
+                                "901",
+                                "901",
+                                "901",
+                                "901",
+                                "901"
+                            ],
+                            "polled_attr": [
+                                "fpga_temp_r",
+                                "1000",
+                                "state",
+                                "1000",
+                                "status",
+                                "1000",
+                                "fpga_mask_rw",
+                                "1000",
+                                "fpga_scrap_r",
+                                "1000",
+                                "fpga_scrap_rw",
+                                "1000",
+                                "fpga_status_r",
+                                "1000",
+                                "fpga_version_r",
+                                "1000",
+                                "fpga_weights_r",
+                                "1000",
+                                "fpga_weights_rw",
+                                "1000",
+                                "tr_busy_r",
+                                "1000",
+                                "tr_reload_rw",
+                                "1000",
+                                "tr_tod_r",
+                                "1000",
+                                "tr_uptime_r",
+                                "1000"
                             ]
                         }
                     }
@@ -43,15 +91,170 @@
                 "SST": {
                     "LTS/SST/1": {
                         "properties": {
-                            "SST_Client_Port": [
+                            "Statistics_Client_UDP_Port": [
                                 "5001"
                             ],
+                            "Statistics_Client_TCP_Port": [
+                                "5101"
+                            ],
                             "OPC_Server_Name": [
                                 "sdptr-sim"
                             ],
                             "OPC_Server_Port": [
                                 "4840"
                             ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ],
+                            "FPGA_sst_offload_hdr_eth_destination_mac_RW_default": [
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd"
+                            ],
+                            "FPGA_sst_offload_hdr_ip_destination_address_RW_default": [
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250"
+                            ],
+                            "FPGA_sst_offload_hdr_udp_destination_port_RW_default": [
+                                "5001",
+                                "5001",
+                                "5001",
+                                "5001",
+                                "5001",
+                                "5001",
+                                "5001",
+                                "5001",
+                                "5001",
+                                "5001",
+                                "5001",
+                                "5001",
+                                "5001",
+                                "5001",
+                                "5001",
+                                "5001"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "XST": {
+            "LTS": {
+                "XST": {
+                    "LTS/XST/1": {
+                        "properties": {
+                            "Statistics_Client_UDP_Port": [
+                                "5002"
+                            ],
+                            "Statistics_Client_TCP_Port": [
+                                "5102"
+                            ],
+                            "OPC_Server_Name": [
+                                "sdptr-sim"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ],
+                            "FPGA_xst_offload_hdr_eth_destination_mac_RW_default": [
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd",
+                                "6c:2b:59:97:be:dd"
+                            ],
+                            "FPGA_xst_offload_hdr_ip_destination_address_RW_default": [
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250",
+                                "10.99.250.250"
+                            ],
+                            "FPGA_xst_offload_hdr_udp_destination_port_RW_default": [
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002",
+                                "5002"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+         "UNB2": {
+            "LTS": {
+                "UNB2": {
+                    "LTS/UNB2/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "despi.astron.nl"
+                            ],
+                            "OPC_Server_Port": [
+                                "4842"
+                            ],
                             "OPC_Time_Out": [
                                 "5.0"
                             ]
diff --git a/CDB/jasper_ConfigDb.json b/CDB/jasper_ConfigDb.json
index d31074cc3537624d9f3e73f9e19baa388494706d..b8ce969d8a47e9b5ebba6402f29a84579c88bebd 100644
--- a/CDB/jasper_ConfigDb.json
+++ b/CDB/jasper_ConfigDb.json
@@ -14,10 +14,10 @@
                 }
             }
         },
-        "PCC": {
+        "RECV": {
             "LTS": {
-                "PCC": {
-                    "LTS/PCC/1": {
+                "RECV": {
+                    "LTS/RECV/1": {
                         "attribute_properties": {
                             "Ant_mask_RW": {
                                 "archive_period": [
diff --git a/CDB/pypcc-sim-config.json b/CDB/recv-sim-config.json
similarity index 71%
rename from CDB/pypcc-sim-config.json
rename to CDB/recv-sim-config.json
index c5288f56b6ee567093fedfde627aaece3e148e39..e9585345e783b54e8bd21bd6e46f90692b8ee095 100644
--- a/CDB/pypcc-sim-config.json
+++ b/CDB/recv-sim-config.json
@@ -1,23 +1,23 @@
-{
-    "servers": {
-        "PCC": {
-            "LTS": {
-                "PCC": {
-                    "LTS/PCC/1": {
-                        "properties": {
-                            "OPC_Server_Name": [
-                                "pypcc-sim"
-                            ],
-                            "OPC_Server_Port": [
-                                "4842"
-                            ],
-                            "OPC_Time_Out": [
-                                "5.0"
-                            ]
-                        }
-                    }
-                }
-            }
-        }
-    }
-}
+{
+    "servers": {
+        "RECV": {
+            "LTS": {
+                "RECV": {
+                    "LTS/RECV/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "recv-sim"
+                            ],
+                            "OPC_Server_Port": [
+                                "4843"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        }
+    }
+}
diff --git a/CDB/sdp-sim-config.json b/CDB/sdp-sim-config.json
index 64b841e1dacf36e1de9b3e20ea068d36f0011478..f733a85a6c570ccdc25646d894bace08c78e9acf 100644
--- a/CDB/sdp-sim-config.json
+++ b/CDB/sdp-sim-config.json
@@ -24,9 +24,25 @@
                 "SST": {
                     "LTS/SST/1": {
                         "properties": {
-                            "Statistics_Client_Port": [
-                                "5001"
+                            "OPC_Server_Name": [
+                                "sdptr-sim"
+                            ],
+                            "OPC_Server_Port": [
+                                "4840"
                             ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        },
+        "XST": {
+            "LTS": {
+                "XST": {
+                    "LTS/XST/1": {
+                        "properties": {
                             "OPC_Server_Name": [
                                 "sdptr-sim"
                             ],
diff --git a/CDB/test_ConfigDb.json b/CDB/test_ConfigDb.json
index 879d73f275d0b7c275a01219cffcea92501be870..b73683f9f1df2b8af3f0f712e9f601bbb292ce50 100644
--- a/CDB/test_ConfigDb.json
+++ b/CDB/test_ConfigDb.json
@@ -1,9 +1,9 @@
 {
     "servers": {
-        "PCC": {
+        "RECV": {
             "1": {
-                "PCC": {
-                    "LTS/PCC/1": {
+                "RECV": {
+                    "LTS/RECV/1": {
                         "properties": {
                             "OPC_Server_Name": [
                                 "ltspi.astron.nl"
diff --git a/CDB/thijs_ConfigDb.json b/CDB/thijs_ConfigDb.json
index 37ae6d7b66acb4bbb0be1fd36bfc78e2f93eba8e..95fa70578a94531454684fdc5ee5bb6df7e8e3a7 100644
--- a/CDB/thijs_ConfigDb.json
+++ b/CDB/thijs_ConfigDb.json
@@ -1,9 +1,9 @@
 {
     "servers": {
-        "PCC": {
+        "RECV": {
             "1": {
-                "PCC": {
-                    "LTS/PCC/1": {
+                "RECV": {
+                    "LTS/RECV/1": {
                         "properties": {
                             "OPC_Server_Name": [
                                 "host.docker.internal"
@@ -94,9 +94,12 @@
                 "SST": {
                     "LTS/SST/1": {
                         "properties": {
-                            "Statistics_Client_Port": [
+                            "Statistics_Client_UDP_Port": [
                                 "5001"
                             ],
+                            "Statistics_Client_TCP_Port": [
+                                "5101"
+                            ],
                             "OPC_Server_Name": [
                                 "dop36.astron.nl"
                             ],
diff --git a/CDB/thomas_ConfigDb.json b/CDB/thomas_ConfigDb.json
index 33c19e162b8e15001759de58dfca22a82c2dd249..93256085f0acbb13bd111e414c548ae8724d6eaa 100644
--- a/CDB/thomas_ConfigDb.json
+++ b/CDB/thomas_ConfigDb.json
@@ -1,9 +1,9 @@
 {
     "servers": {
-        "PCC": {
+        "RECV": {
             "LTS": {
-                "PCC": {
-                    "LTS/PCC/1": {
+                "RECV": {
+                    "LTS/RECV/1": {
                         "properties": {
                             "OPC_Server_Name": [
                                 "okeanos"
diff --git a/CDB/thomas_arm64_ConfigDb.json b/CDB/thomas_arm64_ConfigDb.json
index 4d010b690433d631ddadc7c14babbb31ec71c6ac..298794f42247cee40ea88fc507e587f16e695adc 100644
--- a/CDB/thomas_arm64_ConfigDb.json
+++ b/CDB/thomas_arm64_ConfigDb.json
@@ -1,9 +1,9 @@
 {
     "servers": {
-        "PCC": {
+        "RECV": {
             "LTS": {
-                "PCC": {
-                    "LTS/PCC/1": {
+                "RECV": {
+                    "LTS/RECV/1": {
                         "properties": {
                             "OPC_Server_Name": [
                                 "arm2"
diff --git a/CDB/unb2-sim-config.json b/CDB/unb2-sim-config.json
new file mode 100644
index 0000000000000000000000000000000000000000..a98fa27492e3835867b214cfd2789caf949de460
--- /dev/null
+++ b/CDB/unb2-sim-config.json
@@ -0,0 +1,23 @@
+{
+    "servers": {
+        "UNB2": {
+            "LTS": {
+                "UNB2": {
+                    "LTS/UNB2/1": {
+                        "properties": {
+                            "OPC_Server_Name": [
+                                "unb2-sim"
+                            ],
+                            "OPC_Server_Port": [
+                                "4844"
+                            ],
+                            "OPC_Time_Out": [
+                                "5.0"
+                            ]
+                        }
+                    }
+                }
+            }
+        }
+    }
+}
diff --git a/CDB/windows_ConfigDb.json b/CDB/windows_ConfigDb.json
index c84fb3855372ba588de5bdef470d665b46ea6a99..ac8b7ef7f50b35f8f245ceaea5b5d525d7fd755e 100644
--- a/CDB/windows_ConfigDb.json
+++ b/CDB/windows_ConfigDb.json
@@ -1,9 +1,9 @@
 {
     "servers": {
-        "PCC": {
+        "RECV": {
             "1": {
-                "PCC": {
-                    "LTS/PCC/1": {
+                "RECV": {
+                    "LTS/RECV/1": {
                         "properties": {
                             "OPC_Server_Name": [
                                 "host.docker.internal"
diff --git a/README.md b/README.md
index b7b4398a9581bf0771fa2e8a669f1e53c92b75d2..192b3edb7713088120b672065296575c255adfa6 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,3 @@
 # Tango Station Control
 
-Station Control software related to Tango devices.
\ No newline at end of file
+Station Control software related to Tango devices.
diff --git a/bootstrap/sbin/rebuild_system_from_scratch.sh b/bootstrap/sbin/rebuild_system_from_scratch.sh
index 8335ba864b09c3008e1af310e1394d57dc6293fa..0af4d0b19d6fd85f48040265055235399c107a9e 100755
--- a/bootstrap/sbin/rebuild_system_from_scratch.sh
+++ b/bootstrap/sbin/rebuild_system_from_scratch.sh
@@ -112,7 +112,7 @@ function start_support_images()
 function start_lofar_images()
 {
     (cd ${HOME_DIR}/docker-compose
-    make start device-pcc
+    make start device-recv
     make start device-sdp)
 }
 
diff --git a/devices/clients/README.md b/devices/clients/README.md
index 3613344461e8abb64e5a68a1d30c68b3927d22b4..083420b38dc611fd8096110ca42d46c375d3db60 100644
--- a/devices/clients/README.md
+++ b/devices/clients/README.md
@@ -1,4 +1,4 @@
 this folder contains all the comms_client implementations for organisation
 
 ### How to add a new client
-soon™
\ No newline at end of file
+soon™
diff --git a/devices/clients/attribute_wrapper.py b/devices/clients/attribute_wrapper.py
index 12e5c83516e2c68c2216aca5ba9b39a1fa6f4f8c..4cb389824750cb9d01fc836e8d65caf3656d59a4 100644
--- a/devices/clients/attribute_wrapper.py
+++ b/devices/clients/attribute_wrapper.py
@@ -29,7 +29,14 @@ class attribute_wrapper(attribute):
         # see also https://pytango.readthedocs.io/en/stable/server_api/server.html?highlight=devlong#module-tango.server for
         # more details about type conversion Python/numpy -> PyTango
         if "numpy" not in str(datatype) and datatype != str:
-            raise TypeError("Attribute needs to be a Tango-supported numpy or str type, but has type \"%s\"" % (datatype,))
+            raise ValueError("Attribute needs to be a Tango-supported numpy or str type, but has type \"%s\"" % (datatype,))
+
+        """
+        Numpy has a depracated string type called numpy.str_.
+        this behaves differently from numpy.str (which is literally just an str.
+        """
+        if datatype == numpy.str_:
+            raise Exception("numpy.str_ type not supported, please use numpy.str instead")
 
         self.comms_id = comms_id # store data that can be used to identify the comms interface to use. not used by the wrapper itself
         self.comms_annotation = comms_annotation  # store data that can be used by the comms interface. not used by the wrapper itself
@@ -37,10 +44,7 @@ class attribute_wrapper(attribute):
         self.init_value = init_value
         is_scalar = dims == (1,)
 
-        # tango doesn't recognise numpy.str_, for consistencies sake we convert it here and hide this from the top level
-        # NOTE: discuss, idk if this is an important detail somewhere else
-        if datatype is numpy.str_ or datatype is numpy.str:
-            datatype = str
+
 
         self.numpy_type = datatype  # tango changes our attribute to their representation (E.g numpy.int64 becomes "DevLong64")
 
diff --git a/devices/clients/docker_client.py b/devices/clients/docker_client.py
new file mode 100644
index 0000000000000000000000000000000000000000..c5b0e8b81f69e7f83ae381468b6bcd738f9ec296
--- /dev/null
+++ b/devices/clients/docker_client.py
@@ -0,0 +1,71 @@
+import logging
+import docker
+
+from .comms_client import CommClient
+
+logger = logging.getLogger()
+
+class DockerClient(CommClient):
+    """
+      Controls & queries running docker containers.
+    """
+
+    def start(self):
+        super().start()
+
+    def __init__(self, base_url, fault_func, streams):
+        super().__init__(fault_func, streams)
+
+        self.base_url = base_url
+
+    def connect(self):
+        """
+        Function used to connect to the client.
+        """
+        if not self.connected:
+            self.client = docker.DockerClient(self.base_url)
+
+        return super().connect()
+
+    def ping(self):
+        return True
+
+    def disconnect(self):
+        self.client = None
+
+        return super().disconnect()
+
+    def setup_value_conversion(self, attribute):
+        """
+        gives the client access to the attribute_wrapper object in order to access all data it could potentially need.
+        the OPC ua read/write functions require the dimensionality and the type to be known
+        """
+        return
+
+    def setup_attribute(self, annotation, attribute):
+        """
+        MANDATORY function: is used by the attribute wrapper to get read/write functions. must return the read and write functions
+        """
+
+        container_name = annotation["container"]
+
+        # get all the necessary data to set up the read/write functions from the attribute_wrapper
+        self.setup_value_conversion(attribute)
+
+        def read_function():
+            try:
+                container = self.client.containers.get(container_name)
+            except docker.errors.NotFound:
+                return False
+
+            return container.status == 'running'
+
+        def write_function(value):
+            container = self.client.containers.get(container_name)
+
+            if value:
+                container.start()
+            else:
+                container.stop()
+
+        return read_function, write_function
diff --git a/devices/clients/opcua_client.py b/devices/clients/opcua_client.py
index 68ed862839c0841e80632fa758a472d9f841b0d0..7d915cbd00aee72da2a13b7bbb7365306457cf4d 100644
--- a/devices/clients/opcua_client.py
+++ b/devices/clients/opcua_client.py
@@ -18,11 +18,10 @@ numpy_to_OPCua_dict = {
     numpy.uint32: opcua.ua.VariantType.UInt32,
     numpy.int64: opcua.ua.VariantType.Int64,
     numpy.uint64: opcua.ua.VariantType.UInt64,
-    numpy.datetime_data: opcua.ua.VariantType.DateTime, # is this the right type, does it even matter?
     numpy.float32: opcua.ua.VariantType.Float,
     numpy.double: opcua.ua.VariantType.Double,
     numpy.float64: opcua.ua.VariantType.Double,
-    str: opcua.ua.VariantType.String
+    numpy.str: opcua.ua.VariantType.String
 }
 
 # <class 'numpy.bool_'>
@@ -112,7 +111,8 @@ class OPCUAConnection(CommClient):
         ping the client to make sure the connection with the client is still functional.
         """
         try:
-            self.client.send_hello()
+            #self.client.send_hello() # <-- this crashes when communicating with open62541 v1.2.2+
+            pass
         except Exception as e:
             raise Exception("Lost connection to server %s: %s", self._servername(), e)
 
diff --git a/devices/clients/statistics_client.py b/devices/clients/statistics_client.py
index 5d45ac472b52ac2f024dfd4a338cb3d03f4d3c77..eb37e9dc24b7cc80e557d9c5b2b060d73e652564 100644
--- a/devices/clients/statistics_client.py
+++ b/devices/clients/statistics_client.py
@@ -1,12 +1,13 @@
 from queue import Queue
-from threading import Thread
 import logging
 import numpy
-import queue
 
 from .comms_client import CommClient
+from .tcp_replicator import TCPReplicator
 from .udp_receiver import UDPReceiver
 
+from devices.sdp.statistics_collector import StatisticsConsumer
+
 logger = logging.getLogger()
 
 
@@ -19,19 +20,19 @@ class StatisticsClient(CommClient):
     def start(self):
         super().start()
 
-    def __init__(self, statistics_collector_class, host, port, fault_func, streams, try_interval=2, queuesize=1024):
+    def __init__(self, collector, udp_options, tcp_options, fault_func, streams, try_interval=2, queuesize=1024):
         """
         Create the statistics client and connect() to it and get the object node.
 
-        statistics_collector_class: a subclass of StatisticsCollector that specialises in processing the received packets.
+        collector: a subclass of StatisticsCollector that specialises in processing the received packets.
         host: hostname to listen on
         port: port number to listen on
         """
-        self.host = host
-        self.port = port
-        self.poll_timeout = 0.1
+
+        self.udp_options = udp_options
+        self.tcp_options = tcp_options
         self.queuesize = queuesize
-        self.statistics_collector_class = statistics_collector_class
+        self.collector = collector
 
         super().__init__(fault_func, streams, try_interval)
 
@@ -41,9 +42,10 @@ class StatisticsClient(CommClient):
             fault_func()
             return
 
-    def queue_fill_percentage(self):
+    @staticmethod
+    def _queue_fill_percentage(queue: Queue):
         try:
-            return 100 * self.queue.qsize() / self.queue.maxsize if self.queue.maxsize else 0
+            return 100 * queue.qsize() / queue.maxsize if queue.maxsize else 0
         except NotImplementedError:
             # some platforms don't have qsize(), nothing we can do here
             return 0
@@ -53,9 +55,13 @@ class StatisticsClient(CommClient):
         Function used to connect to the client.
         """
         if not self.connected:
-            self.queue = Queue(maxsize=self.queuesize)
-            self.udp = UDPReceiver(self.host, self.port, self.queue, self.poll_timeout)
-            self.statistics = self.statistics_collector_class(self.queue)
+            self.collector_queue = Queue(maxsize=self.queuesize)
+
+            self.tcp = TCPReplicator(self.tcp_options, self.queuesize)
+            self.statistics = StatisticsConsumer(self.collector_queue, self.collector)
+
+            self.udp = UDPReceiver([self.collector_queue, self.tcp],
+                                   self.udp_options)
 
         return super().connect()
 
@@ -66,23 +72,32 @@ class StatisticsClient(CommClient):
         if not self.udp.is_alive():
             raise Exception("UDP thread died unexpectedly")
 
+        if not self.tcp.is_alive():
+            raise Exception("TCPReplicator thread died unexpectedly")
+
     def disconnect(self):
         # explicit disconnect, instead of waiting for the GC to kick in after "del" below
         try:
             self.statistics.disconnect()
         except Exception:
-            # nothing we can do, but we should continue cleaning up
-            logger.log_exception("Could not disconnect statistics processing class")
+            logger.exception("Could not disconnect statistics processing class")
 
         try:
             self.udp.disconnect()
         except Exception:
             # nothing we can do, but we should continue cleaning up
-            logger.log_exception("Could not disconnect UDP receiver class")
-        
+            logger.exception("Could not disconnect UDP receiver class")
+
+        try:
+            self.tcp.disconnect()
+        except Exception:
+            logger.exception("Could not disconnect TCPReplicator class")
+            #logger.log_exception("Could not disconnect TCPReplicator class")
+
+        del self.tcp
         del self.udp
         del self.statistics
-        del self.queue
+        del self.collector_queue
 
         return super().disconnect()
 
@@ -106,16 +121,34 @@ class StatisticsClient(CommClient):
         # redirect to right object. this works as long as the parameter names are unique among them.
         if annotation["type"] == "statistics":
             def read_function():
-                return self.statistics.parameters[parameter]
+                return self.collector.parameters[parameter]
         elif annotation["type"] == "udp":
             def read_function():
                 return self.udp.parameters[parameter]
         elif annotation["type"] == "queue":
-            if parameter == "fill_percentage":
+            if parameter == "collector_fill_percentage":
                 def read_function():
-                    return numpy.uint64(self.queue_fill_percentage())
+                    return numpy.uint64(self._queue_fill_percentage(self.collector_queue))
+            elif parameter == "replicator_fill_percentage":
+                def read_function():
+                    return numpy.uint64(self._queue_fill_percentage(self.tcp.queue))
             else:
                 raise ValueError("Unknown queue parameter requested: %s" % parameter)
+        elif annotation["type"] == "replicator":
+            if parameter == "clients":
+                def read_function():
+                    return numpy.array(self.tcp.clients(),dtype=numpy.str)
+            elif parameter == "nof_bytes_sent":
+                def read_function():
+                    return numpy.uint64(self.tcp.nof_bytes_sent)
+            elif parameter == "nof_packets_sent":
+                def read_function():
+                    return numpy.uint64(self.tcp.nof_packets_sent)
+            elif parameter == "nof_tasks_pending":
+                def read_function():
+                    return numpy.uint64(self.tcp.nof_tasks_pending)
+            else:
+                raise ValueError("Unknown replicator parameter requested: %s" % parameter)
 
         def write_function(value):
             """
diff --git a/devices/clients/statistics_client_thread.py b/devices/clients/statistics_client_thread.py
new file mode 100644
index 0000000000000000000000000000000000000000..3da8f76ac135fd4fb631f1de98518ff74f9ec2f9
--- /dev/null
+++ b/devices/clients/statistics_client_thread.py
@@ -0,0 +1,45 @@
+from abc import ABC
+from abc import abstractmethod
+import logging
+
+logger = logging.getLogger()
+
+
+class StatisticsClientThread(ABC):
+
+    # Maximum time to wait for the Thread to get unstuck, if we want to stop
+    DISCONNECT_TIMEOUT = 10
+
+    @property
+    @abstractmethod
+    def _options(self) -> dict:
+        """Implement me to return reasonable defaults
+
+        Don't create the variable inside this property, instead create a class
+        variable inside the child class and return that."""
+        pass
+
+    def _parse_options(self, options: dict) -> dict:
+        """Parse the arguments"""
+
+        # Parse options if any otherwise return defaults
+        if not options:
+            return self._options
+
+        # Shallow copy the options, native data types and strings are immutable
+        temp_options = self._options.copy()
+
+        # Find all matching keys in the options arguments and override
+        for option, value in options.items():
+            if option in temp_options:
+                temp_options[option] = value
+
+        return temp_options
+
+    def __del__(self):
+        self.disconnect()
+
+    @abstractmethod
+    def disconnect(self):
+        """Should call join with DISCONNECT_TIMEOUT, only if still alive"""
+        pass
diff --git a/devices/clients/tcp_replicator.py b/devices/clients/tcp_replicator.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ac6e492d977cf14452d4f97bd213c0d12af7cbb
--- /dev/null
+++ b/devices/clients/tcp_replicator.py
@@ -0,0 +1,355 @@
+from queue import Empty
+from queue import Queue
+from threading import Condition
+from threading import Semaphore
+from threading import Thread
+import asyncio
+import logging
+
+from clients.statistics_client_thread import StatisticsClientThread
+
+logger = logging.getLogger()
+
+
+class TCPReplicator(Thread, StatisticsClientThread):
+    """TCP replicator intended to fan out incoming UDP packets
+
+    There are three different processing layers in this class, several
+    methods can be called from the context of the thread that spawned this
+    class (main thread). These include: __init__, transmit, join.
+
+    When constructed start is called, the thread will launch, this will call run
+    from the context of this new thread. This thread will create the new event
+    loop as this can only be done from the context of the thread you desire to
+    use the event loop in. A semaphore is used to prevent a potential race
+    between this new thread setting up the event loop and the main thread trying
+    to tear it down by calling join. The constructor waits on this semaphore
+    which will always be released either by _server_start_callback or by the
+    finally clause in run.
+
+    The final layer is the event loop itself, it handles instances of the
+    TCPServerProtocol. These can be found in the _connected_clients list.
+    However, only async task are allowed to call methods on these objects!
+    The async methods are _transmit, _disconnect, _stop_event_loop,
+    _process_queue and _run_server.
+
+    _process_queue takes elements of the queue and transmits them across clients.
+    It uses an asyncio.Queue to process elements, given to the replicator through
+    the put method.
+
+    To cleanly shutdown this loop in _stop_event_loop, we insert a None magic marker
+    into the queue, causing the _process_task to return.
+
+    Disconnecting the clients and stopping of the server is handled in _disconnect.
+
+    """
+
+    """Default options for TCPReplicator
+    we kindly ask to not change this static variable at runtime.
+    """
+    _default_options = {
+        "tcp_bind": '0.0.0.0',
+        "tcp_port": 6666,
+        "tcp_buffer_size": 128000000,  # In bytes
+    }
+
+    def __init__(self, options: dict = None, queuesize=0):
+        super().__init__()
+
+        self.queuesize = queuesize
+
+        # statistics
+        self.nof_packets_sent = 0
+        self.nof_bytes_sent = 0
+
+        """Reserve asyncio event loop attribute but don't create it yet.
+        This event loop is created inside the new Thread, the result is that
+        the thread owns the event loop! EVENT LOOPS ARE NOT THREAD SAFE ALL
+        CALLS TO THE EVENT LOOP OBJECT MUST USE THE call_soon_threadsafe
+        FUNCTION!!
+        """
+        self._loop = None
+
+        # Used to maintain a reference to the server object so we can stop
+        # listening cleanly
+        self._server = None
+
+        # Maintain a reference to the current _process_queue task so we can
+        # cleanly cancel it. This reduces a lot of logging chatter.
+        self._process_task = None
+
+        # Create and acquire lock to prevent leaving the constructor without
+        # starting the thread.
+        self.initialization_semaphore = Semaphore()
+        self.initialization_semaphore.acquire()
+
+        # Create condition to orchestrate clean disconnecting and shutdown
+        # They are actually the same object, just with different names for
+        # clarity.
+        self.disconnect_condition = Condition()
+        self.shutdown_condition = self.disconnect_condition
+
+        # Connected clients the event loop is managing
+        self._connected_clients = []
+
+        # Parse the configured options
+        self.options = self._parse_options(options)
+
+        # We start ourselves immediately to reduce amount of possible states.
+        self.start()
+
+        # Wait until we can hold the semaphore, this indicates the thread has
+        # initialized or encountered an exception.
+        with self.initialization_semaphore:
+            if not self.is_alive():
+                raise RuntimeError("TCPReplicator failed to initialize")
+
+            logging.debug("TCPReplicator initialization completed")
+
+    @property
+    def _options(self) -> dict:
+        return TCPReplicator._default_options
+
+    class TCPServerProtocol(asyncio.Protocol):
+        """TCP protocol used for connected clients"""
+
+        def __init__(self, options: dict, connected_clients: list):
+            self.options = options
+
+            # Make connected_clients reflect the TCPReplicator connected_clients
+            self.connected_clients = connected_clients
+
+        def connection_made(self, transport):
+            """Setup client connection and add entry to connected_clients"""
+            peername = transport.get_extra_info('peername')
+            logger.debug('TCP connection from {}'.format(peername))
+            self.transport = transport
+            # Set the TCP buffer limit
+            self.transport.set_write_buffer_limits(
+                high=self.options['tcp_buffer_size'])
+            self.connected_clients.append(self)
+
+        def pause_writing(self):
+            """Called when TCP buffer for the specific connection is full
+
+            Upon encountering a full TCP buffer we deem the client to slow and
+            forcefully close its connection.
+            """
+            self.transport.abort()
+
+        def connection_lost(self, exc):
+            """Called when connection is lost
+
+            Used to remove entries from connected_clients
+            """
+            peername = self.transport.get_extra_info('peername')
+            logger.debug('TCP connection lost from {}'.format(peername))
+            self.connected_clients.remove(self)
+
+        def eof_received(self):
+            """After eof_received, connection_lost is still called"""
+            pass
+
+    def run(self):
+        """Run is launched from constructor of TCPReplicator
+
+        It manages an asyncio event loop to orchestrate our TCPServerProtocol.
+        """
+        try:
+            logger.info("Starting TCPReplicator thread for {}:{}".format(self.options["tcp_bind"], self.options["tcp_port"]))
+
+            # Create the event loop, must be done in the new thread
+            self._loop = asyncio.new_event_loop()
+
+            # Create the input queue
+            self.queue = asyncio.Queue(maxsize=self.queuesize, loop=self._loop)
+
+            # When wanting to debug event loop behavior, uncomment this
+            # self._loop.set_debug(True)
+
+            self._process_task = self._loop.create_task(self._process_queue())
+
+            # Schedule the task to create the server
+            server_task = self._loop.create_task(self._run_server(
+                self.options, self._connected_clients))
+
+            # Callback monitors server startup and releases
+            # initialization_semaphore. If server fails to start this callback
+            # call self._loop.stop()
+            server_task.add_done_callback(self._server_start_callback)
+
+            # Keep running event loop until self._loop.stop() is called.
+            # Calling this will lose control flow to the event loop
+            # indefinitely, upon self._loop.stop() control flow is returned
+            # here.
+            self._loop.run_forever()
+
+            # Stop must have been called, close the event loop
+            with self.shutdown_condition:
+                logger.debug("Closing TCPReplicator event loop")
+                self._loop.close()
+                self.shutdown_condition.notify()
+        except Exception as e:
+            # Log the exception as thread exceptions won't be returned to us
+            # on the main thread.
+            logging.exception("TCPReplicator thread encountered fatal exception")
+
+            # We will lose the exception and the original stacktrace of the
+            # thread. Once we use a threadpool it will be much easier to
+            # retrieve this so I propose to not bother implementing it now.
+            # For the pattern to do this see anyway:
+            # https://stackoverflow.com/a/6894023
+
+            # Due to the exception the run method will return making is_alive()
+            # false
+        finally:
+            # Always release the lock upon error so the constructor can return
+            if self.initialization_semaphore.acquire(blocking=False) is False:
+                self.initialization_semaphore.release()
+
+    def transmit(self, data: bytes):
+        """Transmit data to connected clients"""
+
+        if not isinstance(data, (bytes, bytearray)):
+            raise TypeError("Data must be byte-like object")
+
+        self._loop.call_soon_threadsafe(
+            self._loop.create_task, self._transmit(data))
+
+    def join(self, timeout=None):
+        logging.info("Received shutdown request on TCPReplicator thread for {}:{}".format(self.options["tcp_bind"], self.options["tcp_port"]))
+
+        self._clean_shutdown()
+
+        # Only call join at the end otherwise Thread will falsely assume
+        # all child 'processes' have stopped
+        super().join(timeout)
+
+    def disconnect(self):
+        if not self.is_alive():
+            return
+
+        # TODO(Corne): Prevent duplicate code across TCPReplicator, UDPReceiver
+        #              and StatisticsCollector.
+        self.join(self.DISCONNECT_TIMEOUT)
+
+        if self.is_alive():
+            # there is nothing we can do except wait (stall) longer, which
+            # could be indefinitely.
+            logger.error(
+                f"UDP thread for {self.host}:{self.port} did not shutdown after"
+                f"{self.DISCONNECT_TIMEOUT} seconds, just leaving it dangling."
+                f"Please attach a debugger to thread ID {self.ident}.")
+
+    async def _run_server(self, options: dict, connected_clients: list):
+        """Retrieve the event loop created in run() and launch the server"""
+        loop = asyncio.get_event_loop()
+
+        self._server = await loop.create_server(
+            lambda: TCPReplicator.TCPServerProtocol(options, connected_clients),
+            options['tcp_bind'], options['tcp_port'], reuse_address=True)
+
+    def put(self, packet):
+        """ Put a packet in the queue to be scheduled for transmission. """
+
+        # check hereif our queue clogged up, since we'll schedule self.queue.put
+        # asynchronously.
+        if self.queue.full():
+            raise asyncio.QueueFull("asyncio queue full")
+
+        # if we cannot process fast enough, our task list may clog up instead.
+        # just use the same limit here, as the task list will be dominated by the
+        # packet transmission count.
+        if self.queuesize > 0 and self.nof_tasks_pending > self.queuesize:
+            raise asyncio.QueueFull("asyncio loop task list full")
+
+        self._loop.call_soon_threadsafe(
+            self._loop.create_task, self.queue.put(packet))
+
+    async def _process_queue(self):
+        """ Take packets from the queue and transmit them across our clients. """
+        while True:
+            packet = await self.queue.get()
+
+            if packet is None:
+                # Magic marker from caller to terminate
+                break
+
+            self._loop.create_task(self._transmit(packet))
+
+    async def _transmit(self, data):
+        for client in self._connected_clients:
+            client.transport.write(data)
+
+            self.nof_packets_sent += 1
+            self.nof_bytes_sent += len(data)
+
+    async def _disconnect(self):
+        with self.disconnect_condition:
+            self._server.close()
+            await self._server.wait_closed()
+
+            for client in self._connected_clients:
+                peername = client.transport.get_extra_info('peername')
+                logger.debug('Disconnecting client {}'.format(peername))
+                client.transport.abort()
+
+            self.disconnect_condition.notify()
+
+    async def _stop_event_loop(self):
+        with self.shutdown_condition:
+
+            # Stop the current _process_queue task if it exists
+            if self._process_task:
+                # insert magic marker, if the caller hasn't already
+                await self.queue.put(None)
+
+                # wait for task to finish
+                await self._process_task
+
+            # Calling stop() will return control flow to self._loop.run_*()
+            self._loop.stop()
+
+    def _server_start_callback(self, future):
+        # Server started without exception release initialization semaphore
+        if not future.exception():
+            self.initialization_semaphore.release()
+            return
+
+        logging.warning("TCPReplicator server raised unexpected exception")
+        # Stop the loop so run() can fallthrough from self._loop.run_*
+        self._loop.stop()
+        # Raise the original exceptions captured from the start_server task
+        raise future.exception()
+
+    def _clean_shutdown(self):
+        """Disconnect clients, stop the event loop and wait for it to close"""
+
+        # The event loop is not running anymore, we can't send tasks to shut
+        # it down further.
+        if not self._loop.is_running():
+            return
+
+        # Shutdown server and disconnect clients
+        with self.disconnect_condition:
+            self._loop.call_soon_threadsafe(
+                self._loop.create_task, self._disconnect())
+            self.disconnect_condition.wait()
+
+        # Stop and close the event loop
+        with self.shutdown_condition:
+            logging.debug("Stopping TCPReplicator event loop")
+            self._loop.call_soon_threadsafe(
+                self._loop.create_task, self._stop_event_loop())
+            self.shutdown_condition.wait()
+
+    def clients(self):
+        """ Return the list of connected clients. """
+
+        return ["%s:%s" % client.transport.get_extra_info('peername') for client in self._connected_clients]
+
+    @property
+    def nof_tasks_pending(self):
+        """ Return the number of pending tasks in our event loop. """
+
+        return len(asyncio.all_tasks(self._loop))
diff --git a/devices/clients/udp_receiver.py b/devices/clients/udp_receiver.py
index c8bc44eb1965b0fa769528b381dbaee5b2fcd5d0..8a9d1429945cdd5c41c47bf45edc5034c1cafa0c 100644
--- a/devices/clients/udp_receiver.py
+++ b/devices/clients/udp_receiver.py
@@ -1,29 +1,53 @@
+from queue import Full
 from queue import Queue
 from threading import Thread
-import numpy
 import logging
+import numpy
 import socket
 import time
+from typing import List # not needed for python3.9+, where we can use the type "list[Queue]" directly
+
+from clients.statistics_client_thread import StatisticsClientThread
 
 logger = logging.getLogger()
 
 
-class UDPReceiver(Thread):
+class UDPReceiver(Thread, StatisticsClientThread):
     """
     This class provides a small wrapper for the OPC ua read/write functions in order to better organise the code
     """
 
-    # How long to wait for a stuck Thread
-    DISCONNECT_TIMEOUT = 10.0
+    # Default options for UDPReceiver
+    _default_options = {
+        "udp_host": None,
+        "udp_port": None,
+        "poll_timeout": 0.1,
+    }
+
+    def __init__(self, queues: List[Queue], options: dict = None):
+        self.queues = queues
+
+        try:
+            options['udp_host']
+        except KeyError:
+            raise
 
-    def __init__(self, host, port, queue, poll_timeout=0.1):
-        self.queue = queue
-        self.host = host
-        self.port = port
+        try:
+            options['udp_port']
+        except KeyError:
+            raise
+
+        self.options = self._parse_options(options)
+
+        self.host = self.options['udp_host']
+        self.port = self.options['udp_port']
+        self.poll_timeout = self.options['poll_timeout']
 
         self.parameters = {
             # Number of packets we received
             "nof_packets_received":  numpy.uint64(0),
+            # Number of bytes we received
+            "nof_bytes_received":    numpy.uint64(0),
             # Number of packets we had to drop due to a full queue
             "nof_packets_dropped":   numpy.uint64(0),
             # Packets are at most 9000 bytes, the largest payload (well, MTU) of an Ethernet Jumbo frame
@@ -48,13 +72,17 @@ class UDPReceiver(Thread):
 
         # Make sure we can stop receiving packets even if none arrive. 
         # Without this, the recvmsg() call blocks indefinitely if no packet arrives.
-        self.sock.settimeout(poll_timeout)
+        self.sock.settimeout(self.poll_timeout)
 
         self.stream_on = True
         super().__init__()
 
         self.start()
 
+    @property
+    def _options(self) -> dict:
+        return UDPReceiver._default_options
+
     def run(self):
         # all variables are manually defined and are updated each time
         logger.info("Starting UDP thread for {}:{}".format(self.host, self.port))
@@ -64,15 +92,17 @@ class UDPReceiver(Thread):
                 packet, _, _, _ = self.sock.recvmsg(9000)
 
                 self.parameters["nof_packets_received"]  += numpy.uint64(1)
+                self.parameters["nof_bytes_received"]    += numpy.uint64(len(packet))
                 self.parameters["last_packet"]           = numpy.frombuffer(packet, dtype=numpy.uint8)
                 self.parameters["last_packet_timestamp"] = numpy.uint64(int(time.time()))
 
-                # Forward packet to processing thread
-                self.queue.put(packet)
+                # Forward packet to processing threads
+                for queue in self.queues:
+                    queue.put(packet)
             except socket.timeout:
                 # timeout -- expected, allows us to check whether to stop
                 pass
-            except queue.Full:
+            except Full:
                 # overflow -- just discard
                 self.parameters["nof_packets_dropped"] += numpy.uint64(1)
 
@@ -88,10 +118,12 @@ class UDPReceiver(Thread):
             # happens if timeout is hit
             return
 
-        # shutdown the socket so that others can listen on this port
-        self.sock.shutdown(socket.SHUT_RDWR)
+        # close the socket so that others can listen on this port
+        self.sock.close()
 
     def disconnect(self):
+        # TODO(Corne): Prevent duplicate code across TCPReplicator, UDPReceiver
+        #              and StatisticsCollector.
         if not self.is_alive():
             return
 
@@ -101,6 +133,3 @@ class UDPReceiver(Thread):
         if self.is_alive():
             # there is nothing we can do except wait (stall) longer, which could be indefinitely.
             logger.error(f"UDP thread for {self.host}:{self.port} did not shut down after {self.DISCONNECT_TIMEOUT} seconds, just leaving it dangling. Please attach a debugger to thread ID {self.ident}.")
-
-    def __del__(self):
-        self.disconnect()
diff --git a/devices/common/baselines.py b/devices/common/baselines.py
new file mode 100644
index 0000000000000000000000000000000000000000..b9b0ca8038c0d881d602df37f99203d733f283fc
--- /dev/null
+++ b/devices/common/baselines.py
@@ -0,0 +1,59 @@
+"""
+  Baseline calculation functions.
+"""
+
+import math
+
+def nr_baselines(nr_inputs: int) -> int:
+    """ Return the number of baselines (unique pairs) that exist between a given number of inputs. """
+    return nr_inputs * (nr_inputs + 1) // 2
+
+"""
+
+ Baselines are ordered like:
+   0-0, 1-0, 1-1, 2-0, 2-1, 2-2, ...
+
+ if 
+   b = baseline
+   x = stat1 (major)
+   y = stat2 (minor)
+   x >= y
+ then
+   b_xy = x * (x + 1) / 2 + y
+ let
+   u := b_x0
+ then
+     u            = x * (x + 1) / 2
+     8u           = 4x^2 + 4x
+     8u + 1       = 4x^2 + 4x + 1 = (2x + 1)^2
+     sqrt(8u + 1) = 2x + 1
+                x = (sqrt(8u + 1) - 1) / 2
+
+ Let us define
+   x'(b) = (sqrt(8b + 1) - 1) / 2
+ which increases monotonically and is a continuation of y(b).
+
+ Because y simply increases by 1 when b increases enough, we
+ can just take the floor function to obtain the discrete y(b):
+   x(b) = floor(x'(b))
+        = floor(sqrt(8b + 1) - 1) / 2)
+
+"""
+
+def baseline_index(major: int, minor: int) -> int:
+    """ Provide a total ordering of baselines: give the unique array index for the baseline (major,minor),
+        with major >= minor. """
+
+    if major < minor:
+        raise ValueError(f"major < minor: {major} < {minor}. Since we do not store the conjugates this will lead to processing errors.")
+
+    return major * (major + 1) // 2 + minor
+
+def baseline_from_index(index: int) -> tuple:
+    """ Return the (major,minor) input pair given a baseline index. """
+
+    major = int((math.sqrt(float(8 * index + 1)) - 0.99999) / 2)
+    minor = index - baseline_index(major,0)
+
+    return (major,minor)
+
diff --git a/devices/common/lofar_logging.py b/devices/common/lofar_logging.py
index aed0353461d75ae6ad46b4b10ad51289fb08b553..c605d8cf927f890083dafc3ec85a16c1dab70d9d 100644
--- a/devices/common/lofar_logging.py
+++ b/devices/common/lofar_logging.py
@@ -4,6 +4,7 @@ from tango.server import Device
 import sys
 import traceback
 import socket
+import time
 
 from .lofar_git import get_version
 
@@ -33,6 +34,38 @@ class TangoLoggingHandler(logging.Handler):
 
         self.flush()
 
+class LogSuppressErrorSpam(logging.Formatter):
+    """
+       Suppress specific errors from spamming the logs, by only letting them through periodically.
+    """
+
+    def __init__(self, error_suppress_interval = 3600):
+        """ Suppress subsequent errors for `error_suppress_interval` seconds. """
+
+        super().__init__()
+
+        # last time we logged an error
+        self.last_error_log_time = 0
+
+        # suppression interval at which we report errors
+        self.error_suppress_interval = error_suppress_interval
+
+    def is_error_to_suppress(self, record):
+        # Errors occuring by not being able to connect to the ELK stack, f.e. because it is down.
+        return record.name == "LogProcessingWorker" and record.msg == "An error occurred while sending events: %s"
+
+    def filter(self, record):
+        if self.is_error_to_suppress(record):
+            # filter out error if it occurred within our suppression interval
+            now = time.time()
+
+            if now - self.last_error_log_time < self.error_suppress_interval:
+                return False
+
+            self.last_error_log_time = now
+
+        return True
+
 class LogAnnotator(logging.Formatter):
     """ Annotates log records with:
 
@@ -91,6 +124,7 @@ def configure_logger(logger: logging.Logger=None, log_extra=None):
         # configure log messages
         formatter = LogstashFormatter(extra=log_extra, tags=["python", "lofar"])
         handler.setFormatter(formatter)
+        handler.addFilter(LogSuppressErrorSpam())
         handler.addFilter(LogAnnotator())
 
         # install the handler
@@ -103,6 +137,7 @@ def configure_logger(logger: logging.Logger=None, log_extra=None):
     # Log to Tango
     try:
         handler = TangoLoggingHandler()
+        handler.addFilter(LogSuppressErrorSpam())
         handler.addFilter(LogAnnotator())
         logger.addHandler(handler)
     except Exception:
@@ -120,6 +155,7 @@ def configure_logger(logger: logging.Logger=None, log_extra=None):
 
     formatter = logging.Formatter(fmt = '%(asctime)s.%(msecs)d %(levelname)s - HOST="{}" DEVICE="%(tango_device)s" PID="%(process)d" TNAME="%(threadName)s" FILE="%(pathname)s" LINE="%(lineno)d" FUNC="%(funcName)s" MSG="%(message)s"'.format(hostname), datefmt = '%Y-%m-%dT%H:%M:%S')
     handler.setFormatter(formatter)
+    handler.addFilter(LogSuppressErrorSpam())
     handler.addFilter(LogAnnotator())
 
     logger.addHandler(handler)
diff --git a/devices/devices/abstract_device.py b/devices/devices/abstract_device.py
new file mode 100644
index 0000000000000000000000000000000000000000..5b65c3a6c02fd487920e02efed2394de275f1a9d
--- /dev/null
+++ b/devices/devices/abstract_device.py
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the XXX project
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+"""Abstract Device Meta for LOFAR2.0
+
+"""
+
+from abc import ABCMeta
+import logging
+
+from tango.server import DeviceMeta
+
+logger = logging.getLogger()
+
+
+class AbstractDeviceMetas(DeviceMeta, ABCMeta):
+    """Collects meta classes to allow hardware_device to be both a Device and an ABC. """
+
+    def __new__(mcs, name, bases, namespace, **kwargs):
+        cls = ABCMeta.__new__(mcs, name, bases, namespace, **kwargs)
+        cls = DeviceMeta.__new__(type(cls), name, bases, namespace)
+        return cls
diff --git a/devices/devices/docker_device.py b/devices/devices/docker_device.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ff0ec366c436a2dfc75d4cd479219a04c6938d3
--- /dev/null
+++ b/devices/devices/docker_device.py
@@ -0,0 +1,167 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the Docker project
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+""" Docker Device Server for LOFAR2.0
+
+"""
+
+# TODO(Corne): Remove sys.path.append hack once packaging is in place!
+import os, sys
+currentdir = os.path.dirname(os.path.realpath(__file__))
+parentdir = os.path.dirname(currentdir)
+sys.path.append(parentdir)
+
+# PyTango imports
+from tango import DebugIt
+from tango.server import run, command
+from tango.server import device_property, attribute
+from tango import AttrWriteType
+import numpy
+# Additional import
+
+from device_decorators import *
+
+from clients.docker_client import DockerClient
+from clients.attribute_wrapper import attribute_wrapper
+from devices.hardware_device import hardware_device
+from common.lofar_logging import device_logging_to_python, log_exceptions
+from common.lofar_git import get_version
+
+__all__ = ["Docker", "main"]
+
+@device_logging_to_python()
+class Docker(hardware_device):
+    """
+
+    **Properties:**
+
+    - Device Property
+        OPC_Server_Name
+            - Type:'DevString'
+        OPC_Server_Port
+            - Type:'DevULong'
+        OPC_Time_Out
+            - Type:'DevDouble'
+    """
+
+    # -----------------
+    # Device Properties
+    # -----------------
+
+    Docker_Base_URL = device_property(
+        dtype='DevString',
+        mandatory=False,
+        default_value="unix:///var/run/docker.sock"
+    )
+
+    # ----------
+    # Attributes
+    # ----------
+    version_R = attribute(dtype=str, access=AttrWriteType.READ, fget=lambda self: get_version())
+    archiver_maria_db_R = attribute_wrapper(comms_annotation={"container": "archiver-maria-db"}, datatype=numpy.bool_)
+    archiver_maria_db_RW = attribute_wrapper(comms_annotation={"container": "archiver-maria-db"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    databaseds_R = attribute_wrapper(comms_annotation={"container": "databaseds"}, datatype=numpy.bool_)
+    databaseds_RW = attribute_wrapper(comms_annotation={"container": "databaseds"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    device_recv_R = attribute_wrapper(comms_annotation={"container": "device-recv"}, datatype=numpy.bool_)
+    device_recv_RW = attribute_wrapper(comms_annotation={"container": "device-recv"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    device_sdp_R = attribute_wrapper(comms_annotation={"container": "device-sdp"}, datatype=numpy.bool_)
+    device_sdp_RW = attribute_wrapper(comms_annotation={"container": "device-sdp"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    device_sst_R = attribute_wrapper(comms_annotation={"container": "device-sst"}, datatype=numpy.bool_)
+    device_sst_RW = attribute_wrapper(comms_annotation={"container": "device-sst"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    device_xst_R = attribute_wrapper(comms_annotation={"container": "device-xst"}, datatype=numpy.bool_)
+    device_xst_RW = attribute_wrapper(comms_annotation={"container": "device-xst"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    device_unb2_R = attribute_wrapper(comms_annotation={"container": "device-unb2"}, datatype=numpy.bool_)
+    device_unb2_RW = attribute_wrapper(comms_annotation={"container": "device-unb2"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    device_docker_R = attribute_wrapper(comms_annotation={"container": "device-docker"}, datatype=numpy.bool_)
+    # device_docker_RW is not available, as we cannot start our own container`
+    dsconfig_R = attribute_wrapper(comms_annotation={"container": "dsconfig"}, datatype=numpy.bool_)
+    dsconfig_RW = attribute_wrapper(comms_annotation={"container": "dsconfig"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    elk_R = attribute_wrapper(comms_annotation={"container": "elk"}, datatype=numpy.bool_)
+    elk_RW = attribute_wrapper(comms_annotation={"container": "elk"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    grafana_R = attribute_wrapper(comms_annotation={"container": "grafana"}, datatype=numpy.bool_)
+    grafana_RW = attribute_wrapper(comms_annotation={"container": "grafana"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    hdbpp_cm_R = attribute_wrapper(comms_annotation={"container": "hdbpp-cm"}, datatype=numpy.bool_)
+    hdbpp_cm_RW = attribute_wrapper(comms_annotation={"container": "hdbpp-cm"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    hdbpp_es_R = attribute_wrapper(comms_annotation={"container": "hdbpp-es"}, datatype=numpy.bool_)
+    hdbpp_es_RW = attribute_wrapper(comms_annotation={"container": "hdbpp-es"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    itango_R = attribute_wrapper(comms_annotation={"container": "itango"}, datatype=numpy.bool_)
+    itango_RW = attribute_wrapper(comms_annotation={"container": "itango"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    jupyter_R = attribute_wrapper(comms_annotation={"container": "jupyter"}, datatype=numpy.bool_)
+    jupyter_RW = attribute_wrapper(comms_annotation={"container": "jupyter"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    prometheus_R = attribute_wrapper(comms_annotation={"container": "prometheus"}, datatype=numpy.bool_)
+    prometheus_RW = attribute_wrapper(comms_annotation={"container": "prometheus"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    tangodb_R = attribute_wrapper(comms_annotation={"container": "tangodb"}, datatype=numpy.bool_)
+    tangodb_RW = attribute_wrapper(comms_annotation={"container": "tangodb"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    tango_prometheus_exporter_R = attribute_wrapper(comms_annotation={"container": "tango-prometheus-exporter"}, datatype=numpy.bool_)
+    tango_prometheus_exporter_RW = attribute_wrapper(comms_annotation={"container": "tango-prometheus-exporter"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+    tango_rest_R = attribute_wrapper(comms_annotation={"container": "tango-rest"}, datatype=numpy.bool_)
+    tango_rest_RW = attribute_wrapper(comms_annotation={"container": "tango-rest"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
+
+    @log_exceptions()
+    def delete_device(self):
+        """Hook to delete resources allocated in init_device.
+
+        This method allows for any memory or other resources allocated in the
+        init_device method to be released.  This method is called by the device
+        destructor and by the device Init command (a Tango built-in).
+        """
+        self.debug_stream("Shutting down...")
+
+        self.Off()
+        self.debug_stream("Shut down.  Good bye.")
+
+    # --------
+    # overloaded functions
+    # --------
+    @log_exceptions()
+    def configure_for_off(self):
+        """ user code here. is called when the state is set to OFF """
+        # Stop keep-alive
+        try:
+            self.opcua_connection.stop()
+        except Exception as e:
+            self.warn_stream("Exception while stopping OPC ua connection in configure_for_off function: {}. Exception ignored".format(e))
+
+    @log_exceptions()
+    def configure_for_initialise(self):
+        """ user code here. is called when the state is set to INIT """
+
+        # set up the Docker client
+        self.docker_client = DockerClient(self.Docker_Base_URL, self.Fault, self)
+
+        # map an access helper class
+        for i in self.attr_list():
+            try:
+                i.set_comm_client(self.docker_client)
+            except Exception as e:
+                # use the pass function instead of setting read/write fails
+                i.set_pass_func()
+                self.warn_stream("error while setting the attribute {} read/write function. {}".format(i, e))
+
+        self.docker_client.start()
+
+    # --------
+    # Commands
+    # --------
+
+
+# ----------
+# Run server
+# ----------
+def main(args=None, **kwargs):
+    """Main function of the Docker module."""
+
+    from common.lofar_logging import configure_logger
+    configure_logger()
+
+    return run((Docker,), args=args, **kwargs)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/devices/devices/hardware_device.py b/devices/devices/hardware_device.py
index c0e7df614d95e40f9816f9332f2832c8f3d4166c..f8f6ca50d7e02f5a8694c2ec4f9135dd874cd516 100644
--- a/devices/devices/hardware_device.py
+++ b/devices/devices/hardware_device.py
@@ -1,13 +1,13 @@
 # -*- coding: utf-8 -*-
 #
-# This file is part of the PCC project
+# This file is part of the XXX project
 #
 #
 #
 # Distributed under the terms of the APACHE license.
 # See LICENSE.txt for more info.
 
-""" PCC Device Server for LOFAR2.0
+"""Hardware Device Server for LOFAR2.0
 
 """
 
@@ -20,18 +20,15 @@ from tango import DevState, DebugIt, Attribute, DeviceProxy
 
 from clients.attribute_wrapper import attribute_wrapper
 from common.lofar_logging import log_exceptions
-import logging
+from devices.abstract_device import AbstractDeviceMetas
+from devices.device_decorators import only_in_states, fault_on_error
 
+import logging
 
 __all__ = ["hardware_device"]
 
-from devices.device_decorators import only_in_states, fault_on_error
-
 logger = logging.getLogger()
 
-class AbstractDeviceMetas(DeviceMeta, ABCMeta):
-    ''' Collects meta classes to allow hardware_device to be both a Device and an ABC. '''
-    pass
 
 #@log_exceptions()
 class hardware_device(Device, metaclass=AbstractDeviceMetas):
@@ -95,6 +92,9 @@ class hardware_device(Device, metaclass=AbstractDeviceMetas):
         self.set_state(DevState.INIT)
         self.setup_value_dict()
 
+        # reload our class & device properties from the Tango database
+        self.get_device_properties()
+
         self.configure_for_initialise()
 
         self.set_state(DevState.STANDBY)
diff --git a/devices/devices/pcc.py b/devices/devices/recv.py
similarity index 96%
rename from devices/devices/pcc.py
rename to devices/devices/recv.py
index 73b105abc21f9cc8c7c15a564a67c9e0758e77cd..6f1de6aedc9e6db463c2edcd7a1a8bdf3daf7c2e 100644
--- a/devices/devices/pcc.py
+++ b/devices/devices/recv.py
@@ -1,13 +1,13 @@
 # -*- coding: utf-8 -*-
 #
-# This file is part of the PCC project
+# This file is part of the RECV project
 #
 #
 #
 # Distributed under the terms of the APACHE license.
 # See LICENSE.txt for more info.
 
-""" PCC Device Server for LOFAR2.0
+""" RECV Device Server for LOFAR2.0
 
 """
 
@@ -33,10 +33,10 @@ from devices.hardware_device import hardware_device
 from common.lofar_logging import device_logging_to_python, log_exceptions
 from common.lofar_git import get_version
 
-__all__ = ["PCC", "main"]
+__all__ = ["RECV", "main"]
 
 @device_logging_to_python()
-class PCC(hardware_device):
+class RECV(hardware_device):
     """
 
     **Properties:**
@@ -108,7 +108,7 @@ class PCC(hardware_device):
     RCU_Pwr_dig_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_Pwr_dig_R"], datatype=numpy.bool_, dims=(32,))
     RCU_temperature_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_temperature_R"], datatype=numpy.float64, dims=(32,))
     RCU_translator_busy_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_translator_busy_R"], datatype=numpy.bool_)
-    RCU_version_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_version_R"], datatype=numpy.str_, dims=(32,))
+    RCU_version_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_version_R"], datatype=numpy.str, dims=(32,))
 
     @log_exceptions()
     def delete_device(self):
@@ -156,7 +156,7 @@ class PCC(hardware_device):
             except Exception as e:
                 # use the pass function instead of setting read/write fails
                 i.set_pass_func()
-                self.warn_stream("error while setting the PCC attribute {} read/write function. {}".format(i, e))
+                self.warn_stream("error while setting the RECV attribute {} read/write function. {}".format(i, e))
 
         self.OPCua_client.start()
 
@@ -247,12 +247,12 @@ class PCC(hardware_device):
 # Run server
 # ----------
 def main(args=None, **kwargs):
-    """Main function of the PCC module."""
+    """Main function of the RECV module."""
 
     from common.lofar_logging import configure_logger
     configure_logger()
 
-    return run((PCC,), args=args, **kwargs)
+    return run((RECV,), args=args, **kwargs)
 
 
 if __name__ == '__main__':
diff --git a/devices/devices/sdp/sdp.py b/devices/devices/sdp/sdp.py
index 9bc94e6c811ac4e63a88c035ef62eba3998df895..75e027b571cefe0bdfa68621b37f45dd26d98aae 100644
--- a/devices/devices/sdp/sdp.py
+++ b/devices/devices/sdp/sdp.py
@@ -99,12 +99,15 @@ class SDP(hardware_device):
     version_R = attribute(dtype=str, access=AttrWriteType.READ, fget=lambda self: get_version())
 
     # SDP will switch from FPGA_mask_RW to TR_FPGA_mask_RW, offer both for now as its a critical flag
-    FPGA_firmware_version_R = attribute_wrapper(comms_annotation=["2:FPGA_firmware_version_R"], datatype=numpy.str_, dims=(16,))
-    FPGA_hardware_version_R = attribute_wrapper(comms_annotation=["2:FPGA_hardware_version_R"], datatype=numpy.str_, dims=(16,))
+    FPGA_firmware_version_R = attribute_wrapper(comms_annotation=["2:FPGA_firmware_version_R"], datatype=numpy.str, dims=(16,))
+    FPGA_global_node_index_R = attribute_wrapper(comms_annotation=["2:FPGA_global_node_index_R"], datatype=numpy.uint32, dims=(16,))
+    FPGA_hardware_version_R = attribute_wrapper(comms_annotation=["2:FPGA_hardware_version_R"], datatype=numpy.str, dims=(16,))
     FPGA_processing_enable_R = attribute_wrapper(comms_annotation=["2:FPGA_processing_enable_R"], datatype=numpy.bool_, dims=(16,))
     FPGA_processing_enable_RW = attribute_wrapper(comms_annotation=["2:FPGA_processing_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
     FPGA_scrap_R = attribute_wrapper(comms_annotation=["2:FPGA_scrap_R"], datatype=numpy.int32, dims=(8192,))
     FPGA_scrap_RW = attribute_wrapper(comms_annotation=["2:FPGA_scrap_RW"], datatype=numpy.int32, dims=(8192,), access=AttrWriteType.READ_WRITE)
+    FPGA_signal_input_mean_R = attribute_wrapper(comms_annotation=["2:FPGA_signal_input_mean_R"], datatype=numpy.double, dims=(12, 16))
+    FPGA_signal_input_rms_R = attribute_wrapper(comms_annotation=["2:FPGA_signal_input_rms_R"], datatype=numpy.double, dims=(12, 16))
     FPGA_sdp_info_antenna_band_index_R = attribute_wrapper(comms_annotation=["2:FPGA_sdp_info_antenna_band_index_R"], datatype=numpy.uint32, dims=(16,))
     FPGA_sdp_info_block_period_R = attribute_wrapper(comms_annotation=["2:FPGA_sdp_info_block_period_R"], datatype=numpy.uint32, dims=(16,))
     FPGA_sdp_info_f_adc_R = attribute_wrapper(comms_annotation=["2:FPGA_sdp_info_f_adc_R"], datatype=numpy.uint32, dims=(16,))
@@ -116,7 +119,7 @@ class SDP(hardware_device):
     FPGA_sdp_info_station_id_R = attribute_wrapper(comms_annotation=["2:FPGA_sdp_info_station_id_R"], datatype=numpy.uint32, dims=(16,))
     FPGA_sdp_info_station_id_RW = attribute_wrapper(comms_annotation=["2:FPGA_sdp_info_station_id_RW"], datatype=numpy.uint32, dims=(16,), access=AttrWriteType.READ_WRITE)
     FPGA_subband_weights_R = attribute_wrapper(comms_annotation=["2:FPGA_subband_weights_R"], datatype=numpy.uint32, dims=(12 * 512, 16))
-    FPGA_subband_weights_RW = attribute_wrapper(comms_annotation=["2:FPGA_subband_weights_R"], datatype=numpy.uint32, dims=(12 * 512, 16))
+    FPGA_subband_weights_RW = attribute_wrapper(comms_annotation=["2:FPGA_subband_weights_RW"], datatype=numpy.uint32, dims=(12 * 512, 16), access=AttrWriteType.READ_WRITE)
     FPGA_temp_R = attribute_wrapper(comms_annotation=["2:FPGA_temp_R"], datatype=numpy.float_, dims=(16,))
     FPGA_weights_R = attribute_wrapper(comms_annotation=["2:FPGA_weights_R"], datatype=numpy.int16, dims=(12 * 488 * 2, 16))
     FPGA_weights_RW = attribute_wrapper(comms_annotation=["2:FPGA_weights_RW"], datatype=numpy.int16, dims=(12 * 488 * 2, 16), access=AttrWriteType.READ_WRITE)
@@ -131,10 +134,13 @@ class SDP(hardware_device):
     TR_fpga_mask_R = attribute_wrapper(comms_annotation=["2:TR_fpga_mask_R"], datatype=numpy.bool_, dims=(16,))
     TR_fpga_mask_RW = attribute_wrapper(comms_annotation=["2:TR_fpga_mask_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
     TR_fpga_communication_error_R = attribute_wrapper(comms_annotation=["2:TR_fpga_communication_error_R"], datatype=numpy.bool_, dims=(16,))
-    TR_software_version_R = attribute_wrapper(comms_annotation=["2:TR_software_version_R"], datatype=numpy.str_)
-    TR_start_time_R = attribute_wrapper(comms_annotation=["2:TR_start_time_R"], datatype=numpy.int32)
-    TR_tod_R = attribute_wrapper(comms_annotation=["2:TR_tod_R"], datatype=numpy.uint64)
-    TR_tod_pps_delta_R = attribute_wrapper(comms_annotation=["2:TR_tod_pps_delta_R"], datatype=numpy.float_)
+    TR_sdp_config_first_fpga_nr_R = attribute_wrapper(comms_annotation=["2:TR_sdp_config_first_fpga_nr_R"], datatype=numpy.uint32)
+    TR_sdp_config_nof_beamsets_R = attribute_wrapper(comms_annotation=["2:TR_sdp_config_nof_beamsets_R"], datatype=numpy.uint32)
+    TR_sdp_config_nof_fpgas_R = attribute_wrapper(comms_annotation=["2:TR_sdp_config_nof_fpgas_R"], datatype=numpy.uint32)
+    TR_software_version_R = attribute_wrapper(comms_annotation=["2:TR_software_version_R"], datatype=numpy.str)
+    TR_start_time_R = attribute_wrapper(comms_annotation=["2:TR_start_time_R"], datatype=numpy.int64)
+    TR_tod_R = attribute_wrapper(comms_annotation=["2:TR_tod_R"], datatype=numpy.int64, dims=(2,))
+    TR_tod_pps_delta_R = attribute_wrapper(comms_annotation=["2:TR_tod_pps_delta_R"], datatype=numpy.double)
 
     def always_executed_hook(self):
         """Method always executed before any TANGO command is executed."""
@@ -162,7 +168,7 @@ class SDP(hardware_device):
 
         # Stop keep-alive
         try:
-            self.opcua_connection.stop()
+            self.OPCua_client.stop()
         except Exception as e:
             self.warn_stream("Exception while stopping OPC ua connection in configure_for_off function: {}. Exception ignored".format(e))
 
diff --git a/devices/devices/sdp/sst.py b/devices/devices/sdp/sst.py
index 97c7463e2ac3728be2418e639d75140c421a11c1..3b2f36236a841adb0511b284cbeb4a0fbc6ee296 100644
--- a/devices/devices/sdp/sst.py
+++ b/devices/devices/sdp/sst.py
@@ -27,12 +27,6 @@ from tango import AttrWriteType
 from clients.attribute_wrapper import attribute_wrapper
 from clients.opcua_client import OPCUAConnection
 from clients.statistics_client import StatisticsClient
-
-from devices.hardware_device import hardware_device
-
-from common.lofar_git import get_version
-from common.lofar_logging import device_logging_to_python, log_exceptions
-
 from devices.sdp.statistics import Statistics
 from devices.sdp.statistics_collector import SSTCollector
 
@@ -76,25 +70,27 @@ class SST(Statistics):
     # FPGA control points for SSTs
     FPGA_sst_offload_enable_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
     FPGA_sst_offload_enable_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_enable_R"], datatype=numpy.bool_, dims=(16,))
-    FPGA_sst_offload_hdr_eth_destination_mac_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_hdr_eth_destination_mac_RW"], datatype=numpy.str_, dims=(16,), access=AttrWriteType.READ_WRITE)
-    FPGA_sst_offload_hdr_eth_destination_mac_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_hdr_eth_destination_mac_R"], datatype=numpy.str_, dims=(16,))
-    FPGA_sst_offload_hdr_ip_destination_address_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_hdr_ip_destination_address_RW"], datatype=numpy.str_, dims=(16,), access=AttrWriteType.READ_WRITE)
-    FPGA_sst_offload_hdr_ip_destination_address_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_hdr_ip_destination_address_R"], datatype=numpy.str_, dims=(16,))
+    FPGA_sst_offload_hdr_eth_destination_mac_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_hdr_eth_destination_mac_RW"], datatype=numpy.str, dims=(16,), access=AttrWriteType.READ_WRITE)
+    FPGA_sst_offload_hdr_eth_destination_mac_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_hdr_eth_destination_mac_R"], datatype=numpy.str, dims=(16,))
+    FPGA_sst_offload_hdr_ip_destination_address_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_hdr_ip_destination_address_RW"], datatype=numpy.str, dims=(16,), access=AttrWriteType.READ_WRITE)
+    FPGA_sst_offload_hdr_ip_destination_address_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_hdr_ip_destination_address_R"], datatype=numpy.str, dims=(16,))
     FPGA_sst_offload_hdr_udp_destination_port_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_hdr_udp_destination_port_RW"], datatype=numpy.uint16, dims=(16,), access=AttrWriteType.READ_WRITE)
     FPGA_sst_offload_hdr_udp_destination_port_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_hdr_udp_destination_port_R"], datatype=numpy.uint16, dims=(16,))
     FPGA_sst_offload_weighted_subbands_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_weighted_subbands_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
     FPGA_sst_offload_weighted_subbands_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_sst_offload_weighted_subbands_R"], datatype=numpy.bool_, dims=(16,))
 
     # number of packets with valid payloads
-    nof_valid_payloads_R    = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_valid_payloads"}, dims=(SSTCollector.MAX_INPUTS,), datatype=numpy.uint64)
+    nof_valid_payloads_R    = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_valid_payloads"}, dims=(SSTCollector.MAX_FPGAS,), datatype=numpy.uint64)
     # number of packets with invalid payloads
-    nof_payload_errors_R    = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_payload_errors"}, dims=(SSTCollector.MAX_INPUTS,), datatype=numpy.uint64)
+    nof_payload_errors_R    = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_payload_errors"}, dims=(SSTCollector.MAX_FPGAS,), datatype=numpy.uint64)
     # latest SSTs
     sst_R                   = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "sst_values"}, dims=(SSTCollector.MAX_SUBBANDS, SSTCollector.MAX_INPUTS), datatype=numpy.uint64)
     # reported timestamp for each row in the latest SSTs
     sst_timestamp_R         = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "sst_timestamps"}, dims=(SSTCollector.MAX_INPUTS,), datatype=numpy.uint64)
     # integration interval for each row in the latest SSTs
     integration_interval_R  = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "integration_intervals"}, dims=(SSTCollector.MAX_INPUTS,), datatype=numpy.float32)
+    # whether the subband data was calibrated by the SDP (that is, were subband weights applied)
+    subbands_calibrated_R   = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "subbands_calibrated"}, dims=(SSTCollector.MAX_INPUTS,), datatype=numpy.bool_)
 
     # --------
     # Overloaded functions
diff --git a/devices/devices/sdp/statistics.py b/devices/devices/sdp/statistics.py
index 5d10aae8b866acc0b30598856cb63b1ecc6d233a..7d0b970b089ff29931bfc088f8b4b208d347402c 100644
--- a/devices/devices/sdp/statistics.py
+++ b/devices/devices/sdp/statistics.py
@@ -21,7 +21,6 @@ sys.path.append(parentdir)
 from abc import ABCMeta, abstractmethod
 
 # PyTango imports
-from tango.server import run
 from tango.server import device_property, attribute
 from tango import AttrWriteType
 # Additional import
@@ -34,6 +33,9 @@ from devices.hardware_device import hardware_device
 
 from common.lofar_git import get_version
 from common.lofar_logging import device_logging_to_python, log_exceptions
+import logging
+
+logger = logging.getLogger()
 
 import numpy
 
@@ -66,7 +68,12 @@ class Statistics(hardware_device, metaclass=ABCMeta):
         mandatory=True
     )
 
-    Statistics_Client_Port = device_property(
+    Statistics_Client_UDP_Port = device_property(
+        dtype='DevUShort',
+        mandatory=True
+    )
+
+    Statistics_Client_TCP_Port = device_property(
         dtype='DevUShort',
         mandatory=True
     )
@@ -77,8 +84,9 @@ class Statistics(hardware_device, metaclass=ABCMeta):
 
     version_R = attribute(dtype = str, access = AttrWriteType.READ, fget = lambda self: get_version())
 
-    # number of UDP packets that were received
+    # number of UDP packets and bytes that were received
     nof_packets_received_R  = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "udp", "parameter": "nof_packets_received"}, datatype=numpy.uint64)
+    nof_bytes_received_R  = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "udp", "parameter": "nof_bytes_received"}, datatype=numpy.uint64)
     # number of UDP packets that were dropped because we couldn't keep up with processing
     nof_packets_dropped_R   = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "udp", "parameter": "nof_packets_dropped"}, datatype=numpy.uint64)
     # last packet we processed
@@ -87,11 +95,17 @@ class Statistics(hardware_device, metaclass=ABCMeta):
     last_packet_timestamp_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "udp", "parameter": "last_packet_timestamp"}, datatype=numpy.uint64)
 
     # queue fill percentage, as reported by the consumer
-    queue_fill_percentage_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "queue", "parameter": "fill_percentage"}, datatype=numpy.uint64)
+    queue_collector_fill_percentage_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "queue", "parameter": "collector_fill_percentage"}, datatype=numpy.uint64)
+    queue_replicator_fill_percentage_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "queue", "parameter": "replicator_fill_percentage"}, datatype=numpy.uint64)
+
+    replicator_clients_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "replicator", "parameter": "clients"}, dims=(128,), datatype=numpy.str)
+    replicator_nof_bytes_sent_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "replicator", "parameter": "nof_bytes_sent"}, datatype=numpy.uint64)
+
+    replicator_nof_packets_sent_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "replicator", "parameter": "nof_packets_sent"}, datatype=numpy.uint64)
+    replicator_nof_tasks_pending_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "replicator", "parameter": "nof_tasks_pending"}, datatype=numpy.uint64)
 
     # number of UDP packets that were processed
     nof_packets_processed_R = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_packets"}, datatype=numpy.uint64)
-
     # number of invalid (non-SST) packets received
     nof_invalid_packets_R   = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_invalid_packets"}, datatype=numpy.uint64)
     # last packet that could not be parsed
@@ -108,19 +122,32 @@ class Statistics(hardware_device, metaclass=ABCMeta):
         try:
             self.statistics_client.stop()
         except Exception as e:
-            self.warn_stream("Exception while stopping statistics_client in configure_for_off function: {}. Exception ignored".format(e))
+            logger.exception("Exception while stopping statistics_client in configure_for_off. Exception ignored")
 
         try:
             self.OPCUA_client.stop()
         except Exception as e:
-            self.warn_stream("Exception while stopping OPC UA connection in configure_for_off function: {}. Exception ignored".format(e))
+            logger.exception("Exception while stopping OPC UA connection in configure_for_off. Exception ignored")
 
     @log_exceptions()
     def configure_for_initialise(self):
         """ user code here. is called when the sate is set to INIT """
         """Initialises the attributes and properties of the statistics device."""
 
-        self.statistics_client = StatisticsClient(self.STATISTICS_COLLECTOR_CLASS, "0.0.0.0", self.Statistics_Client_Port, self.Fault, self)
+        # Options for UDPReceiver
+        udp_options = {
+            "udp_port": self.Statistics_Client_UDP_Port,
+            "udp_host": "0.0.0.0"
+        }
+
+        # Options for TCPReplicator
+        tcp_options = {
+            "tcp_port": self.Statistics_Client_TCP_Port
+            # tcp_host has default value
+        }
+
+        self.statistics_collector = self.STATISTICS_COLLECTOR_CLASS()
+        self.statistics_client = StatisticsClient(self.statistics_collector, udp_options, tcp_options, self.Fault, self)
 
         self.OPCUA_client = OPCUAConnection("opc.tcp://{}:{}/".format(self.OPC_Server_Name, self.OPC_Server_Port), "http://lofar.eu", self.OPC_Time_Out, self.Fault, self)
 
diff --git a/devices/devices/sdp/statistics_collector.py b/devices/devices/sdp/statistics_collector.py
index f3aac3c1982b03b169eaddedce52b50c939ddc45..1bd8f3c12135a818526c48ecbff80408f290b7c9 100644
--- a/devices/devices/sdp/statistics_collector.py
+++ b/devices/devices/sdp/statistics_collector.py
@@ -3,31 +3,21 @@ from threading import Thread
 import logging
 import numpy
 
-from .statistics_packet import SSTPacket
+from .statistics_packet import SSTPacket, XSTPacket
+from common.baselines import nr_baselines, baseline_index, baseline_from_index
+from clients.statistics_client_thread import StatisticsClientThread
 
 logger = logging.getLogger()
 
-class StatisticsCollector(Thread):
-    """ Base class to process statistics packets from a queue, asynchronously. """
-
-    # Maximum number of antenna inputs we support (used to determine array sizes)
-    MAX_INPUTS = 192
-
-    # Maximum number of subbands we support (used to determine array sizes)
-    MAX_SUBBANDS = 512
-
-    # Maximum time to wait for the Thread to get unstuck, if we want to stop
-    DISCONNECT_TIMEOUT = 10.0
+class StatisticsCollector:
+    """ Base class to process statistics packets into parameters matrices. """
 
-    def __init__(self, queue: Queue):
-        self.queue = queue
-        self.last_packet = None
+    # Maximum number of FPGAs we receive data from (used for diagnostics)
+    MAX_FPGAS = 16
 
+    def __init__(self):
         self.parameters = self._default_parameters()
 
-        super().__init__()
-        self.start()
-
     def _default_parameters(self):
         return {
             "nof_packets":           numpy.uint64(0),
@@ -39,48 +29,18 @@ class StatisticsCollector(Thread):
             "last_invalid_packet":   numpy.zeros((9000,), dtype=numpy.uint8),
         }
 
-    def run(self):
-        logger.info("Starting statistics thread")
-
-        while True:
-            self.last_packet = self.queue.get()
-
-            # This is the exception/slow path, but python doesn't allow us to optimise that
-            if self.last_packet is None:
-                # None is the magic marker to stop processing
-                break
-
-            self.parameters["nof_packets"] += numpy.uint64(1)
-
-            try:
-                self.process_packet(self.last_packet)
-            except Exception as e:
-                logger.exception("Could not parse statistics UDP packet")
-
-                self.parameters["last_invalid_packet"] = numpy.frombuffer(self.last_packet, dtype=numpy.uint8)
-                self.parameters["nof_invalid_packets"] += numpy.uint64(1)
-
-        logger.info("Stopped statistics thread")
-
-    def join(self, timeout=0):
-        # insert magic marker
-        self.queue.put(None)
-        logger.info("Sent shutdown to statistics thread")
-
-        super().join(timeout)
-
-    def disconnect(self):
-        if not self.is_alive():
-            return
+    def process_packet(self, packet):
+        self.parameters["nof_packets"] += numpy.uint64(1)
 
-        # try to get the thread shutdown, but don't stall forever
-        self.join(self.DISCONNECT_TIMEOUT)
+        try:
+            self.parse_packet(packet)
+        except Exception as e:
+            self.parameters["last_invalid_packet"] = numpy.frombuffer(packet, dtype=numpy.uint8)
+            self.parameters["nof_invalid_packets"] += numpy.uint64(1)
 
-        if self.is_alive():
-            # there is nothing we can do except wait (stall) longer, which could be indefinitely.
-            logger.error(f"Statistics thread did not shut down after {self.DISCONNECT_TIMEOUT} seconds, just leaving it dangling. Please attach a debugger to thread ID {self.ident}.")
+            raise ValueError("Could not parse statistics packet") from e
 
-    def process_packet(self, packet):
+    def parse_packet(self, packet):
         """ Update any information based on this packet. """
 
         raise NotImplementedError
@@ -100,20 +60,21 @@ class SSTCollector(StatisticsCollector):
 
         defaults.update({
             # Number of packets received so far that we could parse correctly and do not have a payload error
-            "nof_valid_payloads":    numpy.zeros((self.MAX_INPUTS,), dtype=numpy.uint64),
+            "nof_valid_payloads":    numpy.zeros((self.MAX_FPGAS,), dtype=numpy.uint64),
 
             # Packets that reported a payload error
-            "nof_payload_errors":    numpy.zeros((self.MAX_INPUTS,), dtype=numpy.uint64),
+            "nof_payload_errors":    numpy.zeros((self.MAX_FPGAS,), dtype=numpy.uint64),
 
             # Last value array we've constructed out of the packets
             "sst_values":            numpy.zeros((self.MAX_INPUTS, self.MAX_SUBBANDS), dtype=numpy.uint64),
             "sst_timestamps":        numpy.zeros((self.MAX_INPUTS,), dtype=numpy.float64),
             "integration_intervals": numpy.zeros((self.MAX_INPUTS,), dtype=numpy.float32),
+            "subbands_calibrated":   numpy.zeros((self.MAX_INPUTS,), dtype=numpy.bool_),
         })
 
         return defaults
 
-    def process_packet(self, packet):
+    def parse_packet(self, packet):
         fields = SSTPacket(packet)
 
         # determine which input this packet contains data for
@@ -125,13 +86,179 @@ class SSTCollector(StatisticsCollector):
 
         if fields.payload_error:
             # cannot trust the data if a payload error is reported
-            self.parameters["nof_payload_errors"][input_index] += numpy.uint64(1)
+            self.parameters["nof_payload_errors"][fields.gn_index] += numpy.uint64(1)
 
             # don't raise, as packet is valid
             return
 
         # process the packet
-        self.parameters["nof_valid_payloads"][input_index]    += numpy.uint64(1)
+        self.parameters["nof_valid_payloads"][fields.gn_index]    += numpy.uint64(1)
         self.parameters["sst_values"][input_index][:fields.nof_statistics_per_packet] = fields.payload
         self.parameters["sst_timestamps"][input_index]        = numpy.float64(fields.timestamp().timestamp())
         self.parameters["integration_intervals"][input_index] = fields.integration_interval()
+        self.parameters["subbands_calibrated"][input_index]   = fields.subband_calibrated_flag
+
+class XSTCollector(StatisticsCollector):
+    """ Class to process XST statistics packets. """
+
+    # Maximum number of antenna inputs we support (used to determine array sizes)
+    MAX_INPUTS = 192
+
+    # Maximum number of baselines we can receive
+    MAX_BASELINES = nr_baselines(MAX_INPUTS)
+
+    # Expected block size is BLOCK_LENGTH x BLOCK_LENGTH
+    BLOCK_LENGTH = 12
+
+    # Expected number of blocks: enough to cover all baselines without the conjugates (that is, the top-left triangle of the matrix).
+    MAX_BLOCKS = nr_baselines(MAX_INPUTS // BLOCK_LENGTH)
+
+    # Maximum number of subbands we support (used to determine array sizes)
+    MAX_SUBBANDS = 512
+
+    # Complex values are (real, imag). A bit silly, but we don't want magical constants.
+    VALUES_PER_COMPLEX = 2
+
+    def _default_parameters(self):
+        defaults = super()._default_parameters()
+
+        defaults.update({
+            # Number of packets received so far that we could parse correctly and do not have a payload error
+            "nof_valid_payloads":    numpy.zeros((self.MAX_FPGAS,), dtype=numpy.uint64),
+
+            # Packets that reported a payload error
+            "nof_payload_errors":    numpy.zeros((self.MAX_FPGAS,), dtype=numpy.uint64),
+
+            # Last value array we've constructed out of the packets
+            "xst_blocks":            numpy.zeros((self.MAX_BLOCKS, self.BLOCK_LENGTH * self.BLOCK_LENGTH * self.VALUES_PER_COMPLEX), dtype=numpy.int64),
+            "xst_timestamps":        numpy.zeros((self.MAX_BLOCKS,), dtype=numpy.float64),
+            "xst_subbands":          numpy.zeros((self.MAX_BLOCKS,), dtype=numpy.uint16),
+            "integration_intervals": numpy.zeros((self.MAX_BLOCKS,), dtype=numpy.float32),
+        })
+
+        return defaults
+
+    def parse_packet(self, packet):
+        fields = XSTPacket(packet)
+
+        if fields.payload_error:
+            # cannot trust the data if a payload error is reported
+            self.parameters["nof_payload_errors"][fields.gn_index] += numpy.uint64(1)
+
+            # don't raise, as packet is valid
+            return
+
+        # the blocks must be of size BLOCK_LENGTH x BLOCK_LENGTH
+        if fields.nof_signal_inputs != self.BLOCK_LENGTH:
+            raise ValueError("Packet describes a block of {0} x {0} baselines, but we can only parse blocks of {1} x {1} baselines".format(fields.nof_signal_inputs, self.BLOCK_LENGTH))
+
+        # check whether set of baselines in this packet are not out of bounds
+        for antenna in (0,1):
+            if fields.first_baseline[antenna] + fields.nof_signal_inputs >= self.MAX_INPUTS:
+                # packet describes an input that is out of bounds for us
+                raise ValueError("Packet describes {0} x {0} baselines starting at {1}, but we are limited to describing MAX_INPUTS={2}".format(fields.nof_signal_inputs, fields.first_baseline, self.MAX_INPUTS))
+
+            # the blocks of baselines need to be tightly packed, and thus be provided at exact intervals
+            if fields.first_baseline[antenna] % self.BLOCK_LENGTH != 0:
+                raise ValueError("Packet describes baselines starting at %s, but we require a multiple of BLOCK_LENGTH=%d" % (fields.first_baseline, self.MAX_INPUTS))
+
+        # the payload contains complex values for the block of baselines of size BLOCK_LENGTH x BLOCK_LENGTH
+        # starting at baseline first_baseline.
+        #
+        # we honour this format, as we want to keep the metadata together with these blocks. we do need to put the blocks in a linear
+        # and tight order, however, so we calculate a block index.
+        block_index = baseline_index(fields.first_baseline[0] // self.BLOCK_LENGTH, fields.first_baseline[1] // self.BLOCK_LENGTH)
+
+        # process the packet
+        self.parameters["nof_valid_payloads"][fields.gn_index] += numpy.uint64(1)
+
+        block_index = baseline_index(fields.first_baseline[0], fields.first_baseline[1])
+
+        self.parameters["xst_blocks"][block_index][:fields.nof_statistics_per_packet] = fields.payload
+        self.parameters["xst_timestamps"][block_index]        = numpy.float64(fields.timestamp().timestamp())
+        self.parameters["xst_subbands"][block_index]          = numpy.uint16(fields.subband_index)
+        self.parameters["integration_intervals"][block_index] = fields.integration_interval()
+
+    def xst_values(self):
+        """ xst_blocks, but as a matrix[MAX_INPUTS][MAX_INPUTS] of complex values. """
+
+        matrix = numpy.zeros((self.MAX_INPUTS, self.MAX_INPUTS), dtype=numpy.complex64)
+        xst_blocks = self.parameters["xst_blocks"]
+
+        for block_index in range(self.MAX_BLOCKS):
+            # convert real/imag int to complex float values. this works as real/imag come in pairs
+            block = xst_blocks[block_index].astype(numpy.float32).view(numpy.complex64)
+
+            # reshape into [a][b]
+            block = block.reshape(self.BLOCK_LENGTH, self.BLOCK_LENGTH)
+
+            # compute destination in matrix
+            first_baseline = baseline_from_index(block_index)
+            first_baseline = (first_baseline[0] * self.BLOCK_LENGTH, first_baseline[1] * self.BLOCK_LENGTH)
+
+            # copy block into matrix
+            matrix[first_baseline[0]:first_baseline[0]+self.BLOCK_LENGTH, first_baseline[1]:first_baseline[1]+self.BLOCK_LENGTH] = block
+
+        return matrix
+
+
+class StatisticsConsumer(Thread, StatisticsClientThread):
+    """ Base class to process statistics packets from a queue, asynchronously. """
+
+    # Maximum time to wait for the Thread to get unstuck, if we want to stop
+    DISCONNECT_TIMEOUT = 10.0
+
+    # No default options required, for now?
+    _default_options = {}
+
+    def __init__(self, queue: Queue, collector: StatisticsCollector):
+        self.queue = queue
+        self.collector = collector
+        self.last_packet = None
+
+        super().__init__()
+        self.start()
+
+    @property
+    def _options(self) -> dict:
+        return StatisticsConsumer._default_options
+
+    def run(self):
+        logger.info("Starting statistics thread")
+
+        while True:
+            self.last_packet = self.queue.get()
+
+            # This is the exception/slow path, but python doesn't allow us to optimise that
+            if self.last_packet is None:
+                # None is the magic marker to stop processing
+                break
+
+            try:
+                self.collector.process_packet(self.last_packet)
+            except ValueError as e:
+                logger.exception("Could not parse statistics packet")
+
+                # continue processing
+
+        logger.info("Stopped statistics thread")
+
+    def join(self, timeout=0):
+        # insert magic marker
+        self.queue.put(None)
+        logger.info("Sent shutdown to statistics thread")
+
+        super().join(timeout)
+
+    def disconnect(self):
+        # TODO(Corne): Prevent duplicate code across TCPReplicator, UDPReceiver
+        #              and StatisticsConsumer.
+        if not self.is_alive():
+            return
+
+        # try to get the thread shutdown, but don't stall forever
+        self.join(self.DISCONNECT_TIMEOUT)
+
+        if self.is_alive():
+            # there is nothing we can do except wait (stall) longer, which could be indefinitely.
+            logger.error(f"Statistics thread did not shut down after {self.DISCONNECT_TIMEOUT} seconds, just leaving it dangling. Please attach a debugger to thread ID {self.ident}.")
diff --git a/devices/devices/sdp/statistics_packet.py b/devices/devices/sdp/statistics_packet.py
index 6843c99e62c79b2c9afa119aaf0b3b51709269f7..9bac227071dfbdec9ea0b0fd1fa63fa36176a8d9 100644
--- a/devices/devices/sdp/statistics_packet.py
+++ b/devices/devices/sdp/statistics_packet.py
@@ -117,9 +117,9 @@ class StatisticsPacket(object):
         self.nyquist_zone_index = get_bit_value(self.source_info, 13, 14)
         self.t_adc = get_bit_value(self.source_info, 12)
         self.fsub_type = get_bit_value(self.source_info, 11)
-        self.payload_error = get_bit_value(self.source_info, 10)
-        self.beam_repositioning_flag = get_bit_value(self.source_info, 9)
-        self.subband_calibrated_flag = get_bit_value(self.source_info, 8)
+        self.payload_error = (get_bit_value(self.source_info, 10) != 0)
+        self.beam_repositioning_flag = (get_bit_value(self.source_info, 9) != 0)
+        self.subband_calibrated_flag = (get_bit_value(self.source_info, 8) != 0)
         # self.source_info 5-7 are reserved
         self.gn_index = get_bit_value(self.source_info, 0, 4)
 
@@ -210,6 +210,17 @@ class StatisticsPacket(object):
 
         return header
 
+    def payload(self, signed=False) -> numpy.array:
+        """ The payload of this packet, as a linear array. """
+
+        # derive which and how many elements to read from the packet header
+        bytecount_to_unsigned_struct_type = {1: 'b', 2: 'h', 4: 'i', 8: 'q'} if signed else {1: 'B', 2: 'H', 4: 'I', 8: 'Q'}
+        format_str = ">{}{}".format(self.nof_statistics_per_packet,
+                                    bytecount_to_unsigned_struct_type[self.nof_bytes_per_statistic])
+
+        return numpy.array(
+            struct.unpack(format_str, self.packet[self.header_size:self.header_size + struct.calcsize(format_str)]))
+
 
 class SSTPacket(StatisticsPacket):
     """
@@ -245,16 +256,8 @@ class SSTPacket(StatisticsPacket):
         return header
 
     @property
-    def payload(self) -> numpy.array:
-        """ The payload of this packet, interpreted as SST data. """
-
-        # derive which and how many elements to read from the packet header
-        bytecount_to_unsigned_struct_type = {1: 'B', 2: 'H', 4: 'I', 8: 'Q'}
-        format_str = ">{}{}".format(self.nof_statistics_per_packet,
-                                    bytecount_to_unsigned_struct_type[self.nof_bytes_per_statistic])
-
-        return numpy.array(
-            struct.unpack(format_str, self.packet[self.header_size:self.header_size + struct.calcsize(format_str)]))
+    def payload(self):
+        return super().payload(signed=False)
 
 
 class XSTPacket(StatisticsPacket):
@@ -263,9 +266,10 @@ class XSTPacket(StatisticsPacket):
 
        The following fields are exposed as properties & functions.
 
-
        subband_index:                      subband number for which this packet contains statistics.
-       baseline:                           antenna pair for which this packet contains statistics.
+       first_baseline:                     first antenna pair for which this packet contains statistics.
+
+       payload[nof_signal_inputs][nof_signal_inputs] the baselines, starting from first_baseline
     """
 
     def __init__(self, packet):
@@ -281,16 +285,20 @@ class XSTPacket(StatisticsPacket):
         super().unpack_data_id()
 
         self.subband_index = get_bit_value(self.data_id, 16, 24)
-        self.baseline = (get_bit_value(self.data_id, 8, 15), get_bit_value(self.data_id, 0, 7))
+        self.first_baseline = (get_bit_value(self.data_id, 8, 15), get_bit_value(self.data_id, 0, 7))
 
     def header(self):
         header = super().header()
 
         header["data_id"]["subband_index"] = self.subband_index
-        header["data_id"]["baseline"] = self.baseline
+        header["data_id"]["first_baseline"] = self.first_baseline
 
         return header
 
+    @property
+    def payload(self):
+        return super().payload(signed=True)
+
 
 class BSTPacket(StatisticsPacket):
     """
diff --git a/devices/devices/sdp/xst.py b/devices/devices/sdp/xst.py
new file mode 100644
index 0000000000000000000000000000000000000000..caeeb5d3488369ecaf17208d1b33c2b7e6c76511
--- /dev/null
+++ b/devices/devices/sdp/xst.py
@@ -0,0 +1,150 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the XST project
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+""" XST Device Server for LOFAR2.0
+
+"""
+
+# TODO(Corne): Remove sys.path.append hack once packaging is in place!
+import os, sys
+currentdir = os.path.dirname(os.path.realpath(__file__))
+parentdir = os.path.dirname(currentdir)
+parentdir = os.path.dirname(parentdir)
+sys.path.append(parentdir)
+
+# PyTango imports
+from tango.server import run
+from tango.server import device_property, attribute
+from tango import AttrWriteType
+# Additional import
+
+from clients.attribute_wrapper import attribute_wrapper
+from clients.opcua_client import OPCUAConnection
+from clients.statistics_client import StatisticsClient
+
+from devices.hardware_device import hardware_device
+
+from common.lofar_git import get_version
+from common.lofar_logging import device_logging_to_python, log_exceptions
+
+from devices.sdp.statistics import Statistics
+from devices.sdp.statistics_collector import XSTCollector
+
+import numpy
+
+__all__ = ["XST", "main"]
+
+class XST(Statistics):
+
+    STATISTICS_COLLECTOR_CLASS = XSTCollector
+
+    # -----------------
+    # Device Properties
+    # -----------------
+
+    FPGA_xst_offload_hdr_eth_destination_mac_RW_default = device_property(
+        dtype='DevVarStringArray',
+        mandatory=True
+    )
+
+    FPGA_xst_offload_hdr_ip_destination_address_RW_default = device_property(
+        dtype='DevVarStringArray',
+        mandatory=True
+    )
+
+    FPGA_xst_offload_hdr_udp_destination_port_RW_default = device_property(
+        dtype='DevVarUShortArray',
+        mandatory=True
+    )
+
+    FPGA_xst_processing_enable_RW_default = device_property(
+        dtype='DevVarBooleanArray',
+        mandatory=False,
+        default_value=[True] * 16
+    )
+
+    FPGA_xst_subband_select_RW_default = device_property(
+        dtype='DevVarULongArray',
+        mandatory=False,
+        default_value=[[0,102,0,0,0,0,0,0]] * 16
+    )
+
+    # ----------
+    # Attributes
+    # ----------
+
+    # FPGA control points for XSTs
+    FPGA_xst_integration_interval_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_integration_interval_RW"], datatype=numpy.double, dims=(8,16), access=AttrWriteType.READ_WRITE)
+    FPGA_xst_integration_interval_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_integration_interval_R"], datatype=numpy.double, dims=(8,16))
+    FPGA_xst_offload_enable_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_offload_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
+    FPGA_xst_offload_enable_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_offload_enable_R"], datatype=numpy.bool_, dims=(16,))
+    FPGA_xst_offload_hdr_eth_destination_mac_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_offload_hdr_eth_destination_mac_RW"], datatype=numpy.str, dims=(16,), access=AttrWriteType.READ_WRITE)
+    FPGA_xst_offload_hdr_eth_destination_mac_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_offload_hdr_eth_destination_mac_R"], datatype=numpy.str, dims=(16,))
+    FPGA_xst_offload_hdr_ip_destination_address_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_offload_hdr_ip_destination_address_RW"], datatype=numpy.str, dims=(16,), access=AttrWriteType.READ_WRITE)
+    FPGA_xst_offload_hdr_ip_destination_address_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_offload_hdr_ip_destination_address_R"], datatype=numpy.str, dims=(16,))
+    FPGA_xst_offload_hdr_udp_destination_port_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_offload_hdr_udp_destination_port_RW"], datatype=numpy.uint16, dims=(16,), access=AttrWriteType.READ_WRITE)
+    FPGA_xst_offload_hdr_udp_destination_port_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_offload_hdr_udp_destination_port_R"], datatype=numpy.uint16, dims=(16,))
+    FPGA_xst_processing_enable_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_processing_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
+    FPGA_xst_processing_enable_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_processing_enable_R"], datatype=numpy.bool_, dims=(16,))
+    FPGA_xst_subband_select_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_subband_select_RW"], datatype=numpy.uint32, dims=(8,16), access=AttrWriteType.READ_WRITE)
+    FPGA_xst_subband_select_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_subband_select_R"], datatype=numpy.uint32, dims=(8,16))
+
+    # number of packets with valid payloads
+    nof_valid_payloads_R    = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_valid_payloads"}, dims=(XSTCollector.MAX_FPGAS,), datatype=numpy.uint64)
+    # number of packets with invalid payloads
+    nof_payload_errors_R    = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_payload_errors"}, dims=(XSTCollector.MAX_FPGAS,), datatype=numpy.uint64)
+    # latest XSTs
+    xst_blocks_R            = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "xst_blocks"}, dims=(XSTCollector.BLOCK_LENGTH * XSTCollector.BLOCK_LENGTH * XSTCollector.VALUES_PER_COMPLEX, XSTCollector.MAX_BLOCKS), datatype=numpy.int64)
+    # reported timestamp for each row in the latest XSTs
+    xst_timestamp_R         = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "xst_timestamps"}, dims=(XSTCollector.MAX_BLOCKS,), datatype=numpy.uint64)
+    # which subband the XSTs describe
+    xst_subbands_R          = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "xst_subbands"}, dims=(XSTCollector.MAX_BLOCKS,), datatype=numpy.uint16)
+    # integration interval for each row in the latest XSTs
+    integration_interval_R  = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "integration_intervals"}, dims=(XSTCollector.MAX_BLOCKS,), datatype=numpy.float32)
+
+    # xst_R, but as a matrix of input x input
+    xst_real_R              = attribute(max_dim_x=XSTCollector.MAX_INPUTS, max_dim_y=XSTCollector.MAX_INPUTS, dtype=((numpy.float32,),))
+    xst_imag_R              = attribute(max_dim_x=XSTCollector.MAX_INPUTS, max_dim_y=XSTCollector.MAX_INPUTS, dtype=((numpy.float32,),))
+    xst_power_R             = attribute(max_dim_x=XSTCollector.MAX_INPUTS, max_dim_y=XSTCollector.MAX_INPUTS, dtype=((numpy.float32,),))
+    xst_phase_R             = attribute(max_dim_x=XSTCollector.MAX_INPUTS, max_dim_y=XSTCollector.MAX_INPUTS, dtype=((numpy.float32,),))
+
+    def read_xst_real_R(self):
+        return numpy.real(self.statistics_client.collector.xst_values())
+
+    def read_xst_imag_R(self):
+        return numpy.imag(self.statistics_client.collector.xst_values())
+
+    def read_xst_power_R(self):
+        return numpy.abs(self.statistics_client.collector.xst_values())
+
+    def read_xst_phase_R(self):
+        return numpy.angle(self.statistics_client.collector.xst_values())
+
+    # --------
+    # Overloaded functions
+    # --------
+
+    # --------
+    # Commands
+    # --------
+
+# ----------
+# Run server
+# ----------
+def main(args=None, **kwargs):
+    """Main function of the XST Device module."""
+
+    from common.lofar_logging import configure_logger
+    configure_logger()
+
+    return run((XST,), args=args, **kwargs)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/devices/devices/unb2.py b/devices/devices/unb2.py
index 2df8528a621811ac80ca88a08f954ee09acbb3a9..7c2575991605354de5bba608906fb9ea248f021b 100644
--- a/devices/devices/unb2.py
+++ b/devices/devices/unb2.py
@@ -195,7 +195,7 @@ class UNB2(hardware_device):
     @log_exceptions()
     def configure_for_initialise(self):
         """ user code here. is called when the sate is set to INIT """
-        """Initialises the attributes and properties of the PCC."""
+        """Initialises the attributes and properties of theRECV."""
 
         # set up the OPC ua client
         self.OPCua_client = OPCUAConnection("opc.tcp://{}:{}/".format(self.OPC_Server_Name, self.OPC_Server_Port), "http://lofar.eu", self.OPC_Time_Out, self.Fault, self)
diff --git a/devices/examples/load_from_disk/ini_client.py b/devices/examples/load_from_disk/ini_client.py
index dcc66a85ac7fae4cbe3d00a47fe46a809618938b..cd227f23458c672c08b3acf08ba65fa9a48b581d 100644
--- a/devices/examples/load_from_disk/ini_client.py
+++ b/devices/examples/load_from_disk/ini_client.py
@@ -25,7 +25,7 @@ ini_to_numpy_dict = {
     int: numpy.int64,
     float: numpy.float64,
     bool: numpy.bool_,
-    str: numpy.str_
+    str: numpy.str
 }
 
 import os
@@ -171,9 +171,9 @@ def data_handler(string, dtype):
 
         value = dtype(value)
 
-    elif dtype is numpy.str_:
+    elif dtype is numpy.str:
         for i in string.split(","):
-            val = numpy.str_(i)
+            val = numpy.str(i)
             value.append(val)
 
         value = numpy.array(value)
diff --git a/devices/examples/load_from_disk/ini_device.py b/devices/examples/load_from_disk/ini_device.py
index e4aaef9063b16d94b63822d742bcd10bbef8d35f..07b2f419ab6b4cd5d78eb84a66c3906e169da99d 100644
--- a/devices/examples/load_from_disk/ini_device.py
+++ b/devices/examples/load_from_disk/ini_device.py
@@ -80,8 +80,8 @@ class ini_device(hardware_device):
     bool_scalar_R = attribute_wrapper(comms_annotation={"section": "scalar", "name": "bool_scalar_R"}, datatype=numpy.bool_)
     int_scalar_RW = attribute_wrapper(comms_annotation={"section": "scalar", "name": "int_scalar_RW"}, datatype=numpy.int64, access=AttrWriteType.READ_WRITE)
     int_scalar_R = attribute_wrapper(comms_annotation={"section": "scalar", "name": "int_scalar_R"}, datatype=numpy.int64)
-    str_scalar_RW = attribute_wrapper(comms_annotation={"section": "scalar", "name": "str_scalar_RW"}, datatype=numpy.str_, access=AttrWriteType.READ_WRITE)
-    str_scalar_R = attribute_wrapper(comms_annotation={"section": "scalar", "name": "str_scalar_R"}, datatype=numpy.str_)
+    str_scalar_RW = attribute_wrapper(comms_annotation={"section": "scalar", "name": "str_scalar_RW"}, datatype=numpy.str, access=AttrWriteType.READ_WRITE)
+    str_scalar_R = attribute_wrapper(comms_annotation={"section": "scalar", "name": "str_scalar_R"}, datatype=numpy.str)
 
     double_spectrum_RW = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "double_spectrum_RW"}, datatype=numpy.double, dims=(4,), access=AttrWriteType.READ_WRITE)
     double_spectrum_R = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "double_spectrum_R"}, datatype=numpy.double, dims=(4,))
@@ -89,8 +89,8 @@ class ini_device(hardware_device):
     bool_spectrum_R = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "bool_spectrum_R"}, datatype=numpy.bool_, dims=(4,))
     int_spectrum_RW = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "int_spectrum_RW"}, datatype=numpy.int64, dims=(4,), access=AttrWriteType.READ_WRITE)
     int_spectrum_R = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "int_spectrum_R"}, datatype=numpy.int64, dims=(4,))
-    str_spectrum_RW = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "str_spectrum_RW"}, datatype=numpy.str_, dims=(4,), access=AttrWriteType.READ_WRITE)
-    str_spectrum_R = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "str_spectrum_R"}, datatype=numpy.str_, dims=(4,))
+    str_spectrum_RW = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "str_spectrum_RW"}, datatype=numpy.str, dims=(4,), access=AttrWriteType.READ_WRITE)
+    str_spectrum_R = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "str_spectrum_R"}, datatype=numpy.str, dims=(4,))
 
     double_image_RW = attribute_wrapper(comms_annotation={"section": "image", "name": "double_image_RW"}, datatype=numpy.double, dims=(3, 2), access=AttrWriteType.READ_WRITE)
     double_image_R = attribute_wrapper(comms_annotation={"section": "image", "name": "double_image_R"}, datatype=numpy.double, dims=(3, 2))
@@ -98,15 +98,15 @@ class ini_device(hardware_device):
     bool_image_R = attribute_wrapper(comms_annotation={"section": "image", "name": "bool_image_R"}, datatype=numpy.bool_, dims=(3, 2))
     int_image_RW = attribute_wrapper(comms_annotation={"section": "image", "name": "int_image_RW"}, datatype=numpy.int64, dims=(3, 2), access=AttrWriteType.READ_WRITE)
     int_image_R = attribute_wrapper(comms_annotation={"section": "image", "name": "int_image_R"}, datatype=numpy.int64, dims=(3, 2))
-    str_image_RW = attribute_wrapper(comms_annotation={"section": "image", "name": "str_image_RW"}, datatype=numpy.str_, dims=(3, 2), access=AttrWriteType.READ_WRITE)
-    str_image_R = attribute_wrapper(comms_annotation={"section": "image", "name": "str_image_R"}, datatype=numpy.str_, dims=(3, 2))
+    str_image_RW = attribute_wrapper(comms_annotation={"section": "image", "name": "str_image_RW"}, datatype=numpy.str, dims=(3, 2), access=AttrWriteType.READ_WRITE)
+    str_image_R = attribute_wrapper(comms_annotation={"section": "image", "name": "str_image_R"}, datatype=numpy.str, dims=(3, 2))
 
     # --------
     # overloaded functions
     # --------
     def configure_for_initialise(self):
         """ user code here. is called when the sate is set to INIT """
-        """Initialises the attributes and properties of the PCC."""
+        """Initialises the attributes and properties of the Hardware."""
 
         # set up the OPC ua client
         self.ini_client = ini_client("example.ini", self.Fault, self)
diff --git a/devices/examples/snmp/snmp.py b/devices/examples/snmp/snmp.py
index b54c4fe9033d7ec52236f3df74b57874bac1204f..2a912ce1443bbd8e83b662d4ed9764627d947943 100644
--- a/devices/examples/snmp/snmp.py
+++ b/devices/examples/snmp/snmp.py
@@ -1,6 +1,6 @@
 # -*- coding: utf-8 -*-
 #
-# This file is part of the PCC project
+# This file is part of theRECV project
 #
 #
 #
@@ -70,15 +70,15 @@ class SNMP(hardware_device):
     # Attributes
     # ----------
 
-    sys_description_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.1.0"}, datatype=numpy.str_)
-    sys_objectID_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.2.0", "type": "OID"}, datatype=numpy.str_)
+    sys_description_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.1.0"}, datatype=numpy.str)
+    sys_objectID_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.2.0", "type": "OID"}, datatype=numpy.str)
     sys_uptime_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.3.0", "type": "TimeTicks"}, datatype=numpy.int64)
-    sys_name_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.5.0"}, datatype=numpy.str_)
-    ip_route_mask_127_0_0_1_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.4.21.1.11.127.0.0.1", "type": "IpAddress"}, datatype=numpy.str_)
+    sys_name_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.5.0"}, datatype=numpy.str)
+    ip_route_mask_127_0_0_1_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.4.21.1.11.127.0.0.1", "type": "IpAddress"}, datatype=numpy.str)
     TCP_active_open_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.6.5.0", "type": "Counter32"}, datatype=numpy.int64)
 
-    sys_contact_RW = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.4.0"}, datatype=numpy.str_, access=AttrWriteType.READ_WRITE)
-    sys_contact_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.4.0"}, datatype=numpy.str_)
+    sys_contact_RW = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.4.0"}, datatype=numpy.str, access=AttrWriteType.READ_WRITE)
+    sys_contact_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.1.4.0"}, datatype=numpy.str)
 
     TCP_Curr_estab_R = attribute_wrapper(comms_annotation={"oids": "1.3.6.1.2.1.6.9.0", "type": "Gauge"}, datatype=numpy.int64)
 
@@ -116,7 +116,7 @@ class SNMP(hardware_device):
 # Run server
 # ----------
 def main(args=None, **kwargs):
-    """Main function of the PCC module."""
+    """Main function of the module."""
 
     from common.lofar_logging import configure_logger
     import logging
diff --git a/devices/examples/snmp/snmp_client.py b/devices/examples/snmp/snmp_client.py
index 635450172387148734e5e3b42ed0f82f067a0048..96ac67140b9bdbdba7ab4d4fb8651b5e9674c219 100644
--- a/devices/examples/snmp/snmp_client.py
+++ b/devices/examples/snmp/snmp_client.py
@@ -12,11 +12,11 @@ __all__ = ["SNMP_client"]
 snmp_to_numpy_dict = {
     snmp.types.INTEGER: numpy.int64,
     snmp.types.TimeTicks: numpy.int64,
-    snmp.types.OCTET_STRING: numpy.str_,
-    snmp.types.OID: numpy.str_,
+    snmp.types.OCTET_STRING: numpy.str,
+    snmp.types.OID: numpy.str,
     snmp.types.Counter32: numpy.int64,
     snmp.types.Gauge32: numpy.int64,
-    snmp.types.IpAddress: numpy.str_,
+    snmp.types.IpAddress: numpy.str,
 }
 
 snmp_types = {
@@ -24,9 +24,9 @@ snmp_types = {
     "Gauge": numpy.int64,
     "TimeTick": numpy.int64,
     "Counter32": numpy.int64,
-    "OctetString": numpy.str_,
-    "IpAddress": numpy.str_,
-    "OID": numpy.str_,
+    "OctetString": numpy.str,
+    "IpAddress": numpy.str,
+    "OID": numpy.str,
 }
 
 
diff --git a/devices/integration_test/README.md b/devices/integration_test/README.md
index 3292bfa0049b5c2312f8e0536e00cc581433ed61..a94aa174badfe5b44ccab770dd8437106c432ad3 100644
--- a/devices/integration_test/README.md
+++ b/devices/integration_test/README.md
@@ -7,7 +7,7 @@ container will be build by the makefiles but should only be started by the
 dedicated integration test script. This script will ensure that other containers
 are running and are in the required state.
 
-* Launch pypcc-sim and sdptr-sim simulators.
+* Launch recv-sim and sdptr-sim simulators.
 * Reconfigure dsconfig to use these simulators.
 * Create and start the integration-test container.
 
@@ -23,4 +23,4 @@ $LOFAR20_DIR/sbin/run_integration_test.sh
 ## Limitations
 
 Our makefile will always launch the new container upon creation, resulting in
-the integration tests actually being run twice.
\ No newline at end of file
+the integration tests actually being run twice.
diff --git a/devices/integration_test/client/test_tcp_replicator.py b/devices/integration_test/client/test_tcp_replicator.py
new file mode 100644
index 0000000000000000000000000000000000000000..ca45c4c52ab7f5e379c484b964a05225950fc9e1
--- /dev/null
+++ b/devices/integration_test/client/test_tcp_replicator.py
@@ -0,0 +1,172 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+from asyncio import Queue
+
+import logging
+import time
+import socket
+import sys
+
+from clients.tcp_replicator import TCPReplicator
+
+from integration_test import base
+
+import timeout_decorator
+
+logger = logging.getLogger()
+
+
+class TestTCPReplicator(base.IntegrationTestCase):
+
+    def setUp(self):
+
+        super(TestTCPReplicator, self).setUp()
+
+    def test_start_stop(self):
+        """Test start and stopping the server gracefully"""
+
+        test_options = {
+            "tcp_port": 56565,  # Pick some port with low change of collision
+        }
+
+        replicator = TCPReplicator(test_options)
+        self.assertTrue(replicator.is_alive())
+
+    def test_start_except(self):
+        """Test start and stopping the server gracefully"""
+
+        test_options = {
+            "tcp_port": 56566,  # Pick some port with low change of collision
+        }
+
+        replicator = TCPReplicator(test_options)
+        self.assertTrue(replicator.is_alive())
+
+        self.assertRaises(RuntimeError, TCPReplicator, test_options)
+
+    def test_start_transmit_empty_stop(self):
+        """Test transmitting without clients"""
+
+        test_options = {
+            "tcp_port": 56567,  # Pick some port with low change of collision
+        }
+
+        replicator = TCPReplicator(test_options)
+        self.assertTrue(replicator.is_alive())
+
+        replicator.transmit("Hello World!".encode('utf-8'))
+
+    def test_start_connect_close(self):
+        test_options = {
+            "tcp_port": 56568,  # Pick some port with low change of collision
+        }
+
+        replicator = TCPReplicator(test_options)
+        self.assertTrue(replicator.is_alive())
+
+        time.sleep(2)
+
+        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        s.connect(("127.0.0.1", test_options['tcp_port']))
+
+        time.sleep(2)
+
+        replicator.join()
+
+        self.assertEquals(b'', s.recv(9000))
+
+    @timeout_decorator.timeout(15)
+    def test_start_connect_receive(self):
+        test_options = {
+            "tcp_port": 56569,  # Pick some port with low change of collision
+        }
+
+        m_data = "hello world".encode("utf-8")
+
+        replicator = TCPReplicator(test_options)
+        self.assertTrue(replicator.is_alive())
+
+        time.sleep(2)
+
+        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        s.connect(("127.0.0.1", test_options['tcp_port']))
+
+        time.sleep(2)
+
+        replicator.transmit(m_data)
+
+        data = s.recv(sys.getsizeof(m_data))
+        s.close()
+
+        self.assertEqual(m_data, data)
+
+    @timeout_decorator.timeout(15)
+    def test_start_connect_receive_multiple(self):
+        test_options = {
+            "tcp_port": 56570,  # Pick some port with low change of collision
+        }
+
+        m_data = "hello world".encode("utf-8")
+
+        replicator = TCPReplicator(test_options)
+        self.assertTrue(replicator.is_alive())
+
+        time.sleep(2)
+
+        s1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        s1.connect(("127.0.0.1", test_options['tcp_port']))
+
+        s2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        s2.connect(("127.0.0.1", test_options['tcp_port']))
+
+        time.sleep(3)
+
+        replicator.transmit(m_data)
+
+        data1 = s1.recv(sys.getsizeof(m_data))
+        s1.close()
+
+        data2 = s2.recv(sys.getsizeof(m_data))
+        s2.close()
+
+        self.assertEqual(m_data, data1)
+        self.assertEqual(m_data, data2)
+
+    @timeout_decorator.timeout(15)
+    def test_start_connect_receive_multiple_queue(self):
+        test_options = {
+            "tcp_port": 56571,  # Pick some port with low change of collision
+        }
+
+        m_data = "hello world".encode("utf-8")
+
+        replicator = TCPReplicator(test_options)
+        self.assertTrue(replicator.is_alive())
+
+        time.sleep(2)
+
+        s1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        s1.connect(("127.0.0.1", test_options['tcp_port']))
+
+        s2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        s2.connect(("127.0.0.1", test_options['tcp_port']))
+
+        time.sleep(3)
+
+        replicator.put(m_data)
+
+        data1 = s1.recv(sys.getsizeof(m_data))
+        s1.close()
+
+        data2 = s2.recv(sys.getsizeof(m_data))
+        s2.close()
+
+        self.assertEqual(m_data, data1)
+        self.assertEqual(m_data, data2)
diff --git a/devices/integration_test/client/test_apsct_sim.py b/devices/integration_test/client/test_unb2_sim.py
similarity index 65%
rename from devices/integration_test/client/test_apsct_sim.py
rename to devices/integration_test/client/test_unb2_sim.py
index 775c34cd207699f7febb435000314c65db97b66a..678930cd5f092c94f9242a01a58d139993f2504f 100644
--- a/devices/integration_test/client/test_apsct_sim.py
+++ b/devices/integration_test/client/test_unb2_sim.py
@@ -12,16 +12,15 @@ from opcua import Client
 from integration_test import base
 
 
-class TestAPSCTSim(base.IntegrationTestCase):
+class TestUNB2Sim(base.IntegrationTestCase):
 
     def setUp(self):
-        super(TestAPSCTSim, self).setUp()
+        super(TestUNB2Sim, self).setUp()
 
     def test_opcua_connection(self):
-        """Check if we can connect to apsct-sim"""
+        """Check if we can connect to unb2-sim"""
 
-        #TODO(Corne): Replace to APSCT name once simulator name has changed
-        client = Client("opc.tcp://pypcc-sim:4842")
+        client = Client("opc.tcp://recv-sim:4842")
         root_node = None
 
         try:
diff --git a/devices/integration_test/devices/test_device_pcc.py b/devices/integration_test/devices/test_device_recv.py
similarity index 73%
rename from devices/integration_test/devices/test_device_pcc.py
rename to devices/integration_test/devices/test_device_recv.py
index b3b7a4672dbb18790d19144aeb35bcacd68e4bfb..3a010a000c03d3c039f8f93a68c0f6437bc30db1 100644
--- a/devices/integration_test/devices/test_device_pcc.py
+++ b/devices/integration_test/devices/test_device_recv.py
@@ -15,14 +15,14 @@ from tango._tango import DevState
 from integration_test import base
 
 
-class TestDevicePCC(base.IntegrationTestCase):
+class TestDeviceRECV(base.IntegrationTestCase):
 
     def setUp(self):
-        super(TestDevicePCC, self).setUp()
+        super(TestDeviceRECV, self).setUp()
 
     def tearDown(self):
         """Turn device Off in teardown to prevent blocking tests"""
-        d = DeviceProxy("LTS/PCC/1")
+        d = DeviceProxy("LTS/RECV/1")
 
         try:
             d.Off()
@@ -30,26 +30,26 @@ class TestDevicePCC(base.IntegrationTestCase):
             """Failing to turn Off devices should not raise errors here"""
             print(f"Failed to turn device off in teardown {e}")
 
-    def test_device_proxy_pcc(self):
+    def test_device_proxy_recv(self):
         """Test if we can successfully create a DeviceProxy and fetch state"""
 
-        d = DeviceProxy("LTS/PCC/1")
+        d = DeviceProxy("LTS/RECV/1")
 
         self.assertEqual(DevState.OFF, d.state())
 
-    def test_device_pcc_initialize(self):
+    def test_device_recv_initialize(self):
         """Test if we can transition to standby"""
 
-        d = DeviceProxy("LTS/PCC/1")
+        d = DeviceProxy("LTS/RECV/1")
 
         d.initialise()
 
         self.assertEqual(DevState.STANDBY, d.state())
 
-    def test_device_pcc_on(self):
+    def test_device_recv_on(self):
         """Test if we can transition to on"""
 
-        d = DeviceProxy("LTS/PCC/1")
+        d = DeviceProxy("LTS/RECV/1")
 
         d.initialise()
 
diff --git a/devices/integration_test/devices/test_device_sst.py b/devices/integration_test/devices/test_device_sst.py
new file mode 100644
index 0000000000000000000000000000000000000000..a6b71d328305f2dafed46f9e4f3ea9209df9601d
--- /dev/null
+++ b/devices/integration_test/devices/test_device_sst.py
@@ -0,0 +1,140 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+import socket
+import sys
+import time
+
+from tango import DeviceProxy
+from tango._tango import DevState
+
+from integration_test import base
+
+
+class TestDeviceSST(base.IntegrationTestCase):
+
+    def setUp(self):
+        """Intentionally recreate the device object in each test"""
+        super(TestDeviceSST, self).setUp()
+
+    def tearDown(self):
+        """Turn device Off in teardown to prevent blocking tests"""
+        d = DeviceProxy("LTS/SST/1")
+
+        try:
+            d.Off()
+        except Exception as e:
+            """Failing to turn Off devices should not raise errors here"""
+            print(f"Failed to turn device off in teardown {e}")
+
+    def test_device_proxy_sst(self):
+        """Test if we can successfully create a DeviceProxy and fetch state"""
+
+        d = DeviceProxy("LTS/SST/1")
+
+        self.assertEqual(DevState.OFF, d.state())
+
+    def test_device_sst_initialize(self):
+        """Test if we can transition to standby"""
+
+        d = DeviceProxy("LTS/SST/1")
+
+        d.initialise()
+
+        self.assertEqual(DevState.STANDBY, d.state())
+
+    def test_device_sst_on(self):
+        """Test if we can transition to on"""
+
+        port_property = {"Statistics_Client_TCP_Port": "4999"}
+
+        d = DeviceProxy("LTS/SST/1")
+
+        self.assertEqual(DevState.OFF, d.state(),
+                         "Prerequisite could not be met "
+                         "this test can not continue")
+
+        d.put_property(port_property)
+
+        d.initialise()
+
+        self.assertEqual(DevState.STANDBY, d.state())
+
+        d.on()
+
+        self.assertEqual(DevState.ON, d.state())
+
+    def test_device_sst_send_udp(self):
+        port_property = {"Statistics_Client_TCP_Port": "4998"}
+
+        d = DeviceProxy("LTS/SST/1")
+
+        self.assertEqual(DevState.OFF, d.state(),
+                         "Prerequisite could not be met "
+                         "this test can not continue")
+
+        d.put_property(port_property)
+
+        d.initialise()
+
+        self.assertEqual(DevState.STANDBY, d.state())
+
+        d.on()
+
+        self.assertEqual(DevState.ON, d.state())
+
+        s1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+        s1.connect(("device-sst", 5001))
+
+        # TODO(Corne): Change me into an actual SST packet
+        s1.send("Hello World!".encode("UTF-8"))
+
+        s1.close()
+
+    def test_device_sst_connect_tcp_receive(self):
+        port_property = {"Statistics_Client_TCP_Port": "5101"}
+
+        m_data = "Hello World!".encode("UTF-8")
+
+        d = DeviceProxy("LTS/SST/1")
+
+        self.assertEqual(DevState.OFF, d.state(),
+                         "Prerequisite could not be met "
+                         "this test can not continue")
+
+        d.put_property(port_property)
+
+        d.initialise()
+
+        self.assertEqual(DevState.STANDBY, d.state())
+
+        d.on()
+
+        self.assertEqual(DevState.ON, d.state())
+
+        time.sleep(2)
+
+        s1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+        s1.connect(("device-sst", 5001))
+
+        s2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        s2.connect(("device-sst", 5101))
+
+        time.sleep(2)
+
+        # TODO(Corne): Change me into an actual SST packet
+        s1.send(m_data)
+
+        time.sleep(2)
+
+        data = s2.recv(sys.getsizeof(m_data))
+
+        s1.close()
+        s2.close()
+
+        self.assertEqual(m_data, data)
diff --git a/devices/integration_test/devices/test_device_unb2.py b/devices/integration_test/devices/test_device_unb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..97f31ab6ee162f8183db963e92f4f03b9ee7f617
--- /dev/null
+++ b/devices/integration_test/devices/test_device_unb2.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+import time
+
+from tango import DeviceProxy
+from tango._tango import DevState
+
+from integration_test import base
+
+
+class TestDeviceUNB2(base.IntegrationTestCase):
+
+    def setUp(self):
+        """Intentionally recreate the device object in each test"""
+        super(TestDeviceUNB2, self).setUp()
+
+    def tearDown(self):
+        """Turn device Off in teardown to prevent blocking tests"""
+        d = DeviceProxy("LTS/UNB2/1")
+
+        try:
+            d.Off()
+        except Exception as e:
+            """Failing to turn Off devices should not raise errors here"""
+            print(f"Failed to turn device off in teardown {e}")
+
+    def test_device_proxy_unb2(self):
+        """Test if we can successfully create a DeviceProxy and fetch state"""
+
+        d = DeviceProxy("LTS/UNB2/1")
+
+        self.assertEqual(DevState.OFF, d.state())
+
+    def test_device_unb2_initialize(self):
+        """Test if we can transition to standby"""
+
+        d = DeviceProxy("LTS/UNB2/1")
+
+        d.initialise()
+
+        self.assertEqual(DevState.STANDBY, d.state())
+
+    def test_device_unb2_on(self):
+        """Test if we can transition to on"""
+
+        d = DeviceProxy("LTS/UNB2/1")
+
+        d.initialise()
+
+        d.on()
+
+        self.assertEqual(DevState.ON, d.state())
diff --git a/devices/setup.cfg b/devices/setup.cfg
index 586aa190649d3c54b04ce586cdbaa4565570b1b1..55b29032e6aefc1787179c054b701b7fc51323ac 100644
--- a/devices/setup.cfg
+++ b/devices/setup.cfg
@@ -1,11 +1,11 @@
 [metadata]
 name = TangoStationControl
 summary = LOFAR 2.0 Station Control
-description-file =
+description_file =
     README.md
-description-content-type = text/x-rst; charset=UTF-8
+description_content_type = text/x-rst; charset=UTF-8
 author = ASTRON
-home-page = https://astron.nl
+home_page = https://astron.nl
 project_urls =
     Bug Tracker = https://support.astron.nl/jira/projects/L2SS/issues/
     Source Code = https://git.astron.nl/lofar2.0/tango
@@ -27,4 +27,4 @@ package_dir=./
 [entry_points]
 console_scripts =
     SDP = SDP:main
-    PCC = PCC:main
+   RECV = RECV:main
diff --git a/devices/statistics_writer/README.md b/devices/statistics_writer/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..62e940bacb6512eb702cc4fdd816b8ba61153958
--- /dev/null
+++ b/devices/statistics_writer/README.md
@@ -0,0 +1,59 @@
+# TCP to HDF5 statistics writer
+The TCP to HDF5 statistics writer can be started with `tcp_hdf5_writer.py` This script imports
+`tcp_receiver.py` and `statistics_writer.py`. `tcp_receiver.py` only takes care of receiving packets. 
+`statistics_writer.py` takes the receive function from the tcp_receiver and uses it to obtain packets. 
+Any function that can deliver statistics packets can be used by this code. 
+`statistics_writer.py` takes care of processing the packets it receives, filling statistics matrices
+and writing those matrices (as well as a bunch of metadata) to hdf5.
+
+
+### TCP Statistics writer
+
+The TCP statistics writer can be called with the `tcp_hdf5_writer.py` script.
+This script can be called with the following arguments:
+  ```
+  --host        the address to connect to
+  --port        the port to use
+  --file        file to read from (as opposed to host and port)
+  --interval    The time between creating new files in hours
+  --output_dir  specifies the folder to write all the files
+  --mode        sets the statistics type to be decoded options: "SST", "XST", "BST"
+  --debug       takes no arguments, when used prints a lot of extra data to help with debugging
+  ```
+
+
+##HFD5 structure
+Statistics packets are collected by the StatisticsCollector in to a matrix. Once the matrix is done or a newer 
+timestamp arrives this matrix along with the header of first packet header, nof_payload_errors and nof_valid_payloads.
+The file will be named after the mode it is in and the timestamp of the statistics packets. For example: `SST_1970-01-01-00-00-00.h5`.
+
+
+```
+File
+|
+|------ {mode_timestamp}  |- {statistics matrix}
+|                         |- {first packet header}
+|                         |- {nof_valid_payloads}
+|                         |- {nof_payload_errors}
+|
+|------ {mode_timestamp}  |- {statistics matrix}
+|                         |- {first packet header}
+|                         |- {nof_valid_payloads}
+|                         |- {nof_payload_errors}
+|
+...
+```
+
+###explorer
+There is an hdf5 explorer that will walk through specified hdf5 files. 
+Its called `hdf5_explorer.py` and can be called with a `--file` argument
+ex: `python3 hdf5_explorer.py --file data/SST_1970-01-01-00-00-00.h5` This allows for easy manual checking 
+of the structure and content of hdf5 files. useful for testing and debugging.
+Can also be used as example of how to read the HDF5 statistics data files.
+Provides a number of example functions inside that go through the file in various ways.
+
+###test server
+There is a test server that will continuously send out the same statistics packet.
+Its called `test_server.py`. Takes `--host`, `--port` and `--file` as optional input arguments.
+Defaults to address `'127.0.0.1'`, port `65433` and file `devices_test_SDP_SST_statistics_packets.bin`
+
diff --git a/devices/statistics_writer/hdf5_writer.py b/devices/statistics_writer/hdf5_writer.py
new file mode 100644
index 0000000000000000000000000000000000000000..5a5eb5ee3fe4d5d05094874d65293ea7fbc68314
--- /dev/null
+++ b/devices/statistics_writer/hdf5_writer.py
@@ -0,0 +1,223 @@
+# imports for working with datetime objects
+from datetime import datetime, timedelta
+import pytz
+
+# python hdf5
+import h5py
+
+import numpy
+import logging
+
+# import statistics classes with workaround
+import sys
+sys.path.append("..")
+from devices.sdp.statistics_packet import SSTPacket, XSTPacket, BSTPacket, StatisticsPacket
+import devices.sdp.statistics_collector as statistics_collector
+
+
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger("hdf5_writer")
+
+__all__ = ["hdf5_writer"]
+
+class hdf5_writer:
+
+    SST_MODE = "SST"
+    XST_MODE = "XST"
+    BST_MODE = "BST"
+
+
+    def __init__(self, new_file_time_interval, file_location, statistics_mode):
+
+        # all variables that deal with the matrix that's currently being decoded
+        self.current_matrix = None
+        self.current_timestamp = datetime.min.replace(tzinfo=pytz.UTC)
+
+        # the header of the first packet of a new matrix is written as metadata.
+        # Assumes all subsequent headers of the same matrix are identical (minus index)
+        self.statistics_header = None
+
+        # file handing
+        self.file_location = file_location
+        self.new_file_time_interval = timedelta(seconds=new_file_time_interval)
+        self.last_file_time = datetime.min.replace(tzinfo=pytz.UTC)
+        self.file = None
+
+        # parameters that are configured depending on the mode the statistics writer is in (SST,XST,BST)
+        self.decoder = None
+        self.collector = None
+        self.store_function = None
+        self.mode = statistics_mode.upper()
+        self.config_mode()
+
+    def next_packet(self, packet):
+        """
+        All statistics packets come with a timestamp of the time they were measured. All the values will be spread across multiple packets.
+        As long as the timestamp is the same they belong in the same matrix. This code handles collecting the matrix from those multiple
+        packets as well as storing matrices and starting new ones
+
+        The code receives new packets and checks the statistics timestamp of them. If the timestamp is higher than the current timestamp
+        it will close the current matrix, store it and start a new one.
+        """
+
+        # process the packet
+        statistics_packet = self.decoder(packet)
+
+        if not self.statistics_header:
+            self.statistics_header = statistics_packet.header()
+
+        # grab the timestamp
+        statistics_timestamp = statistics_packet.timestamp()
+
+        # ignore packets with no timestamp, as they indicate FPGA processing was disabled
+        # and are useless anyway.
+        if statistics_packet.block_serial_number == 0:
+            logger.warning(f"Received statistics with no timestamp. Packet dropped.")
+            return
+
+        # check if te statistics timestamp is unexpectedly older than the current one
+        if statistics_timestamp < self.current_timestamp:
+            logger.warning(f"Received statistics with earlier timestamp than is currently being processed ({statistics_timestamp}). Packet dropped.")
+            return
+
+        # if this statistics packet has a new timestamp it means we need to start a new matrix
+        if statistics_timestamp > self.current_timestamp:
+            self.start_new_matrix(statistics_timestamp)
+            self.current_timestamp = statistics_timestamp
+
+        self.process_packet(packet)
+
+    def start_new_matrix(self, timestamp):
+        logger.info(f"starting new matrix with timestamp: {timestamp}")
+        """
+        is called when a statistics packet with a newer timestamp is received.
+        Writes the matrix to the hdf5 file
+        Creates a new hdf5 file if needed
+        updates current timestamp and statistics matrix collector
+        """
+
+        # write the finished (and checks if its the first matrix)
+        if self.current_matrix is not None:
+            try:
+                self.write_matrix()
+            except Exception as e:
+                time = self.current_timestamp.strftime("%Y-%m-%d-%H-%M-%S-%f")[:-3]
+                logger.exception(f"Exception while attempting to write matrix to HDF5. Matrix: {time} dropped")
+
+        # only start a new file if its time AND we are done with the previous matrix.
+        if timestamp >= self.new_file_time_interval + self.last_file_time:
+            self.start_new_hdf5(timestamp)
+
+        # create a new and empty current_matrix
+        self.current_matrix = self.collector()
+        self.statistics_header = None
+
+    def write_matrix(self):
+        logger.info("writing matrix to file")
+        """
+        Writes the finished matrix to the hdf5 file
+        """
+
+        # create the new hdf5 group based on the timestamp of packets
+        current_group = self.file.create_group("{}_{}".format(self.mode, self.current_timestamp.strftime("%Y-%m-%d-%H-%M-%S-%f")[:-3]))
+
+        # store the statistics values for the current group
+        self.store_function(current_group)
+
+        # might be optional, but they're easy to add.
+        current_group.create_dataset(name="nof_payload_errors", data=self.current_matrix.parameters["nof_payload_errors"])
+        current_group.create_dataset(name="nof_valid_payloads", data=self.current_matrix.parameters["nof_valid_payloads"])
+
+        # get the statistics header
+        header = self.statistics_header
+
+        # can't store datetime objects, convert to string instead
+        header["timestamp"] = header["timestamp"].isoformat(timespec="milliseconds")
+
+        # Stores the header of the packet received for this matrix as a list of atttributes
+        for k,v in header.items():
+            if type(v) == dict:
+                for subk, subv in v.items():
+                    current_group.attrs[f"{k}_{subk}"] = subv
+            else:
+                current_group.attrs[k] = v
+
+    def write_sst_matrix(self, current_group):
+        # store the SST values
+        current_group.create_dataset(name="sst_values", data=self.current_matrix.parameters["sst_values"].astype(numpy.float32), compression="gzip")
+
+    def write_xst_matrix(self, current_group):
+        # requires a function call to transform the xst_blocks in to the right structure
+        current_group.create_dataset(name="xst_values", data=self.current_matrix.xst_values().astype(numpy.cfloat), compression="gzip")
+
+    def write_bst_matrix(self, current_group):
+        raise NotImplementedError("BST values not implemented")
+
+
+    def process_packet(self, packet):
+        logger.debug(f"Processing packet")
+        """
+        Adds the newly received statistics packet to the statistics matrix
+        """
+        self.current_matrix.process_packet(packet)
+
+    def start_new_hdf5(self, timestamp):
+
+        if self.file is not None:
+            try:
+                self.file.close()
+            except Exception as e:
+                logger.exception(f"Error while attempting to close hdf5 file to disk. file {self.file} likely empty, please verify integrity.")
+
+        current_time = str(timestamp.strftime("%Y-%m-%d-%H-%M-%S"))
+        logger.info(f"creating new file: {self.file_location}/{self.mode}_{current_time}.h5")
+
+        try:
+            self.file = h5py.File(f"{self.file_location}/{self.mode}_{current_time}.h5", 'w')
+        except Exception as e:
+            logger.exception(f"Error while creating new file")
+            raise e
+
+        self.last_file_time = timestamp
+
+    def config_mode(self):
+        logger.debug(f"attempting to configure {self.mode} mode")
+
+        """
+        Configures the object for the correct statistics type to be used.
+        decoder:            the class to decode a single packet
+        collector:          the class to collect statistics packets
+        store_function:     the function to write the mode specific data to file
+        """
+
+        if self.mode == self.SST_MODE:
+            self.decoder = SSTPacket
+            self.collector = statistics_collector.SSTCollector
+            self.store_function = self.write_sst_matrix
+
+        elif self.mode == self.XST_MODE:
+            self.decoder = XSTPacket
+            self.collector = statistics_collector.XSTCollector
+            self.store_function = self.write_xst_matrix
+
+        elif self.mode == self.BST_MODE:
+            self.store_function = self.write_bst_matrix
+            raise NotImplementedError("BST collector has not yet been implemented")
+
+        else:
+            raise ValueError("invalid statistics mode specified '{}', please use 'SST', 'XST' or 'BST' ".format(self.mode))
+
+    def close_writer(self):
+        """
+        Function that can be used to stop the writer without data loss.
+        """
+        logger.debug("closing hdf5 file")
+        if self.file is not None:
+            if self.current_matrix is not None:
+                # Write matrix if one exists
+                # only creates file if there is a matrix to actually write
+                try:
+                    self.write_matrix()
+                finally:
+                    self.file.close()
+                    logger.debug(f"{self.file} closed")
diff --git a/devices/statistics_writer/receiver.py b/devices/statistics_writer/receiver.py
new file mode 100644
index 0000000000000000000000000000000000000000..919357764a2196cb7955e4ec77f2487b81d24d59
--- /dev/null
+++ b/devices/statistics_writer/receiver.py
@@ -0,0 +1,65 @@
+import socket
+
+import sys
+sys.path.append("..")
+from devices.sdp.statistics_packet import StatisticsPacket
+import os
+
+class receiver:
+    """ Reads data from a file descriptor. """
+
+    HEADER_LENGTH = 32
+
+    def __init__(self, fd):
+        self.fd = fd
+
+    def get_packet(self) -> bytes:
+        """ Read exactly one statistics packet from the TCP connection. """
+
+        # read only the header, to compute the size of the packet
+        header = self.read_data(self.HEADER_LENGTH)
+        packet = StatisticsPacket(header)
+
+        # read the rest of the packet (payload)
+        payload_length = packet.expected_size() - len(header)
+        payload = self.read_data(payload_length)
+
+        # add payload to the header, and return the full packet
+        return header + payload
+
+    def read_data(self, data_length: int) -> bytes:
+        """ Read exactly data_length bytes from the TCP connection. """
+
+        data = b''
+        while len(data) < data_length:
+            # try to read the remainder.
+            # NOTE: recv() may return less data than requested, and returns 0
+            # if there is nothing left to read (end of stream)
+            more_data = os.read(self.fd, data_length - len(data))
+            if not more_data:
+                # connection got dropped
+                raise EOFError("End of stream")
+
+            data += more_data
+
+        return data
+
+class tcp_receiver(receiver):
+    def __init__(self, HOST, PORT):
+        self.host = HOST
+        self.port = PORT
+
+        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        self.sock.connect((self.host, self.port))
+
+        super().__init__(fd=self.sock.fileno())
+
+class file_receiver(receiver):
+    def __init__(self, filename):
+        self.filename = filename
+        self.fileno = os.open(filename, os.O_RDONLY)
+
+        super().__init__(fd=self.fileno)
+
+    def __del__(self):
+        os.close(self.fileno)
diff --git a/devices/statistics_writer/statistics_writer.py b/devices/statistics_writer/statistics_writer.py
new file mode 100644
index 0000000000000000000000000000000000000000..444ee2323e950a0428513cb4506d8b2b2376fc27
--- /dev/null
+++ b/devices/statistics_writer/statistics_writer.py
@@ -0,0 +1,70 @@
+import argparse
+from receiver import tcp_receiver, file_receiver
+from hdf5_writer import hdf5_writer
+
+import sys
+import signal
+
+import logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger("statistics_writer")
+
+parser = argparse.ArgumentParser(description='Converts a stream of statistics packets into HDF5 files.')
+parser.add_argument('--host', type=str, help='the host to connect to')
+parser.add_argument('--port', type=int, default=0, help='the port to connect to, or 0 to use default port for the selected mode (default: %(default)s)')
+parser.add_argument('--file', type=str, help='the file to read from')
+
+parser.add_argument('--mode', type=str, choices=['SST', 'XST', 'BST'], default='SST', help='sets the statistics type to be decoded options (default: %(default)s)')
+parser.add_argument('--interval', type=float, default=3600, nargs="?", help='The time between creating new files in seconds (default: %(default)s)')
+parser.add_argument('--output_dir', type=str, default=".", nargs="?", help='specifies the folder to write all the files (default: %(default)s)')
+parser.add_argument('--debug', dest='debug', action='store_true', default=False, help='increase log output')
+
+
+# create a data dumper that creates a new file every 10s (for testing)
+if __name__ == "__main__":
+    args = parser.parse_args()
+
+    # argparse arguments
+    host = args.host
+    port = args.port
+    filename = args.file
+    output_dir = args.output_dir
+    interval = args.interval
+    mode = args.mode
+    debug = args.debug
+
+    if port == 0:
+        default_ports = { "SST": 5101, "XST": 5102, "BST": 5103 }
+        port = default_ports[mode]
+
+    if debug:
+        logger.setLevel(logging.DEBUG)
+        logger.debug("Setting loglevel to DEBUG")
+
+    # creates the TCP receiver that is given to the writer
+    if filename:
+        receiver = file_receiver(filename)
+    elif host and port:
+        receiver = tcp_receiver(host, port)
+    else:
+        logger.fatal("Must provide either a host and port, or a file to receive input from")
+        sys.exit(1)
+
+    # create the writer
+    writer = hdf5_writer(new_file_time_interval=interval, file_location=output_dir, statistics_mode=mode)
+
+    # start looping
+    try:
+        while True:
+            packet = receiver.get_packet()
+            writer.next_packet(packet)
+    except KeyboardInterrupt:
+        # user abort, don't complain
+        logger.warning("Received keyboard interrupt. Stopping.")
+    except EOFError:
+        # done processing all input, don't complain
+        logger.info("End of input.")
+    finally:
+        writer.close_writer()
+
+
diff --git a/devices/statistics_writer/test/devices_test_SDP_SST_statistics_packets.bin b/devices/statistics_writer/test/devices_test_SDP_SST_statistics_packets.bin
new file mode 100644
index 0000000000000000000000000000000000000000..e94347b86a0a03b940eb84980ec8f6d3b6d4e2d7
Binary files /dev/null and b/devices/statistics_writer/test/devices_test_SDP_SST_statistics_packets.bin differ
diff --git a/devices/statistics_writer/test/hdf5_explorer.py b/devices/statistics_writer/test/hdf5_explorer.py
new file mode 100644
index 0000000000000000000000000000000000000000..102c36b79f7beeb6a34ffba9b95a495a85a76f6e
--- /dev/null
+++ b/devices/statistics_writer/test/hdf5_explorer.py
@@ -0,0 +1,95 @@
+import h5py
+import numpy
+
+import argparse
+
+parser = argparse.ArgumentParser(description='Select a file to explore')
+parser.add_argument('--file', type=str, help='the name and path of the file')
+
+import logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger("hdf5_explorer")
+logger.setLevel(logging.DEBUG)
+
+
+class statistics_data:
+    """
+    Example class not used by anything
+    This class takes the file and the statistics name as its __init__ arguments and then stores the
+    the datasets in them.
+    """
+
+class explorer:
+    """
+    This class serves both as a tool to test and verify the content of HDF5 files as well as provide an example
+    of how you can go through HDF5 files.
+    """
+
+
+    def __init__(self, filename):
+        self.file = h5py.File(filename, 'r')
+
+    def print_all_statistics_full(self):
+        """
+        Explores the file with knowledge of the file structure. assumes all top level groups are statistics
+        and that all statistics groups are made up of datasets.
+        Prints the groups, the datasets and the content of the datasets.
+
+        Can easily be modified to instead of just logging all the data, store it in whatever structure is needed.
+        """
+
+        for group_key in self.file.keys():
+            dataset = list(self.file[group_key])
+
+            #print group name
+            logger.debug(f" \n\ngroup: {group_key}")
+
+            # Go through all the datasets
+            for i in dataset:
+                data = self.file.get(f"{group_key}/{i}")
+                logger.debug(f" dataset: {i}")
+                logger.debug(f" Data: {numpy.array(data)}")
+
+            # go through all the attributes in the group (This is the header info)
+            attr_keys = self.file[group_key].attrs.keys()
+            for i in attr_keys:
+                attr = self.file[group_key].attrs[i]
+
+                logger.debug(f" {i}: {attr}")
+
+    def print_all_statistics_top_level(self):
+        """
+        Explores the file with knowledge of the file structure. assumes all top level groups are statistics
+        and that all statistics groups are made up of datasets.
+        This function prints only the top level groups, AKA all the statistics collected. Useful when dealing with
+        potentially hundreds of statistics.
+        """
+        # List all groups
+        logger.debug("Listing all statistics stored in this file:")
+
+        for group_key in self.file.keys():
+            logger.debug(group_key)
+
+
+# create a data dumper that creates a new file every 10s (for testing)
+if __name__ == "__main__":
+    args = parser.parse_args()
+    Explorer = explorer(args.file)
+
+    """
+    Print the entire files content
+    """
+    Explorer.print_all_statistics_full()
+
+    """
+    Print only the names of all the statistics in this file
+    """
+    logger.debug("--------------Top level groups--------------")
+    Explorer.print_all_statistics_top_level()
+
+
+
+
+
+
+
diff --git a/devices/statistics_writer/test/test_server.py b/devices/statistics_writer/test/test_server.py
new file mode 100644
index 0000000000000000000000000000000000000000..eec9ec3eed992b03ee809ca37de012bad43bd213
--- /dev/null
+++ b/devices/statistics_writer/test/test_server.py
@@ -0,0 +1,52 @@
+import socket
+import time
+
+import argparse
+
+import logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger("statistics_test_server")
+logger.setLevel(logging.DEBUG)
+
+parser = argparse.ArgumentParser(description='Select what hostname to use and what port to use')
+parser.add_argument('--port', type=int, help='port to use', default=65433)
+parser.add_argument('--host',  help='host to use', default='127.0.0.1')
+parser.add_argument('--file',  help='file to use as data', default='devices_test_SDP_SST_statistics_packets.bin')
+parser.add_argument('--interval', type=int,  help='ime between sending entire files content', default=1)
+
+args = parser.parse_args()
+HOST = args.host
+PORT = args.port
+FILE = args.file
+INTERVAL = args.interval
+
+
+while True:
+    try:
+        f = open(FILE, "rb")
+        data = f.read()
+    except Exception as e:
+        logger.error(f"File not found, are you sure: '{FILE}' is a valid path, Exception: {e}")
+        exit()
+
+    try:
+        with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+            logger.debug(f"Starting TCP test server on {HOST} {PORT}")
+            logger.debug("To interrupt the script, press Ctrl-C twice within a second")
+
+            s.bind((HOST, PORT))
+            s.listen()
+            conn, addr = s.accept()
+
+            with conn:
+                logger.debug(f'Connected by: {addr}')
+
+                while True:
+                    time.sleep(INTERVAL)
+                    conn.sendall(data)
+
+    except Exception as e:
+        logger.warning(f"Exception occurred: {e}")
+
+        # just do 2 interrupt within a second to quit the program
+        time.sleep(1)
diff --git a/devices/statistics_writer/udp_dev/udp_client.py b/devices/statistics_writer/udp_dev/udp_client.py
new file mode 100644
index 0000000000000000000000000000000000000000..cef6a079d17dc0fb45d71f181ee2be908e9bd091
--- /dev/null
+++ b/devices/statistics_writer/udp_dev/udp_client.py
@@ -0,0 +1,62 @@
+import socket
+import sys
+import netifaces as ni
+from datetime import datetime
+import time
+
+class UDP_Client:
+
+    def __init__(self, server_ip:str, server_port:int):
+        self.server_ip = server_ip
+        self.server_port = server_port
+        self.server_data = None
+        self.server_addr = None # tuple of address info
+
+    def run(self):
+        # Create socket for server
+        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, 0)
+        print("Do Ctrl+c to exit the program !!")
+        print('\n\n*** This Client keeps sending the same SST packet with an interval of 1s ***')
+
+        # Let's send data through UDP protocol
+        while True:
+
+            #Old interactive interface
+            #send_data = input("Type some text to send =>");
+            #s.sendto(send_data.encode('utf-8'), (self.server_ip, self.server_port))
+            #print("\n\n 1. Client Sent : ", send_data, "\n\n")
+            #self.server_data, self.server_addr = s.recvfrom(4096)
+            #print("\n\n 2. Client received : ", self.server_data.decode('utf-8'), "\n\n")
+
+            time.sleep(1)
+
+            f = open("../../test/SDP_SST_statistics_packet.bin", "rb")
+            send_data = f.read()
+            s.sendto(send_data, (self.server_ip, self.server_port))
+            print("\n\n 1. Client Sent SST Packet at: ", datetime.now())
+            self.server_data, self.server_addr = s.recvfrom(4096)
+            print("\n\n 2. Client received : ", self.server_data.decode('utf-8'), "\n\n")
+
+        # close the socket
+        s.close()
+
+if __name__ == '__main__':
+
+    if len(sys.argv) == 3:
+        if sys.argv[1]=='localhost':
+            server_ip = ni.ifaddresses('eth0')[ni.AF_INET][0]['addr']
+        else :
+            server_ip = sys.argv[1]
+        server_port = int(sys.argv[2])
+        #local_ip = local_ip = ni.ifaddresses('eth0')[ni.AF_INET][0]['addr']
+        #server_ip = local_ip
+    else:
+        print("Run like : python3 udp_client.py <server_ip> <server_port>")
+        exit(1)
+
+    client = UDP_Client(server_ip,server_port)
+    client.run()
+
+    
+        
+        
\ No newline at end of file
diff --git a/devices/statistics_writer/udp_dev/udp_server.py b/devices/statistics_writer/udp_dev/udp_server.py
new file mode 100644
index 0000000000000000000000000000000000000000..45624761519287b13bbce5c73cf8d8cb7dff9201
--- /dev/null
+++ b/devices/statistics_writer/udp_dev/udp_server.py
@@ -0,0 +1,50 @@
+import socket
+import sys
+import time
+import netifaces as ni
+from datetime import datetime
+
+class UDP_Server:
+
+    def __init__(self, ip:str, port:int, buffer_size:int = 8192):
+        self.ip = ip
+        self.port = port
+        self.buffer_size = buffer_size
+        self.recv_data = None
+        self.recv_addr = None
+
+    def run(self):
+        # Create a UDP socket
+        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+        # Bind the socket to the port
+        server_address = (self.ip, self.port)
+        s.bind(server_address)
+        print("Do Ctrl+c to exit the program !!")
+        print("\n\n####### Server is listening on %s - port %s #######" % (self.ip,self.port))
+
+        while True:
+            
+            self.recv_data, self.recv_addr = s.recvfrom(self.buffer_size)
+            print("\n\n 2. Server received at: ", datetime.now(), "\n\n")
+
+            '''Server response'''
+            #send_data = input("Type some text to send => ")
+            send_data = 'Packet received. Waiting for the next one.'
+            s.sendto(send_data.encode('utf-8'), self.recv_addr)
+            print("\n\n 1. Server sent : ", send_data,"\n\n")
+
+            #time.sleep(10)
+            #s.close()
+            
+            break
+
+        # close the socket
+        s.close()
+    
+    def get_recv_data(self):
+        return self.recv_data
+
+if __name__ == '__main__':
+    local_ip = ni.ifaddresses('eth0')[ni.AF_INET][0]['addr']
+    server = UDP_Server(local_ip,5600)
+    server.run()
diff --git a/devices/statistics_writer/udp_dev/udp_write_manager.py b/devices/statistics_writer/udp_dev/udp_write_manager.py
new file mode 100644
index 0000000000000000000000000000000000000000..0c11f6a82dc11f8151eb771b90033feb38ef9c42
--- /dev/null
+++ b/devices/statistics_writer/udp_dev/udp_write_manager.py
@@ -0,0 +1,81 @@
+from datetime import datetime
+import time
+import os
+import h5py
+import numpy as np
+from statistics_writer.udp_dev import udp_server as udp
+import netifaces as ni
+from statistics_packet import SSTPacket
+
+__all__ = ["statistics_writer"]
+
+
+class Statistics_Writer:
+
+    def __init__(self, new_file_time_interval):
+
+        self.new_file_time_interval = new_file_time_interval
+        self.packet_cnt = 0
+
+        # Define ip and port of the receiver
+        self.local_ip = ni.ifaddresses('eth0')[ni.AF_INET][0]['addr']
+        self.server = udp.UDP_Server(self.local_ip, 5600)
+
+        # Create data directory if not exists
+        try:
+            os.makedirs('../data')
+        except:
+            print('Data directory already created')
+
+        # create initial file
+        self.last_file_time = time.time()
+        self.file = None
+        self.new_hdf5()
+
+    def write_packet(self, raw_data):
+        # create new file if the file was created more than the allowed time ago
+        if time.time() >= self.new_file_time_interval + self.last_file_time:
+            self.new_hdf5()
+
+        self.packet_cnt += 1
+
+        # create dataset with the raw data in it
+        self.write_raw(raw_data)
+        self.write_metadata(raw_data)
+
+    def new_hdf5(self):
+
+        if self.file is not None:
+            self.file.close()
+
+        timestamp = datetime.now()
+        current_time = str(timestamp.strftime("%Y-%m-%d-%H-%M-%S"))
+        print("creating new file: data/{}.h5".format(current_time))
+        self.file = h5py.File("data/{}.h5".format(current_time), 'w')
+        self.last_file_time = time.time()
+
+    def write_metadata(self, packet):
+        # decode packet
+        self.sst = SSTPacket(packet)
+        header = self.sst.header()
+        header_bytes = bytes(str(header), "utf-8")
+        header_bytes = np.frombuffer(header_bytes, dtype=np.uint8)
+        self.file.create_dataset('packet_{}_header'.format(self.packet_cnt), data=header_bytes)
+
+    def write_raw(self, packet):
+        # create dataset with the raw data in it
+        data = np.frombuffer(packet, dtype=np.uint8)
+        self.file.create_dataset('packet_{}_raw'.format(self.packet_cnt), data=data)
+
+
+if __name__ == "__main__":
+    # create a data dumper that creates a new file every 10s (for testing)
+    test = Statistics_Writer(new_file_time_interval=10)
+
+    # simple loop to write data every second
+    while True:
+        test.server.run()
+        data = test.server.get_recv_data()
+        test.write_packet(data)
+
+    # time.sleep(1)
diff --git a/devices/test-requirements.txt b/devices/test-requirements.txt
index c97375e938b0466da884581c339f2c5735472c62..20ed449cd8f17f9110ebe1b70774916abe8c00cb 100644
--- a/devices/test-requirements.txt
+++ b/devices/test-requirements.txt
@@ -2,14 +2,17 @@
 # order of appearance. Changing the order has an impact on the overall
 # integration process, which may cause wedges in the gate later.
 
+bandit>=1.6.0 # Apache-2.0
+coverage>=5.2.0 # Apache-2.0
 doc8>=0.8.0 # Apache-2.0
 flake8>=3.8.0 # MIT
-bandit>=1.6.0 # Apache-2.0
+flake8-breakpoint>=1.1.0 # MIT
+flake8-debugger>=4.0.0 #MIT
+flake8-mock>=0.3 #GPL
 hacking>=3.2.0,<3.3.0 # Apache-2.0
-coverage>=5.2.0 # Apache-2.0
 python-subunit>=1.4.0 # Apache-2.0/BSD
 Pygments>=2.6.0
 stestr>=3.0.0 # Apache-2.0
 testscenarios>=0.5.0 # Apache-2.0/BSD
 testtools>=2.4.0 # MIT
-
+timeout-decorator>=0.5  # MIT
diff --git a/devices/test/SDP_SST_statistics_packet.bin b/devices/test/SDP_SST_statistics_packet.bin
index ade2d62c32eb6cbf4fb9b5ec2d7c0368ab0af408..a45b77587a8104cbeb756d85cbb757f02abf39bf 100644
Binary files a/devices/test/SDP_SST_statistics_packet.bin and b/devices/test/SDP_SST_statistics_packet.bin differ
diff --git a/devices/test/SDP_SST_statistics_packets.bin b/devices/test/SDP_SST_statistics_packets.bin
new file mode 100644
index 0000000000000000000000000000000000000000..e94347b86a0a03b940eb84980ec8f6d3b6d4e2d7
Binary files /dev/null and b/devices/test/SDP_SST_statistics_packets.bin differ
diff --git a/devices/test/SDP_XST_statistics_packets.bin b/devices/test/SDP_XST_statistics_packets.bin
new file mode 100644
index 0000000000000000000000000000000000000000..97c08e3bfb47bf56c30288b5e62cc60c7034b417
Binary files /dev/null and b/devices/test/SDP_XST_statistics_packets.bin differ
diff --git a/devices/test/clients/test_attr_wrapper.py b/devices/test/clients/test_attr_wrapper.py
index a293923acbf21774e9f221b650353f3410104a88..453e19c19d67b56eb339462cc1da7e0e8414451b 100644
--- a/devices/test/clients/test_attr_wrapper.py
+++ b/devices/test/clients/test_attr_wrapper.py
@@ -38,8 +38,8 @@ def dev_init(device):
 class TestAttributeTypes(base.TestCase):
 
     class str_scalar_device(hardware_device):
-        scalar_R = attribute_wrapper(comms_annotation="str_scalar_R", datatype=str)
-        scalar_RW = attribute_wrapper(comms_annotation="str_scalar_RW", datatype=str, access=AttrWriteType.READ_WRITE)
+        scalar_R = attribute_wrapper(comms_annotation="str_scalar_R", datatype=numpy.str)
+        scalar_RW = attribute_wrapper(comms_annotation="str_scalar_RW", datatype=numpy.str, access=AttrWriteType.READ_WRITE)
 
         def configure_for_initialise(self):
             dev_init(self)
@@ -122,8 +122,8 @@ class TestAttributeTypes(base.TestCase):
             dev_init(self)
 
     class str_spectrum_device(hardware_device):
-        spectrum_R = attribute_wrapper(comms_annotation="str_spectrum_R", datatype=str, dims=spectrum_dims)
-        spectrum_RW = attribute_wrapper(comms_annotation="str_spectrum_RW", datatype=str, access=AttrWriteType.READ_WRITE, dims=spectrum_dims)
+        spectrum_R = attribute_wrapper(comms_annotation="str_spectrum_R", datatype=numpy.str, dims=spectrum_dims)
+        spectrum_RW = attribute_wrapper(comms_annotation="str_spectrum_RW", datatype=numpy.str, access=AttrWriteType.READ_WRITE, dims=spectrum_dims)
 
         def configure_for_initialise(self):
             dev_init(self)
@@ -206,8 +206,8 @@ class TestAttributeTypes(base.TestCase):
             dev_init(self)
 
     class str_image_device(hardware_device):
-        image_R = attribute_wrapper(comms_annotation="str_image_R", datatype=str, dims=(2,3))
-        image_RW = attribute_wrapper(comms_annotation="str_image_RW", datatype=str, access=AttrWriteType.READ_WRITE, dims=(2,3))
+        image_R = attribute_wrapper(comms_annotation="str_image_R", datatype=numpy.str, dims=(2,3))
+        image_RW = attribute_wrapper(comms_annotation="str_image_RW", datatype=numpy.str, access=AttrWriteType.READ_WRITE, dims=(2,3))
 
         def configure_for_initialise(self):
             dev_init(self)
@@ -333,20 +333,20 @@ class TestAttributeTypes(base.TestCase):
 
             if test_type == "scalar":
 
-                if dtype is str or dtype is numpy.str_:
+                if dtype is numpy.str:
                     val = str_scalar_val
                 else:
                     val = dtype(1)
                 proxy.scalar_RW = val
             elif test_type == "spectrum":
-                if dtype is str or dtype is numpy.str_:
+                if dtype is numpy.str:
                     val = str_spectrum_val
                 else:
                     val = numpy.full(spectrum_dims, dtype=dtype, fill_value=1)
                 print(val)
                 proxy.spectrum_RW = val
             elif test_type == "image":
-                if dtype is str or dtype is numpy.str_:
+                if dtype is numpy.str:
                     val = str_image_val
                 else:
                     val = numpy.full(image_dims, dtype=dtype, fill_value=1)
@@ -408,7 +408,7 @@ class TestAttributeTypes(base.TestCase):
                 proxy.on()
 
                 if test_type == "scalar":
-                    if dtype is str or dtype is numpy.str_:
+                    if dtype is numpy.str:
                         val = str_scalar_val
                     else:
                         val = dtype(1)
@@ -416,7 +416,7 @@ class TestAttributeTypes(base.TestCase):
                     result_R = proxy.scalar_R
                     result_RW = proxy.scalar_RW
                 elif test_type == "spectrum":
-                    if dtype is str or dtype is numpy.str_:
+                    if dtype is numpy.str:
                         val = str_spectrum_val
                     else:
                         val = numpy.full(spectrum_dims, dtype=dtype, fill_value=1)
@@ -424,7 +424,7 @@ class TestAttributeTypes(base.TestCase):
                     result_R = proxy.spectrum_R
                     result_RW = proxy.spectrum_RW
                 elif test_type == "image":
-                    if dtype is str or dtype is numpy.str_:
+                    if dtype is numpy.str:
                         val = str_image_val
                     else:
                         val = numpy.full(image_dims, dtype=dtype, fill_value=1)
@@ -434,7 +434,7 @@ class TestAttributeTypes(base.TestCase):
                     result_R = proxy.image_R
                     result_RW = proxy.image_RW
 
-                    if dtype != str:
+                    if dtype != numpy.str:
                         self.assertEqual(result_R.shape, image_dims, "not the correct dimensions")
 
                         result_R = result_R.reshape(-1)
@@ -450,7 +450,7 @@ class TestAttributeTypes(base.TestCase):
                     self.assertTrue(comparison, " Value could not be handled by the atrribute_wrappers internal RW storer. attempted to write: {}".format(val))
                     comparison = result_R == val
                     self.assertTrue(comparison, " value in the clients R attribute not equal to what was written. read: {}, wrote {}".format(result_R, val))
-                elif dtype != str:
+                elif dtype != numpy.str:
                     comparison = result_RW == val
                     equal_arrays = comparison.all()
                     self.assertTrue(equal_arrays, " Value could not be handled by the atrribute_wrappers internal RW storer. attempted to write: {}".format(val))
diff --git a/devices/test/clients/test_client.py b/devices/test/clients/test_client.py
index 1d8c85f5e597a31d00bc1af105e0465b9c8a8a11..2c5a2df9c42431f28e6e8a8c3180b8902c4a4597 100644
--- a/devices/test/clients/test_client.py
+++ b/devices/test/clients/test_client.py
@@ -84,6 +84,7 @@ class test_client(CommClient):
 
         def write_function(write_value):
             self.streams.debug_stream("from write_function, writing {} array of type {}".format(dims, dtype))
+
             self.value = write_value
             return
 
diff --git a/devices/test/clients/test_opcua_client.py b/devices/test/clients/test_opcua_client.py
new file mode 100644
index 0000000000000000000000000000000000000000..df9296c417857683955aa73ee3cbc0b7985ade76
--- /dev/null
+++ b/devices/test/clients/test_opcua_client.py
@@ -0,0 +1,246 @@
+import numpy
+from clients.opcua_client import OPCUAConnection
+from clients import opcua_client
+
+import opcua
+import io
+
+from unittest import mock
+import unittest
+
+from test import base
+
+
+class attr_props:
+    def __init__(self, numpy_type):
+        self.numpy_type = numpy_type
+
+
+attr_test_types = [
+    attr_props(numpy_type=str),
+    attr_props(numpy_type=numpy.bool_),
+    attr_props(numpy_type=numpy.float32),
+    attr_props(numpy_type=numpy.float64),
+    attr_props(numpy_type=numpy.double),
+    attr_props(numpy_type=numpy.uint8),
+    attr_props(numpy_type=numpy.uint16),
+    attr_props(numpy_type=numpy.uint32),
+    attr_props(numpy_type=numpy.uint64),
+    attr_props(numpy_type=numpy.int16),
+    attr_props(numpy_type=numpy.int32),
+    attr_props(numpy_type=numpy.int64)
+]
+
+scalar_shape = (1,)
+spectrum_shape = (4,)
+image_shape = (2, 3)
+dimension_tests = [scalar_shape, spectrum_shape, image_shape]
+
+
+class TestOPCua(base.TestCase):
+    @mock.patch.object(OPCUAConnection, "check_nodes")
+    @mock.patch.object(OPCUAConnection, "connect")
+    @mock.patch.object(opcua_client, "Client")
+    def test_opcua_connection(self, m_opc_client, m_connect, m_check):
+        """
+        This tests verifies whether the correct connection steps happen. It checks whether we can init an OPCUAConnection object
+        Whether we can set the namespace, and the OPCua client.
+        """
+
+        m_get_namespace = mock.Mock()
+        m_get_namespace.get_namespace_index.return_value = 42
+        m_opc_client.return_value = m_get_namespace
+
+        test_client = OPCUAConnection("opc.tcp://localhost:4874/freeopcua/server/", "http://lofar.eu", 5, mock.Mock(), mock.Mock())
+
+        """Verify that construction of OPCUAConnection calls self.connect"""
+        m_connect.assert_called_once()  # the connect function in the opcua client
+        m_check.assert_called_once()  # debug function that prints out all nodes
+        m_opc_client.assert_called_once()  # makes sure the actual freeOPCua client object is created only once
+
+        m_get_namespace.get_namespace_index.assert_called_once_with("http://lofar.eu")
+        self.assertEqual(42, test_client.name_space_index)
+
+
+    @mock.patch.object(OPCUAConnection, "check_nodes")
+    @mock.patch.object(OPCUAConnection, "connect")
+    @mock.patch.object(opcua_client, "Client")
+    @mock.patch.object(opcua_client, 'ProtocolAttribute')
+    def test_opcua_attr_setup(self, m_protocol_attr, m_opc_client, m_connect, m_check):
+        """
+        This tests covers the correct creation of read/write functions.
+        In normal circumstances called by he attribute wrapper.
+        Will be given 'comms_annotation', for OPCua that will be a node path and can access the attributes type and dimensions
+
+        Test succeeds if there are no errors.
+        """
+
+        for i in attr_test_types:
+            class mock_attr:
+                def __init__(self, dtype, x, y):
+                    self.numpy_type = dtype
+                    self.dim_x = x
+                    self.dim_y = y
+
+            for j in dimension_tests:
+                if len(j) == 1:
+                    dim_x = j[0]
+                    dim_y = 0
+                else:
+                    dim_x = j[1]
+                    dim_y = j[0]
+
+                # create a fake attribute with only the required variables in it.
+                m_attribute = mock_attr(i.numpy_type, dim_x, dim_y)
+
+                # pretend like there is a running OPCua server with a node that has this name
+                m_annotation = ["2:PCC", f"2:testNode_{str(i.numpy_type)}_{str(dim_x)}_{str(dim_y)}"]
+
+                test = OPCUAConnection("opc.tcp://localhost:4874/freeopcua/server/", "http://lofar.eu", 5, mock.Mock(), mock.Mock())
+                test.setup_attribute(m_annotation, m_attribute)
+
+                # success if there are no errors.
+
+
+
+    def test_protocol_attr(self):
+        """
+        This tests finding an OPCua node and returning a valid object with read/write functions.
+        (This step is normally initiated by the attribute_wrapper)
+        """
+
+        # for all datatypes
+        for i in attr_test_types:
+            # for all dimensions
+            for j in dimension_tests:
+
+                node = mock.Mock()
+
+                # handle scalars slightly differently
+                if len(j) == 1:
+                    dims = (j[0], 0)
+                else:
+                    dims = (j[1], j[0])
+
+                ua_type = opcua_client.numpy_to_OPCua_dict[i.numpy_type]
+                test = opcua_client.ProtocolAttribute(node, dims[0], dims[1], ua_type)
+                print(test.dim_y, test.dim_x, test.ua_type)
+
+                """
+                Part of the test already includes simply not throwing an exception, but for the sake coverage these asserts have also
+                been added.
+                """
+                self.assertTrue(test.dim_y == dims[1], f"Dimensionality error, ProtocolAttribute.dim_y got: {test.dim_y} expected: {dims[1]}")
+                self.assertTrue(test.dim_x == dims[0], f"Dimensionality error, ProtocolAttribute.dim_y got: {test.dim_x} expected: {dims[0]}")
+                self.assertTrue(test.ua_type == ua_type, f"type error. Got: {test.ua_type} expected: {ua_type}")
+                self.assertTrue(hasattr(test, "write_function"), f"No write function found")
+                self.assertTrue(hasattr(test, "read_function"), f"No read function found")
+
+    def test_read(self):
+        """
+        This tests the read functions.
+        """
+
+        for j in dimension_tests:
+            for i in attr_test_types:
+                def get_test_value():
+                    return numpy.zeros(j, i.numpy_type)
+
+                def get_flat_value():
+                    return get_test_value().flatten()
+
+                m_node = mock.Mock()
+
+                if len(j) == 1:
+                    test = opcua_client.ProtocolAttribute(m_node, j[0], 0, opcua_client.numpy_to_OPCua_dict[i.numpy_type])
+                else:
+                    test = opcua_client.ProtocolAttribute(m_node, j[1], j[0], opcua_client.numpy_to_OPCua_dict[i.numpy_type])
+                m_node.get_value = get_flat_value
+                val = test.read_function()
+
+                comp = val == get_test_value()
+                self.assertTrue(comp.all(), "Read value unequal to expected value: \n\t{} \n\t{}".format(val, get_test_value()))
+
+    def test_type_map(self):
+        for numpy_type, opcua_type in opcua_client.numpy_to_OPCua_dict.items():
+            # derive a default value that can get lost in a type translation
+            if numpy_type in [str, numpy.str]:
+              default_value = "foo"
+            elif numpy_type == numpy.bool_:
+              default_value = True
+            else:
+              # integer or float type
+              # integers: numpy will drop the decimals for us
+              # floats: make sure we chose a value that has an exact binary representation
+              default_value = 42.25
+
+            # apply our mapping
+            v = opcua.ua.uatypes.Variant(value=numpy_type(default_value), varianttype=opcua_type)
+
+            try:
+                # try to convert it to binary to force opcua to parse the value as the type
+                binary = opcua.ua.ua_binary.variant_to_binary(v)
+
+                # reinterpret the resulting binary to obtain what opcua made of our value
+                binary_stream = io.BytesIO(binary)
+                reparsed_v = opcua.ua.ua_binary.variant_from_binary(binary_stream)
+            except Exception as e:
+                raise Exception(f"Conversion {numpy_type} -> {opcua_type} failed.") from e
+
+            # did the value get lost in translation?
+            self.assertEqual(v.Value, reparsed_v.Value, msg=f"Conversion {numpy_type} -> {opcua_type} failed.")
+
+            # does the OPC-UA type have the same datasize (and thus, precision?)
+            if numpy_type not in [str, numpy.str]:
+                self.assertEqual(numpy_type().itemsize, getattr(opcua.ua.ua_binary.Primitives, opcua_type.name).size, msg=f"Conversion {numpy_type} -> {opcua_type} failed: precision mismatch")
+
+
+
+    def test_write(self):
+        """
+        Test the writing of values by instantiating a ProtocolAttribute attribute, and calling the write function.
+        but the opcua function that writes to the server has been changed to the compare_values function.
+        This allows the code to compare what values we want to write and what values would be given to a server.
+        """
+
+        # for all dimensionalities
+        for j in dimension_tests:
+
+            #for all datatypes
+            for i in attr_test_types:
+
+                # get numpy array of the test value
+                def get_test_value():
+                    return numpy.zeros(j, i.numpy_type)
+
+                # get opcua Varianttype array of the test value
+                def get_mock_value(value):
+                    return opcua.ua.uatypes.Variant(value=value, varianttype=opcua_client.numpy_to_OPCua_dict[i.numpy_type])
+
+                m_node = mock.Mock()
+
+                # create the protocolattribute
+                if len(j) == 1:
+                    test = opcua_client.ProtocolAttribute(m_node, j[0], 0, opcua_client.numpy_to_OPCua_dict[i.numpy_type])
+                else:
+                    test = opcua_client.ProtocolAttribute(m_node, j[1], j[0], opcua_client.numpy_to_OPCua_dict[i.numpy_type])
+
+                test.node.get_data_value = mock.Mock()
+
+                # comparison function that replaces `set_data_value` inside the attributes write function
+                def compare_values(val):
+                    # test values
+                    val = val.tolist() if type(val) == numpy.ndarray else val
+                    if j != dimension_tests[0]:
+                        comp = val._value == get_mock_value(get_test_value().flatten())._value
+                        self.assertTrue(comp.all(),
+                                        "Array attempting to write unequal to expected array: \n\t got: {} \n\texpected: {}".format(val,get_mock_value(get_test_value())))
+                    else:
+                        comp = val == get_mock_value(get_test_value())
+                        self.assertTrue(comp, "value attempting to write unequal to expected value: \n\tgot: {} \n\texpected: {}".format(val, get_mock_value(get_test_value())))
+
+                # replace the `set_data_value`, usualy responsible for communicating with the server with the `compare_values` function.
+                m_node.set_data_value = compare_values
+
+                # call the write function with the test values
+                test.write_function(get_test_value())
diff --git a/devices/test/clients/test_statistics_client_thread.py b/devices/test/clients/test_statistics_client_thread.py
new file mode 100644
index 0000000000000000000000000000000000000000..fd7ce0701f9d792863909b9f8ee4a9d39a2b1fd1
--- /dev/null
+++ b/devices/test/clients/test_statistics_client_thread.py
@@ -0,0 +1,41 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+import logging
+from unittest import mock
+
+from clients.statistics_client_thread import StatisticsClientThread
+
+from test import base
+
+logger = logging.getLogger()
+
+
+class TestStatisticsClientThread(base.TestCase):
+
+    def setUp(self):
+        super(TestStatisticsClientThread, self).setUp()
+
+    class DummySCThread(StatisticsClientThread):
+
+        def disconnect(self):
+            pass
+
+        @property
+        def _options(self) -> dict:
+            return {}
+
+    @mock.patch.object(DummySCThread, "disconnect")
+    def test_del_disconnect(self, m_disconnect):
+        """Ensure that __del__ calls disconnect() of child class"""
+
+        t_test = TestStatisticsClientThread.DummySCThread()
+        del t_test
+
+        m_disconnect.assert_called_once_with()
diff --git a/devices/test/clients/test_tcp_replicator.py b/devices/test/clients/test_tcp_replicator.py
new file mode 100644
index 0000000000000000000000000000000000000000..a9babed0eb7af7a58544b3ff7535c3113ed12ca3
--- /dev/null
+++ b/devices/test/clients/test_tcp_replicator.py
@@ -0,0 +1,219 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+import logging
+import time
+from queue import Queue
+from unittest import mock
+
+from clients.tcp_replicator import TCPReplicator
+from clients import tcp_replicator
+
+from test import base
+
+import timeout_decorator
+
+logger = logging.getLogger()
+
+
+class TestTCPReplicator(base.TestCase):
+
+    @staticmethod
+    async def dummy_task():
+        pass
+
+    def setUp(self):
+        super(TestTCPReplicator, self).setUp()
+
+        self.m_server = mock.Mock()
+        self.m_server.wait_closed.return_value = self.dummy_task()
+
+        async def dummy_create_server():
+            return self.m_server
+
+        # Create reusable test fixture for unit tests
+        self.m_tcp_replicator = TCPReplicator
+
+        # Patch _run_server and force match spec
+        event_loop_patcher = mock.patch.object(
+            tcp_replicator.asyncio, 'get_event_loop')
+        self.m_event_loop = event_loop_patcher.start()
+        self.m_event_loop.return_value.create_server.return_value = \
+            dummy_create_server()
+        self.addCleanup(event_loop_patcher.stop)
+
+        # Stash _process_queue before mocking
+        self.t_process_queue = TCPReplicator._process_queue
+
+        # Patch _process_queue and force match spec
+        process_queue_patcher = mock.patch.object(
+            self.m_tcp_replicator, '_process_queue',
+            autospec=True, return_value=self.dummy_task())
+        self.m_process_queue = process_queue_patcher.start()
+        self.addCleanup(process_queue_patcher.stop)
+
+    def test_parse_options(self):
+        """Validate option parsing"""
+
+        # Perform string copy of current tcp_bind value
+        t_tcp_bind = str(TCPReplicator._default_options['tcp_bind'])
+
+        test_options = {
+            "random": 12346,    # I should be ignored
+            "tcp_bind": '0.0.0.0',  # I should get set
+        }
+
+        replicator = self.m_tcp_replicator(options=test_options)
+        self.assertTrue(replicator.is_alive())
+
+        # Ensure replicator initialization does not modify static variable
+        self.assertEqual(t_tcp_bind, TCPReplicator._default_options['tcp_bind'])
+
+        # Ensure options are correctly updated upon initialization
+        self.assertEqual(test_options['tcp_bind'], replicator.options['tcp_bind'])
+
+        # Ensure non existing keys don't propagate into options
+        self.assertFalse('random' in replicator.options)
+
+    def test_connected_clients(self):
+        """Validate shared list behavior between TCPServerProtocol and thread"""
+
+        m_client = mock.Mock()
+
+        # Create both a TCPReplicator and TCPServerProtocol separately
+        replicator = self.m_tcp_replicator()
+        self.assertTrue(replicator.is_alive())
+        protocol = TCPReplicator.TCPServerProtocol(
+            replicator._options, replicator._connected_clients)
+
+        # Add a mocked client to replicators list
+        replicator._connected_clients.append(m_client)
+
+        # Ensure the mocked client appears in the protocols list
+        self.assertTrue(m_client in protocol.connected_clients)
+
+    def test_start_stop(self):
+        """Verify threading behavior, being able to start and stop the thread"""
+
+        replicator = self.m_tcp_replicator()
+        self.assertTrue(replicator.is_alive())
+
+        # Give the thread 5 seconds to stop
+        replicator.join(5)
+
+        # Thread should now be dead
+        self.assertFalse(replicator.is_alive())
+
+    @timeout_decorator.timeout(5)
+    def test_start_except_eventloop(self):
+        """Verify exception handling inside run() for eventloop creation"""
+
+        m_loop = mock.Mock()
+        m_loop.create_task.side_effect = RuntimeError("Test Error")
+
+        # Signal to _clean_shutdown that the exception has caused the loop to
+        # stop
+        m_loop.is_running.return_value = False
+
+        m_replicator_import = tcp_replicator
+
+        with mock.patch.object(m_replicator_import, 'asyncio') as run_patcher:
+            run_patcher.new_event_loop.return_value = m_loop
+
+            # Constructor should raise an exception if the thread dies early
+            self.assertRaises(RuntimeError, self.m_tcp_replicator)
+
+    @timeout_decorator.timeout(5)
+    def test_start_except_server(self):
+        """Verify exception handling inside run() for starting server"""
+
+        self.m_event_loop.return_value.create_server.side_effect =\
+            RuntimeError("Test Error")
+
+        # Constructor should raise an exception if the thread dies early
+        self.assertRaises(RuntimeError, self.m_tcp_replicator)
+
+    @timeout_decorator.timeout(5)
+    def test_start_stop_delete(self):
+        """Verify that deleting the TCPReplicator object safely halts thread"""
+
+        replicator = self.m_tcp_replicator()
+        self.assertTrue(replicator.is_alive())
+
+        del replicator
+
+    def test_transmit(self):
+        """Test that clients are getting data written to their transport"""
+
+        m_data = "Hello World!".encode('utf-8')
+
+        m_client = mock.Mock()
+
+        replicator = self.m_tcp_replicator()
+        self.assertTrue(replicator.is_alive())
+
+        replicator._connected_clients.append(m_client)
+
+        replicator.transmit(m_data)
+
+        # TODO(Corne): Find suitable primitive to synchronize async task update
+        #              with main thread.
+        time.sleep(1)
+        time.sleep(1)
+        time.sleep(1)
+        time.sleep(1)
+        time.sleep(1)
+        time.sleep(1)
+
+        m_client.transport.write.assert_called_once_with(m_data)
+
+    def test_queue_start(self):
+        replicator = self.m_tcp_replicator()
+
+        self.m_process_queue.assert_called_once_with(replicator)
+
+    def test_transmit_queue(self):
+        m_data = "Hello World!".encode('utf-8')
+
+        m_client = mock.Mock()
+
+        replicator = self.m_tcp_replicator()
+        self.assertTrue(replicator.is_alive())
+
+        # Patch _process_queue back into object and jump start it
+        replicator._process_queue = self.t_process_queue
+        replicator._loop.call_soon_threadsafe(
+            replicator._loop.create_task, replicator._process_queue(replicator))
+
+        replicator._connected_clients.append(m_client)
+
+        replicator.put(m_data)
+
+        # TODO(Corne): Find suitable primitive to synchronize async task update
+        #              with main thread.
+        time.sleep(1)
+        time.sleep(1)
+        time.sleep(1)
+        time.sleep(1)
+        time.sleep(1)
+        time.sleep(1)
+
+        m_client.transport.write.assert_called_once_with(m_data)
+
+    def test_disconnect(self,):
+        m_client = mock.Mock()
+
+        replicator = self.m_tcp_replicator()
+        self.assertTrue(replicator.is_alive())
+
+        replicator._connected_clients.append(m_client)
+
+        replicator.join(5)
+
+        m_client.transport.abort.assert_called_once_with()
diff --git a/devices/test/common/test_baselines.py b/devices/test/common/test_baselines.py
new file mode 100644
index 0000000000000000000000000000000000000000..206b4ca0eccefe1012519c8236d158e52f1cdc38
--- /dev/null
+++ b/devices/test/common/test_baselines.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+from common import baselines
+
+from test import base
+
+
+class TestBaselines(base.TestCase):
+    def test_nr_baselines(self):
+        # no baselines if no antennas
+        self.assertEqual(0, baselines.nr_baselines(0))
+        # one antenna only has autocorrelation
+        self.assertEqual(1, baselines.nr_baselines(1))
+        # two antennas each have autocorrelations + a baseline between each other
+        self.assertEqual(3, baselines.nr_baselines(2))
+
+    def test_baseline_indices(self):
+        """ Test whether baseline_from_index and baseline_index line up. """
+
+        for major in range(192):
+            for minor in range(major + 1):
+                idx = baselines.baseline_index(major, minor)
+                self.assertEqual((major, minor), baselines.baseline_from_index(idx), msg=f'baseline_index({major},{minor}) resulted in {idx}, and should match baseline_from_index({idx})')
diff --git a/devices/test/devices/test_abstract_device.py b/devices/test/devices/test_abstract_device.py
new file mode 100644
index 0000000000000000000000000000000000000000..f54383c9a1b85f5c9e442f51d7f04d061951f772
--- /dev/null
+++ b/devices/test/devices/test_abstract_device.py
@@ -0,0 +1,89 @@
+# -*- coding: utf-8 -*-
+#
+# This file is part of the LOFAR 2.0 Station Software
+#
+#
+#
+# Distributed under the terms of the APACHE license.
+# See LICENSE.txt for more info.
+
+import abc
+from unittest import mock
+
+from tango import DevFailed
+from tango import server
+from tango.server import attribute
+
+from tango.test_context import DeviceTestContext
+
+from devices.abstract_device import AbstractDeviceMetas
+
+from test import base
+
+
+class TestAbstractDevice(base.TestCase):
+
+    class AbstractExample(object, metaclass=abc.ABCMeta):
+        """A pure abc.ABCMeta metaclass with an abstract method
+
+        This is an abstract class that inherits object with the abc.ABCMeta as
+        metaclass
+        """
+
+        @abc.abstractmethod
+        def example_method(self):
+            raise NotImplementedError
+
+    class TestHardwareDevice(server.Device, metaclass=AbstractDeviceMetas):
+        """This is your overarching abstract class with a combined metaclass
+
+        Device is an object with DeviceMeta as metaclass
+        We use HardwareDeviceMetas as metaclass
+
+        Our metaclass contract is now fulfilled.
+        """
+
+        @attribute(dtype=float)
+        def call_example_method(self):
+            return self.example_method()
+
+        @abc.abstractmethod
+        def example_method(self):
+            raise NotImplementedError
+
+    class ConcreteHardwareDevice(TestHardwareDevice):
+
+        def example_method(self):
+            return 12
+
+    def setUp(self):
+        super(TestAbstractDevice, self).setUp()
+
+    def test_instance_tango(self):
+
+        try:
+            with DeviceTestContext(self.TestHardwareDevice, process=True) as proxy:
+                # Calling this method raises the NotImplementedError exception
+                proxy.call_example_method()
+        except Exception as e:
+            self.assertIsInstance(e, DevFailed)
+
+        with DeviceTestContext(self.ConcreteHardwareDevice, process=True) as proxy:
+            self.assertEqual(12, proxy.call_example_method)
+
+    @mock.patch.object(server, 'get_worker')
+    @mock.patch.object(server, 'LatestDeviceImpl')
+    def test_instance_error(self, m_worker, m_implement):
+        # Creating this object should raise a type error but it does not
+        # combining metaclasses in this way does not have the desired result.
+        # This is a known limitation of this approach
+        m_device = self.TestHardwareDevice(mock.Mock(), mock.Mock())
+
+        # Raising the NotImplementedError works as expected, however.
+        self.assertRaises(NotImplementedError, m_device.example_method)
+
+        # Creating this object of a class that has a pure metaclass does raise
+        # the expected error.
+        self.assertRaises(TypeError, self.AbstractExample)
+
+
diff --git a/devices/test/devices/test_statistics_collector.py b/devices/test/devices/test_statistics_collector.py
new file mode 100644
index 0000000000000000000000000000000000000000..a3568b8e56452259b8754be3a76e862a20845fcb
--- /dev/null
+++ b/devices/test/devices/test_statistics_collector.py
@@ -0,0 +1,80 @@
+from devices.sdp.statistics_collector import XSTCollector
+from devices.sdp.statistics_packet import XSTPacket
+
+from test import base
+
+class TestXSTCollector(base.TestCase):
+    def test_valid_packet(self):
+        collector = XSTCollector()
+
+        # a valid packet as obtained from SDP, with 64-bit BE 1+1j as payload
+        packet =  b'X\x05\x00\x00\x00\x00\x00\x00\x10\x08\x00\x02\xfa\xef\x00f\x00\x00\x0c\x08\x01 \x14\x00\x00\x01!\xd9&z\x1b\xb3' + 288 * b'\x00\x00\x00\x00\x00\x00\x00\x01'
+
+        # parse it ourselves to extract info nicely
+        fields = XSTPacket(packet)
+        fpga_index = fields.gn_index
+
+        # this should not throw
+        collector.process_packet(packet)
+
+        # counters should now be updated
+        self.assertEqual(1, collector.parameters["nof_packets"])
+        self.assertEqual(0, collector.parameters["nof_invalid_packets"])
+
+        self.assertEqual(1, collector.parameters["nof_valid_payloads"][fpga_index])
+        self.assertEqual(0, collector.parameters["nof_payload_errors"][fpga_index])
+
+        # check whether the data ended up in the right block, and the rest is still zero
+        xst_values = collector.xst_values()
+
+        for baseline_a in range(collector.MAX_INPUTS):
+            for baseline_b in range(collector.MAX_INPUTS):
+                if baseline_b > baseline_a:
+                    # only scan top-left triangle
+                    continue
+
+                baseline_a_was_in_packet = (fields.first_baseline[0] <= baseline_a < fields.first_baseline[0] + fields.nof_signal_inputs)
+                baseline_b_was_in_packet = (fields.first_baseline[1] <= baseline_b < fields.first_baseline[1] + fields.nof_signal_inputs)
+
+                if baseline_a_was_in_packet and baseline_b_was_in_packet:
+                    self.assertEqual(1+1j, xst_values[baseline_a][baseline_b], msg=f'element [{baseline_a}][{baseline_b}] did not end up in XST matrix.')
+                else:
+                    self.assertEqual(0+0j, xst_values[baseline_a][baseline_b], msg=f'element [{baseline_a}][{baseline_b}] was not in packet, but was written to the XST matrix.')
+
+    def test_invalid_packet(self):
+        collector = XSTCollector()
+
+        # an invalid packet
+        packet =  b'S\x05\x00\x00\x00\x00\x00\x00\x10\x08\x00\x02\xfa\xef\x00f\x00\x00\x0c\x08\x01 \x14\x00\x00\x01!\xd9&z\x1b\xb3' + 288 * b'\x00\x00\x00\x00\x00\x00\x00\x01'
+
+        # this should throw
+        with self.assertRaises(ValueError):
+            collector.process_packet(packet)
+
+        # counters should now be updated
+        self.assertEqual(1, collector.parameters["nof_packets"])
+        self.assertEqual(1, collector.parameters["nof_invalid_packets"])
+
+        self.assertListEqual([0] * collector.MAX_FPGAS, list(collector.parameters["nof_valid_payloads"]))
+        self.assertListEqual([0] * collector.MAX_FPGAS, list(collector.parameters["nof_payload_errors"]))
+
+    def test_payload_error(self):
+        collector = XSTCollector()
+
+        # an valid packet with a payload error
+        packet =  b'X\x05\x00\x00\x00\x00\x00\x00\x14\x08\x00\x02\xfa\xef\x00f\x00\x00\x0c\x08\x01 \x14\x00\x00\x01!\xd9&z\x1b\xb3' + 288 * b'\x00\x00\x00\x00\x00\x00\x00\x01'
+
+        # parse it ourselves to extract info nicely
+        fields = XSTPacket(packet)
+        fpga_index = fields.gn_index
+
+        # this should not throw
+        collector.process_packet(packet)
+
+        # counters should now be updated
+        self.assertEqual(1, collector.parameters["nof_packets"])
+        self.assertEqual(0, collector.parameters["nof_invalid_packets"])
+
+        self.assertEqual(0, collector.parameters["nof_valid_payloads"][fpga_index])
+        self.assertEqual(1, collector.parameters["nof_payload_errors"][fpga_index])
+
diff --git a/devices/toolkit/README.md b/devices/toolkit/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..e3fd6c9af3c0c73ed20dc1558588adf12dd07918
--- /dev/null
+++ b/devices/toolkit/README.md
@@ -0,0 +1,42 @@
+# Tango Archiving Framework
+
+The Archiver class in archiver.py defines the methods to manage the device attributes archiving allowed by Tango.
+
+The main components (and the relative Docker containers) are:
+
+- Configuration Manager (container: hdbpp-cm): Device server that assists in adding, modifying, moving, deleting an Attribute to/from the archiving system
+- Event Subscriber (container: hdbpp-es): The EventSubscriber TANGO device server, is the archiving system engine. On typical usage, it will subscribe to archive events on request by the ConfigurationManager device. The EventSubscriber is designed to start archiving all the already configured Attributes, even if the ConfigurationManager is not running. Moreover, being a TANGO device, the EventSubscriber configuration can be managed with Jive.
+- Archiving DBMS (container: archiver-maria-db): Specific Database devoted to storing attribute values.
+- (Optional) HDB++ Viewer (container: hdbpp-viewer): Standalone JAVA application designed to monitor signals coming from database
+
+## Archiver creation
+When an Archiver object is created, we can define three of its properties:
+- the ConfigurationManager name (Tango namespace)
+- at least one EventSubscriber name (Tango namespace)
+- the default context archiving for the subscribers. This means that a default archiving strategy will be applied to
+all the attributes. Of course this strategy can be tuned individually for each attribute if needed.
+Archiving strategies are ['ALWAYS','RUN','SHUTDOWN','SERVICE']
+- ALWAYS:always stored
+- RUN:stored during run
+- SHUTDOWN:stored during shutdown
+- SERVICE:stored during maintenance activities
+
+## Add an attribute
+When adding an attribute to the archiving framework, we must define the following properties:
+- the EventSubscriber name that will take charge of the attribute
+- the archiving strategy (4 options defined above)
+- the attribute polling period (it should have been already defined in TangoDB)
+- the archive event period (MOST IMPORTANT, it defines the frequency rate at which an attribute is archived in the DBMS)
+
+It is important to understand that, when an attribute is successfully added to the EventSubscriber list, the archiving begins without an explicit 'Start' command, rather it follows the archiving strategy already defined.
+
+The 'Start' command is used instead during a session when an attribute has been paused/stopped for any reason, or it has raised some kind of issue.
+
+## Difference between Stop and Remove an attribute
+When stopping an attribute archiving, the framework does not remove it from the list.
+This means that archiving is stopped for the current session, but if the device is restarted,  the attribute archiving will be restarted as well.
+In order to definitely stop the archiving, the attribute must be removed from the attribute list.
+
+## Update an attribute
+If we want to update the archiving properties of an attribute (e.g. the archive event period), there is a relative method.
+It must be noted that the updating is not istantaneous because, following the framework architecture, an attribute must be first removed from the EventSubscriber list and then re-added with the new properties.
diff --git a/devices/toolkit/archiver.py b/devices/toolkit/archiver.py
index 94ce98ce41cc5983834059cf30e08ff7ebf3a8b5..3df98c383cb717bd092268ac8e3632853d64ded7 100644
--- a/devices/toolkit/archiver.py
+++ b/devices/toolkit/archiver.py
@@ -1,9 +1,12 @@
 #! /usr/bin/env python3
 
+from logging import raiseExceptions
+import traceback
 from clients.attribute_wrapper import attribute_wrapper
-from tango import DeviceProxy
+from tango import DeviceProxy, AttributeProxy
 from datetime import datetime, timedelta
 
+import time
 from sqlalchemy import create_engine, and_
 from sqlalchemy.orm import sessionmaker
 from .archiver_base import *
@@ -12,32 +15,216 @@ class Archiver():
     """
     The Archiver class implements the basic operations to perform attributes archiving
     """
-    def __init__(self, cm_name: str = 'archiving/hdbpp/confmanager01', es_name: str = 'archiving/hdbpp/eventsubscriber01'):
+    def __init__(self, cm_name: str = 'archiving/hdbpp/confmanager01', es_name: str = 'archiving/hdbpp/eventsubscriber01', context: str = 'RUN'):
         self.cm_name = cm_name
         self.cm = DeviceProxy(cm_name)
+        try: 
+            cm_state = self.cm.state() # ping the device server
+            if cm_state is 'FAULT':
+                print('Configuration Manager is in FAULT state')
+                print(self.cm.status())
+                return
+        except:
+            print(traceback.format_exc())
+            return
         self.es_name = es_name
         self.es = DeviceProxy(es_name)
+        self.cm.write_attribute('Context',context)    # Set default Context Archiving for all the subscribers
 
-    def add_attribute_to_archiver(self, attribute: str, polling_period: float = 1000, event_period: float = 1000, strategy: str = 'ALWAYS'):
+    def add_attribute_to_archiver(self, attribute_name: str, polling_period: int = 1000, event_period: int = 1000, strategy: str = 'RUN'):
         """
         Takes as input the attribute name, polling period (ms), event period (ms) and archiving strategy, 
         and adds the selected attribute to the subscriber's list of archiving attributes.
         The ConfigurationManager and EventSubscriber devices must be already up and running.
         The archiving-DBMS must be already properly configured.
         """
-        self.cm.write_attribute('SetAttributeName', attribute)
-        self.cm.write_attribute('SetArchiver', self.es_name)
-        self.cm.write_attribute('SetStrategy', strategy)
-        self.cm.write_attribute('SetPollingPeriod', int(polling_period))
-        self.cm.write_attribute('SetPeriodEvent', int(event_period))
-        self.cm.AttributeAdd()
+        if (len(attribute_name.split('/'))!=4): 
+            raise AttributeFormatException 
+        try:
+            self.cm.write_attribute('SetAttributeName', attribute_name)
+            self.cm.write_attribute('SetArchiver', self.es_name)
+            self.cm.write_attribute('SetStrategy', strategy)
+            self.cm.write_attribute('SetPollingPeriod', polling_period)
+            self.cm.write_attribute('SetPeriodEvent', event_period)
+            self.cm.AttributeAdd()
+            print('Attribute %s added to archiving list!' % attribute_name)
+        except Exception as e:
+            if 'already archived' not in str(e).lower():
+                traceback.format_exc()
+            else: 
+                print('Attribute %s already in archiving list!' % attribute_name)
 
-    def remove_attribute_from_archiver(self, attribute: str):
+    def add_attributes_to_archiver(self,device_name,global_archive_period:int = None, exclude:list = ['Status','State']):
+        """
+        Add sequentially all the attributes of the selected device in the event subscriber list, if not already present
+        """
+        d = DeviceProxy(device_name)
+        attrs_list = list(d.get_attribute_list()) # cast to list otherwise removal is not allowed
+        try: 
+            for a in exclude: attrs_list.remove(a)
+        except:
+            pass
+        for a in attrs_list:
+            attr_fullname = str(device_name+'/'+a).lower()
+            attr_proxy = AttributeProxy(attr_fullname)
+            if attr_proxy.is_polled() is True:   # if not polled attribute is also not archived
+                try:
+                    if self.es.AttributeList is None or not(self.cm.AttributeSearch(a)):
+                        polling_period = attr_proxy.get_poll_period()  
+                        archive_period = global_archive_period or int(attr_proxy.get_property('archive_period')['archive_period'][0])                 
+                        self.add_attribute_to_archiver(attr_fullname,polling_period=polling_period,
+                            event_period=archive_period)
+                        #time.sleep(0.5)
+                except:
+                    print(traceback.format_exc())
+    
+    def remove_attribute_from_archiver(self, attribute_name:str):
         """
         Stops the data archiving of the attribute passed as input, and remove it from the subscriber's list. 
         """
-        self.cm.AttributeStop(attribute)
-        self.cm.AttributeRemove(attribute)       
+        if (len(attribute_name.split('/'))!=4): 
+            raise AttributeFormatException 
+        try:
+            self.cm.AttributeStop(attribute_name)
+            self.cm.AttributeRemove(attribute_name)
+            print('Attribute %s removed!' % attribute_name)
+        except Exception as e:
+            if 'attribute not found' not in str(e).lower():
+                traceback.format_exc()
+            else: 
+                print('Attribute %s not found!' % attribute_name)      
+    
+    def remove_attributes_by_device(self,device_name:str):
+        """
+        Stops the data archiving of all the attributes of the selected device, and remove them from the
+        subscriber's list
+        """
+        d = DeviceProxy(device_name)
+        attrs_list = d.get_attribute_list()
+        for a in attrs_list:
+            try:
+                attr_fullname = str(device_name+'/'+a).lower()
+                self.remove_attribute_from_archiver(attr_fullname)
+            except:
+                print(traceback.format_exc())
+
+    def start_archiving_attribute(self, attribute_name:str):
+        """
+        Starts the archiving of the attribute passed as input.
+        The attribute must be already present in the subscriber's list
+        """
+        if (len(attribute_name.split('/'))!=4): 
+            raise AttributeFormatException 
+        try:
+            self.cm.AttributeStart(attribute_name)
+        except Exception as e:
+            if 'attribute not found' not in str(e).lower():
+                traceback.format_exc()
+            else: 
+                print('Attribute %s not found!' % attribute_name)
+    
+    def stop_archiving_attribute(self, attribute_name:str):
+        """
+        Stops the archiving of the attribute passed as input.
+        The attribute must be already present in the subscriber's list
+        """
+        if (len(attribute_name.split('/'))!=4): 
+            raise AttributeFormatException
+        try:
+            self.cm.AttributeStop(attribute_name)
+        except Exception as e:
+            if 'attribute not found' not in str(e).lower():
+                traceback.format_exc()
+            else: 
+                print('Attribute %s not found!' % attribute_name)
+    
+    def check_and_add_attribute_in_archiving_list(self, attribute_name:str):
+        """
+        Check if an attribute is in the archiving list
+        """
+        if (len(attribute_name.split('/'))!=4): 
+            raise AttributeFormatException
+        # Add attribute if not present in event subscriber list
+        try:
+            if self.es.AttributeList is None or not(self.cm.AttributeSearch(attribute_name)):
+                self.add_attribute_to_archiver(attribute_name)
+        except:
+            print(traceback.format_exc())
+        return attribute_name
+    
+    def update_archiving_attribute(self, attribute_name: str, polling_period: int = 1000, event_period: int = 1000, strategy: str = 'RUN'):
+        """
+        Update the archiving properties of an attribute already in a subscriber list
+        """
+        try:
+            self.remove_attribute_from_archiver(attribute_name)
+            time.sleep(1)
+            self.add_attribute_to_archiver(attribute_name,polling_period,event_period,strategy)
+            time.sleep(1)
+            self.start_archiving_attribute(attribute_name)
+        except:
+            print(traceback.format_exc())
+    
+    def get_subscriber_attributes(self,es_name:str = None):
+        """
+        Return the list of attributes managed by the event subscriber
+        """
+        if es_name is not None:
+            es = DeviceProxy(es_name)
+        else: 
+            es = self.es
+        attrs = es.AttributeList or []
+        return attrs
+    
+    def get_subscriber_errors(self,es_name:str = None):
+        """
+        Return a dictionary of the attributes currently in error, defined as AttributeName -> AttributeError
+        """
+        if es_name is not None:
+            es = DeviceProxy(es_name)
+        else: 
+            es = self.es
+        try:
+            attrs = es.AttributeList or []
+            errs = es.AttributeErrorList or []
+            return dict((a,e) for a,e in zip(attrs,errs) if e)
+        except:
+            print('No attribute errors in the subscriber')
+            return {}
+    
+    def get_attribute_errors(self,attribute_name:str):
+        """
+        Return the error related to the attribute
+        """
+        if (len(attribute_name.split('/'))!=4): 
+            raise AttributeFormatException
+        errs_dict = self.get_subscriber_errors()
+        for e in errs_dict:
+            if attribute_name in e:
+                return errs_dict.get(e)
+        return None
+    
+    def get_subscriber_load(self,use_freq:bool=True,es_name:str = None):
+        """
+        Return the estimated load of an archiver, in frequency of records or number
+        of attributes
+        """
+        if es_name is not None:
+            es = DeviceProxy(es_name)
+        else: 
+            es = self.es
+        if use_freq:
+            return str(es.AttributeRecordFreq)+(' events/period' )
+        else:
+            return len(es.AttributeList or []) 
+    
+class AttributeFormatException(Exception):
+    """
+    Exception that handles wrong attribute naming
+    """
+    def __init__(self, message="Wrong Tango attribute format! Try: domain/family/member/attribute (e.g. LTS/RECV/1/temperature)"):
+        self.message = message
+        super().__init__(self.message)
 
 class Retriever():
     """
diff --git a/devices/toolkit/lts_cold_start.py b/devices/toolkit/lts_cold_start.py
index fb558ff2ce849ab9f844331c117aee122af014fe..47d3243e2064dc39fba8127e33da842acba19416 100644
--- a/devices/toolkit/lts_cold_start.py
+++ b/devices/toolkit/lts_cold_start.py
@@ -60,10 +60,10 @@ def lts_cold_start():
     # Define the LOFAR2.0 specific log format
     configure_logging()
 
-    # Get a reference to the PCC device, do not
+    # Get a reference to the RECV device, do not
     # force a restart of the already running Tango
     # device.
-    pcc = startup("LTS/PCC/1")
+    recv = startup("LTS/RECV/1")
 
     # Getting CLK, RCU & RCU ADCs into proper shape for use by real people.
     #
@@ -86,51 +86,51 @@ def lts_cold_start():
     #
     #
     # Steps 1.1 & 1.2
-    pcc.CLK_off()
+    recv.CLK_off()
     # 2021-04-30, Thomas
     # This should be refactored into a function.
     timeout = 10.0
-    while pcc.CLK_translator_busy_R is True:
+    while recv.CLK_translator_busy_R is True:
         logging.debug("Waiting on \"CLK_translator_busy_R\" to become \"True\"...")
         timeout = timeout - 1.0
         if timeout < 1.0:
-            # Switching the PCC clock off should never take longer than
+            # Switching the RECV clock off should never take longer than
             # 10 seconds.  Here we ran into a timeout.
             # Clean up and raise an exception.
-            pcc.off()
-            raise Exception("After calling \"CLK_off\" a timeout occured while waiting for \"CLK_translator_busy_R\" to become \"True\".  Please investigate the reason why the PCC translator never set \"CLK_translator_busy_R\" to \"True\".  Aborting start-up procedure.")
+            recv.off()
+            raise Exception("After calling \"CLK_off\" a timeout occured while waiting for \"CLK_translator_busy_R\" to become \"True\".  Please investigate the reason why the RECV translator never set \"CLK_translator_busy_R\" to \"True\".  Aborting start-up procedure.")
         sleep(1.0)
 
     # Steps 1.3 & 1.4
-    pcc.CLK_on()
+    recv.CLK_on()
     # Per Paulus this should never take longer than 2 seconds.
     # 2021-04-30, Thomas
     # This should be refactored into a function.
     timeout = 2.0
-    while pcc.CLK_translator_busy_R is True:
+    while recv.CLK_translator_busy_R is True:
         logging.debug("After calling \"CLK_on()\"  Waiting on \"CLK_translator_busy_R\" to become \"True\"...")
         timeout = timeout - 1.0
         if timeout < 1.0:
-            # Switching the PCC clock on should never take longer than
+            # Switching theRECV clock on should never take longer than
             # a couple of seconds.  Here we ran into a timeout.
             # Clean up and raise an exception.
-            pcc.off()
-            raise Exception("After calling \"CLK_on\" a timeout occured while waiting for \"CLK_translator_busy_R\" to become \"True\".  Please investigate the reason why the PCC translator never set \"CLK_translator_busy_R\" to \"True\".  Aborting start-up procedure.")
+            recv.off()
+            raise Exception("After calling \"CLK_on\" a timeout occured while waiting for \"CLK_translator_busy_R\" to become \"True\".  Please investigate the reason why the RECV translator never set \"CLK_translator_busy_R\" to \"True\".  Aborting start-up procedure.")
         sleep(1.0)
 
     # 1.5 Check if CLK_PLL_locked_R == True
     # 2021-04-30, Thomas
     # This should be refactored into a function.
-    clk_locked = pcc.CLK_PLL_locked_R
+    clk_locked = recv.CLK_PLL_locked_R
     if clk_locked is True:
        logging.info("CLK signal is locked.")
     else:
         # CLK signal is not locked
-        clk_i2c_status = pcc.CLK_I2C_STATUS_R
+        clk_i2c_status = recv.CLK_I2C_STATUS_R
         exception_text = "CLK I2C is not working.  Please investigate!  Maybe power cycle subrack to restart CLK board and translator.  Aborting start-up procedure."
         if i2c_status <= 0:
             exception_text = "CLK signal is not locked.  Please investigate!  The subrack probably do not receive clock input or the CLK PCB is broken.  Aborting start-up procedure."
-        pcc.off()
+        recv.off()
         raise Exception(exception_text)
     # Step 1.6
     # Done.
@@ -150,40 +150,40 @@ def lts_cold_start():
     #
     # Step 2.1
     # We have only 8 RCUs in LTS.
-    pcc.RCU_mask_RW = [True, ] * 8
+    recv.RCU_mask_RW = [True, ] * 8
     # Steps 2.2 & 2.3
-    pcc.RCU_off()
+    recv.RCU_off()
     # 2021-04-30, Thomas
     # This should be refactored into a function.
     timeout = 10.0
-    while pcc.RCU_translator_busy_R is True:
+    while recv.RCU_translator_busy_R is True:
         logging.debug("Waiting on \"RCU_translator_busy_R\" to become \"True\"...")
         timeout = timeout - 1.0
         if timeout < 1.0:
             # Switching the RCUs off should never take longer than
             # 10 seconds.  Here we ran into a timeout.
             # Clean up and raise an exception.
-            pcc.off()
-            raise Exception("After calling \"RCU_off\" a timeout occured while waiting for \"RCU_translator_busy_R\" to become \"True\".  Please investigate the reason why the PCC translator never set \"RCU_translator_busy_R\" to \"True\".  Aborting start-up procedure.")
+            recv.off()
+            raise Exception("After calling \"RCU_off\" a timeout occured while waiting for \"RCU_translator_busy_R\" to become \"True\".  Please investigate the reason why the RECV translator never set \"RCU_translator_busy_R\" to \"True\".  Aborting start-up procedure.")
         sleep(1.0)
 
     # Steps 2.4 & 2.5
     # We leave the RCU mask as it is because it got already set for the
     # RCU_off() call.
-    pcc.RCU_on()
+    recv.RCU_on()
     # Per Paulus this should never take longer than 5 seconds.
     # 2021-04-30, Thomas
     # This should be refactored into a function.
     timeout = 5.0
-    while pcc.RCU_translator_busy_R is True:
+    while recv.RCU_translator_busy_R is True:
         logging.debug("After calling \"RCU_on()\"  Waiting on \"RCU_translator_busy_R\" to become \"True\"...")
         timeout = timeout - 1.0
         if timeout < 1.0:
             # Switching the RCUs on should never take longer than
             # a couple of seconds.  Here we ran into a timeout.
             # Clean up and raise an exception.
-            pcc.off()
-            raise Exception("After calling \"RCU_on\" a timeout occured while waiting for \"RCU_translator_busy_R\" to become \"True\".  Please investigate the reason why the PCC translator never set \"RCU_translator_busy_R\" to \"True\".  Aborting start-up procedure.")
+            recv.off()
+            raise Exception("After calling \"RCU_on\" a timeout occured while waiting for \"RCU_translator_busy_R\" to become \"True\".  Please investigate the reason why the RECV translator never set \"RCU_translator_busy_R\" to \"True\".  Aborting start-up procedure.")
         sleep(1.0)
     # Step 2.6
     # Done.
@@ -196,9 +196,9 @@ def lts_cold_start():
     #
     #
     # Steps 3.1 & 3.2
-    rcu_mask = pcc.RCU_mask_RW
-    adc_locked = numpy.array(pcc.RCU_ADC_lock_R)
-    for rcu, i2c_status in enumerate(pcc.RCU_I2C_STATUS_R):
+    rcu_mask = recv.RCU_mask_RW
+    adc_locked = numpy.array(recv.RCU_ADC_lock_R)
+    for rcu, i2c_status in enumerate(recv.RCU_I2C_STATUS_R):
         if i2c_status == 0:
             rcu_mask[rcu] = True
             logging.info("RCU #{} is available.".format(rcu))
@@ -209,7 +209,7 @@ def lts_cold_start():
             # The RCU's I2C bus is not working.
             rcu_mask[rcu] = False
             logging.error("RCU #{}'s I2C is not working.  Please investigate!  Disabling RCU #{} to avoid damage.".format(rcu, rcu))
-    pcc.RCU_mask_RW = rcu_mask
+    recv.RCU_mask_RW = rcu_mask
     # Step 3.3
     # Done
 
diff --git a/devices/toolkit/startup.py b/devices/toolkit/startup.py
index e1cc092b01b3714d80f0b8ca827856bde451c78b..66a8d2c496fc7e86d0d13086336e900fc1a1bfaf 100644
--- a/devices/toolkit/startup.py
+++ b/devices/toolkit/startup.py
@@ -7,7 +7,7 @@ logger = logging.getLogger()
 def startup(device: str, force_restart: bool) -> tango.DeviceProxy:
     '''
     Start a LOFAR Tango device:
-    pcc = startup(device = 'LTS/PCC/1', force_restart = False)
+    recv = startup(device = 'LTS/RECV/1', force_restart = False)
     '''
     proxy = tango.DeviceProxy(device)
     state = proxy.state()
diff --git a/devices/tox.ini b/devices/tox.ini
index 4869bad0462b461a28babab5ae50375b957b44fa..59d2347f3ff42ccb084033aea67d478fd63513cb 100644
--- a/devices/tox.ini
+++ b/devices/tox.ini
@@ -13,7 +13,8 @@ setenv =
    OS_STDOUT_CAPTURE=1
    OS_STDERR_CAPTURE=1
    OS_TEST_TIMEOUT=60
-deps = -r{toxinidir}/test-requirements.txt
+deps =
+    -r{toxinidir}/test-requirements.txt
     -r{toxinidir}/../docker-compose/lofar-device-base/lofar-requirements.txt
 commands = stestr run {posargs}
 
@@ -38,9 +39,9 @@ commands =
 ;             It thus matters what interfaces Docker will bind our
 ;             containers to, not what our containers listen on.
 commands =
-    bandit -r devices/ clients/ common/ examples/ util/ -n5 -ll -s B104
+    bandit -r devices/ -n5 -ll -s B104
 
 [flake8]
 filename = *.py,.stestr.conf,.txt
-select = W292
+select = W292,B601,B602,T100,M001
 exclude=.tox,.egg-info
diff --git a/docker-compose/Makefile b/docker-compose/Makefile
index 09eb760123bc4687207609c3ad94c740a72c317c..b964653b1f1d0764c254b9e68c0d97ea8f3ef396 100644
--- a/docker-compose/Makefile
+++ b/docker-compose/Makefile
@@ -29,6 +29,12 @@ ifeq (start,$(firstword $(MAKECMDGOALS)))
     SERVICE_TARGET = true
 else ifeq (stop,$(firstword $(MAKECMDGOALS)))
     SERVICE_TARGET = true
+else ifeq (restart,$(firstword $(MAKECMDGOALS)))
+    SERVICE_TARGET = true
+else ifeq (build,$(firstword $(MAKECMDGOALS)))
+    SERVICE_TARGET = true
+else ifeq (build-nocache,$(firstword $(MAKECMDGOALS)))
+    SERVICE_TARGET = true
 else ifeq (attach,$(firstword $(MAKECMDGOALS)))
     SERVICE_TARGET = true
     ifndef NETWORK_MODE
@@ -115,10 +121,11 @@ DOCKER_COMPOSE_ARGS := DISPLAY=$(DISPLAY) \
     TANGO_LOFAR_CONTAINER_DIR=${TANGO_LOFAR_CONTAINER_DIR} MYSQL_HOST=$(MYSQL_HOST) \
     CONTAINER_NAME_PREFIX=$(CONTAINER_NAME_PREFIX) \
     COMPOSE_IGNORE_ORPHANS=true \
-    CONTAINER_EXECUTION_UID=$(shell id -u)
+    CONTAINER_EXECUTION_UID=$(shell id -u) \
+    DOCKER_GID=$(shell getent group docker | cut -d: -f 3)
 
 
-.PHONY: up down minimal start stop status clean pull help
+.PHONY: up down minimal start stop restart build build-nocache status clean pull help
 .DEFAULT_GOAL := help
 
 pull: ## pull the images from the Docker hub
@@ -126,8 +133,13 @@ pull: ## pull the images from the Docker hub
 
 build: ## rebuild images
 	# docker-compose does not support build dependencies, so manage those here
-	$(DOCKER_COMPOSE_ARGS) docker-compose -f lofar-device-base.yml -f networks.yml build
-	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) build
+	$(DOCKER_COMPOSE_ARGS) docker-compose -f lofar-device-base.yml -f networks.yml build --progress=plain
+	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) build --progress=plain $(SERVICE)
+
+build-nocache: ## rebuild images from scratch
+	# docker-compose does not support build dependencies, so manage those here
+	$(DOCKER_COMPOSE_ARGS) docker-compose -f lofar-device-base.yml -f networks.yml build --progress=plain
+	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) build --no-cache --progress=plain $(SERVICE)
 
 up: minimal  ## start the base TANGO system and prepare all services
 	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) up --no-start
@@ -152,6 +164,11 @@ start: up ## start a service (usage: make start <servicename>)
 
 stop:  ## stop a service (usage: make stop <servicename>)
 	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) stop $(SERVICE)
+	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) rm -f $(SERVICE)
+
+restart: ## restart a service (usage: make restart <servicename>)
+	make stop $(SERVICE) # cannot use dependencies, as that would allow start and stop to run in parallel..
+	make start $(SERVICE)
 
 attach:  ## attach a service to an existing Tango network
 	$(DOCKER_COMPOSE_ARGS) docker-compose $(ATTACH_COMPOSE_FILE_ARGS) up -d $(SERVICE)
@@ -162,8 +179,9 @@ status:  ## show the container status
 images:  ## show the container images
 	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) images
 
-clean: down  ## clear all TANGO database entries
+clean: down  ## clear all TANGO database entries, and all containers
 	docker volume rm $(BASEDIR)_tangodb
+	$(DOCKER_COMPOSE_ARGS) docker-compose $(COMPOSE_FILE_ARGS) rm -f
 
 help:   ## show this help.
 	@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
diff --git a/docker-compose/archiver.yml b/docker-compose/archiver.yml
index 8006ece3b86f0013a5eedc1e066dc4c2f07b73af..5204b52c0ffd05fcee800d0f7faebc9345628a48 100644
--- a/docker-compose/archiver.yml
+++ b/docker-compose/archiver.yml
@@ -6,6 +6,8 @@ services:
     container_name: archiver-maria-db
     networks:
       - control
+    ports:
+      - "3307:3306/tcp"
     depends_on:
       - databaseds
     environment:
diff --git a/docker-compose/device-docker.yml b/docker-compose/device-docker.yml
new file mode 100644
index 0000000000000000000000000000000000000000..7ed5ecf40dd02a0a9d39941c144dc9958e2ed794
--- /dev/null
+++ b/docker-compose/device-docker.yml
@@ -0,0 +1,44 @@
+#
+# Docker compose file that launches an interactive iTango session.
+#
+# Connect to the interactive session with 'docker attach itango'.
+# Disconnect with the Docker deattach sequence: <CTRL>+<P> <CTRL>+<Q>
+#
+# Defines:
+#   - itango: iTango interactive session
+#
+# Requires:
+#   - lofar-device-base.yml
+#
+version: '2'
+
+services:
+  device-docker:
+    image: device-docker
+    # build explicitly, as docker-compose does not understand a local image
+    # being shared among services.
+    build:
+        context: lofar-device-base
+        args:
+            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+    container_name: ${CONTAINER_NAME_PREFIX}device-docker
+    networks:
+      - control
+    ports:
+      - "5705:5705" # unique port for this DS
+    volumes:
+      - ${TANGO_LOFAR_CONTAINER_MOUNT}
+      - /var/run/docker.sock:/var/run/docker.sock:rw # we want to control our sibling containers, NOT do docker-in-docker (dind)
+    user: ${CONTAINER_EXECUTION_UID}:${DOCKER_GID} # user that starts this container by definition has access rights to docker
+    environment:
+      - TANGO_HOST=${TANGO_HOST}
+    entrypoint:
+      - /usr/local/bin/wait-for-it.sh
+      - ${TANGO_HOST}
+      - --timeout=30
+      - --strict
+      - --
+      # configure CORBA to _listen_ on 0:port, but tell others we're _reachable_ through ${HOSTNAME}:port, since CORBA
+      # can't know about our Docker port forwarding
+      - python3 -u ${TANGO_LOFAR_CONTAINER_DIR}/devices/devices/docker_device.py LTS -v -ORBendPoint giop:tcp:0:5705 -ORBendPointPublish giop:tcp:${HOSTNAME}:5705
+    restart: on-failure
diff --git a/docker-compose/device-pcc.yml b/docker-compose/device-recv.yml
similarity index 83%
rename from docker-compose/device-pcc.yml
rename to docker-compose/device-recv.yml
index ebf71352df76969e879a5d73f022705a202ab925..f553ba61476557fdeef1cdd3757d96184e8a5c76 100644
--- a/docker-compose/device-pcc.yml
+++ b/docker-compose/device-recv.yml
@@ -13,19 +13,19 @@
 version: '2'
 
 services:
-  device-pcc:
-    image: device-pcc
+  device-recv:
+    image: device-recv
     # build explicitly, as docker-compose does not understand a local image
     # being shared among services.
     build:
         context: lofar-device-base
         args:
             SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
-    container_name: ${CONTAINER_NAME_PREFIX}device-pcc
+    container_name: ${CONTAINER_NAME_PREFIX}device-recv
     networks:
       - control
     ports:
-      - "5700:5700" # unique port for this DS
+      - "5707:5707" # unique port for this DS
     volumes:
         - ${TANGO_LOFAR_CONTAINER_MOUNT}
     environment:
@@ -38,5 +38,5 @@ services:
       - --
       # configure CORBA to _listen_ on 0:port, but tell others we're _reachable_ through ${HOSTNAME}:port, since CORBA
       # can't know about our Docker port forwarding
-      - python3 -u ${TANGO_LOFAR_CONTAINER_DIR}/devices/devices/pcc.py LTS -v -ORBendPoint giop:tcp:0:5700 -ORBendPointPublish giop:tcp:${HOSTNAME}:5700
+      - python3 -u ${TANGO_LOFAR_CONTAINER_DIR}/devices/devices/recv.py LTS -v -ORBendPoint giop:tcp:0:5707 -ORBendPointPublish giop:tcp:${HOSTNAME}:5707
     restart: on-failure
diff --git a/docker-compose/device-sst.yml b/docker-compose/device-sst.yml
index c620ba206b6091b1544582e62128575fc231b03c..a7f2e867bc4075d002d764189ef3906ff81fb12a 100644
--- a/docker-compose/device-sst.yml
+++ b/docker-compose/device-sst.yml
@@ -27,6 +27,7 @@ services:
         - data
     ports:
         - "5001:5001/udp" # port to receive SST UDP packets on
+        - "5101:5101/tcp" # port to emit SST TCP packets on
         - "5702:5702" # unique port for this DS
     volumes:
         - ${TANGO_LOFAR_CONTAINER_MOUNT}
diff --git a/docker-compose/device-xst.yml b/docker-compose/device-xst.yml
new file mode 100644
index 0000000000000000000000000000000000000000..8b0ba2d77cffe33caf4130f29ee83e1db1911f2b
--- /dev/null
+++ b/docker-compose/device-xst.yml
@@ -0,0 +1,45 @@
+#
+# Docker compose file that launches an interactive iTango session.
+#
+# Connect to the interactive session with 'docker attach itango'.
+# Disconnect with the Docker deattach sequence: <CTRL>+<P> <CTRL>+<Q>
+#
+# Defines:
+#   - itango: iTango interactive session
+#
+# Requires:
+#   - lofar-device-base.yml
+#
+version: '2'
+
+services:
+  device-xst:
+    image: device-xst
+    # build explicitly, as docker-compose does not understand a local image
+    # being shared among services.
+    build:
+        context: lofar-device-base
+        args:
+            SOURCE_IMAGE: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}-tango-itango:${TANGO_ITANGO_VERSION}
+    container_name: ${CONTAINER_NAME_PREFIX}device-xst
+    networks:
+        - control
+        - data
+    ports:
+        - "5002:5002/udp" # port to receive XST UDP packets on
+        - "5102:5102/tcp" # port to emit XST TCP packets on
+        - "5706:5706" # unique port for this DS
+    volumes:
+        - ${TANGO_LOFAR_CONTAINER_MOUNT}
+    environment:
+      - TANGO_HOST=${TANGO_HOST}
+    entrypoint:
+      - /usr/local/bin/wait-for-it.sh
+      - ${TANGO_HOST}
+      - --timeout=30
+      - --strict
+      - --
+      # configure CORBA to _listen_ on 0:port, but tell others we're _reachable_ through ${HOSTNAME}:port, since CORBA
+      # can't know about our Docker port forwarding
+      - python3 -u ${TANGO_LOFAR_CONTAINER_DIR}/devices/devices/sdp/xst.py LTS -v -ORBendPoint giop:tcp:0:5706 -ORBendPointPublish giop:tcp:${HOSTNAME}:5706
+    restart: on-failure
diff --git a/docker-compose/grafana.yml b/docker-compose/grafana.yml
index 1a9b3ee77aa53fcef367e1159c1b8623971ad5d7..b9060c70a53ecfb4d4027ebe1e78d9fe658050f6 100644
--- a/docker-compose/grafana.yml
+++ b/docker-compose/grafana.yml
@@ -6,9 +6,9 @@
 #
 version: '2'
 
-volumes:
-  grafana-data: {}
-  grafana-configs: {}
+#volumes:
+#  grafana-data: {}
+#  grafana-configs: {}
 
 services:
   grafana:
@@ -18,9 +18,9 @@ services:
     container_name: ${CONTAINER_NAME_PREFIX}grafana
     networks:
       - control
-    volumes:
-      - grafana-data:/var/lib/grafana
-      - grafana-configs:/etc/grafana
+    #volumes:
+    #  - grafana-data:/var/lib/grafana
+    #  - grafana-configs:/etc/grafana
     ports:
       - "3000:3000"
     restart: unless-stopped
diff --git a/docker-compose/grafana/Dockerfile b/docker-compose/grafana/Dockerfile
index d8d13e48da742b9519e11ee7e32a38fc173f21cc..bc766bcd3b0d71f346fd70e34fa27dd91fc27b04 100644
--- a/docker-compose/grafana/Dockerfile
+++ b/docker-compose/grafana/Dockerfile
@@ -1,25 +1,8 @@
 FROM grafana/grafana
 
-# To populate the Grafana configuration:
-#
-# Datasources (thanks to https://rmoff.net/2017/08/08/simple-export/import-of-data-sources-in-grafana/):
-#
-# Import: 
-#
-# for i in data_sources/*; do \
-#     curl -X "POST" "http://localhost:3000/api/datasources" \
-#     -H "Content-Type: application/json" \
-#      --user admin:admin \
-#      --data-binary @$i
-# done
-#
-# Export:
-#
-# mkdir -p data_sources && curl -s "http://localhost:3000/api/datasources"  -u admin:admin|jq -c -M '.[]'|split -l 1 - data_sources/
-#
-# Dashboards:
-#
-# Import: http://localhost:3000/dashboard/import
-# Export: "share" icon next to dashboard title -> "Export"
-# 
+COPY grafana.ini /etc/grafana/
 
+# Add default configuration through provisioning (see https://grafana.com/docs/grafana/latest/administration/provisioning)
+COPY datasources /etc/grafana/provisioning/datasources/
+COPY dashboards /var/lib/grafana/dashboards/
+COPY stationcontrol-dashboards.yaml /etc/grafana/provisioning/dashboards/
diff --git a/docker-compose/grafana/dashboards/docker.json b/docker-compose/grafana/dashboards/docker.json
new file mode 100644
index 0000000000000000000000000000000000000000..0d486f65e35a9a1c4a5ca024d6a89b0a46991ff2
--- /dev/null
+++ b/docker-compose/grafana/dashboards/docker.json
@@ -0,0 +1,108 @@
+{
+  "annotations": {
+    "list": [
+      {
+        "builtIn": 1,
+        "datasource": "-- Grafana --",
+        "enable": true,
+        "hide": true,
+        "iconColor": "rgba(0, 211, 255, 1)",
+        "name": "Annotations & Alerts",
+        "target": {
+          "limit": 100,
+          "matchAny": false,
+          "tags": [],
+          "type": "dashboard"
+        },
+        "type": "dashboard"
+      }
+    ]
+  },
+  "description": "",
+  "editable": true,
+  "gnetId": null,
+  "graphTooltip": 0,
+  "id": 2,
+  "links": [],
+  "panels": [
+    {
+      "datasource": "Prometheus",
+      "description": "Which Docker containers are running on the station.",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "custom": {
+            "fillOpacity": 90,
+            "lineWidth": 0
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "red",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 1
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 14,
+        "w": 19,
+        "x": 0,
+        "y": 0
+      },
+      "id": 2,
+      "options": {
+        "alignValue": "center",
+        "legend": {
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "mergeValues": true,
+        "rowHeight": 0.9,
+        "showValue": "never",
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/docker/1\",name=~\".*_R\",name!=\"version_R\"}",
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{name}}",
+          "refId": "A"
+        }
+      ],
+      "title": "Docker Containers",
+      "transformations": [],
+      "type": "state-timeline"
+    }
+  ],
+  "schemaVersion": 30,
+  "style": "dark",
+  "tags": [],
+  "templating": {
+    "list": []
+  },
+  "time": {
+    "from": "now-30m",
+    "to": "now"
+  },
+  "timepicker": {},
+  "timezone": "",
+  "title": "Docker",
+  "uid": "buKx9ZHnk",
+  "version": 1
+}
diff --git a/docker-compose/grafana/dashboards/home.json b/docker-compose/grafana/dashboards/home.json
new file mode 100644
index 0000000000000000000000000000000000000000..51ed27cc87098fa85f7563d813c6807eb18a7b3d
--- /dev/null
+++ b/docker-compose/grafana/dashboards/home.json
@@ -0,0 +1,2581 @@
+{
+  "annotations": {
+    "list": [
+      {
+        "builtIn": 1,
+        "datasource": "-- Grafana --",
+        "enable": true,
+        "hide": true,
+        "iconColor": "rgba(0, 211, 255, 1)",
+        "name": "Annotations & Alerts",
+        "target": {
+          "limit": 100,
+          "matchAny": false,
+          "tags": [],
+          "type": "dashboard"
+        },
+        "type": "dashboard"
+      }
+    ]
+  },
+  "editable": true,
+  "gnetId": null,
+  "graphTooltip": 0,
+  "id": 3,
+  "links": [],
+  "panels": [
+    {
+      "collapsed": false,
+      "datasource": null,
+      "gridPos": {
+        "h": 1,
+        "w": 24,
+        "x": 0,
+        "y": 0
+      },
+      "id": 15,
+      "panels": [],
+      "title": "Devices",
+      "type": "row"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "Progress of station initialisation",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "yellow",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 1
+              },
+              {
+                "color": "green",
+                "value": 100
+              }
+            ]
+          },
+          "unit": "percent"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 6,
+        "w": 4,
+        "x": 0,
+        "y": 1
+      },
+      "id": 43,
+      "options": {
+        "reduceOptions": {
+          "calcs": [
+            "lastNotNull"
+          ],
+          "fields": "",
+          "values": false
+        },
+        "showThresholdLabels": false,
+        "showThresholdMarkers": false,
+        "text": {}
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/boot/1\",name=\"initialisation_progress_R\"}",
+          "interval": "",
+          "legendFormat": "",
+          "refId": "A"
+        }
+      ],
+      "title": "Station Initialisation",
+      "type": "gauge"
+    },
+    {
+      "datasource": "Prometheus",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "mappings": [
+            {
+              "options": {
+                "0": {
+                  "color": "green",
+                  "index": 1,
+                  "text": "ON"
+                },
+                "1": {
+                  "color": "red",
+                  "index": 3,
+                  "text": "OFF"
+                },
+                "7": {
+                  "color": "yellow",
+                  "index": 2,
+                  "text": "STANDBY"
+                },
+                "8": {
+                  "color": "red",
+                  "index": 0,
+                  "text": "FAULT"
+                },
+                "11": {
+                  "color": "red",
+                  "index": 4,
+                  "text": "ALARM"
+                }
+              },
+              "type": "value"
+            }
+          ],
+          "noValue": "???",
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 80
+              }
+            ]
+          },
+          "unit": "string"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 9,
+        "w": 6,
+        "x": 4,
+        "y": 1
+      },
+      "id": 4,
+      "options": {
+        "colorMode": "background",
+        "graphMode": "none",
+        "justifyMode": "auto",
+        "orientation": "horizontal",
+        "reduceOptions": {
+          "calcs": [
+            "lastNotNull"
+          ],
+          "fields": "",
+          "values": false
+        },
+        "text": {},
+        "textMode": "value_and_name"
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "device_attribute{label=\"State\",device=~\"lts/.*/1\"}",
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{device}}",
+          "refId": "A"
+        }
+      ],
+      "title": "Device States",
+      "type": "stat"
+    },
+    {
+      "datasource": "ELK logs",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "auto",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "off"
+            }
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 1
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 9,
+        "w": 12,
+        "x": 10,
+        "y": 1
+      },
+      "id": 32,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "list",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "targets": [
+        {
+          "alias": "",
+          "bucketAggs": [
+            {
+              "field": "extra.tango_device.keyword",
+              "id": "2",
+              "settings": {
+                "min_doc_count": "0",
+                "order": "desc",
+                "orderBy": "_term",
+                "size": "10"
+              },
+              "type": "terms"
+            },
+            {
+              "field": "@timestamp",
+              "id": "3",
+              "settings": {
+                "interval": "auto",
+                "min_doc_count": "0",
+                "trimEdges": "0"
+              },
+              "type": "date_histogram"
+            }
+          ],
+          "metrics": [
+            {
+              "id": "1",
+              "type": "count"
+            }
+          ],
+          "query": "level:(ERROR or FATAL)",
+          "refId": "A",
+          "timeField": "@timestamp"
+        }
+      ],
+      "title": "Errors",
+      "type": "timeseries"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "custom": {
+            "align": "auto",
+            "displayMode": "auto"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "red",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 100
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 3,
+        "w": 4,
+        "x": 0,
+        "y": 7
+      },
+      "id": 44,
+      "options": {
+        "showHeader": false
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/boot/1\",name=\"initialisation_status_R\"}",
+          "instant": true,
+          "interval": "",
+          "legendFormat": "",
+          "refId": "A"
+        }
+      ],
+      "title": "Initialisation status",
+      "transformations": [
+        {
+          "id": "labelsToFields",
+          "options": {}
+        },
+        {
+          "id": "organize",
+          "options": {
+            "excludeByName": {
+              "Time": true,
+              "Value": true,
+              "device": true,
+              "device_attribute{device=\"lts/boot/1\", dim_x=\"1\", dim_y=\"0\", instance=\"tango-prometheus-exporter:8000\", job=\"tango\", label=\"initialisation_status_R\", name=\"initialisation_status_R\", str_value=\"Initialisation completed\", type=\"string\", x=\"0\", y=\"0\"}": true,
+              "dim_x": true,
+              "dim_y": true,
+              "instance": true,
+              "job": true,
+              "label": true,
+              "name": true,
+              "type": true,
+              "x": true,
+              "y": true
+            },
+            "indexByName": {
+              "Time": 0,
+              "Value": 5,
+              "device": 1,
+              "dim_x": 2,
+              "dim_y": 3,
+              "instance": 4,
+              "job": 6,
+              "label": 7,
+              "name": 8,
+              "str_value": 9,
+              "type": 10,
+              "x": 11,
+              "y": 12
+            },
+            "renameByName": {
+              "name": "",
+              "str_value": "status"
+            }
+          }
+        }
+      ],
+      "type": "table"
+    },
+    {
+      "collapsed": false,
+      "datasource": null,
+      "gridPos": {
+        "h": 1,
+        "w": 24,
+        "x": 0,
+        "y": 10
+      },
+      "id": 17,
+      "panels": [],
+      "title": "RECV",
+      "type": "row"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "auto",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "off"
+            }
+          },
+          "mappings": [],
+          "min": 0,
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 80
+              }
+            ]
+          },
+          "unit": "celsius"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 0,
+        "y": 11
+      },
+      "id": 22,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/recv/1\",name=\"RCU_temperature_R\"} - 273.15",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "RCU temperatures",
+      "transformations": [],
+      "type": "timeseries"
+    },
+    {
+      "datasource": "Prometheus",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 0
+              },
+              {
+                "color": "green",
+                "value": 3
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 6,
+        "x": 5,
+        "y": 11
+      },
+      "id": 21,
+      "options": {
+        "colorMode": "background",
+        "graphMode": "area",
+        "justifyMode": "auto",
+        "orientation": "auto",
+        "reduceOptions": {
+          "calcs": [
+            "lastNotNull"
+          ],
+          "fields": "",
+          "values": false
+        },
+        "text": {},
+        "textMode": "name"
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "sum by (x)(1 + (device_attribute{device=\"lts/recv/1\",name=\"RCU_ADC_lock_R\"} == bool 129)) * on(x) device_attribute{device=\"lts/recv/1\",name=\"RCU_mask_RW\"} - 3",
+          "interval": "",
+          "legendFormat": "{{y}}",
+          "refId": "A"
+        }
+      ],
+      "title": "RCU ADC lock",
+      "type": "stat"
+    },
+    {
+      "datasource": "Prometheus",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 1
+              },
+              {
+                "color": "green",
+                "value": 2
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 6,
+        "x": 11,
+        "y": 11
+      },
+      "id": 25,
+      "options": {
+        "colorMode": "background",
+        "graphMode": "area",
+        "justifyMode": "auto",
+        "orientation": "auto",
+        "reduceOptions": {
+          "calcs": [
+            "lastNotNull"
+          ],
+          "fields": "",
+          "values": false
+        },
+        "text": {},
+        "textMode": "name"
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "(2 - device_attribute{device=\"lts/recv/1\",name=\"RCU_I2C_STATUS_R\"}) * on(x) device_attribute{device=\"lts/recv/1\",name=\"RCU_mask_RW\"}",
+          "interval": "",
+          "legendFormat": "{{y}}",
+          "refId": "A"
+        }
+      ],
+      "title": "RCU I2C status",
+      "type": "stat"
+    },
+    {
+      "datasource": "Prometheus",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 1
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 5,
+        "w": 3,
+        "x": 17,
+        "y": 11
+      },
+      "id": 24,
+      "options": {
+        "colorMode": "background",
+        "graphMode": "none",
+        "justifyMode": "auto",
+        "orientation": "auto",
+        "reduceOptions": {
+          "calcs": [
+            "lastNotNull"
+          ],
+          "fields": "",
+          "values": false
+        },
+        "text": {},
+        "textMode": "name"
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "1-device_attribute{device=\"lts/recv/1\",name=\"CLK_Enable_PWR_R\"}",
+          "interval": "",
+          "legendFormat": "Power",
+          "refId": "A"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/recv/1\",name=\"CLK_I2C_STATUS_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "I2C",
+          "refId": "B"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/recv/1\",name=\"CLK_PLL_error_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "PLL",
+          "refId": "C"
+        },
+        {
+          "exemplar": true,
+          "expr": "1-device_attribute{device=\"lts/recv/1\",name=\"CLK_PLL_locked_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "PLL Lock",
+          "refId": "D"
+        }
+      ],
+      "title": "Clock",
+      "type": "stat"
+    },
+    {
+      "collapsed": false,
+      "datasource": null,
+      "gridPos": {
+        "h": 1,
+        "w": 24,
+        "x": 0,
+        "y": 19
+      },
+      "id": 46,
+      "panels": [],
+      "title": "Uniboard 2",
+      "type": "row"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "Temperature sensors of each node on each board",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic",
+            "seriesBy": "max"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "never",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "line"
+            }
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 85
+              }
+            ]
+          },
+          "unit": "celsius"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 0,
+        "y": 20
+      },
+      "id": 48,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_FPGA_POL_CORE_TEMP_R\"}",
+          "interval": "",
+          "legendFormat": "Core board {{x}} node {{y}}",
+          "refId": "A"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_FPGA_POL_ERAM_TEMP_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "ERAM board {{x}} node {{y}}",
+          "refId": "B"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_FPGA_POL_RXGXB_TEMP_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "TrRx board {{x}} node {{y}}",
+          "refId": "C"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_FPGA_POL_HXGB_TEMP_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "TrHx board {{x}} node {{y}}",
+          "refId": "D"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_FPGA_POL_PGM_TEMP_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "IO board {{x}} node {{y}}",
+          "refId": "E"
+        },
+        {
+          "hide": false,
+          "refId": "F"
+        }
+      ],
+      "title": "Uniboard2 Node Temperatures",
+      "type": "timeseries"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "Temperature sensors of the power supply on each board",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic",
+            "seriesBy": "max"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "never",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "line"
+            }
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 85
+              }
+            ]
+          },
+          "unit": "celsius"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 5,
+        "y": 20
+      },
+      "id": 50,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_POL_QSFP_N01_TEMP_R\"}",
+          "interval": "",
+          "legendFormat": "QSFP N01 board {{x}} ",
+          "refId": "A"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_POL_QSFP_N23_TEMP_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "QSFP N23 board {{x}}",
+          "refId": "B"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_POL_SWITCH_1V2_TEMP_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "Switch 1v2 board {{x}}",
+          "refId": "C"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_POL_SWITCH_PHY_TEMP_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "Switch PHY board {{x}}",
+          "refId": "D"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_POL_CLOCK_TEMP_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "Clock PWR board {{x}}",
+          "refId": "E"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_DC_DC_48V_12V_TEMP_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "DC-DC board {{x}}",
+          "refId": "F"
+        }
+      ],
+      "title": "Uniboard2 Power Supply Temperatures",
+      "type": "timeseries"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "Voltage sensors of each node on each board",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "axisSoftMax": 2,
+            "axisSoftMin": 0,
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "never",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "line"
+            }
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 85
+              }
+            ]
+          },
+          "unit": "volt"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 10,
+        "y": 20
+      },
+      "id": 49,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_FPGA_POL_CORE_VOUT_R\"}",
+          "interval": "",
+          "legendFormat": "Core board {{x}} node {{y}}",
+          "refId": "A"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_FPGA_POL_ERAM_VOUT_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "ERAM board {{x}} node {{y}}",
+          "refId": "B"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_FPGA_POL_RXGXB_VOUT_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "TrRx board {{x}} node {{y}}",
+          "refId": "C"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_FPGA_POL_HXGB_VOUT_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "TrHx board {{x}} node {{y}}",
+          "refId": "D"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_FPGA_POL_PGM_VOUT_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "IO board {{x}} node {{y}}",
+          "refId": "E"
+        }
+      ],
+      "title": "Uniboard2 Voltages",
+      "type": "timeseries"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "Voltage sensors of the power supply on each board",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic",
+            "seriesBy": "max"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "axisSoftMin": 0,
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "never",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "line"
+            }
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 85
+              }
+            ]
+          },
+          "unit": "volt"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 15,
+        "y": 20
+      },
+      "id": 51,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_POL_QSFP_N01_VOUT_R\"}",
+          "interval": "",
+          "legendFormat": "QSFP N01 board {{x}} ",
+          "refId": "A"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_POL_QSFP_N23_VOUT_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "QSFP N23 board {{x}}",
+          "refId": "B"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_POL_SWITCH_1V2_VOUT_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "Switch 1v2 board {{x}}",
+          "refId": "C"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_POL_SWITCH_PHY_VOUT_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "Switch PHY board {{x}}",
+          "refId": "D"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_POL_CLOCK_VOUT_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "Clock PWR board {{x}}",
+          "refId": "E"
+        },
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/unb2/1\",name=\"UNB2_DC_DC_48V_12V_VOUT_R\"}",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "DC-DC board {{x}}",
+          "refId": "F"
+        }
+      ],
+      "title": "Uniboard2 Power Supply Voltages",
+      "type": "timeseries"
+    },
+    {
+      "collapsed": false,
+      "datasource": null,
+      "gridPos": {
+        "h": 1,
+        "w": 24,
+        "x": 0,
+        "y": 28
+      },
+      "id": 19,
+      "panels": [],
+      "title": "SDP",
+      "type": "row"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "auto",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "off"
+            }
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 80
+              }
+            ]
+          },
+          "unit": "celsius"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 0,
+        "y": 29
+      },
+      "id": 5,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/sdp/1\",name=\"FPGA_temp_R\"}",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "FPGA temperatures",
+      "transformations": [],
+      "type": "timeseries"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 50
+              },
+              {
+                "color": "red",
+                "value": 100
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 5,
+        "y": 29
+      },
+      "id": 11,
+      "options": {
+        "colorMode": "background",
+        "graphMode": "area",
+        "justifyMode": "auto",
+        "orientation": "auto",
+        "reduceOptions": {
+          "calcs": [
+            "lastNotNull"
+          ],
+          "fields": "",
+          "values": false
+        },
+        "text": {},
+        "textMode": "name"
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "(50+50*device_attribute{device=\"lts/sdp/1\",name=\"TR_fpga_communication_error_R\"}) * on(x) device_attribute{device=\"lts/sdp/1\",name=\"TR_fpga_mask_R\"}",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "FPGA communication",
+      "transformations": [],
+      "type": "stat"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 50
+              },
+              {
+                "color": "red",
+                "value": 100
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 10,
+        "y": 29
+      },
+      "id": 9,
+      "options": {
+        "colorMode": "background",
+        "graphMode": "area",
+        "justifyMode": "auto",
+        "orientation": "auto",
+        "reduceOptions": {
+          "calcs": [
+            "lastNotNull"
+          ],
+          "fields": "",
+          "values": false
+        },
+        "text": {},
+        "textMode": "name"
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "(100-50*device_attribute{device=\"lts/sdp/1\",name=\"FPGA_processing_enable_R\"}) * on(x) device_attribute{device=\"lts/sdp/1\",name=\"TR_fpga_mask_R\"}",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "FPGA processing enabled",
+      "transformations": [],
+      "type": "stat"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "Measured difference between PTP and PPS",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "auto",
+            "spanNulls": 60000,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "off"
+            }
+          },
+          "decimals": 2,
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "red",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 0.001
+              },
+              {
+                "color": "red",
+                "value": 0.1
+              }
+            ]
+          },
+          "unit": "s"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 15,
+        "y": 29
+      },
+      "id": 13,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/sdp/1\",name=\"TR_tod_pps_delta_R\"}",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "FPGA Clock offset",
+      "transformations": [],
+      "type": "timeseries"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "Number of inputs that are fed from the SDP wave-form generator",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "mappings": [
+            {
+              "options": {
+                "0": {
+                  "index": 0,
+                  "text": "OFF"
+                }
+              },
+              "type": "value"
+            }
+          ],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "green",
+                "value": null
+              },
+              {
+                "color": "red",
+                "value": 1
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 4,
+        "w": 3,
+        "x": 20,
+        "y": 29
+      },
+      "id": 12,
+      "options": {
+        "colorMode": "background",
+        "graphMode": "area",
+        "justifyMode": "auto",
+        "orientation": "auto",
+        "reduceOptions": {
+          "calcs": [
+            "lastNotNull"
+          ],
+          "fields": "",
+          "values": false
+        },
+        "text": {},
+        "textMode": "value"
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "sum(sum by(x) (device_attribute{device=\"lts/sdp/1\",name=\"FPGA_wg_enable_RW\"}) * on(x) device_attribute{device=\"lts/sdp/1\",name=\"TR_fpga_mask_R\"})",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "Waveform generator",
+      "transformations": [],
+      "type": "stat"
+    },
+    {
+      "collapsed": false,
+      "datasource": null,
+      "gridPos": {
+        "h": 1,
+        "w": 24,
+        "x": 0,
+        "y": 37
+      },
+      "id": 27,
+      "panels": [],
+      "title": "SST",
+      "type": "row"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 50
+              },
+              {
+                "color": "red",
+                "value": 100
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 0,
+        "y": 38
+      },
+      "id": 28,
+      "options": {
+        "colorMode": "background",
+        "graphMode": "area",
+        "justifyMode": "auto",
+        "orientation": "auto",
+        "reduceOptions": {
+          "calcs": [
+            "lastNotNull"
+          ],
+          "fields": "",
+          "values": false
+        },
+        "text": {},
+        "textMode": "name"
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "(100-50*device_attribute{device=\"lts/sst/1\",name=\"FPGA_sst_offload_enable_R\"}) * on(x) device_attribute{device=\"lts/sdp/1\",name=\"TR_fpga_mask_R\"}",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "SST offloading enabled",
+      "transformations": [],
+      "type": "stat"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "axisSoftMin": 0,
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "auto",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "off"
+            }
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 50
+              },
+              {
+                "color": "red",
+                "value": 100
+              }
+            ]
+          },
+          "unit": "pps"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 5,
+        "y": 38
+      },
+      "id": 29,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "rate(device_attribute{device=\"lts/sst/1\",name=\"nof_invalid_packets_R\"}[1m])",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "invalid",
+          "refId": "A"
+        },
+        {
+          "exemplar": true,
+          "expr": "rate(device_attribute{device=\"lts/sst/1\",name=\"nof_packets_dropped_R\"}[1m])",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "dropped",
+          "refId": "B"
+        },
+        {
+          "exemplar": true,
+          "expr": "rate(device_attribute{device=\"lts/sst/1\",name=\"nof_payload_errors_R\"}[1m])",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "payload errors {{x}}",
+          "refId": "C"
+        }
+      ],
+      "title": "SST packet errors",
+      "transformations": [],
+      "type": "timeseries"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "axisSoftMin": 0,
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "auto",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "off"
+            }
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 50
+              },
+              {
+                "color": "red",
+                "value": 100
+              }
+            ]
+          },
+          "unit": "binBps"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 10,
+        "y": 38
+      },
+      "id": 30,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "sum(rate(device_attribute{device=\"lts/sst/1\",name=\"nof_bytes_received_R\"}[1m]))",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "SST bytes received",
+      "transformations": [],
+      "type": "timeseries"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "Rate of SSTs replicated to connected clients.",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "axisSoftMin": 0,
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "auto",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "off"
+            }
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 50
+              },
+              {
+                "color": "red",
+                "value": 100
+              }
+            ]
+          },
+          "unit": "binBps"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 15,
+        "y": 38
+      },
+      "id": 33,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "rate(device_attribute{device=\"lts/sst/1\",name=\"replicator_nof_bytes_sent_R\"}[1m])",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "SST bytes sent",
+      "transformations": [],
+      "type": "timeseries"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "Load of TCPReplicator class, which sends statistics packets to connected clients.",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "axisSoftMax": 5,
+            "axisSoftMin": 0,
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "auto",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "off"
+            }
+          },
+          "mappings": [],
+          "min": 0,
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 50
+              },
+              {
+                "color": "red",
+                "value": 100
+              }
+            ]
+          },
+          "unit": "none"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 3,
+        "x": 20,
+        "y": 38
+      },
+      "id": 34,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/sst/1\",name=\"replicator_nof_tasks_pending_R\"}",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "SST Replicator load",
+      "transformations": [],
+      "type": "timeseries"
+    },
+    {
+      "collapsed": false,
+      "datasource": null,
+      "gridPos": {
+        "h": 1,
+        "w": 24,
+        "x": 0,
+        "y": 46
+      },
+      "id": 36,
+      "panels": [],
+      "title": "XST",
+      "type": "row"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "thresholds"
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 50
+              },
+              {
+                "color": "red",
+                "value": 100
+              }
+            ]
+          }
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 0,
+        "y": 47
+      },
+      "id": 37,
+      "options": {
+        "colorMode": "background",
+        "graphMode": "area",
+        "justifyMode": "auto",
+        "orientation": "auto",
+        "reduceOptions": {
+          "calcs": [
+            "lastNotNull"
+          ],
+          "fields": "",
+          "values": false
+        },
+        "text": {},
+        "textMode": "name"
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "(100-50*device_attribute{device=\"lts/xst/1\",name=\"FPGA_xst_offload_enable_R\"}) * on(x) device_attribute{device=\"lts/sdp/1\",name=\"TR_fpga_mask_R\"}",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "XST offloading enabled",
+      "transformations": [],
+      "type": "stat"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "axisSoftMin": 0,
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "auto",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "off"
+            }
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 50
+              },
+              {
+                "color": "red",
+                "value": 100
+              }
+            ]
+          },
+          "unit": "pps"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 5,
+        "y": 47
+      },
+      "id": 38,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "rate(device_attribute{device=\"lts/xst/1\",name=\"nof_invalid_packets_R\"}[1m])",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "invalid",
+          "refId": "A"
+        },
+        {
+          "exemplar": true,
+          "expr": "rate(device_attribute{device=\"lts/xst/1\",name=\"nof_packets_dropped_R\"}[1m])",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "dropped",
+          "refId": "B"
+        },
+        {
+          "exemplar": true,
+          "expr": "rate(device_attribute{device=\"lts/xst/1\",name=\"nof_payload_errors_R\"}[1m])",
+          "hide": false,
+          "interval": "",
+          "legendFormat": "payload errors {{x}}",
+          "refId": "C"
+        }
+      ],
+      "title": "XST packet errors",
+      "transformations": [],
+      "type": "timeseries"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "axisSoftMin": 0,
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "auto",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "off"
+            }
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 50
+              },
+              {
+                "color": "red",
+                "value": 100
+              }
+            ]
+          },
+          "unit": "binBps"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 10,
+        "y": 47
+      },
+      "id": 39,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "sum(rate(device_attribute{device=\"lts/xst/1\",name=\"nof_bytes_received_R\"}[1m]))",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "XST bytes received",
+      "transformations": [],
+      "type": "timeseries"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "Rate of XSTs replicated to connected clients.",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "axisSoftMin": 0,
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "auto",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "off"
+            }
+          },
+          "mappings": [],
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 50
+              },
+              {
+                "color": "red",
+                "value": 100
+              }
+            ]
+          },
+          "unit": "binBps"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 5,
+        "x": 15,
+        "y": 47
+      },
+      "id": 40,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "rate(device_attribute{device=\"lts/xst/1\",name=\"replicator_nof_bytes_sent_R\"}[1m])",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "XST bytes sent",
+      "transformations": [],
+      "type": "timeseries"
+    },
+    {
+      "datasource": "Prometheus",
+      "description": "Load of TCPReplicator class, which sends statistics packets to connected clients.",
+      "fieldConfig": {
+        "defaults": {
+          "color": {
+            "mode": "palette-classic"
+          },
+          "custom": {
+            "axisLabel": "",
+            "axisPlacement": "auto",
+            "axisSoftMax": 5,
+            "axisSoftMin": 0,
+            "barAlignment": 0,
+            "drawStyle": "line",
+            "fillOpacity": 0,
+            "gradientMode": "none",
+            "hideFrom": {
+              "legend": false,
+              "tooltip": false,
+              "viz": false
+            },
+            "lineInterpolation": "linear",
+            "lineWidth": 1,
+            "pointSize": 5,
+            "scaleDistribution": {
+              "type": "linear"
+            },
+            "showPoints": "auto",
+            "spanNulls": false,
+            "stacking": {
+              "group": "A",
+              "mode": "none"
+            },
+            "thresholdsStyle": {
+              "mode": "off"
+            }
+          },
+          "mappings": [],
+          "min": 0,
+          "thresholds": {
+            "mode": "absolute",
+            "steps": [
+              {
+                "color": "transparent",
+                "value": null
+              },
+              {
+                "color": "green",
+                "value": 50
+              },
+              {
+                "color": "red",
+                "value": 100
+              }
+            ]
+          },
+          "unit": "none"
+        },
+        "overrides": []
+      },
+      "gridPos": {
+        "h": 8,
+        "w": 3,
+        "x": 20,
+        "y": 47
+      },
+      "id": 41,
+      "options": {
+        "legend": {
+          "calcs": [],
+          "displayMode": "hidden",
+          "placement": "bottom"
+        },
+        "tooltip": {
+          "mode": "single"
+        }
+      },
+      "pluginVersion": "8.1.2",
+      "targets": [
+        {
+          "exemplar": true,
+          "expr": "device_attribute{device=\"lts/xst/1\",name=\"replicator_nof_tasks_pending_R\"}",
+          "format": "time_series",
+          "hide": false,
+          "instant": false,
+          "interval": "",
+          "legendFormat": "{{x}}",
+          "refId": "A"
+        }
+      ],
+      "title": "XST Replicator load",
+      "transformations": [],
+      "type": "timeseries"
+    }
+  ],
+  "refresh": false,
+  "schemaVersion": 30,
+  "style": "dark",
+  "tags": [],
+  "templating": {
+    "list": []
+  },
+  "time": {
+    "from": "now-30m",
+    "to": "now"
+  },
+  "timepicker": {},
+  "timezone": "",
+  "title": "LOFAR2.0 Station",
+  "uid": "6f7Pv8Vnz",
+  "version": 1
+}
diff --git a/docker-compose/grafana/dashboards/lofar2.0-station.json b/docker-compose/grafana/dashboards/lofar2.0-station.json
deleted file mode 100644
index 9ef5eacfff3581b52ef7491d1551f8f050376936..0000000000000000000000000000000000000000
--- a/docker-compose/grafana/dashboards/lofar2.0-station.json
+++ /dev/null
@@ -1,1150 +0,0 @@
-{
-  "annotations": {
-    "list": [
-      {
-        "builtIn": 1,
-        "datasource": "-- Grafana --",
-        "enable": true,
-        "hide": true,
-        "iconColor": "rgba(0, 211, 255, 1)",
-        "name": "Annotations & Alerts",
-        "target": {
-          "limit": 100,
-          "matchAny": false,
-          "tags": [],
-          "type": "dashboard"
-        },
-        "type": "dashboard"
-      }
-    ]
-  },
-  "editable": true,
-  "gnetId": null,
-  "graphTooltip": 0,
-  "id": 1,
-  "links": [],
-  "panels": [
-    {
-      "collapsed": false,
-      "datasource": null,
-      "gridPos": {
-        "h": 1,
-        "w": 24,
-        "x": 0,
-        "y": 0
-      },
-      "id": 15,
-      "panels": [],
-      "title": "Devices",
-      "type": "row"
-    },
-    {
-      "datasource": "Prometheus",
-      "fieldConfig": {
-        "defaults": {
-          "color": {
-            "mode": "thresholds"
-          },
-          "mappings": [
-            {
-              "options": {
-                "0": {
-                  "color": "green",
-                  "index": 1,
-                  "text": "ON"
-                },
-                "1": {
-                  "color": "red",
-                  "index": 3,
-                  "text": "OFF"
-                },
-                "7": {
-                  "color": "yellow",
-                  "index": 2,
-                  "text": "STANDBY"
-                },
-                "8": {
-                  "color": "red",
-                  "index": 0,
-                  "text": "FAULT"
-                },
-                "11": {
-                  "color": "red",
-                  "index": 4,
-                  "text": "ALARM"
-                }
-              },
-              "type": "value"
-            }
-          ],
-          "noValue": "???",
-          "thresholds": {
-            "mode": "absolute",
-            "steps": [
-              {
-                "color": "green",
-                "value": null
-              },
-              {
-                "color": "red",
-                "value": 80
-              }
-            ]
-          },
-          "unit": "string"
-        },
-        "overrides": []
-      },
-      "gridPos": {
-        "h": 4,
-        "w": 12,
-        "x": 0,
-        "y": 1
-      },
-      "id": 4,
-      "options": {
-        "colorMode": "background",
-        "graphMode": "none",
-        "justifyMode": "auto",
-        "orientation": "auto",
-        "reduceOptions": {
-          "calcs": [
-            "lastNotNull"
-          ],
-          "fields": "",
-          "values": false
-        },
-        "text": {},
-        "textMode": "value_and_name"
-      },
-      "pluginVersion": "8.1.2",
-      "targets": [
-        {
-          "exemplar": true,
-          "expr": "device_attribute{label=\"State\"}",
-          "instant": false,
-          "interval": "",
-          "legendFormat": "{{device}}",
-          "refId": "A"
-        }
-      ],
-      "title": "Device States",
-      "type": "stat"
-    },
-    {
-      "collapsed": false,
-      "datasource": null,
-      "gridPos": {
-        "h": 1,
-        "w": 24,
-        "x": 0,
-        "y": 5
-      },
-      "id": 17,
-      "panels": [],
-      "title": "PCC",
-      "type": "row"
-    },
-    {
-      "datasource": "Prometheus",
-      "description": "",
-      "fieldConfig": {
-        "defaults": {
-          "color": {
-            "mode": "palette-classic"
-          },
-          "custom": {
-            "axisLabel": "",
-            "axisPlacement": "auto",
-            "barAlignment": 0,
-            "drawStyle": "line",
-            "fillOpacity": 0,
-            "gradientMode": "none",
-            "hideFrom": {
-              "legend": false,
-              "tooltip": false,
-              "viz": false
-            },
-            "lineInterpolation": "linear",
-            "lineWidth": 1,
-            "pointSize": 5,
-            "scaleDistribution": {
-              "type": "linear"
-            },
-            "showPoints": "auto",
-            "spanNulls": false,
-            "stacking": {
-              "group": "A",
-              "mode": "none"
-            },
-            "thresholdsStyle": {
-              "mode": "off"
-            }
-          },
-          "mappings": [],
-          "min": 0,
-          "thresholds": {
-            "mode": "absolute",
-            "steps": [
-              {
-                "color": "green",
-                "value": null
-              },
-              {
-                "color": "red",
-                "value": 80
-              }
-            ]
-          },
-          "unit": "celsius"
-        },
-        "overrides": []
-      },
-      "gridPos": {
-        "h": 8,
-        "w": 5,
-        "x": 0,
-        "y": 6
-      },
-      "id": 22,
-      "options": {
-        "legend": {
-          "calcs": [],
-          "displayMode": "hidden",
-          "placement": "bottom"
-        },
-        "tooltip": {
-          "mode": "single"
-        }
-      },
-      "pluginVersion": "8.1.2",
-      "targets": [
-        {
-          "exemplar": true,
-          "expr": "device_attribute{device=\"lts/pcc/1\",name=\"RCU_temperature_R\"} - 273.15",
-          "format": "time_series",
-          "hide": false,
-          "instant": false,
-          "interval": "",
-          "legendFormat": "{{x}}",
-          "refId": "A"
-        }
-      ],
-      "title": "RCU temperatures",
-      "transformations": [],
-      "type": "timeseries"
-    },
-    {
-      "datasource": "Prometheus",
-      "fieldConfig": {
-        "defaults": {
-          "color": {
-            "mode": "thresholds"
-          },
-          "mappings": [],
-          "thresholds": {
-            "mode": "absolute",
-            "steps": [
-              {
-                "color": "transparent",
-                "value": null
-              },
-              {
-                "color": "red",
-                "value": 0
-              },
-              {
-                "color": "green",
-                "value": 3
-              }
-            ]
-          }
-        },
-        "overrides": []
-      },
-      "gridPos": {
-        "h": 8,
-        "w": 6,
-        "x": 5,
-        "y": 6
-      },
-      "id": 21,
-      "options": {
-        "colorMode": "background",
-        "graphMode": "area",
-        "justifyMode": "auto",
-        "orientation": "auto",
-        "reduceOptions": {
-          "calcs": [
-            "lastNotNull"
-          ],
-          "fields": "",
-          "values": false
-        },
-        "text": {},
-        "textMode": "name"
-      },
-      "pluginVersion": "8.1.2",
-      "targets": [
-        {
-          "exemplar": true,
-          "expr": "sum by (x)(1 + (device_attribute{device=\"lts/pcc/1\",name=\"RCU_ADC_lock_R\"} == bool 129)) * on(x) device_attribute{device=\"lts/pcc/1\",name=\"RCU_mask_RW\"} - 3",
-          "interval": "",
-          "legendFormat": "{{y}}",
-          "refId": "A"
-        }
-      ],
-      "title": "RCU ADC lock",
-      "type": "stat"
-    },
-    {
-      "datasource": "Prometheus",
-      "fieldConfig": {
-        "defaults": {
-          "color": {
-            "mode": "thresholds"
-          },
-          "mappings": [],
-          "thresholds": {
-            "mode": "absolute",
-            "steps": [
-              {
-                "color": "transparent",
-                "value": null
-              },
-              {
-                "color": "red",
-                "value": 1
-              },
-              {
-                "color": "green",
-                "value": 2
-              }
-            ]
-          }
-        },
-        "overrides": []
-      },
-      "gridPos": {
-        "h": 8,
-        "w": 6,
-        "x": 11,
-        "y": 6
-      },
-      "id": 25,
-      "options": {
-        "colorMode": "background",
-        "graphMode": "area",
-        "justifyMode": "auto",
-        "orientation": "auto",
-        "reduceOptions": {
-          "calcs": [
-            "lastNotNull"
-          ],
-          "fields": "",
-          "values": false
-        },
-        "text": {},
-        "textMode": "name"
-      },
-      "pluginVersion": "8.1.2",
-      "targets": [
-        {
-          "exemplar": true,
-          "expr": "(2 - device_attribute{device=\"lts/pcc/1\",name=\"RCU_I2C_STATUS_R\"}) * on(x) device_attribute{device=\"lts/pcc/1\",name=\"RCU_mask_RW\"}",
-          "interval": "",
-          "legendFormat": "{{y}}",
-          "refId": "A"
-        }
-      ],
-      "title": "RCU I2C status",
-      "type": "stat"
-    },
-    {
-      "datasource": "Prometheus",
-      "fieldConfig": {
-        "defaults": {
-          "color": {
-            "mode": "thresholds"
-          },
-          "mappings": [],
-          "thresholds": {
-            "mode": "absolute",
-            "steps": [
-              {
-                "color": "green",
-                "value": null
-              },
-              {
-                "color": "red",
-                "value": 1
-              }
-            ]
-          }
-        },
-        "overrides": []
-      },
-      "gridPos": {
-        "h": 5,
-        "w": 3,
-        "x": 17,
-        "y": 6
-      },
-      "id": 24,
-      "options": {
-        "colorMode": "background",
-        "graphMode": "none",
-        "justifyMode": "auto",
-        "orientation": "auto",
-        "reduceOptions": {
-          "calcs": [
-            "lastNotNull"
-          ],
-          "fields": "",
-          "values": false
-        },
-        "text": {},
-        "textMode": "name"
-      },
-      "pluginVersion": "8.1.2",
-      "targets": [
-        {
-          "exemplar": true,
-          "expr": "1-device_attribute{device=\"lts/pcc/1\",name=\"CLK_Enable_PWR_R\"}",
-          "interval": "",
-          "legendFormat": "Power",
-          "refId": "A"
-        },
-        {
-          "exemplar": true,
-          "expr": "device_attribute{device=\"lts/pcc/1\",name=\"CLK_I2C_STATUS_R\"}",
-          "hide": false,
-          "interval": "",
-          "legendFormat": "I2C",
-          "refId": "B"
-        },
-        {
-          "exemplar": true,
-          "expr": "device_attribute{device=\"lts/pcc/1\",name=\"CLK_PLL_error_R\"}",
-          "hide": false,
-          "interval": "",
-          "legendFormat": "PLL",
-          "refId": "C"
-        },
-        {
-          "exemplar": true,
-          "expr": "1-device_attribute{device=\"lts/pcc/1\",name=\"CLK_PLL_locked_R\"}",
-          "hide": false,
-          "interval": "",
-          "legendFormat": "PLL Lock",
-          "refId": "D"
-        }
-      ],
-      "title": "Clock",
-      "type": "stat"
-    },
-    {
-      "collapsed": false,
-      "datasource": null,
-      "gridPos": {
-        "h": 1,
-        "w": 24,
-        "x": 0,
-        "y": 14
-      },
-      "id": 19,
-      "panels": [],
-      "title": "SDP",
-      "type": "row"
-    },
-    {
-      "datasource": "Prometheus",
-      "description": "",
-      "fieldConfig": {
-        "defaults": {
-          "color": {
-            "mode": "palette-classic"
-          },
-          "custom": {
-            "axisLabel": "",
-            "axisPlacement": "auto",
-            "barAlignment": 0,
-            "drawStyle": "line",
-            "fillOpacity": 0,
-            "gradientMode": "none",
-            "hideFrom": {
-              "legend": false,
-              "tooltip": false,
-              "viz": false
-            },
-            "lineInterpolation": "linear",
-            "lineWidth": 1,
-            "pointSize": 5,
-            "scaleDistribution": {
-              "type": "linear"
-            },
-            "showPoints": "auto",
-            "spanNulls": false,
-            "stacking": {
-              "group": "A",
-              "mode": "none"
-            },
-            "thresholdsStyle": {
-              "mode": "off"
-            }
-          },
-          "mappings": [],
-          "thresholds": {
-            "mode": "absolute",
-            "steps": [
-              {
-                "color": "green",
-                "value": null
-              },
-              {
-                "color": "red",
-                "value": 80
-              }
-            ]
-          },
-          "unit": "celsius"
-        },
-        "overrides": []
-      },
-      "gridPos": {
-        "h": 8,
-        "w": 5,
-        "x": 0,
-        "y": 15
-      },
-      "id": 5,
-      "options": {
-        "legend": {
-          "calcs": [],
-          "displayMode": "hidden",
-          "placement": "bottom"
-        },
-        "tooltip": {
-          "mode": "single"
-        }
-      },
-      "pluginVersion": "8.1.2",
-      "targets": [
-        {
-          "exemplar": true,
-          "expr": "device_attribute{device=\"lts/sdp/1\",name=\"FPGA_temp_R\"}",
-          "format": "time_series",
-          "hide": false,
-          "instant": false,
-          "interval": "",
-          "legendFormat": "{{x}}",
-          "refId": "A"
-        }
-      ],
-      "title": "FPGA temperatures",
-      "transformations": [],
-      "type": "timeseries"
-    },
-    {
-      "datasource": "Prometheus",
-      "description": "",
-      "fieldConfig": {
-        "defaults": {
-          "color": {
-            "mode": "thresholds"
-          },
-          "mappings": [],
-          "thresholds": {
-            "mode": "absolute",
-            "steps": [
-              {
-                "color": "transparent",
-                "value": null
-              },
-              {
-                "color": "green",
-                "value": 50
-              },
-              {
-                "color": "red",
-                "value": 100
-              }
-            ]
-          }
-        },
-        "overrides": []
-      },
-      "gridPos": {
-        "h": 8,
-        "w": 5,
-        "x": 5,
-        "y": 15
-      },
-      "id": 11,
-      "options": {
-        "colorMode": "background",
-        "graphMode": "area",
-        "justifyMode": "auto",
-        "orientation": "auto",
-        "reduceOptions": {
-          "calcs": [
-            "lastNotNull"
-          ],
-          "fields": "",
-          "values": false
-        },
-        "text": {},
-        "textMode": "name"
-      },
-      "pluginVersion": "8.1.2",
-      "targets": [
-        {
-          "exemplar": true,
-          "expr": "(50+50*device_attribute{device=\"lts/sdp/1\",name=\"TR_fpga_communication_error_R\"}) * on(x) device_attribute{device=\"lts/sdp/1\",name=\"TR_fpga_mask_R\"}",
-          "format": "time_series",
-          "hide": false,
-          "instant": false,
-          "interval": "",
-          "legendFormat": "{{x}}",
-          "refId": "A"
-        }
-      ],
-      "title": "FPGA communication",
-      "transformations": [],
-      "type": "stat"
-    },
-    {
-      "datasource": "Prometheus",
-      "description": "",
-      "fieldConfig": {
-        "defaults": {
-          "color": {
-            "mode": "thresholds"
-          },
-          "mappings": [],
-          "thresholds": {
-            "mode": "absolute",
-            "steps": [
-              {
-                "color": "transparent",
-                "value": null
-              },
-              {
-                "color": "green",
-                "value": 50
-              },
-              {
-                "color": "red",
-                "value": 100
-              }
-            ]
-          }
-        },
-        "overrides": []
-      },
-      "gridPos": {
-        "h": 8,
-        "w": 5,
-        "x": 10,
-        "y": 15
-      },
-      "id": 9,
-      "options": {
-        "colorMode": "background",
-        "graphMode": "area",
-        "justifyMode": "auto",
-        "orientation": "auto",
-        "reduceOptions": {
-          "calcs": [
-            "lastNotNull"
-          ],
-          "fields": "",
-          "values": false
-        },
-        "text": {},
-        "textMode": "name"
-      },
-      "pluginVersion": "8.1.2",
-      "targets": [
-        {
-          "exemplar": true,
-          "expr": "(100-50*device_attribute{device=\"lts/sdp/1\",name=\"FPGA_processing_enable_R\"}) * on(x) device_attribute{device=\"lts/sdp/1\",name=\"TR_fpga_mask_R\"}",
-          "format": "time_series",
-          "hide": false,
-          "instant": false,
-          "interval": "",
-          "legendFormat": "{{x}}",
-          "refId": "A"
-        }
-      ],
-      "title": "FPGA processing enabled",
-      "transformations": [],
-      "type": "stat"
-    },
-    {
-      "datasource": "Prometheus",
-      "description": "Measured difference between PTP and PPS",
-      "fieldConfig": {
-        "defaults": {
-          "color": {
-            "mode": "thresholds"
-          },
-          "custom": {
-            "axisLabel": "",
-            "axisPlacement": "auto",
-            "barAlignment": 0,
-            "drawStyle": "line",
-            "fillOpacity": 0,
-            "gradientMode": "none",
-            "hideFrom": {
-              "legend": false,
-              "tooltip": false,
-              "viz": false
-            },
-            "lineInterpolation": "linear",
-            "lineWidth": 1,
-            "pointSize": 5,
-            "scaleDistribution": {
-              "type": "linear"
-            },
-            "showPoints": "auto",
-            "spanNulls": 60000,
-            "stacking": {
-              "group": "A",
-              "mode": "none"
-            },
-            "thresholdsStyle": {
-              "mode": "off"
-            }
-          },
-          "decimals": 2,
-          "mappings": [],
-          "thresholds": {
-            "mode": "absolute",
-            "steps": [
-              {
-                "color": "red",
-                "value": null
-              },
-              {
-                "color": "green",
-                "value": 0.001
-              },
-              {
-                "color": "red",
-                "value": 0.1
-              }
-            ]
-          },
-          "unit": "s"
-        },
-        "overrides": []
-      },
-      "gridPos": {
-        "h": 8,
-        "w": 5,
-        "x": 15,
-        "y": 15
-      },
-      "id": 13,
-      "options": {
-        "legend": {
-          "calcs": [],
-          "displayMode": "hidden",
-          "placement": "bottom"
-        },
-        "tooltip": {
-          "mode": "single"
-        }
-      },
-      "pluginVersion": "8.1.2",
-      "targets": [
-        {
-          "exemplar": true,
-          "expr": "device_attribute{device=\"lts/sdp/1\",name=\"TR_tod_pps_delta_R\"}",
-          "format": "time_series",
-          "hide": false,
-          "instant": false,
-          "interval": "",
-          "legendFormat": "{{x}}",
-          "refId": "A"
-        }
-      ],
-      "title": "FPGA Clock offset",
-      "transformations": [],
-      "type": "timeseries"
-    },
-    {
-      "datasource": "Prometheus",
-      "description": "Number of inputs that are fed from the SDP wave-form generator",
-      "fieldConfig": {
-        "defaults": {
-          "color": {
-            "mode": "thresholds"
-          },
-          "mappings": [
-            {
-              "options": {
-                "0": {
-                  "index": 0,
-                  "text": "OFF"
-                }
-              },
-              "type": "value"
-            }
-          ],
-          "thresholds": {
-            "mode": "absolute",
-            "steps": [
-              {
-                "color": "green",
-                "value": null
-              },
-              {
-                "color": "red",
-                "value": 1
-              }
-            ]
-          }
-        },
-        "overrides": []
-      },
-      "gridPos": {
-        "h": 4,
-        "w": 3,
-        "x": 20,
-        "y": 15
-      },
-      "id": 12,
-      "options": {
-        "colorMode": "background",
-        "graphMode": "area",
-        "justifyMode": "auto",
-        "orientation": "auto",
-        "reduceOptions": {
-          "calcs": [
-            "lastNotNull"
-          ],
-          "fields": "",
-          "values": false
-        },
-        "text": {},
-        "textMode": "value"
-      },
-      "pluginVersion": "8.1.2",
-      "targets": [
-        {
-          "exemplar": true,
-          "expr": "sum(sum by(x) (device_attribute{device=\"lts/sdp/1\",name=\"FPGA_wg_enable_RW\"}) * on(x) device_attribute{device=\"lts/sdp/1\",name=\"TR_fpga_mask_R\"})",
-          "format": "time_series",
-          "hide": false,
-          "instant": false,
-          "interval": "",
-          "legendFormat": "{{x}}",
-          "refId": "A"
-        }
-      ],
-      "title": "Waveform generator",
-      "transformations": [],
-      "type": "stat"
-    },
-    {
-      "collapsed": true,
-      "datasource": null,
-      "gridPos": {
-        "h": 1,
-        "w": 24,
-        "x": 0,
-        "y": 23
-      },
-      "id": 27,
-      "panels": [],
-      "title": "SST",
-      "type": "row"
-    },
-    {
-      "datasource": "Prometheus",
-      "description": "",
-      "fieldConfig": {
-        "defaults": {
-          "color": {
-            "mode": "thresholds"
-          },
-          "mappings": [],
-          "thresholds": {
-            "mode": "absolute",
-            "steps": [
-              {
-                "color": "transparent",
-                "value": null
-              },
-              {
-                "color": "green",
-                "value": 50
-              },
-              {
-                "color": "red",
-                "value": 100
-              }
-            ]
-          }
-        },
-        "overrides": []
-      },
-      "gridPos": {
-        "h": 8,
-        "w": 5,
-        "x": 0,
-        "y": 24
-      },
-      "id": 28,
-      "options": {
-        "colorMode": "background",
-        "graphMode": "area",
-        "justifyMode": "auto",
-        "orientation": "auto",
-        "reduceOptions": {
-          "calcs": [
-            "lastNotNull"
-          ],
-          "fields": "",
-          "values": false
-        },
-        "text": {},
-        "textMode": "name"
-      },
-      "pluginVersion": "8.1.2",
-      "targets": [
-        {
-          "exemplar": true,
-          "expr": "(100-50*device_attribute{device=\"lts/sst/1\",name=\"FPGA_sst_offload_enable_R\"}) * on(x) device_attribute{device=\"lts/sdp/1\",name=\"TR_fpga_mask_R\"}",
-          "format": "time_series",
-          "hide": false,
-          "instant": false,
-          "interval": "",
-          "legendFormat": "{{x}}",
-          "refId": "A"
-        }
-      ],
-      "title": "SST offloading enabled",
-      "transformations": [],
-      "type": "stat"
-    },
-    {
-      "datasource": "Prometheus",
-      "description": "",
-      "fieldConfig": {
-        "defaults": {
-          "color": {
-            "mode": "palette-classic"
-          },
-          "custom": {
-            "axisLabel": "",
-            "axisPlacement": "auto",
-            "barAlignment": 0,
-            "drawStyle": "line",
-            "fillOpacity": 0,
-            "gradientMode": "none",
-            "hideFrom": {
-              "legend": false,
-              "tooltip": false,
-              "viz": false
-            },
-            "lineInterpolation": "linear",
-            "lineWidth": 1,
-            "pointSize": 5,
-            "scaleDistribution": {
-              "type": "linear"
-            },
-            "showPoints": "auto",
-            "spanNulls": false,
-            "stacking": {
-              "group": "A",
-              "mode": "none"
-            },
-            "thresholdsStyle": {
-              "mode": "off"
-            }
-          },
-          "mappings": [],
-          "thresholds": {
-            "mode": "absolute",
-            "steps": [
-              {
-                "color": "transparent",
-                "value": null
-              },
-              {
-                "color": "green",
-                "value": 50
-              },
-              {
-                "color": "red",
-                "value": 100
-              }
-            ]
-          },
-          "unit": "pps"
-        },
-        "overrides": []
-      },
-      "gridPos": {
-        "h": 8,
-        "w": 5,
-        "x": 5,
-        "y": 24
-      },
-      "id": 29,
-      "options": {
-        "legend": {
-          "calcs": [],
-          "displayMode": "hidden",
-          "placement": "bottom"
-        },
-        "tooltip": {
-          "mode": "single"
-        }
-      },
-      "pluginVersion": "8.1.2",
-      "targets": [
-        {
-          "exemplar": true,
-          "expr": "rate(device_attribute{device=\"lts/sst/1\",name=\"nof_invalid_packets_R\"}[1m])",
-          "format": "time_series",
-          "hide": false,
-          "instant": false,
-          "interval": "",
-          "legendFormat": "invalid",
-          "refId": "A"
-        },
-        {
-          "exemplar": true,
-          "expr": "rate(device_attribute{device=\"lts/sst/1\",name=\"nof_packets_dropped_R\"}[1m])",
-          "hide": false,
-          "interval": "",
-          "legendFormat": "dropped",
-          "refId": "B"
-        },
-        {
-          "exemplar": true,
-          "expr": "rate(device_attribute{device=\"lts/sst/1\",name=\"nof_payload_errors_R\"}[1m])",
-          "hide": false,
-          "interval": "",
-          "legendFormat": "payload errors {{x}}",
-          "refId": "C"
-        }
-      ],
-      "title": "SST packet errors",
-      "transformations": [],
-      "type": "timeseries"
-    },
-    {
-      "datasource": "Prometheus",
-      "description": "",
-      "fieldConfig": {
-        "defaults": {
-          "color": {
-            "mode": "palette-classic"
-          },
-          "custom": {
-            "axisLabel": "",
-            "axisPlacement": "auto",
-            "barAlignment": 0,
-            "drawStyle": "line",
-            "fillOpacity": 0,
-            "gradientMode": "none",
-            "hideFrom": {
-              "legend": false,
-              "tooltip": false,
-              "viz": false
-            },
-            "lineInterpolation": "linear",
-            "lineWidth": 1,
-            "pointSize": 5,
-            "scaleDistribution": {
-              "type": "linear"
-            },
-            "showPoints": "auto",
-            "spanNulls": false,
-            "stacking": {
-              "group": "A",
-              "mode": "none"
-            },
-            "thresholdsStyle": {
-              "mode": "off"
-            }
-          },
-          "mappings": [],
-          "thresholds": {
-            "mode": "absolute",
-            "steps": [
-              {
-                "color": "transparent",
-                "value": null
-              },
-              {
-                "color": "green",
-                "value": 50
-              },
-              {
-                "color": "red",
-                "value": 100
-              }
-            ]
-          },
-          "unit": "pps"
-        },
-        "overrides": []
-      },
-      "gridPos": {
-        "h": 8,
-        "w": 5,
-        "x": 10,
-        "y": 24
-      },
-      "id": 30,
-      "options": {
-        "legend": {
-          "calcs": [],
-          "displayMode": "hidden",
-          "placement": "bottom"
-        },
-        "tooltip": {
-          "mode": "single"
-        }
-      },
-      "pluginVersion": "8.1.2",
-      "targets": [
-        {
-          "exemplar": true,
-          "expr": "rate(device_attribute{device=\"lts/sst/1\",name=\"nof_valid_payloads_R\"}[1m])",
-          "format": "time_series",
-          "hide": false,
-          "instant": false,
-          "interval": "",
-          "legendFormat": "{{x}}",
-          "refId": "A"
-        }
-      ],
-      "title": "SST packets",
-      "transformations": [],
-      "type": "timeseries"
-    }
-  ],
-  "refresh": false,
-  "schemaVersion": 30,
-  "style": "dark",
-  "tags": [],
-  "templating": {
-    "list": []
-  },
-  "time": {
-    "from": "now-30m",
-    "to": "now"
-  },
-  "timepicker": {},
-  "timezone": "",
-  "title": "LOFAR2.0 Station",
-  "uid": "6f7Pv8Vnz",
-  "version": 8
-}
diff --git a/docker-compose/grafana/dashboards/version-information.json b/docker-compose/grafana/dashboards/version-information.json
index a8e95f2a3af3d9eb5feb2e0eeb29ec0f8059ce31..e82135a1ad0867223a061481c79bb8a0dd8f0d9f 100644
--- a/docker-compose/grafana/dashboards/version-information.json
+++ b/docker-compose/grafana/dashboards/version-information.json
@@ -529,7 +529,7 @@
       },
       "id": 11,
       "panels": [],
-      "title": "PCC",
+      "title": "RECV",
       "type": "row"
     },
     {
@@ -601,7 +601,7 @@
       "targets": [
         {
           "exemplar": true,
-          "expr": "device_attribute{device=\"lts/pcc/1\",name=\"RCU_version_R\"}",
+          "expr": "device_attribute{device=\"lts/recv/1\",name=\"RCU_version_R\"}",
           "instant": true,
           "interval": "",
           "legendFormat": "",
diff --git a/docker-compose/grafana/data_sources/aa b/docker-compose/grafana/data_sources/aa
deleted file mode 100644
index 992fe7d0d562461563a0294b7d300aff1f047296..0000000000000000000000000000000000000000
--- a/docker-compose/grafana/data_sources/aa
+++ /dev/null
@@ -1 +0,0 @@
-{"id":4,"uid":"6W2nM-Vnz","orgId":1,"name":"Prometheus","type":"prometheus","typeName":"Prometheus","typeLogoUrl":"public/app/plugins/datasource/prometheus/img/prometheus_logo.svg","access":"proxy","url":"prometheus:9090","password":"","user":"","database":"","basicAuth":false,"isDefault":false,"jsonData":{"httpMethod":"POST"},"readOnly":false}
diff --git a/docker-compose/grafana/data_sources/ab b/docker-compose/grafana/data_sources/ab
deleted file mode 100644
index 765af60ea28bc8c1a283274ffc013a2838bb0e3d..0000000000000000000000000000000000000000
--- a/docker-compose/grafana/data_sources/ab
+++ /dev/null
@@ -1 +0,0 @@
-{"id":2,"uid":"d5_heb47k","orgId":1,"name":"TangoDB","type":"mysql","typeName":"MySQL","typeLogoUrl":"public/app/plugins/datasource/mysql/img/mysql_logo.svg","access":"proxy","url":"tangodb","password":"","user":"tango","database":"tango","basicAuth":false,"isDefault":true,"jsonData":{"timezone":""},"readOnly":false}
diff --git a/docker-compose/grafana/datasources/archiver-maria-db.yaml b/docker-compose/grafana/datasources/archiver-maria-db.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..c809d294269683f12ca82a9f28d6019c85f96723
--- /dev/null
+++ b/docker-compose/grafana/datasources/archiver-maria-db.yaml
@@ -0,0 +1,40 @@
+apiVersion: 1
+
+datasources:
+  # <string, required> name of the datasource. Required
+  - name: Archiver
+    # <string, required> datasource type. Required
+    type: mysql
+    # <string, required> access mode. proxy or direct (Server or Browser in the UI). Required
+    access: proxy
+    # <int> org id. will default to orgId 1 if not specified
+    orgId: 1
+    # <string> custom UID which can be used to reference this datasource in other parts of the configuration, if not specified will be generated automatically
+    uid: ZqAMHGN7z
+    # <string> url
+    url: archiver-maria-db
+    # <string> Deprecated, use secureJsonData.password
+    password:
+    # <string> database user, if used
+    user: tango
+    # <string> database name, if used
+    database: hdbpp
+    # <bool> enable/disable basic auth
+    basicAuth: false
+    # <string> basic auth username
+    basicAuthUser:
+    # <string> Deprecated, use secureJsonData.basicAuthPassword
+    basicAuthPassword:
+    # <bool> enable/disable with credentials headers
+    withCredentials:
+    # <bool> mark as default datasource. Max one per org
+    isDefault: true
+    # <map> fields that will be converted to json and stored in jsonData
+    jsonData:
+    # <string> json object of data that will be encrypted.
+    secureJsonData:
+      # <string> database password, if used
+      password: tango
+    version: 1
+    # <bool> allow users to edit datasources from the UI.
+    editable: false
diff --git a/docker-compose/grafana/datasources/elk.yaml b/docker-compose/grafana/datasources/elk.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..7dc0535bf5bfcfd9446836d8425dd74a320918e6
--- /dev/null
+++ b/docker-compose/grafana/datasources/elk.yaml
@@ -0,0 +1,44 @@
+apiVersion: 1
+
+datasources:
+  # <string, required> name of the datasource. Required
+  - name: ELK logs
+    # <string, required> datasource type. Required
+    type: elasticsearch
+    # <string, required> access mode. proxy or direct (Server or Browser in the UI). Required
+    access: proxy
+    # <int> org id. will default to orgId 1 if not specified
+    orgId: 1
+    # <string> custom UID which can be used to reference this datasource in other parts of the configuration, if not specified will be generated automatically
+    uid: RuQjz8V7z
+    # <string> url
+    url: elk:9200
+    # <string> Deprecated, use secureJsonData.password
+    password:
+    # <string> database user, if used
+    user:
+    # <string> database name, if used
+    database: logstash-*
+    # <bool> enable/disable basic auth
+    basicAuth: false
+    # <string> basic auth username
+    basicAuthUser:
+    # <string> Deprecated, use secureJsonData.basicAuthPassword
+    basicAuthPassword:
+    # <bool> enable/disable with credentials headers
+    withCredentials:
+    # <bool> mark as default datasource. Max one per org
+    isDefault: false
+    # <map> fields that will be converted to json and stored in jsonData
+    jsonData:
+      esVersion:  7.10.0
+      includeFrozen: false
+      logLevelField:
+      logMessageField:
+      maxConcurrentShardRequests: 5
+      timeField: "@timestamp"
+    # <string> json object of data that will be encrypted.
+    secureJsonData:
+    version: 1
+    # <bool> allow users to edit datasources from the UI.
+    editable: false
diff --git a/docker-compose/grafana/datasources/prometheus.yaml b/docker-compose/grafana/datasources/prometheus.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..e271f4a9c609a4e11b36bb688bed6f01faae0d74
--- /dev/null
+++ b/docker-compose/grafana/datasources/prometheus.yaml
@@ -0,0 +1,39 @@
+apiVersion: 1
+
+datasources:
+  # <string, required> name of the datasource. Required
+  - name: Prometheus
+    # <string, required> datasource type. Required
+    type: prometheus
+    # <string, required> access mode. proxy or direct (Server or Browser in the UI). Required
+    access: proxy
+    # <int> org id. will default to orgId 1 if not specified
+    orgId: 1
+    # <string> custom UID which can be used to reference this datasource in other parts of the configuration, if not specified will be generated automatically
+    uid: 6W2nM-Vnz
+    # <string> url
+    url: prometheus:9090
+    # <string> Deprecated, use secureJsonData.password
+    password:
+    # <string> database user, if used
+    user:
+    # <string> database name, if used
+    database:
+    # <bool> enable/disable basic auth
+    basicAuth: false
+    # <string> basic auth username
+    basicAuthUser:
+    # <string> Deprecated, use secureJsonData.basicAuthPassword
+    basicAuthPassword:
+    # <bool> enable/disable with credentials headers
+    withCredentials:
+    # <bool> mark as default datasource. Max one per org
+    isDefault: false
+    # <map> fields that will be converted to json and stored in jsonData
+    jsonData:
+      httpMethod: POST
+    # <string> json object of data that will be encrypted.
+    secureJsonData:
+    version: 1
+    # <bool> allow users to edit datasources from the UI.
+    editable: false
diff --git a/docker-compose/grafana/datasources/tangodb.yaml b/docker-compose/grafana/datasources/tangodb.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..9a962a2417f0c963249b53fde925d8c11fcdc996
--- /dev/null
+++ b/docker-compose/grafana/datasources/tangodb.yaml
@@ -0,0 +1,40 @@
+apiVersion: 1
+
+datasources:
+  # <string, required> name of the datasource. Required
+  - name: TangoDB
+    # <string, required> datasource type. Required
+    type: mysql
+    # <string, required> access mode. proxy or direct (Server or Browser in the UI). Required
+    access: proxy
+    # <int> org id. will default to orgId 1 if not specified
+    orgId: 1
+    # <string> custom UID which can be used to reference this datasource in other parts of the configuration, if not specified will be generated automatically
+    uid: d5_heb47k
+    # <string> url
+    url: tangodb
+    # <string> Deprecated, use secureJsonData.password
+    password:
+    # <string> database user, if used
+    user: tango
+    # <string> database name, if used
+    database: hdbpp
+    # <bool> enable/disable basic auth
+    basicAuth: false
+    # <string> basic auth username
+    basicAuthUser:
+    # <string> Deprecated, use secureJsonData.basicAuthPassword
+    basicAuthPassword:
+    # <bool> enable/disable with credentials headers
+    withCredentials:
+    # <bool> mark as default datasource. Max one per org
+    isDefault: false
+    # <map> fields that will be converted to json and stored in jsonData
+    jsonData:
+    # <string> json object of data that will be encrypted.
+    secureJsonData:
+      # <string> database password, if used
+      password: tango
+    version: 1
+    # <bool> allow users to edit datasources from the UI.
+    editable: false
diff --git a/docker-compose/grafana/grafana.ini b/docker-compose/grafana/grafana.ini
new file mode 100644
index 0000000000000000000000000000000000000000..82f1f4bb004e5ba3c1078226e96decf09cdca4f5
--- /dev/null
+++ b/docker-compose/grafana/grafana.ini
@@ -0,0 +1,1006 @@
+##################### Grafana Configuration Example #####################
+#
+# Everything has defaults so you only need to uncomment things you want to
+# change
+
+# possible values : production, development
+;app_mode = production
+
+# instance name, defaults to HOSTNAME environment variable value or hostname if HOSTNAME var is empty
+;instance_name = ${HOSTNAME}
+
+#################################### Paths ####################################
+[paths]
+# Path to where grafana can store temp files, sessions, and the sqlite3 db (if that is used)
+;data = /var/lib/grafana
+
+# Temporary files in `data` directory older than given duration will be removed
+;temp_data_lifetime = 24h
+
+# Directory where grafana can store logs
+;logs = /var/log/grafana
+
+# Directory where grafana will automatically scan and look for plugins
+;plugins = /var/lib/grafana/plugins
+
+# folder that contains provisioning config files that grafana will apply on startup and while running.
+;provisioning = conf/provisioning
+
+#################################### Server ####################################
+[server]
+# Protocol (http, https, h2, socket)
+;protocol = http
+
+# The ip address to bind to, empty will bind to all interfaces
+;http_addr =
+
+# The http port  to use
+;http_port = 3000
+
+# The public facing domain name used to access grafana from a browser
+;domain = localhost
+
+# Redirect to correct domain if host header does not match domain
+# Prevents DNS rebinding attacks
+;enforce_domain = false
+
+# The full public facing url you use in browser, used for redirects and emails
+# If you use reverse proxy and sub path specify full url (with sub path)
+;root_url = %(protocol)s://%(domain)s:%(http_port)s/
+
+# Serve Grafana from subpath specified in `root_url` setting. By default it is set to `false` for compatibility reasons.
+;serve_from_sub_path = false
+
+# Log web requests
+;router_logging = false
+
+# the path relative working path
+;static_root_path = public
+
+# enable gzip
+;enable_gzip = false
+
+# https certs & key file
+;cert_file =
+;cert_key =
+
+# Unix socket path
+;socket =
+
+# CDN Url
+;cdn_url =
+
+# Sets the maximum time using a duration format (5s/5m/5ms) before timing out read of an incoming request and closing idle connections.
+# `0` means there is no timeout for reading the request.
+;read_timeout = 0
+
+#################################### Database ####################################
+[database]
+# You can configure the database connection by specifying type, host, name, user and password
+# as separate properties or as on string using the url properties.
+
+# Either "mysql", "postgres" or "sqlite3", it's your choice
+;type = sqlite3
+;host = 127.0.0.1:3306
+;name = grafana
+;user = root
+# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;"""
+;password =
+
+# Use either URL or the previous fields to configure the database
+# Example: mysql://user:secret@host:port/database
+;url =
+
+# For "postgres" only, either "disable", "require" or "verify-full"
+;ssl_mode = disable
+
+# Database drivers may support different transaction isolation levels.
+# Currently, only "mysql" driver supports isolation levels.
+# If the value is empty - driver's default isolation level is applied.
+# For "mysql" use "READ-UNCOMMITTED", "READ-COMMITTED", "REPEATABLE-READ" or "SERIALIZABLE".
+;isolation_level =
+
+;ca_cert_path =
+;client_key_path =
+;client_cert_path =
+;server_cert_name =
+
+# For "sqlite3" only, path relative to data_path setting
+;path = grafana.db
+
+# Max idle conn setting default is 2
+;max_idle_conn = 2
+
+# Max conn setting default is 0 (mean not set)
+;max_open_conn =
+
+# Connection Max Lifetime default is 14400 (means 14400 seconds or 4 hours)
+;conn_max_lifetime = 14400
+
+# Set to true to log the sql calls and execution times.
+;log_queries =
+
+# For "sqlite3" only. cache mode setting used for connecting to the database. (private, shared)
+;cache_mode = private
+
+################################### Data sources #########################
+[datasources]
+# Upper limit of data sources that Grafana will return. This limit is a temporary configuration and it will be deprecated when pagination will be introduced on the list data sources API.
+;datasource_limit = 5000
+
+#################################### Cache server #############################
+[remote_cache]
+# Either "redis", "memcached" or "database" default is "database"
+;type = database
+
+# cache connectionstring options
+# database: will use Grafana primary database.
+# redis: config like redis server e.g. `addr=127.0.0.1:6379,pool_size=100,db=0,ssl=false`. Only addr is required. ssl may be 'true', 'false', or 'insecure'.
+# memcache: 127.0.0.1:11211
+;connstr =
+
+#################################### Data proxy ###########################
+[dataproxy]
+
+# This enables data proxy logging, default is false
+;logging = false
+
+# How long the data proxy waits to read the headers of the response before timing out, default is 30 seconds.
+# This setting also applies to core backend HTTP data sources where query requests use an HTTP client with timeout set.
+;timeout = 30
+
+# How long the data proxy waits to establish a TCP connection before timing out, default is 10 seconds.
+;dialTimeout = 10
+
+# How many seconds the data proxy waits before sending a keepalive probe request.
+;keep_alive_seconds = 30
+
+# How many seconds the data proxy waits for a successful TLS Handshake before timing out.
+;tls_handshake_timeout_seconds = 10
+
+# How many seconds the data proxy will wait for a server's first response headers after
+# fully writing the request headers if the request has an "Expect: 100-continue"
+# header. A value of 0 will result in the body being sent immediately, without
+# waiting for the server to approve.
+;expect_continue_timeout_seconds = 1
+
+# Optionally limits the total number of connections per host, including connections in the dialing,
+# active, and idle states. On limit violation, dials will block.
+# A value of zero (0) means no limit.
+;max_conns_per_host = 0
+
+# The maximum number of idle connections that Grafana will keep alive.
+;max_idle_connections = 100
+
+# How many seconds the data proxy keeps an idle connection open before timing out.
+;idle_conn_timeout_seconds = 90
+
+# If enabled and user is not anonymous, data proxy will add X-Grafana-User header with username into the request, default is false.
+;send_user_header = false
+
+#################################### Analytics ####################################
+[analytics]
+# Server reporting, sends usage counters to stats.grafana.org every 24 hours.
+# No ip addresses are being tracked, only simple counters to track
+# running instances, dashboard and error counts. It is very helpful to us.
+# Change this option to false to disable reporting.
+;reporting_enabled = true
+
+# The name of the distributor of the Grafana instance. Ex hosted-grafana, grafana-labs
+;reporting_distributor = grafana-labs
+
+# Set to false to disable all checks to https://grafana.net
+# for new versions (grafana itself and plugins), check is used
+# in some UI views to notify that grafana or plugin update exists
+# This option does not cause any auto updates, nor send any information
+# only a GET request to http://grafana.com to get latest versions
+;check_for_updates = true
+
+# Google Analytics universal tracking code, only enabled if you specify an id here
+;google_analytics_ua_id =
+
+# Google Tag Manager ID, only enabled if you specify an id here
+;google_tag_manager_id =
+
+#################################### Security ####################################
+[security]
+# disable creation of admin user on first start of grafana
+;disable_initial_admin_creation = false
+
+# default admin user, created on startup
+;admin_user = admin
+
+# default admin password, can be changed before first start of grafana,  or in profile settings
+;admin_password = admin
+
+# used for signing
+;secret_key = SW2YcwTIb9zpOOhoPsMm
+
+# disable gravatar profile images
+;disable_gravatar = false
+
+# data source proxy whitelist (ip_or_domain:port separated by spaces)
+;data_source_proxy_whitelist =
+
+# disable protection against brute force login attempts
+;disable_brute_force_login_protection = false
+
+# set to true if you host Grafana behind HTTPS. default is false.
+;cookie_secure = false
+
+# set cookie SameSite attribute. defaults to `lax`. can be set to "lax", "strict", "none" and "disabled"
+;cookie_samesite = lax
+
+# set to true if you want to allow browsers to render Grafana in a <frame>, <iframe>, <embed> or <object>. default is false.
+;allow_embedding = false
+
+# Set to true if you want to enable http strict transport security (HSTS) response header.
+# This is only sent when HTTPS is enabled in this configuration.
+# HSTS tells browsers that the site should only be accessed using HTTPS.
+;strict_transport_security = false
+
+# Sets how long a browser should cache HSTS. Only applied if strict_transport_security is enabled.
+;strict_transport_security_max_age_seconds = 86400
+
+# Set to true if to enable HSTS preloading option. Only applied if strict_transport_security is enabled.
+;strict_transport_security_preload = false
+
+# Set to true if to enable the HSTS includeSubDomains option. Only applied if strict_transport_security is enabled.
+;strict_transport_security_subdomains = false
+
+# Set to true to enable the X-Content-Type-Options response header.
+# The X-Content-Type-Options response HTTP header is a marker used by the server to indicate that the MIME types advertised
+# in the Content-Type headers should not be changed and be followed.
+;x_content_type_options = true
+
+# Set to true to enable the X-XSS-Protection header, which tells browsers to stop pages from loading
+# when they detect reflected cross-site scripting (XSS) attacks.
+;x_xss_protection = true
+
+# Enable adding the Content-Security-Policy header to your requests.
+# CSP allows to control resources the user agent is allowed to load and helps prevent XSS attacks.
+;content_security_policy = false
+
+# Set Content Security Policy template used when adding the Content-Security-Policy header to your requests.
+# $NONCE in the template includes a random nonce.
+# $ROOT_PATH is server.root_url without the protocol.
+;content_security_policy_template = """script-src 'self' 'unsafe-eval' 'unsafe-inline' 'strict-dynamic' $NONCE;object-src 'none';font-src 'self';style-src 'self' 'unsafe-inline' blob:;img-src * data:;base-uri 'self';connect-src 'self' grafana.com ws://$ROOT_PATH wss://$ROOT_PATH;manifest-src 'self';media-src 'none';form-action 'self';"""
+
+#################################### Snapshots ###########################
+[snapshots]
+# snapshot sharing options
+;external_enabled = true
+;external_snapshot_url = https://snapshots-origin.raintank.io
+;external_snapshot_name = Publish to snapshot.raintank.io
+
+# Set to true to enable this Grafana instance act as an external snapshot server and allow unauthenticated requests for
+# creating and deleting snapshots.
+;public_mode = false
+
+# remove expired snapshot
+;snapshot_remove_expired = true
+
+#################################### Dashboards History ##################
+[dashboards]
+# Number dashboard versions to keep (per dashboard). Default: 20, Minimum: 1
+;versions_to_keep = 20
+
+# Minimum dashboard refresh interval. When set, this will restrict users to set the refresh interval of a dashboard lower than given interval. Per default this is 5 seconds.
+# The interval string is a possibly signed sequence of decimal numbers, followed by a unit suffix (ms, s, m, h, d), e.g. 30s or 1m.
+;min_refresh_interval = 5s
+
+# Path to the default home dashboard. If this value is empty, then Grafana uses StaticRootPath + "dashboards/home.json"
+default_home_dashboard_path = /var/lib/grafana/dashboards/home.json
+
+#################################### Users ###############################
+[users]
+# disable user signup / registration
+;allow_sign_up = true
+
+# Allow non admin users to create organizations
+;allow_org_create = true
+
+# Set to true to automatically assign new users to the default organization (id 1)
+;auto_assign_org = true
+
+# Set this value to automatically add new users to the provided organization (if auto_assign_org above is set to true)
+;auto_assign_org_id = 1
+
+# Default role new users will be automatically assigned (if disabled above is set to true)
+;auto_assign_org_role = Viewer
+
+# Require email validation before sign up completes
+;verify_email_enabled = false
+
+# Background text for the user field on the login page
+;login_hint = email or username
+;password_hint = password
+
+# Default UI theme ("dark" or "light")
+;default_theme = dark
+
+# Path to a custom home page. Users are only redirected to this if the default home dashboard is used. It should match a frontend route and contain a leading slash.
+; home_page =
+
+# External user management, these options affect the organization users view
+;external_manage_link_url =
+;external_manage_link_name =
+;external_manage_info =
+
+# Viewers can edit/inspect dashboard settings in the browser. But not save the dashboard.
+;viewers_can_edit = false
+
+# Editors can administrate dashboard, folders and teams they create
+;editors_can_admin = false
+
+# The duration in time a user invitation remains valid before expiring. This setting should be expressed as a duration. Examples: 6h (hours), 2d (days), 1w (week). Default is 24h (24 hours). The minimum supported duration is 15m (15 minutes).
+;user_invite_max_lifetime_duration = 24h
+
+# Enter a comma-separated list of users login to hide them in the Grafana UI. These users are shown to Grafana admins and themselves.
+; hidden_users =
+
+[auth]
+# Login cookie name
+;login_cookie_name = grafana_session
+
+# The maximum lifetime (duration) an authenticated user can be inactive before being required to login at next visit. Default is 7 days (7d). This setting should be expressed as a duration, e.g. 5m (minutes), 6h (hours), 10d (days), 2w (weeks), 1M (month). The lifetime resets at each successful token rotation.
+;login_maximum_inactive_lifetime_duration =
+
+# The maximum lifetime (duration) an authenticated user can be logged in since login time before being required to login. Default is 30 days (30d). This setting should be expressed as a duration, e.g. 5m (minutes), 6h (hours), 10d (days), 2w (weeks), 1M (month).
+;login_maximum_lifetime_duration =
+
+# How often should auth tokens be rotated for authenticated users when being active. The default is each 10 minutes.
+;token_rotation_interval_minutes = 10
+
+# Set to true to disable (hide) the login form, useful if you use OAuth, defaults to false
+;disable_login_form = false
+
+# Set to true to disable the sign out link in the side menu. Useful if you use auth.proxy or auth.jwt, defaults to false
+;disable_signout_menu = false
+
+# URL to redirect the user to after sign out
+;signout_redirect_url =
+
+# Set to true to attempt login with OAuth automatically, skipping the login screen.
+# This setting is ignored if multiple OAuth providers are configured.
+;oauth_auto_login = false
+
+# OAuth state max age cookie duration in seconds. Defaults to 600 seconds.
+;oauth_state_cookie_max_age = 600
+
+# limit of api_key seconds to live before expiration
+;api_key_max_seconds_to_live = -1
+
+# Set to true to enable SigV4 authentication option for HTTP-based datasources.
+;sigv4_auth_enabled = false
+
+#################################### Anonymous Auth ######################
+[auth.anonymous]
+# enable anonymous access
+enabled = true
+
+# specify organization name that should be used for unauthenticated users
+;org_name = Main Org.
+
+# specify role for unauthenticated users
+;org_role = Viewer
+
+# mask the Grafana version number for unauthenticated users
+;hide_version = false
+
+#################################### GitHub Auth ##########################
+[auth.github]
+;enabled = false
+;allow_sign_up = true
+;client_id = some_id
+;client_secret = some_secret
+;scopes = user:email,read:org
+;auth_url = https://github.com/login/oauth/authorize
+;token_url = https://github.com/login/oauth/access_token
+;api_url = https://api.github.com/user
+;allowed_domains =
+;team_ids =
+;allowed_organizations =
+
+#################################### GitLab Auth #########################
+[auth.gitlab]
+;enabled = false
+;allow_sign_up = true
+;client_id = some_id
+;client_secret = some_secret
+;scopes = api
+;auth_url = https://gitlab.com/oauth/authorize
+;token_url = https://gitlab.com/oauth/token
+;api_url = https://gitlab.com/api/v4
+;allowed_domains =
+;allowed_groups =
+
+#################################### Google Auth ##########################
+[auth.google]
+;enabled = false
+;allow_sign_up = true
+;client_id = some_client_id
+;client_secret = some_client_secret
+;scopes = https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email
+;auth_url = https://accounts.google.com/o/oauth2/auth
+;token_url = https://accounts.google.com/o/oauth2/token
+;api_url = https://www.googleapis.com/oauth2/v1/userinfo
+;allowed_domains =
+;hosted_domain =
+
+#################################### Grafana.com Auth ####################
+[auth.grafana_com]
+;enabled = false
+;allow_sign_up = true
+;client_id = some_id
+;client_secret = some_secret
+;scopes = user:email
+;allowed_organizations =
+
+#################################### Azure AD OAuth #######################
+[auth.azuread]
+;name = Azure AD
+;enabled = false
+;allow_sign_up = true
+;client_id = some_client_id
+;client_secret = some_client_secret
+;scopes = openid email profile
+;auth_url = https://login.microsoftonline.com/<tenant-id>/oauth2/v2.0/authorize
+;token_url = https://login.microsoftonline.com/<tenant-id>/oauth2/v2.0/token
+;allowed_domains =
+;allowed_groups =
+
+#################################### Okta OAuth #######################
+[auth.okta]
+;name = Okta
+;enabled = false
+;allow_sign_up = true
+;client_id = some_id
+;client_secret = some_secret
+;scopes = openid profile email groups
+;auth_url = https://<tenant-id>.okta.com/oauth2/v1/authorize
+;token_url = https://<tenant-id>.okta.com/oauth2/v1/token
+;api_url = https://<tenant-id>.okta.com/oauth2/v1/userinfo
+;allowed_domains =
+;allowed_groups =
+;role_attribute_path =
+;role_attribute_strict = false
+
+#################################### Generic OAuth ##########################
+[auth.generic_oauth]
+;enabled = false
+;name = OAuth
+;allow_sign_up = true
+;client_id = some_id
+;client_secret = some_secret
+;scopes = user:email,read:org
+;empty_scopes = false
+;email_attribute_name = email:primary
+;email_attribute_path =
+;login_attribute_path =
+;name_attribute_path =
+;id_token_attribute_name =
+;auth_url = https://foo.bar/login/oauth/authorize
+;token_url = https://foo.bar/login/oauth/access_token
+;api_url = https://foo.bar/user
+;allowed_domains =
+;team_ids =
+;allowed_organizations =
+;role_attribute_path =
+;role_attribute_strict = false
+;groups_attribute_path =
+;tls_skip_verify_insecure = false
+;tls_client_cert =
+;tls_client_key =
+;tls_client_ca =
+
+#################################### Basic Auth ##########################
+[auth.basic]
+;enabled = true
+
+#################################### Auth Proxy ##########################
+[auth.proxy]
+;enabled = false
+;header_name = X-WEBAUTH-USER
+;header_property = username
+;auto_sign_up = true
+;sync_ttl = 60
+;whitelist = 192.168.1.1, 192.168.2.1
+;headers = Email:X-User-Email, Name:X-User-Name
+# Read the auth proxy docs for details on what the setting below enables
+;enable_login_token = false
+
+#################################### Auth JWT ##########################
+[auth.jwt]
+;enabled = true
+;header_name = X-JWT-Assertion
+;email_claim = sub
+;username_claim = sub
+;jwk_set_url = https://foo.bar/.well-known/jwks.json
+;jwk_set_file = /path/to/jwks.json
+;cache_ttl = 60m
+;expected_claims = {"aud": ["foo", "bar"]}
+;key_file = /path/to/key/file
+
+#################################### Auth LDAP ##########################
+[auth.ldap]
+;enabled = false
+;config_file = /etc/grafana/ldap.toml
+;allow_sign_up = true
+
+# LDAP background sync (Enterprise only)
+# At 1 am every day
+;sync_cron = "0 0 1 * * *"
+;active_sync_enabled = true
+
+#################################### AWS ###########################
+[aws]
+# Enter a comma-separated list of allowed AWS authentication providers.
+# Options are: default (AWS SDK Default), keys (Access && secret key), credentials (Credentials field), ec2_iam_role (EC2 IAM Role)
+; allowed_auth_providers = default,keys,credentials
+
+# Allow AWS users to assume a role using temporary security credentials.
+# If true, assume role will be enabled for all AWS authentication providers that are specified in aws_auth_providers
+; assume_role_enabled = true
+
+#################################### Azure ###############################
+[azure]
+# Azure cloud environment where Grafana is hosted
+# Possible values are AzureCloud, AzureChinaCloud, AzureUSGovernment and AzureGermanCloud
+# Default value is AzureCloud (i.e. public cloud)
+;cloud = AzureCloud
+
+# Specifies whether Grafana hosted in Azure service with Managed Identity configured (e.g. Azure Virtual Machines instance)
+# If enabled, the managed identity can be used for authentication of Grafana in Azure services
+# Disabled by default, needs to be explicitly enabled
+;managed_identity_enabled = false
+
+# Client ID to use for user-assigned managed identity
+# Should be set for user-assigned identity and should be empty for system-assigned identity
+;managed_identity_client_id =
+
+#################################### SMTP / Emailing ##########################
+[smtp]
+;enabled = false
+;host = localhost:25
+;user =
+# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;"""
+;password =
+;cert_file =
+;key_file =
+;skip_verify = false
+;from_address = admin@grafana.localhost
+;from_name = Grafana
+# EHLO identity in SMTP dialog (defaults to instance_name)
+;ehlo_identity = dashboard.example.com
+# SMTP startTLS policy (defaults to 'OpportunisticStartTLS')
+;startTLS_policy = NoStartTLS
+
+[emails]
+;welcome_email_on_sign_up = false
+;templates_pattern = emails/*.html, emails/*.txt
+;content_types = text/html
+
+#################################### Logging ##########################
+[log]
+# Either "console", "file", "syslog". Default is console and  file
+# Use space to separate multiple modes, e.g. "console file"
+;mode = console file
+
+# Either "debug", "info", "warn", "error", "critical", default is "info"
+;level = info
+
+# optional settings to set different levels for specific loggers. Ex filters = sqlstore:debug
+;filters =
+
+# For "console" mode only
+[log.console]
+;level =
+
+# log line format, valid options are text, console and json
+;format = console
+
+# For "file" mode only
+[log.file]
+;level =
+
+# log line format, valid options are text, console and json
+;format = text
+
+# This enables automated log rotate(switch of following options), default is true
+;log_rotate = true
+
+# Max line number of single file, default is 1000000
+;max_lines = 1000000
+
+# Max size shift of single file, default is 28 means 1 << 28, 256MB
+;max_size_shift = 28
+
+# Segment log daily, default is true
+;daily_rotate = true
+
+# Expired days of log file(delete after max days), default is 7
+;max_days = 7
+
+[log.syslog]
+;level =
+
+# log line format, valid options are text, console and json
+;format = text
+
+# Syslog network type and address. This can be udp, tcp, or unix. If left blank, the default unix endpoints will be used.
+;network =
+;address =
+
+# Syslog facility. user, daemon and local0 through local7 are valid.
+;facility =
+
+# Syslog tag. By default, the process' argv[0] is used.
+;tag =
+
+[log.frontend]
+# Should Sentry javascript agent be initialized
+;enabled = false
+
+# Sentry DSN if you want to send events to Sentry.
+;sentry_dsn =
+
+# Custom HTTP endpoint to send events captured by the Sentry agent to. Default will log the events to stdout.
+;custom_endpoint = /log
+
+# Rate of events to be reported between 0 (none) and 1 (all), float
+;sample_rate = 1.0
+
+# Requests per second limit enforced an extended period, for Grafana backend log ingestion endpoint (/log).
+;log_endpoint_requests_per_second_limit = 3
+
+# Max requests accepted per short interval of time for Grafana backend log ingestion endpoint (/log).
+;log_endpoint_burst_limit = 15
+
+#################################### Usage Quotas ########################
+[quota]
+; enabled = false
+
+#### set quotas to -1 to make unlimited. ####
+# limit number of users per Org.
+; org_user = 10
+
+# limit number of dashboards per Org.
+; org_dashboard = 100
+
+# limit number of data_sources per Org.
+; org_data_source = 10
+
+# limit number of api_keys per Org.
+; org_api_key = 10
+
+# limit number of alerts per Org.
+;org_alert_rule = 100
+
+# limit number of orgs a user can create.
+; user_org = 10
+
+# Global limit of users.
+; global_user = -1
+
+# global limit of orgs.
+; global_org = -1
+
+# global limit of dashboards
+; global_dashboard = -1
+
+# global limit of api_keys
+; global_api_key = -1
+
+# global limit on number of logged in users.
+; global_session = -1
+
+# global limit of alerts
+;global_alert_rule = -1
+
+#################################### Alerting ############################
+[alerting]
+# Disable alerting engine & UI features
+;enabled = true
+# Makes it possible to turn off alert rule execution but alerting UI is visible
+;execute_alerts = true
+
+# Default setting for new alert rules. Defaults to categorize error and timeouts as alerting. (alerting, keep_state)
+;error_or_timeout = alerting
+
+# Default setting for how Grafana handles nodata or null values in alerting. (alerting, no_data, keep_state, ok)
+;nodata_or_nullvalues = no_data
+
+# Alert notifications can include images, but rendering many images at the same time can overload the server
+# This limit will protect the server from render overloading and make sure notifications are sent out quickly
+;concurrent_render_limit = 5
+
+
+# Default setting for alert calculation timeout. Default value is 30
+;evaluation_timeout_seconds = 30
+
+# Default setting for alert notification timeout. Default value is 30
+;notification_timeout_seconds = 30
+
+# Default setting for max attempts to sending alert notifications. Default value is 3
+;max_attempts = 3
+
+# Makes it possible to enforce a minimal interval between evaluations, to reduce load on the backend
+;min_interval_seconds = 1
+
+# Configures for how long alert annotations are stored. Default is 0, which keeps them forever.
+# This setting should be expressed as a duration. Examples: 6h (hours), 10d (days), 2w (weeks), 1M (month).
+;max_annotation_age =
+
+# Configures max number of alert annotations that Grafana stores. Default value is 0, which keeps all alert annotations.
+;max_annotations_to_keep =
+
+#################################### Annotations #########################
+[annotations]
+# Configures the batch size for the annotation clean-up job. This setting is used for dashboard, API, and alert annotations.
+;cleanupjob_batchsize = 100
+
+[annotations.dashboard]
+# Dashboard annotations means that annotations are associated with the dashboard they are created on.
+
+# Configures how long dashboard annotations are stored. Default is 0, which keeps them forever.
+# This setting should be expressed as a duration. Examples: 6h (hours), 10d (days), 2w (weeks), 1M (month).
+;max_age =
+
+# Configures max number of dashboard annotations that Grafana stores. Default value is 0, which keeps all dashboard annotations.
+;max_annotations_to_keep =
+
+[annotations.api]
+# API annotations means that the annotations have been created using the API without any
+# association with a dashboard.
+
+# Configures how long Grafana stores API annotations. Default is 0, which keeps them forever.
+# This setting should be expressed as a duration. Examples: 6h (hours), 10d (days), 2w (weeks), 1M (month).
+;max_age =
+
+# Configures max number of API annotations that Grafana keeps. Default value is 0, which keeps all API annotations.
+;max_annotations_to_keep =
+
+#################################### Explore #############################
+[explore]
+# Enable the Explore section
+;enabled = true
+
+#################################### Internal Grafana Metrics ##########################
+# Metrics available at HTTP API Url /metrics
+[metrics]
+# Disable / Enable internal metrics
+;enabled           = true
+# Graphite Publish interval
+;interval_seconds  = 10
+# Disable total stats (stat_totals_*) metrics to be generated
+;disable_total_stats = false
+
+#If both are set, basic auth will be required for the metrics endpoint.
+; basic_auth_username =
+; basic_auth_password =
+
+# Metrics environment info adds dimensions to the `grafana_environment_info` metric, which
+# can expose more information about the Grafana instance.
+[metrics.environment_info]
+#exampleLabel1 = exampleValue1
+#exampleLabel2 = exampleValue2
+
+# Send internal metrics to Graphite
+[metrics.graphite]
+# Enable by setting the address setting (ex localhost:2003)
+;address =
+;prefix = prod.grafana.%(instance_name)s.
+
+#################################### Grafana.com integration  ##########################
+# Url used to import dashboards directly from Grafana.com
+[grafana_com]
+;url = https://grafana.com
+
+#################################### Distributed tracing ############
+[tracing.jaeger]
+# Enable by setting the address sending traces to jaeger (ex localhost:6831)
+;address = localhost:6831
+# Tag that will always be included in when creating new spans. ex (tag1:value1,tag2:value2)
+;always_included_tag = tag1:value1
+# Type specifies the type of the sampler: const, probabilistic, rateLimiting, or remote
+;sampler_type = const
+# jaeger samplerconfig param
+# for "const" sampler, 0 or 1 for always false/true respectively
+# for "probabilistic" sampler, a probability between 0 and 1
+# for "rateLimiting" sampler, the number of spans per second
+# for "remote" sampler, param is the same as for "probabilistic"
+# and indicates the initial sampling rate before the actual one
+# is received from the mothership
+;sampler_param = 1
+# sampling_server_url is the URL of a sampling manager providing a sampling strategy.
+;sampling_server_url =
+# Whether or not to use Zipkin propagation (x-b3- HTTP headers).
+;zipkin_propagation = false
+# Setting this to true disables shared RPC spans.
+# Not disabling is the most common setting when using Zipkin elsewhere in your infrastructure.
+;disable_shared_zipkin_spans = false
+
+#################################### External image storage ##########################
+[external_image_storage]
+# Used for uploading images to public servers so they can be included in slack/email messages.
+# you can choose between (s3, webdav, gcs, azure_blob, local)
+;provider =
+
+[external_image_storage.s3]
+;endpoint =
+;path_style_access =
+;bucket =
+;region =
+;path =
+;access_key =
+;secret_key =
+
+[external_image_storage.webdav]
+;url =
+;public_url =
+;username =
+;password =
+
+[external_image_storage.gcs]
+;key_file =
+;bucket =
+;path =
+
+[external_image_storage.azure_blob]
+;account_name =
+;account_key =
+;container_name =
+
+[external_image_storage.local]
+# does not require any configuration
+
+[rendering]
+# Options to configure a remote HTTP image rendering service, e.g. using https://github.com/grafana/grafana-image-renderer.
+# URL to a remote HTTP image renderer service, e.g. http://localhost:8081/render, will enable Grafana to render panels and dashboards to PNG-images using HTTP requests to an external service.
+;server_url =
+# If the remote HTTP image renderer service runs on a different server than the Grafana server you may have to configure this to a URL where Grafana is reachable, e.g. http://grafana.domain/.
+;callback_url =
+# Concurrent render request limit affects when the /render HTTP endpoint is used. Rendering many images at the same time can overload the server,
+# which this setting can help protect against by only allowing a certain amount of concurrent requests.
+;concurrent_render_request_limit = 30
+
+[panels]
+# If set to true Grafana will allow script tags in text panels. Not recommended as it enable XSS vulnerabilities.
+;disable_sanitize_html = false
+
+[plugins]
+;enable_alpha = false
+;app_tls_skip_verify_insecure = false
+# Enter a comma-separated list of plugin identifiers to identify plugins to load even if they are unsigned. Plugins with modified signatures are never loaded.
+;allow_loading_unsigned_plugins =
+# Enable or disable installing plugins directly from within Grafana.
+;plugin_admin_enabled = false
+;plugin_admin_external_manage_enabled = false
+;plugin_catalog_url = https://grafana.com/grafana/plugins/
+
+#################################### Grafana Live ##########################################
+[live]
+# max_connections to Grafana Live WebSocket endpoint per Grafana server instance. See Grafana Live docs
+# if you are planning to make it higher than default 100 since this can require some OS and infrastructure
+# tuning. 0 disables Live, -1 means unlimited connections.
+;max_connections = 100
+
+# allowed_origins is a comma-separated list of origins that can establish connection with Grafana Live.
+# If not set then origin will be matched over root_url. Supports wildcard symbol "*".
+;allowed_origins =
+
+# engine defines an HA (high availability) engine to use for Grafana Live. By default no engine used - in
+# this case Live features work only on a single Grafana server. Available options: "redis".
+# Setting ha_engine is an EXPERIMENTAL feature.
+;ha_engine =
+
+# ha_engine_address sets a connection address for Live HA engine. Depending on engine type address format can differ.
+# For now we only support Redis connection address in "host:port" format.
+# This option is EXPERIMENTAL.
+;ha_engine_address = "127.0.0.1:6379"
+
+#################################### Grafana Image Renderer Plugin ##########################
+[plugin.grafana-image-renderer]
+# Instruct headless browser instance to use a default timezone when not provided by Grafana, e.g. when rendering panel image of alert.
+# See ICU’s metaZones.txt (https://cs.chromium.org/chromium/src/third_party/icu/source/data/misc/metaZones.txt) for a list of supported
+# timezone IDs. Fallbacks to TZ environment variable if not set.
+;rendering_timezone =
+
+# Instruct headless browser instance to use a default language when not provided by Grafana, e.g. when rendering panel image of alert.
+# Please refer to the HTTP header Accept-Language to understand how to format this value, e.g. 'fr-CH, fr;q=0.9, en;q=0.8, de;q=0.7, *;q=0.5'.
+;rendering_language =
+
+# Instruct headless browser instance to use a default device scale factor when not provided by Grafana, e.g. when rendering panel image of alert.
+# Default is 1. Using a higher value will produce more detailed images (higher DPI), but will require more disk space to store an image.
+;rendering_viewport_device_scale_factor =
+
+# Instruct headless browser instance whether to ignore HTTPS errors during navigation. Per default HTTPS errors are not ignored. Due to
+# the security risk it's not recommended to ignore HTTPS errors.
+;rendering_ignore_https_errors =
+
+# Instruct headless browser instance whether to capture and log verbose information when rendering an image. Default is false and will
+# only capture and log error messages. When enabled, debug messages are captured and logged as well.
+# For the verbose information to be included in the Grafana server log you have to adjust the rendering log level to debug, configure
+# [log].filter = rendering:debug.
+;rendering_verbose_logging =
+
+# Instruct headless browser instance whether to output its debug and error messages into running process of remote rendering service.
+# Default is false. This can be useful to enable (true) when troubleshooting.
+;rendering_dumpio =
+
+# Additional arguments to pass to the headless browser instance. Default is --no-sandbox. The list of Chromium flags can be found
+# here (https://peter.sh/experiments/chromium-command-line-switches/). Multiple arguments is separated with comma-character.
+;rendering_args =
+
+# You can configure the plugin to use a different browser binary instead of the pre-packaged version of Chromium.
+# Please note that this is not recommended, since you may encounter problems if the installed version of Chrome/Chromium is not
+# compatible with the plugin.
+;rendering_chrome_bin =
+
+# Instruct how headless browser instances are created. Default is 'default' and will create a new browser instance on each request.
+# Mode 'clustered' will make sure that only a maximum of browsers/incognito pages can execute concurrently.
+# Mode 'reusable' will have one browser instance and will create a new incognito page on each request.
+;rendering_mode =
+
+# When rendering_mode = clustered you can instruct how many browsers or incognito pages can execute concurrently. Default is 'browser'
+# and will cluster using browser instances.
+# Mode 'context' will cluster using incognito pages.
+;rendering_clustering_mode =
+# When rendering_mode = clustered you can define maximum number of browser instances/incognito pages that can execute concurrently..
+;rendering_clustering_max_concurrency =
+
+# Limit the maximum viewport width, height and device scale factor that can be requested.
+;rendering_viewport_max_width =
+;rendering_viewport_max_height =
+;rendering_viewport_max_device_scale_factor =
+
+# Change the listening host and port of the gRPC server. Default host is 127.0.0.1 and default port is 0 and will automatically assign
+# a port not in use.
+;grpc_host =
+;grpc_port =
+
+[enterprise]
+# Path to a valid Grafana Enterprise license.jwt file
+;license_path =
+
+[feature_toggles]
+# enable features, separated by spaces
+enable = ngalert
+
+[date_formats]
+# For information on what formatting patterns that are supported https://momentjs.com/docs/#/displaying/
+
+# Default system date format used in time range picker and other places where full time is displayed
+;full_date = YYYY-MM-DD HH:mm:ss
+
+# Used by graph and other places where we only show small intervals
+;interval_second = HH:mm:ss
+;interval_minute = HH:mm
+;interval_hour = MM/DD HH:mm
+;interval_day = MM/DD
+;interval_month = YYYY-MM
+;interval_year = YYYY
+
+# Experimental feature
+;use_browser_locale = false
+
+# Default timezone for user preferences. Options are 'browser' for the browser local timezone or a timezone name from IANA Time Zone database, e.g. 'UTC' or 'Europe/Amsterdam' etc.
+;default_timezone = browser
+
+[expressions]
+# Enable or disable the expressions functionality.
+;enabled = true
+
+[geomap]
+# Set the JSON configuration for the default basemap
+;default_baselayer_config = `{
+;  "type": "xyz",
+;  "config": {
+;    "attribution": "Open street map",
+;    "url": "https://tile.openstreetmap.org/{z}/{x}/{y}.png"
+;  }
+;}`
+
+# Enable or disable loading other base map layers
+;enable_custom_baselayers = true
diff --git a/docker-compose/grafana/stationcontrol-dashboards.yaml b/docker-compose/grafana/stationcontrol-dashboards.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..50d300483241f1c5c4b1c992d834bfa4d71014f6
--- /dev/null
+++ b/docker-compose/grafana/stationcontrol-dashboards.yaml
@@ -0,0 +1,24 @@
+apiVersion: 1
+
+providers:
+  # <string> an unique provider name. Required
+  - name: 'StationControl'
+    # <int> Org id. Default to 1
+    orgId: 1
+    # <string> name of the dashboard folder.
+    folder: ''
+    # <string> folder UID. will be automatically generated if not specified
+    folderUid: ''
+    # <string> provider type. Default to 'file'
+    type: file
+    # <bool> disable dashboard deletion
+    disableDeletion: true
+    # <int> how often Grafana will scan for changed dashboards
+    updateIntervalSeconds: 60
+    # <bool> allow updating provisioned dashboards from the UI
+    allowUiUpdates: false
+    options:
+      # <string, required> path to dashboard files on disk. Required when using the 'file' type
+      path: /var/lib/grafana/dashboards
+      # <bool> use folder names from filesystem to create folders in Grafana
+      foldersFromFilesStructure: true
diff --git a/docker-compose/hdbpp_viewer.yml b/docker-compose/hdbpp_viewer.yml
index 481879729621d5c5828abddbbd047182d9b16278..9a1f9da06a3db1398e3f4060fd32de9850e6532e 100644
--- a/docker-compose/hdbpp_viewer.yml
+++ b/docker-compose/hdbpp_viewer.yml
@@ -12,8 +12,7 @@ services:
   hdbpp-viewer:
     image: ${LOCAL_DOCKER_REGISTRY_HOST}/${LOCAL_DOCKER_REGISTRY_USER}/hdbpp_viewer:${TANGO_HDBPP_VIEWER_VERSION}
     container_name: ${CONTAINER_NAME_PREFIX}hdbpp-viewer
-    networks:
-      - control
+    network_mode: host
     depends_on:
       - databaseds
       - dsconfig
@@ -25,17 +24,17 @@ services:
     environment:
       - XAUTHORITY=${XAUTHORITY}
       - DISPLAY=${DISPLAY}
-      - TANGO_HOST=${TANGO_HOST}
+      - TANGO_HOST=localhost:10000
       - HDB_TYPE=mysql
-      - HDB_MYSQL_HOST=archiver-maria-db
-      - HDB_MYSQL_PORT=3306
+      - HDB_MYSQL_HOST=localhost
+      - HDB_MYSQL_PORT=3307
       - HDB_USER=tango
       - HDB_PASSWORD=tango
       - HDB_NAME=hdbpp
       - CLASSPATH=JTango.jar:ATKCore.jar:ATKWidget.jar:jhdbviewer.jar:HDBPP.jar:jython.jar:jcalendar.jar
     entrypoint:
       - wait-for-it.sh
-      - ${TANGO_HOST}
+      - localhost:10000
       - --strict
       - --
       - ./hdbpp_viewer/hdbpp_viewer_script
diff --git a/docker-compose/itango/lofar-requirements.txt b/docker-compose/itango/lofar-requirements.txt
index 0e869add1a8113a1f63f84e9348321dad5a5c4f2..29942e272353180f3622f4ad6d36fb7c31307eb1 100644
--- a/docker-compose/itango/lofar-requirements.txt
+++ b/docker-compose/itango/lofar-requirements.txt
@@ -6,3 +6,4 @@ python-logstash-async
 gitpython
 PyMySQL[rsa]
 sqlalchemy
+timeout-decorator
diff --git a/docker-compose/jupyter/Dockerfile b/docker-compose/jupyter/Dockerfile
index 29f736cdca2fc843750612c6780ea7ad2dfa516e..b69ddfa7e5b6d6eaeab11b25f99258d0f0743daa 100644
--- a/docker-compose/jupyter/Dockerfile
+++ b/docker-compose/jupyter/Dockerfile
@@ -9,7 +9,6 @@ ARG CONTAINER_EXECUTION_UID=1000
 ENV HOME=/home/user
 RUN sudo mkdir -p ${HOME}
 RUN sudo chown ${CONTAINER_EXECUTION_UID} -R ${HOME}
-USER ${CONTAINER_EXECUTION_UID}
 
 RUN sudo pip3 install jupyter
 RUN sudo pip3 install ipykernel
@@ -20,8 +19,6 @@ RUN sudo pip3 install matplotlib jupyterplot
 # Allow Download as -> PDF via html
 RUN sudo pip3 install nbconvert
 RUN sudo pip3 install notebook-as-pdf
-# pyppeteer-install installs in the homedir, so run it as the user that will execute the notebook
-RUN pyppeteer-install
 
 # see https://github.com/jupyter/nbconvert/issues/1434
 RUN sudo bash -c "echo DEFAULT_ARGS += [\\\"--no-sandbox\\\"] >> /usr/local/lib/python3.7/dist-packages/pyppeteer/launcher.py"
@@ -48,9 +45,15 @@ COPY jupyter-notebook /usr/local/bin/jupyter-notebook
 #Install further python modules
 RUN sudo pip3 install PyMySQL[rsa] sqlalchemy
 
+# Packages to interface with testing hardware directly
+RUN sudo pip3 install pyvisa pyvisa-py
+
 # Add Tini. Tini operates as a process subreaper for jupyter. This prevents kernel crashes.
 ENV TINI_VERSION v0.6.0
 ENV JUPYTER_RUNTIME_DIR=/tmp
 ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /usr/bin/tini
 RUN sudo chmod +x /usr/bin/tini
 
+USER ${CONTAINER_EXECUTION_UID}
+# pyppeteer-install installs in the homedir, so run it as the user that will execute the notebook
+RUN pyppeteer-install
diff --git a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
index 22be4e90bb9a5ec927b9a7b3ac9b542e1bb9166f..df75d5962a1327041995aa04c41d6d1e1c2ae914 100644
--- a/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
+++ b/docker-compose/jupyter/ipython-profiles/stationcontrol-jupyter/startup/01-devices.py
@@ -1,8 +1,9 @@
 # Create shortcuts for our devices
-pcc = DeviceProxy("LTS/PCC/1")
+recv = DeviceProxy("LTS/RECV/1")
 sdp = DeviceProxy("LTS/SDP/1")
 sst = DeviceProxy("LTS/SST/1")
+xst = DeviceProxy("LTS/XST/1")
 unb2 = DeviceProxy("LTS/UNB2/1")
 
 # Put them in a list in case one wants to iterate
-devices = [pcc, sdp, sst, unb2]
+devices = [recv, sdp, sst, xst, unb2]
diff --git a/docker-compose/lofar-device-base/lofar-requirements.txt b/docker-compose/lofar-device-base/lofar-requirements.txt
index 69d52984a264c3a53bbcfece15be810ccaa32e7b..2214412a4365f4b804d6b20b0576c390482b1481 100644
--- a/docker-compose/lofar-device-base/lofar-requirements.txt
+++ b/docker-compose/lofar-device-base/lofar-requirements.txt
@@ -2,3 +2,6 @@ opcua >= 0.98.9
 astropy
 python-logstash-async
 gitpython
+PyMySQL[rsa]
+sqlalchemy
+docker
diff --git a/docker-compose/pypcc-sim.yml b/docker-compose/pypcc-sim.yml
deleted file mode 100644
index 15739d3f4dcfada169a4bb6f9ee568da612d2259..0000000000000000000000000000000000000000
--- a/docker-compose/pypcc-sim.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-# Docker compose file that launches a PyPCC simulator
-#
-# Defines:
-#   - pypcc-sim
-#
-version: '2'
-
-services:
-  pypcc-sim:
-    build:
-        context: pypcc-sim
-    container_name: ${CONTAINER_NAME_PREFIX}pypcc-sim
-    networks:
-      - control
-    volumes:
-        - ${HOME}:/hosthome
-    ports:
-        - "4842:4842"
-    restart: on-failure
diff --git a/docker-compose/recv-sim.yml b/docker-compose/recv-sim.yml
new file mode 100644
index 0000000000000000000000000000000000000000..7b1f704fa8854f12d411c7088b7caf0a74f328f0
--- /dev/null
+++ b/docker-compose/recv-sim.yml
@@ -0,0 +1,20 @@
+#
+# Docker compose file that launches a RECV simulator
+#
+# Defines:
+#   - recv-sim
+#
+version: '2'
+
+services:
+  recv-sim:
+    build:
+        context: recv-sim
+    container_name: ${CONTAINER_NAME_PREFIX}recv-sim
+    networks:
+      - control
+    volumes:
+        - ${HOME}:/hosthome
+    ports:
+        - "4843:4843"
+    restart: on-failure
diff --git a/docker-compose/pypcc-sim/Dockerfile b/docker-compose/recv-sim/Dockerfile
similarity index 82%
rename from docker-compose/pypcc-sim/Dockerfile
rename to docker-compose/recv-sim/Dockerfile
index bf3e34d6a5a7c4660aebb4e0006e8fc73ec5665a..c65c5b6f836e889f9b3c364ceace5f7b9b821628 100644
--- a/docker-compose/pypcc-sim/Dockerfile
+++ b/docker-compose/recv-sim/Dockerfile
@@ -7,4 +7,4 @@ RUN apt-get update && apt-get install -y python3 python3-pip python3-yaml git &&
     git clone --depth 1 --branch master https://git.astron.nl/lofar2.0/pypcc
 
 WORKDIR /pypcc
-CMD ["python3","pypcc2.py","--simulator"]
+CMD ["python3","pypcc2.py","--simulator","--port","4843"]
diff --git a/docker-compose/pypcc-sim/requirements.txt b/docker-compose/recv-sim/requirements.txt
similarity index 100%
rename from docker-compose/pypcc-sim/requirements.txt
rename to docker-compose/recv-sim/requirements.txt
diff --git a/docker-compose/tango-prometheus-exporter/Dockerfile b/docker-compose/tango-prometheus-exporter/Dockerfile
index 4f548bbc1a7ff9eebb906fe912cd7a74992bd558..1df83afa690c008f83868c1bc9c8d6c1a09323ef 100644
--- a/docker-compose/tango-prometheus-exporter/Dockerfile
+++ b/docker-compose/tango-prometheus-exporter/Dockerfile
@@ -12,4 +12,4 @@ RUN pip install -r /code/pip-requirements.txt
 WORKDIR /code
 ENV PYTHONPATH '/code/'
 
-CMD ["python" , "/code/collector.py"]
+CMD ["python", "-u", "/code/collector.py"]
diff --git a/docker-compose/tango-prometheus-exporter/get_metrics.sh b/docker-compose/tango-prometheus-exporter/get_metrics.sh
index b2801728979d1a7b788c44a13e882a4750a83c30..0401a2564fbaf5e71c4b8c8ff971ea2f08fe62d2 100755
--- a/docker-compose/tango-prometheus-exporter/get_metrics.sh
+++ b/docker-compose/tango-prometheus-exporter/get_metrics.sh
@@ -1 +1 @@
-curl $(kubectl get svc -n tango-grafana -o jsonpath='{.items[?(@.metadata.name=="tango-exporter-service-0")].spec.clusterIP}')/metrics
\ No newline at end of file
+curl $(kubectl get svc -n tango-grafana -o jsonpath='{.items[?(@.metadata.name=="tango-exporter-service-0")].spec.clusterIP}')/metrics
diff --git a/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter b/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter
index c7cadb8f48dc8b36860f23ce421c72e90216a548..774d39a40ca19c9d979ad22565e57b4af3e9a831 160000
--- a/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter
+++ b/docker-compose/tango-prometheus-exporter/ska-tango-grafana-exporter
@@ -1 +1 @@
-Subproject commit c7cadb8f48dc8b36860f23ce421c72e90216a548
+Subproject commit 774d39a40ca19c9d979ad22565e57b4af3e9a831
diff --git a/docker-compose/unb2-sim.yml b/docker-compose/unb2-sim.yml
new file mode 100644
index 0000000000000000000000000000000000000000..e031e20f54ad6addec1fdbabf972661d6f4c8f9a
--- /dev/null
+++ b/docker-compose/unb2-sim.yml
@@ -0,0 +1,20 @@
+#
+# Docker compose file that launches a UNB2 simulator
+#
+# Defines:
+#   - unb2-sim
+#
+version: '2'
+
+services:
+  unb2-sim:
+    build:
+        context: unb2-sim
+    container_name: ${CONTAINER_NAME_PREFIX}unb2-sim
+    networks:
+      - control
+    volumes:
+        - ${HOME}:/hosthome
+    ports:
+        - "4844:4844"
+    restart: on-failure
diff --git a/docker-compose/unb2-sim/Dockerfile b/docker-compose/unb2-sim/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..cbb4ce80185ee1cf0a84aabc457a7983e57b5651
--- /dev/null
+++ b/docker-compose/unb2-sim/Dockerfile
@@ -0,0 +1,10 @@
+FROM ubuntu:20.04
+
+COPY requirements.txt /requirements.txt
+
+RUN apt-get update && apt-get install -y python3 python3-pip python3-yaml git && \
+    pip3 install -r requirements.txt && \
+    git clone --depth 1 --branch master https://git.astron.nl/lofar2.0/pypcc
+
+WORKDIR /pypcc
+CMD ["python3","pypcc2.py","--simulator", "--port=4844", "--config=UNB2"]
diff --git a/docker-compose/unb2-sim/requirements.txt b/docker-compose/unb2-sim/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..5badf0e57069366adedc43ca2c2b59dc7dc4b35d
--- /dev/null
+++ b/docker-compose/unb2-sim/requirements.txt
@@ -0,0 +1,4 @@
+#check if this file is necessary
+opcua
+numpy
+recordclass>=0.16,<0.16.1
\ No newline at end of file
diff --git a/jupyter-notebooks/Docker_notebook.ipynb b/jupyter-notebooks/Docker_notebook.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..facc20c3e1e1d561504ef09b08e2e78296d0fb39
--- /dev/null
+++ b/jupyter-notebooks/Docker_notebook.ipynb
@@ -0,0 +1,139 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "id": "waiting-chance",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import time"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "id": "moving-alexandria",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "d=DeviceProxy(\"LTS/Docker/1\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "id": "ranking-aluminum",
+   "metadata": {
+    "scrolled": false
+   },
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Device is now in on state\n"
+     ]
+    }
+   ],
+   "source": [
+    "state = str(d.state())\n",
+    "\n",
+    "\n",
+    "if state == \"OFF\" or state == \"FAULT\":\n",
+    "    d.initialise()\n",
+    "    time.sleep(1)\n",
+    "state = str(d.state())\n",
+    "if state == \"STANDBY\":\n",
+    "    d.on()\n",
+    "state = str(d.state())\n",
+    "if state == \"ON\":\n",
+    "    print(\"Device is now in on state\")\n",
+    "else:\n",
+    "    print(\"warning, expected device to be in on state, is: \", state)\n"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "id": "0caa8146",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "version_R *L2SS-379-docker-device [0c12e90b1c311df82edf9ebd0b0e9a3a530f0a81]\n",
+      "archiver_maria_db_R True\n",
+      "archiver_maria_db_RW False\n",
+      "databaseds_R True\n",
+      "databaseds_RW False\n",
+      "device_recv_R False\n",
+      "device_recv_RW False\n",
+      "device_sdp_R True\n",
+      "device_sdp_RW False\n",
+      "device_sst_R True\n",
+      "device_sst_RW False\n",
+      "device_xst_R False\n",
+      "device_xst_RW False\n",
+      "device_unb2_R True\n",
+      "device_unb2_RW False\n",
+      "device_docker_R True\n",
+      "dsconfig_R True\n",
+      "dsconfig_RW False\n",
+      "elk_R True\n",
+      "elk_RW False\n",
+      "grafana_R True\n",
+      "grafana_RW False\n",
+      "hdbpp_cm_R True\n",
+      "hdbpp_cm_RW False\n",
+      "hdbpp_es_R True\n",
+      "hdbpp_es_RW False\n",
+      "itango_R True\n",
+      "itango_RW False\n",
+      "jupyter_R True\n",
+      "jupyter_RW False\n",
+      "prometheus_R True\n",
+      "prometheus_RW False\n",
+      "tangodb_R True\n",
+      "tangodb_RW False\n",
+      "tango_prometheus_exporter_R False\n",
+      "tango_prometheus_exporter_RW False\n",
+      "tango_rest_R True\n",
+      "tango_rest_RW False\n",
+      "State <function __get_command_func.<locals>.f at 0x7f84a8b4fb70>\n",
+      "Status <function __get_command_func.<locals>.f at 0x7f84a8b4fb70>\n"
+     ]
+    }
+   ],
+   "source": [
+    "attr_names = d.get_attribute_list()\n",
+    "\n",
+    "\n",
+    "for i in attr_names:\n",
+    "    exec(\"value = print(i, d.{})\".format(i))"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "StationControl",
+   "language": "python",
+   "name": "stationcontrol"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.7.3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/jupyter-notebooks/PCC_notebook.ipynb b/jupyter-notebooks/PCC_notebook.ipynb
deleted file mode 100644
index 29b0744a5f3f7c692ef7cd6b148c1e5192e2e026..0000000000000000000000000000000000000000
--- a/jupyter-notebooks/PCC_notebook.ipynb
+++ /dev/null
@@ -1,177 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "id": "funded-deputy",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import time"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "id": "bridal-mumbai",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "d=DeviceProxy(\"LTS/PCC/1\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "id": "subjective-conference",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Device is now in on state\n"
-     ]
-    }
-   ],
-   "source": [
-    "state = str(d.state())\n",
-    "\n",
-    "if state == \"OFF\":\n",
-    "    d.initialise()\n",
-    "    time.sleep(1)\n",
-    "state = str(d.state())\n",
-    "if state == \"STANDBY\":\n",
-    "    d.on()\n",
-    "state = str(d.state())\n",
-    "if state == \"ON\":\n",
-    "    print(\"Device is now in on state\")\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "id": "liable-thesaurus",
-   "metadata": {},
-   "outputs": [
-    {
-     "ename": "AttributeError",
-     "evalue": "RCU_ADC_SYNC_R",
-     "output_type": "error",
-     "traceback": [
-      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
-      "\u001b[0;31mAttributeError\u001b[0m                            Traceback (most recent call last)",
-      "\u001b[0;32m<ipython-input-4-aafae2adcd98>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m     10\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mRCU_LED0_RW\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"RCU_LED0_RW\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     11\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mRCU_ADC_lock_R\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"RCU_ADC_lock_R\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 12\u001b[0;31m \u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mRCU_ADC_SYNC_R\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"RCU_ADC_SYNC_R\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     13\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mRCU_ADC_JESD_R\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"RCU_ADC_JESD_R\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     14\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mRCU_ADC_CML_R\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\"RCU_ADC_CML_R\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
-      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/device_proxy.py\u001b[0m in \u001b[0;36m__DeviceProxy__getattr\u001b[0;34m(self, name)\u001b[0m\n\u001b[1;32m    353\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread_pipe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    354\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 355\u001b[0;31m     \u001b[0msix\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mraise_from\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mAttributeError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcause\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    356\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    357\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
-      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/six.py\u001b[0m in \u001b[0;36mraise_from\u001b[0;34m(value, from_value)\u001b[0m\n",
-      "\u001b[0;31mAttributeError\u001b[0m: RCU_ADC_SYNC_R"
-     ]
-    }
-   ],
-   "source": [
-    "\n",
-    "values = [[d.RCU_mask_RW, \"RCU_mask_RW\"],\n",
-    "[d.Ant_mask_RW,\"Ant_mask_RW\"],\n",
-    "[d.RCU_attenuator_R,\"RCU_attenuator_R\"],\n",
-    "[d.RCU_attenuator_RW,\"RCU_attenuator_RW\"],\n",
-    "[d.RCU_band_R,\"RCU_band_R\"],\n",
-    "[d.RCU_band_RW,\"RCU_band_RW\"],\n",
-    "[d.RCU_temperature_R,\"RCU_temperature_R\"],\n",
-    "[d.RCU_Pwr_dig_R,\"RCU_Pwr_dig_R\"],\n",
-    "[d.RCU_LED0_R,\"RCU_LED0_R\"],\n",
-    "[d.RCU_LED0_RW,\"RCU_LED0_RW\"],\n",
-    "[d.RCU_ADC_lock_R,\"RCU_ADC_lock_R\"],\n",
-    "[d.RCU_ADC_SYNC_R,\"RCU_ADC_SYNC_R\"],\n",
-    "[d.RCU_ADC_JESD_R,\"RCU_ADC_JESD_R\"],\n",
-    "[d.RCU_ADC_CML_R,\"RCU_ADC_CML_R\"],\n",
-    "[d.RCU_OUT1_R,\"RCU_OUT1_R\"],\n",
-    "[d.RCU_OUT2_R,\"RCU_OUT2_R\"],\n",
-    "[d.RCU_ID_R,\"RCU_ID_R\"],\n",
-    "[d.RCU_version_R,\"RCU_version_R\"],\n",
-    "[d.HBA_element_beamformer_delays_R,\"HBA_element_beamformer_delays_R\"],\n",
-    "[d.HBA_element_beamformer_delays_RW,\"HBA_element_beamformer_delays_RW\"],\n",
-    "[d.HBA_element_pwr_R,\"HBA_element_pwr_R\"],\n",
-    "[d.HBA_element_pwr_RW,\"HBA_element_pwr_RW\"],\n",
-    "[d.RCU_monitor_rate_RW,\"RCU_monitor_rate_RW\"]]\n",
-    "\n",
-    "\n",
-    "for i in values:\n",
-    "    print(\"🟦🟦🟦\", i[1], \": \", i[0])\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 7,
-   "id": "charitable-subject",
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "[False False False False False False False False False False False False\n",
-      " False False False False False False False False False False False False\n",
-      " False False False False False False False False]\n",
-      "current monitoring rate: 0.0, setting to 1.0\n",
-      "new monitoring rate is: 1.0\n"
-     ]
-    }
-   ],
-   "source": [
-    "d.RCU_mask_RW = [False, False, False, False, False, False, False, False, False, False, False, False,\n",
-    " False, False, False, False, False, False, False, False, False, False, False, False,\n",
-    " False, False, False, False, False, False, False, False,]\n",
-    "time.sleep(1)\n",
-    "print(d.RCU_mask_RW)\n",
-    "\n",
-    "monitor_rate = d.RCU_monitor_rate_RW\n",
-    "print(\"current monitoring rate: {}, setting to {}\".format(monitor_rate, monitor_rate + 1))\n",
-    "d.RCU_monitor_rate_RW = monitor_rate + 1\n",
-    "time.sleep(2)\n",
-    "print(\"new monitoring rate is: {}\".format(d.RCU_monitor_rate_RW))"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "impressive-request",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "attr_names = d.get_attribute_list()\n",
-    "\n",
-    "for i in attr_names:\n",
-    "    exec(\"value = print(i, d.{})\".format(i))\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "conditional-scale",
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "StationControl",
-   "language": "python",
-   "name": "stationcontrol"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 3
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython3",
-   "version": "3.7.3"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 5
-}
diff --git a/jupyter-notebooks/RECV_archive_all_attributes.ipynb b/jupyter-notebooks/RECV_archive_all_attributes.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..d3c4ae74950875eb4bffa84c4fcc279d740c2692
--- /dev/null
+++ b/jupyter-notebooks/RECV_archive_all_attributes.ipynb
@@ -0,0 +1,329 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "id": "3191bdf1",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import sys, time\n",
+    "import numpy as np\n",
+    "sys.path.append('/hosthome/tango/devices')\n",
+    "from toolkit.archiver import Archiver,Retriever\n",
+    "from toolkit.archiver_base import *\n",
+    "from matplotlib import pyplot as plt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "id": "e2d12232",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "True\n"
+     ]
+    }
+   ],
+   "source": [
+    "from common.lofar_environment import isProduction\n",
+    "print(isProduction())"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "id": "81e08b9f",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "[]"
+      ]
+     },
+     "execution_count": 3,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "archiver = Archiver()\n",
+    "archiver.get_subscriber_attributes()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "id": "884ff1ff",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "OFF\n"
+     ]
+    }
+   ],
+   "source": [
+    "device_name = 'LTS/RECV/1'\n",
+    "d=DeviceProxy(device_name) \n",
+    "state = str(d.state())\n",
+    "print(state)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "id": "0f6e65b0",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Attribute lts/recv/1/ant_mask_rw added to archiving list!\n",
+      "Attribute lts/recv/1/clk_enable_pwr_r added to archiving list!\n",
+      "Attribute lts/recv/1/clk_i2c_status_r added to archiving list!\n",
+      "Attribute lts/recv/1/clk_pll_error_r added to archiving list!\n",
+      "Attribute lts/recv/1/clk_pll_locked_r added to archiving list!\n",
+      "Attribute lts/recv/1/clk_monitor_rate_rw added to archiving list!\n",
+      "Attribute lts/recv/1/clk_translator_busy_r added to archiving list!\n",
+      "Attribute lts/recv/1/hba_element_beamformer_delays_r added to archiving list!\n",
+      "Attribute lts/recv/1/hba_element_beamformer_delays_rw added to archiving list!\n",
+      "Attribute lts/recv/1/hba_element_led_r added to archiving list!\n",
+      "Attribute lts/recv/1/hba_element_led_rw added to archiving list!\n",
+      "Attribute lts/recv/1/hba_element_lna_pwr_r added to archiving list!\n",
+      "Attribute lts/recv/1/hba_element_lna_pwr_rw added to archiving list!\n",
+      "Attribute lts/recv/1/hba_element_pwr_r added to archiving list!\n",
+      "Attribute lts/recv/1/hba_element_pwr_rw added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_adc_lock_r added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_attenuator_r added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_attenuator_rw added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_band_r added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_band_rw added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_i2c_status_r added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_id_r added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_led0_r added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_led0_rw added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_led1_r added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_led1_rw added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_mask_rw added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_monitor_rate_rw added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_pwr_dig_r added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_temperature_r added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_translator_busy_r added to archiving list!\n",
+      "Attribute lts/recv/1/rcu_version_r added to archiving list!\n",
+      "Device is now in ON state\n"
+     ]
+    }
+   ],
+   "source": [
+    "# Start the device\n",
+    "if state == \"OFF\":\n",
+    "    if isProduction():\n",
+    "        archiver.add_attributes_to_archiver(device_name,global_archive_period=1000)\n",
+    "    else:\n",
+    "        archiver.remove_attributes_by_device(device_name)\n",
+    "    time.sleep(1)\n",
+    "    d.initialise()\n",
+    "    time.sleep(1)\n",
+    "state = str(d.state())\n",
+    "if state == \"STANDBY\":\n",
+    "    d.on()\n",
+    "state = str(d.state())\n",
+    "if state == \"ON\":\n",
+    "    print(\"Device is now in ON state\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "id": "8efd3dc1",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "('tango://databaseds:10000/lts/recv/1/ant_mask_rw',\n",
+       " 'tango://databaseds:10000/lts/recv/1/clk_enable_pwr_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/clk_i2c_status_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/clk_pll_error_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/clk_pll_locked_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/clk_monitor_rate_rw',\n",
+       " 'tango://databaseds:10000/lts/recv/1/clk_translator_busy_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/hba_element_beamformer_delays_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/hba_element_beamformer_delays_rw',\n",
+       " 'tango://databaseds:10000/lts/recv/1/hba_element_led_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/hba_element_led_rw',\n",
+       " 'tango://databaseds:10000/lts/recv/1/hba_element_lna_pwr_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/hba_element_lna_pwr_rw',\n",
+       " 'tango://databaseds:10000/lts/recv/1/hba_element_pwr_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/hba_element_pwr_rw',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_adc_lock_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_attenuator_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_attenuator_rw',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_band_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_band_rw',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_i2c_status_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_id_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_led0_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_led0_rw',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_led1_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_led1_rw',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_mask_rw',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_monitor_rate_rw',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_pwr_dig_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_temperature_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_translator_busy_r',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_version_r')"
+      ]
+     },
+     "execution_count": 6,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "archiver.get_subscriber_attributes()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "id": "a1222d19",
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "{'tango://databaseds:10000/lts/recv/1/clk_enable_pwr_r': 'Read value for attribute CLK_Enable_PWR_R has not been updated',\n",
+       " 'tango://databaseds:10000/lts/recv/1/clk_i2c_status_r': 'Read value for attribute CLK_I2C_STATUS_R has not been updated',\n",
+       " 'tango://databaseds:10000/lts/recv/1/clk_pll_error_r': 'Read value for attribute CLK_PLL_error_R has not been updated',\n",
+       " 'tango://databaseds:10000/lts/recv/1/clk_pll_locked_r': 'Read value for attribute CLK_PLL_locked_R has not been updated',\n",
+       " 'tango://databaseds:10000/lts/recv/1/clk_translator_busy_r': 'Read value for attribute CLK_translator_busy_R has not been updated',\n",
+       " 'tango://databaseds:10000/lts/recv/1/rcu_version_r': 'Storing Error: mysql_stmt_bind_param() failed, err=Buffer type is not supported'}"
+      ]
+     },
+     "execution_count": 7,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "# Archiver managing methods\n",
+    "archiver.get_subscriber_errors()\n",
+    "\n",
+    "#e = archiver.get_attribute_errors('lts/recv/1/rcu_temperature_r')\n",
+    "#print(e)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "id": "174bbcdb",
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "1586.0 events/period  -> Number of archiving events per minute\n"
+     ]
+    }
+   ],
+   "source": [
+    "l = archiver.get_subscriber_load()\n",
+    "print(l,\" -> Number of archiving events per minute\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "f060b0b6",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "#archiver.update_archiving_attribute('lts/recv/1/rcu_pwr_dig_r')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "f626d029",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Turn off the device\n",
+    "d.off()\n",
+    "\n",
+    "# Leave commented by default\n",
+    "archiver.remove_attributes_by_device(device_name)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "13c3b97d",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Initialise the retriever object and print the archived attributes in the database\n",
+    "retriever = Retriever()\n",
+    "#retriever.get_all_archived_attributes()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "f176c20e",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Retrieve records in the last n hours (works even with decimals)\n",
+    "\n",
+    "# Use alternatively one of the following two methods to retrieve data (last n hours or interval)\n",
+    "records= retriever.get_attribute_value_by_hours(attr_fq_name,hours=0.1)\n",
+    "#records = retriever.get_attribute_value_by_interval(attr_fq_name,'2021-09-01 16:00:00', '2021-09-01 16:03:00')\n",
+    "\n",
+    "if not records:\n",
+    "    print('Empty result!')\n",
+    "else:\n",
+    "    # Convert DB Array records into Python lists\n",
+    "    data = build_array_from_record(records,records[0].dim_x_r)\n",
+    "    # Extract only the value from the array \n",
+    "    array_values = get_values_from_record(data)\n",
+    "\n",
+    "#records\n",
+    "#data\n",
+    "#array_values"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "StationControl",
+   "language": "python",
+   "name": "stationcontrol"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.7.3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/jupyter-notebooks/RECV_archive_attribute.ipynb b/jupyter-notebooks/RECV_archive_attribute.ipynb
new file mode 100644
index 0000000000000000000000000000000000000000..632cdda6e2310d91a9424796812128a68685c466
--- /dev/null
+++ b/jupyter-notebooks/RECV_archive_attribute.ipynb
@@ -0,0 +1,273 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "42e7f25a",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "import sys, time\n",
+    "import numpy as np\n",
+    "sys.path.append('/hosthome/tango/devices')\n",
+    "from toolkit.archiver import Archiver,Retriever\n",
+    "from toolkit.archiver_base import *\n",
+    "from matplotlib import pyplot as plt"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "1f025912",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from common.lofar_environment import isProduction\n",
+    "print(isProduction())"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "e0656e2d",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Define an attribute for archiving\n",
+    "device_name = 'LTS/RECV/1'\n",
+    "d=DeviceProxy(device_name) \n",
+    "state = str(d.state())\n",
+    "print(device_name,'is',state)\n",
+    "\n",
+    "archiver = Archiver()\n",
+    "\n",
+    "# Attribute chosen to be archived\n",
+    "attr_name = 'rcu_temperature_r'\n",
+    "attr_fq_name = str(device_name+'/'+attr_name).lower()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "153d9420",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Print the list of the attributes in the event subscriber\n",
+    "# If any attribute is present, its archiving will begin when device will reach ON state,\n",
+    "# Otherwise, attribute will be added to the list at the device initializing phase only in PRODUCTION mode\n",
+    "archiver.get_subscriber_attributes()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "2ebb00f8",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Start the device\n",
+    "if state == \"OFF\":\n",
+    "    if isProduction():\n",
+    "        archiver.check_and_add_attribute_in_archiving_list(attr_fq_name)\n",
+    "    else:\n",
+    "        archiver.remove_attribute_from_archiver(attr_fq_name)\n",
+    "    time.sleep(1)\n",
+    "    d.initialise()\n",
+    "    time.sleep(1)\n",
+    "state = str(d.state())\n",
+    "if state == \"STANDBY\":\n",
+    "    d.on()\n",
+    "state = str(d.state())\n",
+    "if state == \"ON\":\n",
+    "    print(\"Device is now in ON state\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "75163627",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Modify attribute archiving features\n",
+    "archiver.update_archiving_attribute(attr_fq_name,polling_period=1000,event_period=5000,strategy='RUN')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "7814715e",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Add attribute to the archiving list (starts the archiving if device is running)\n",
+    "\n",
+    "# Archiving strategies are ['ALWAYS','RUN','SHUTDOWN','SERVICE']\n",
+    "#Read [0]\tALWAYS:always stored\n",
+    "#Read [1]\tRUN:stored during run\n",
+    "#Read [2]\tSHUTDOWN:stored during shutdown\n",
+    "#Read [3]\tSERVICE:stored during maintenance activities\n",
+    "\n",
+    "archiver.add_attribute_to_archiver(attr_fq_name, polling_period=1000, event_period=1000, strategy='RUN')"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "52a27abb",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Stop the attribute archiving but do not remove it from the list\n",
+    "# This means that archiving is stopped for the current session, but if the device is restarted, \n",
+    "# the attribute archiving will be restarted as well\n",
+    "# In order to definitely stop the archiving, the attribute must be removed from the attribute list (go to last cell)\n",
+    "archiver.stop_archiving_attribute(attr_fq_name)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "c064e337",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Starts the attribute archiving if it was stopped\n",
+    "archiver.start_archiving_attribute(attr_fq_name)"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "d199916c",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Initialise the retriever object and print the archived attributes in the database\n",
+    "retriever = Retriever()\n",
+    "retriever.get_all_archived_attributes()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "80e2a560",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Retrieve records in the last n hours (works even with decimals)\n",
+    "\n",
+    "# Use alternatively one of the following two methods to retrieve data (last n hours or interval)\n",
+    "records= retriever.get_attribute_value_by_hours(attr_fq_name,hours=0.1)\n",
+    "#records = retriever.get_attribute_value_by_interval(attr_fq_name,'2021-09-01 16:00:00', '2021-09-01 16:03:00')\n",
+    "\n",
+    "if not records:\n",
+    "    print('Empty result!')\n",
+    "else:\n",
+    "    # Convert DB Array records into Python lists\n",
+    "    data = build_array_from_record(records,records[0].dim_x_r)\n",
+    "    # Extract only the value from the array \n",
+    "    array_values = get_values_from_record(data)\n",
+    "\n",
+    "#records\n",
+    "#data\n",
+    "#array_values"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "64c8e060",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Extract and process timestamps for plotting purposes\n",
+    "def get_timestamps(data,strformat):\n",
+    "    timestamps = []\n",
+    "    for i in range(len(data)):\n",
+    "        timestamps.append(data[i][0].recv_time.strftime(strformat))\n",
+    "    return timestamps\n",
+    "timestamps = get_timestamps(data,\"%Y-%m-%d %X\")"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "59a0c05c",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Plot of array values\n",
+    "\n",
+    "heatmap = np.array(array_values,dtype=np.float)\n",
+    "fig = plt.figure()\n",
+    "plt.rcParams['figure.figsize'] = [128, 64]\n",
+    "#plt.rcParams['figure.dpi'] = 128\n",
+    "ax = fig.add_subplot(111)\n",
+    "im = ax.imshow(heatmap, interpolation='nearest',cmap='coolwarm')\n",
+    "ax.set_xlabel('Array index')\n",
+    "ax.set_ylabel('Timestamp')\n",
+    "ax.set_xlim([0,(records[0].dim_x_r)-1])\n",
+    "ax.set_xticks(np.arange(0,records[0].dim_x_r))\n",
+    "\n",
+    "ax.set_yticks(range(0,len(timestamps)))\n",
+    "ax.set_yticklabels(timestamps,fontsize=4)\n",
+    "\n",
+    "# Comment the previous two lines and uncomment the following line if there are too many timestamp labels\n",
+    "#ax.set_yticks(range(0,len(timestamps),10))\n",
+    "\n",
+    "ax.set_title('Archived data for '+ attr_fq_name)\n",
+    "ax.grid()\n",
+    "cbar = fig.colorbar(ax=ax, mappable=im, orientation='horizontal')\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "1c753ed9",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Count number of archive events per minute\n",
+    "archiver.get_subscriber_load()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "id": "a0e8dcab",
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# Turn off the device\n",
+    "d.off()\n",
+    "# Remove attribute from archiving list\n",
+    "#archiver.remove_attribute_from_archiver(attr_fq_name)\n",
+    "#archiver.remove_attributes_by_device(device_name)"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "StationControl",
+   "language": "python",
+   "name": "stationcontrol"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 3
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython3",
+   "version": "3.7.3"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/jupyter-notebooks/SDP_notebook.ipynb b/jupyter-notebooks/RECV_notebook.ipynb
similarity index 57%
rename from jupyter-notebooks/SDP_notebook.ipynb
rename to jupyter-notebooks/RECV_notebook.ipynb
index 49114ce9d7a72f13b1c70d0b75f1a590e6e6ac04..0f246c2bdd6f89e4bae6f06d46caef643091045c 100644
--- a/jupyter-notebooks/SDP_notebook.ipynb
+++ b/jupyter-notebooks/RECV_notebook.ipynb
@@ -17,29 +17,22 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "d=DeviceProxy(\"LTS/SDP/1\")"
+    "d=DeviceProxy(\"LTS/RECV/1\")"
    ]
   },
   {
    "cell_type": "code",
-   "execution_count": 5,
+   "execution_count": 4,
    "id": "ranking-aluminum",
    "metadata": {
     "scrolled": false
    },
    "outputs": [
     {
-     "ename": "ConnectionFailed",
-     "evalue": "DevFailed[\nDevError[\n    desc = TRANSIENT CORBA system exception: TRANSIENT_NoUsableProfile\n  origin = Connection::connect\n  reason = API_CorbaException\nseverity = ERR]\n\nDevError[\n    desc = Failed to connect to device lts/sdp/1\n  origin = Connection::connect\n  reason = API_CantConnectToDevice\nseverity = ERR]\n]",
-     "output_type": "error",
-     "traceback": [
-      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
-      "\u001b[0;31mConnectionFailed\u001b[0m                          Traceback (most recent call last)",
-      "\u001b[0;32m/tmp/ipykernel_21/3603531217.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mstate\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mstr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      2\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      3\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      4\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mstate\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m\"OFF\"\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mstate\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m\"FAULT\"\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      5\u001b[0m     \u001b[0md\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0minitialise\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
-      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/green.py\u001b[0m in \u001b[0;36mgreener\u001b[0;34m(obj, *args, **kwargs)\u001b[0m\n\u001b[1;32m    193\u001b[0m             \u001b[0mgreen_mode\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0maccess\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'green_mode'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    194\u001b[0m             \u001b[0mexecutor\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_object_executor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgreen_mode\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 195\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mexecutor\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mwait\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mwait\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtimeout\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtimeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    196\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    197\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0mgreener\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
-      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/green.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fn, args, kwargs, wait, timeout)\u001b[0m\n\u001b[1;32m    107\u001b[0m         \u001b[0;31m# Sychronous (no delegation)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    108\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masynchronous\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0min_executor_context\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 109\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    110\u001b[0m         \u001b[0;31m# Asynchronous delegation\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    111\u001b[0m         \u001b[0maccessor\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdelegate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
-      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/device_proxy.py\u001b[0m in \u001b[0;36m__DeviceProxy__state\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m   1558\u001b[0m                 \u001b[0;32mif\u001b[0m \u001b[0mdev_st\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0mDevState\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mON\u001b[0m \u001b[0;34m:\u001b[0m \u001b[0;34m...\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1559\u001b[0m     \"\"\"\n\u001b[0;32m-> 1560\u001b[0;31m     \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_state\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1561\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1562\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
-      "\u001b[0;31mConnectionFailed\u001b[0m: DevFailed[\nDevError[\n    desc = TRANSIENT CORBA system exception: TRANSIENT_NoUsableProfile\n  origin = Connection::connect\n  reason = API_CorbaException\nseverity = ERR]\n\nDevError[\n    desc = Failed to connect to device lts/sdp/1\n  origin = Connection::connect\n  reason = API_CantConnectToDevice\nseverity = ERR]\n]"
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Device is now in on state\n"
      ]
     }
    ],
@@ -106,201 +99,74 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 9,
+   "execution_count": 5,
    "id": "7accae6a",
    "metadata": {},
-   "outputs": [],
-   "source": [
-    "attr_names = d.get_attribute_list()\n",
-    "\n",
-    "\n",
-    "for i in attr_names:\n",
-    "    exec(\"value = print(i, d.{})\".format(i))\n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 19,
-   "id": "b88868c5",
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "array([[1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],\n",
-       "       [1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.],\n",
-       "       [1., 1., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],\n",
-       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],\n",
-       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],\n",
-       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],\n",
-       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],\n",
-       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],\n",
-       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],\n",
-       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],\n",
-       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],\n",
-       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.]],\n",
-       "      dtype=float32)"
-      ]
-     },
-     "execution_count": 19,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "wgswitches = d.FPGA_wg_enable_R\n",
-    "print(\"Old values:\\n\",  wgswitches)\n",
-    "wgswitches[9][0] = True\n",
-    "wgswitches[10][0] = True\n",
-    "print(\"Values to be set:\\n\", wgswitches)\n",
-    "d.FPGA_wg_enable_RW =wgswitches\n",
-    "time.sleep(7)\n",
-    "print(\"Values read back after setting:\\n\",d.FPGA_wg_enable_R)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 18,
-   "id": "8f3db8c7",
-   "metadata": {},
    "outputs": [
     {
-     "data": {
-      "text/plain": [
-       "array([[119.99817, 119.99817, 119.99817, 119.99817, 119.99817, 119.99817,\n",
-       "        119.99817, 119.99817, 119.99817, 119.99817, 119.99817, 119.99817,\n",
-       "        119.99817, 119.99817, 119.99817, 119.99817],\n",
-       "       [119.99817, 119.99817, 119.99817, 119.99817, 119.99817, 119.99817,\n",
-       "        119.99817, 119.99817, 119.99817, 119.99817, 119.99817, 119.99817,\n",
-       "        119.99817, 119.99817, 119.99817, 119.99817],\n",
-       "       [119.99817, 119.99817, 119.99817, 119.99817,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ],\n",
-       "       [  0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ],\n",
-       "       [  0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ],\n",
-       "       [  0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ],\n",
-       "       [  0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ],\n",
-       "       [  0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ],\n",
-       "       [  0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ],\n",
-       "       [  0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ],\n",
-       "       [  0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ],\n",
-       "       [  0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ,   0.     ,   0.     ,\n",
-       "          0.     ,   0.     ,   0.     ,   0.     ]], dtype=float32)"
-      ]
-     },
-     "execution_count": 18,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "phases = d.FPGA_wg_phase_R\n",
-    "print(\"Old values:\\n\",  phases)\n",
-    "phases[9][0] = 1.0334\n",
-    "phases[9][1] = 20.15\n",
-    "phases[10][0] = 130\n",
-    "print(\"Values to be set:\\n\", phases)\n",
-    "d.FPGA_wg_phase_RW = phases\n",
-    "time.sleep(7)\n",
-    "print(\"Values read back after setting:\\n\", d.FPGA_wg_phase_R)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 13,
-   "id": "e45b4874",
-   "metadata": {},
-   "outputs": [
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "version_R *L2SS-357-Rename_PCC_to_RECV [c4d52d7125ece480acb1492a5fc0ba7fc60f9ea1]\n",
+      "Ant_mask_RW [[False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]\n",
+      " [False False False]]\n"
+     ]
+    },
     {
-     "data": {
-      "text/plain": [
-       "array([[29921878., 29921878., 29921878., 29921878., 29921878., 29921878.,\n",
-       "        29921878., 29921878., 29921878., 29921878., 29921878., 29921878.,\n",
-       "        29921878., 29921878., 29921878., 29921878.],\n",
-       "       [29921878., 29921878., 29921878., 29921878., 29921878., 29921878.,\n",
-       "        29921878., 29921878., 29921878., 29921878., 29921878., 29921878.,\n",
-       "        29921878., 29921878., 29921878., 29921878.],\n",
-       "       [29921878., 29921878., 29921878., 29921878.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.],\n",
-       "       [       0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.],\n",
-       "       [       0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.],\n",
-       "       [       0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.],\n",
-       "       [       0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.],\n",
-       "       [       0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.],\n",
-       "       [       0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.],\n",
-       "       [       0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.],\n",
-       "       [       0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.],\n",
-       "       [       0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.,        0.,        0.,\n",
-       "               0.,        0.,        0.,        0.]], dtype=float32)"
-      ]
-     },
-     "execution_count": 13,
-     "metadata": {},
-     "output_type": "execute_result"
+     "ename": "DevFailed",
+     "evalue": "DevFailed[\nDevError[\n    desc = Read value for attribute CLK_Enable_PWR_R has not been updated\n  origin = Device_3Impl::read_attributes_no_except\n  reason = API_AttrValueNotSet\nseverity = ERR]\n\nDevError[\n    desc = Failed to read_attribute on device lts/recv/1, attribute CLK_Enable_PWR_R\n  origin = DeviceProxy::read_attribute()\n  reason = API_AttributeFailed\nseverity = ERR]\n]",
+     "output_type": "error",
+     "traceback": [
+      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
+      "\u001b[0;31mDevFailed\u001b[0m                                 Traceback (most recent call last)",
+      "\u001b[0;32m/tmp/ipykernel_26/3093379163.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      3\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      4\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mattr_names\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 5\u001b[0;31m     \u001b[0mexec\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"value = print(i, d.{})\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
+      "\u001b[0;32m<string>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/device_proxy.py\u001b[0m in \u001b[0;36m__DeviceProxy__getattr\u001b[0;34m(self, name)\u001b[0m\n\u001b[1;32m    319\u001b[0m     \u001b[0mattr_info\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__get_attr_cache\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mname_l\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    320\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mattr_info\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 321\u001b[0;31m         \u001b[0;32mreturn\u001b[0m \u001b[0m__get_attribute_value\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_info\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    322\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    323\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mname_l\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__get_pipe_cache\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/device_proxy.py\u001b[0m in \u001b[0;36m__get_attribute_value\u001b[0;34m(self, attr_info, name)\u001b[0m\n\u001b[1;32m    281\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__get_attribute_value\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_info\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    282\u001b[0m     \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0menum_class\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mattr_info\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 283\u001b[0;31m     \u001b[0mattr_value\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread_attribute\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    284\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0menum_class\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    285\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0menum_class\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mattr_value\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/green.py\u001b[0m in \u001b[0;36mgreener\u001b[0;34m(obj, *args, **kwargs)\u001b[0m\n\u001b[1;32m    193\u001b[0m             \u001b[0mgreen_mode\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0maccess\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'green_mode'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    194\u001b[0m             \u001b[0mexecutor\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_object_executor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgreen_mode\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 195\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mexecutor\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mwait\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mwait\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtimeout\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtimeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    196\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    197\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0mgreener\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/green.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fn, args, kwargs, wait, timeout)\u001b[0m\n\u001b[1;32m    107\u001b[0m         \u001b[0;31m# Sychronous (no delegation)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    108\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masynchronous\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0min_executor_context\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 109\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    110\u001b[0m         \u001b[0;31m# Asynchronous delegation\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    111\u001b[0m         \u001b[0maccessor\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdelegate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/device_proxy.py\u001b[0m in \u001b[0;36m__DeviceProxy__read_attribute\u001b[0;34m(self, value, extract_as)\u001b[0m\n\u001b[1;32m    439\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    440\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__DeviceProxy__read_attribute\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mextract_as\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mExtractAs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mNumpy\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 441\u001b[0;31m     \u001b[0;32mreturn\u001b[0m \u001b[0m__check_read_attribute\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_read_attribute\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mextract_as\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    442\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    443\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/device_proxy.py\u001b[0m in \u001b[0;36m__check_read_attribute\u001b[0;34m(dev_attr)\u001b[0m\n\u001b[1;32m    155\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__check_read_attribute\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdev_attr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    156\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mdev_attr\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mhas_failed\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 157\u001b[0;31m         \u001b[0;32mraise\u001b[0m \u001b[0mDevFailed\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mdev_attr\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_err_stack\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    158\u001b[0m     \u001b[0;32mreturn\u001b[0m \u001b[0mdev_attr\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    159\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;31mDevFailed\u001b[0m: DevFailed[\nDevError[\n    desc = Read value for attribute CLK_Enable_PWR_R has not been updated\n  origin = Device_3Impl::read_attributes_no_except\n  reason = API_AttrValueNotSet\nseverity = ERR]\n\nDevError[\n    desc = Failed to read_attribute on device lts/recv/1, attribute CLK_Enable_PWR_R\n  origin = DeviceProxy::read_attribute()\n  reason = API_AttributeFailed\nseverity = ERR]\n]"
+     ]
     }
    ],
    "source": [
-    "amplitudes = d.FPGA_wg_amplitude_R\n",
-    "print(\"Old values:\\n\",  amplitudes)\n",
-    "amplitudes[9][0] = 1.0\n",
-    "amplitudes[9][1] = 1.99\n",
-    "amplitudes[10][0] = 0.5\n",
-    "print(\"Values to be set:\\n\", amplitudes)\n",
-    "d.FPGA_wg_amplitude_RW = amplitudes\n",
-    "time.sleep(7)\n",
-    "print(\"Values read back after setting:\\n\", d.FPGA_wg_amplitude_R)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "id": "9b1bbd3e",
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "frequencies = d.FPGA_wg_frequency_R\n",
-    "print(\"Old values:\\n\",  frequencies)\n",
-    "frequencies[9][0] = 19000000\n",
-    "frequencies[9][1] = 20000000\n",
-    "frequencies[10][0] = 22000000\n",
-    "print(\"Values to be set:\\n\", frequencies)\n",
-    "d.FPGA_wg_frequency_RW = frequencies\n",
-    "print(\"Values read back after setting:\\n\", d.FPGA_wg_frequency_R)"
+    "attr_names = d.get_attribute_list()\n",
+    "\n",
+    "\n",
+    "for i in attr_names:\n",
+    "    exec(\"value = print(i, d.{})\".format(i))\n"
    ]
   }
  ],
diff --git a/jupyter-notebooks/Start All Devices.ipynb b/jupyter-notebooks/Start All Devices.ipynb
index beb52a381c89a4cda30b08374d36c337def29eae..3c5da68df6ce970a837e83903379f88435cc1483 100644
--- a/jupyter-notebooks/Start All Devices.ipynb	
+++ b/jupyter-notebooks/Start All Devices.ipynb	
@@ -30,7 +30,7 @@
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "Device PCC(lts/pcc/1) is now in state FAULT\n",
+      "Device RECV(lts/recv/1) is now in state FAULT\n",
       "Device SDP(lts/sdp/1) is now in state ON\n"
      ]
     }
diff --git a/jupyter-notebooks/UNB2_notebook.ipynb b/jupyter-notebooks/UNB2_notebook.ipynb
index 3e87179f3a0f0ef951da6a078ba5df3610a6696d..e140631ffc4ea0cee80e2374ec6b5f1289dbba24 100644
--- a/jupyter-notebooks/UNB2_notebook.ipynb
+++ b/jupyter-notebooks/UNB2_notebook.ipynb
@@ -2,7 +2,7 @@
  "cells": [
   {
    "cell_type": "code",
-   "execution_count": 1,
+   "execution_count": 25,
    "id": "waiting-chance",
    "metadata": {},
    "outputs": [],
@@ -12,7 +12,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 5,
+   "execution_count": 26,
    "id": "moving-alexandria",
    "metadata": {},
    "outputs": [],
@@ -22,7 +22,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 7,
+   "execution_count": 28,
    "id": "ranking-aluminum",
    "metadata": {
     "scrolled": false
@@ -55,7 +55,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 8,
+   "execution_count": 29,
    "id": "0caa8146",
    "metadata": {},
    "outputs": [
@@ -63,68 +63,68 @@
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "version_R *L2SS-268-LR1_2_Read_hardware_status_of_UB2c_from_SDPHW [1007a5c5462b1aa3e8f81268f890f6c058413218]\n",
+      "version_R *L2SS-268-LR1_2_Read_hardware_status_of_UB2c_from_SDPHW [b2db449162be8e52013dbbd1a44d6d90a12491b5]\n",
       "UNB2_Power_ON_OFF_RW [False False]\n",
       "UNB2_Front_Panel_LED_RW [0 0]\n",
       "UNB2_Front_Panel_LED_R [0 0]\n",
       "UNB2_mask_RW [False False]\n",
       "UNB2_I2C_bus_STATUS_R [False False]\n",
-      "UNB2_EEPROM_Unique_ID_R [5947666 5947666]\n",
-      "UNB2_DC_DC_48V_12V_VIN_R [29.18505859 29.18505859]\n",
-      "UNB2_DC_DC_48V_12V_VOUT_R [12.00146484 11.98486328]\n",
-      "UNB2_DC_DC_48V_12V_IOUT_R [3.625 3.625]\n",
-      "UNB2_DC_DC_48V_12V_TEMP_R [37. 37.]\n",
-      "UNB2_POL_QSFP_N01_VOUT_R [3.28686523 3.28686523]\n",
-      "UNB2_POL_QSFP_N01_IOUT_R [1.55078125 1.55078125]\n",
-      "UNB2_POL_QSFP_N01_TEMP_R [33.75 33.75]\n",
-      "UNB2_POL_QSFP_N23_VOUT_R [3.28710938 3.28710938]\n",
-      "UNB2_POL_QSFP_N23_IOUT_R [1.25195312 1.25195312]\n",
-      "UNB2_POL_QSFP_N23_TEMP_R [40.625 40.625]\n",
-      "UNB2_POL_SWITCH_1V2_VOUT_R [1.19970703 1.19970703]\n",
-      "UNB2_POL_SWITCH_1V2_IOUT_R [1.73632812 1.73632812]\n",
-      "UNB2_POL_SWITCH_1V2_TEMP_R [45.125 45.125]\n",
-      "UNB2_POL_SWITCH_PHY_VOUT_R [1.00024414 1.00024414]\n",
-      "UNB2_POL_SWITCH_PHY_IOUT_R [0.52050781 0.52050781]\n",
-      "UNB2_POL_SWITCH_PHY_TEMP_R [46.1875 46.1875]\n",
-      "UNB2_POL_CLOCK_VOUT_R [2.49951172 2.49951172]\n",
-      "UNB2_POL_CLOCK_IOUT_R [0.94042969 0.94042969]\n",
-      "UNB2_POL_CLOCK_TEMP_R [42.875 42.875]\n",
-      "UNB2_FPGA_DDR4_SLOT_TEMP_R [[27.5  27.5  29.25 27.75 28.75 29.25 28.5  28.5 ]\n",
-      " [27.5  27.5  29.25 27.75 28.75 29.25 28.5  28.5 ]]\n",
-      "UNB2_FPGA_POL_CORE_IOUT_R [[5.921875   4.109375   3.76171875 3.55859375]\n",
-      " [5.921875   4.1015625  3.76171875 3.55859375]]\n",
-      "UNB2_FPGA_POL_CORE_TEMP_R [[30.84375 31.46875 32.4375  34.75   ]\n",
-      " [30.84375 31.5     32.375   34.6875 ]]\n",
-      "UNB2_FPGA_POL_ERAM_VOUT_R [[0.8996582  0.90014648 0.90014648 0.8996582 ]\n",
-      " [0.8996582  0.8996582  0.90014648 0.8996582 ]]\n",
-      "UNB2_FPGA_POL_ERAM_IOUT_R [[0.08764648 0.0880127  0.18725586 0.08703613]\n",
-      " [0.02593994 0.0880127  0.18725586 0.08703613]]\n",
-      "UNB2_FPGA_POL_ERAM_TEMP_R [[38.75   39.25   41.     41.4375]\n",
-      " [38.75   39.25   41.     41.4375]]\n",
-      "UNB2_FPGA_POL_RXGXB_VOUT_R [[0.90014648 0.89990234 0.90014648 0.90014648]\n",
-      " [0.90014648 0.89990234 0.90014648 0.90014648]]\n",
-      "UNB2_FPGA_POL_RXGXB_IOUT_R [[0.49755859 0.41113281 0.40234375 0.48876953]\n",
-      " [0.49755859 0.41113281 0.40234375 0.48876953]]\n",
-      "UNB2_FPGA_POL_RXGXB_TEMP_R [[34.75   38.0625 36.5    38.1875]\n",
-      " [34.75   38.0625 36.5    38.1875]]\n",
-      "UNB2_FPGA_POL_TXGXB_VOUT_R [[0.89990234 0.90014648 0.90014648 0.89990234]\n",
-      " [0.89990234 0.90014648 0.90014648 0.89990234]]\n",
-      "UNB2_FPGA_POL_TXGXB_IOUT_R [[0.17480469 0.12219238 0.06433105 0.13110352]\n",
-      " [0.17480469 0.12219238 0.06433105 0.13110352]]\n",
-      "UNB2_FPGA_POL_HGXB_VOUT_R [[1.80004883 1.79956055 1.79980469 1.79980469]\n",
-      " [1.80004883 1.79956055 1.79980469 1.79980469]]\n",
-      "UNB2_FPGA_POL_HGXB_IOUT_R [[0.67089844 0.76269531 0.80664062 0.7265625 ]\n",
-      " [0.67089844 0.76269531 0.80664062 0.7265625 ]]\n",
-      "UNB2_FPGA_POL_HGXB_TEMP_R [[40.375  41.8125 44.3125 40.625 ]\n",
-      " [40.375  41.8125 44.3125 40.625 ]]\n",
-      "UNB2_FPGA_POL_PGM_VOUT_R [[1.80029297 1.80004883 1.79931641 1.80029297]\n",
-      " [1.80029297 1.80004883 1.79931641 1.80029297]]\n",
-      "UNB2_FPGA_POL_PGM_IOUT_R [[0.13818359 0.17089844 0.31542969 0.10656738]\n",
-      " [0.1550293  0.17089844 0.31542969 0.10656738]]\n",
-      "UNB2_FPGA_POL_PGM_TEMP_R [[40.0625 42.1875 44.3125 40.3125]\n",
-      " [40.0625 42.1875 44.3125 40.3125]]\n",
-      "State <function __get_command_func.<locals>.f at 0x7f636d295510>\n",
-      "Status <function __get_command_func.<locals>.f at 0x7f636d295510>\n"
+      "UNB2_EEPROM_Unique_ID_R [0 0]\n",
+      "UNB2_DC_DC_48V_12V_VIN_R [0. 0.]\n",
+      "UNB2_DC_DC_48V_12V_VOUT_R [0. 0.]\n",
+      "UNB2_DC_DC_48V_12V_IOUT_R [0. 0.]\n",
+      "UNB2_DC_DC_48V_12V_TEMP_R [0. 0.]\n",
+      "UNB2_POL_QSFP_N01_VOUT_R [0. 0.]\n",
+      "UNB2_POL_QSFP_N01_IOUT_R [0. 0.]\n",
+      "UNB2_POL_QSFP_N01_TEMP_R [0. 0.]\n",
+      "UNB2_POL_QSFP_N23_VOUT_R [0. 0.]\n",
+      "UNB2_POL_QSFP_N23_IOUT_R [0. 0.]\n",
+      "UNB2_POL_QSFP_N23_TEMP_R [0. 0.]\n",
+      "UNB2_POL_SWITCH_1V2_VOUT_R [0. 0.]\n",
+      "UNB2_POL_SWITCH_1V2_IOUT_R [0. 0.]\n",
+      "UNB2_POL_SWITCH_1V2_TEMP_R [0. 0.]\n",
+      "UNB2_POL_SWITCH_PHY_VOUT_R [0. 0.]\n",
+      "UNB2_POL_SWITCH_PHY_IOUT_R [0. 0.]\n",
+      "UNB2_POL_SWITCH_PHY_TEMP_R [0. 0.]\n",
+      "UNB2_POL_CLOCK_VOUT_R [0. 0.]\n",
+      "UNB2_POL_CLOCK_IOUT_R [0. 0.]\n",
+      "UNB2_POL_CLOCK_TEMP_R [0. 0.]\n",
+      "UNB2_FPGA_DDR4_SLOT_TEMP_R [[0. 0. 0. 0. 0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0. 0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_CORE_IOUT_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_CORE_TEMP_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_ERAM_VOUT_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_ERAM_IOUT_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_ERAM_TEMP_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_RXGXB_VOUT_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_RXGXB_IOUT_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_RXGXB_TEMP_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_TXGXB_VOUT_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_TXGXB_IOUT_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_HGXB_VOUT_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_HGXB_IOUT_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_HGXB_TEMP_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_PGM_VOUT_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_PGM_IOUT_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "UNB2_FPGA_POL_PGM_TEMP_R [[0. 0. 0. 0.]\n",
+      " [0. 0. 0. 0.]]\n",
+      "State <function __get_command_func.<locals>.f at 0x7f4c210e1ea0>\n",
+      "Status <function __get_command_func.<locals>.f at 0x7f4c210e1ea0>\n"
      ]
     }
    ],
@@ -138,7 +138,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 78,
+   "execution_count": 30,
    "id": "929965c2",
    "metadata": {},
    "outputs": [
@@ -171,7 +171,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 82,
+   "execution_count": 22,
    "id": "6813164e",
    "metadata": {},
    "outputs": [
@@ -201,7 +201,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 74,
+   "execution_count": 23,
    "id": "e9b32ec7",
    "metadata": {},
    "outputs": [
@@ -210,7 +210,7 @@
      "output_type": "stream",
      "text": [
       "Old values:\n",
-      " [ True  True]\n",
+      " [False False]\n",
       "Values to be set:\n",
       " [False False]\n",
       "Values read back after setting:\n",
@@ -231,25 +231,21 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 7,
+   "execution_count": 24,
    "id": "transsexual-battle",
    "metadata": {},
    "outputs": [
     {
-     "ename": "DevFailed",
-     "evalue": "DevFailed[\nDevError[\n    desc = Read value for attribute FPGA_mask_RW has not been updated\n  origin = Device_3Impl::read_attributes_no_except\n  reason = API_AttrValueNotSet\nseverity = ERR]\n\nDevError[\n    desc = Failed to read_attribute on device lts/sdp/1, attribute FPGA_mask_RW\n  origin = DeviceProxy::read_attribute()\n  reason = API_AttributeFailed\nseverity = ERR]\n]",
+     "ename": "AttributeError",
+     "evalue": "FPGA_mask_RW",
      "output_type": "error",
      "traceback": [
       "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
-      "\u001b[0;31mDevFailed\u001b[0m                                 Traceback (most recent call last)",
+      "\u001b[0;31mAttributeError\u001b[0m                            Traceback (most recent call last)",
       "\u001b[0;32m/tmp/ipykernel_22/2885399456.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      1\u001b[0m values = [\n\u001b[0;32m----> 2\u001b[0;31m     \u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mFPGA_mask_RW\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"FPGA_mask_RW\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      3\u001b[0m     \u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mFPGA_scrap_R\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"FPGA_scrap_R\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      4\u001b[0m     \u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mFPGA_scrap_RW\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"FPGA_scrap_RW\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      5\u001b[0m     \u001b[0;34m[\u001b[0m\u001b[0md\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mFPGA_status_R\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m\"FPGA_status_R\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
-      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/device_proxy.py\u001b[0m in \u001b[0;36m__DeviceProxy__getattr\u001b[0;34m(self, name)\u001b[0m\n\u001b[1;32m    319\u001b[0m     \u001b[0mattr_info\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__get_attr_cache\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mname_l\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    320\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mattr_info\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 321\u001b[0;31m         \u001b[0;32mreturn\u001b[0m \u001b[0m__get_attribute_value\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_info\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    322\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    323\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mname_l\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__get_pipe_cache\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
-      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/device_proxy.py\u001b[0m in \u001b[0;36m__get_attribute_value\u001b[0;34m(self, attr_info, name)\u001b[0m\n\u001b[1;32m    281\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__get_attribute_value\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mattr_info\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    282\u001b[0m     \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0menum_class\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mattr_info\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 283\u001b[0;31m     \u001b[0mattr_value\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread_attribute\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    284\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0menum_class\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    285\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0menum_class\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mattr_value\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
-      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/green.py\u001b[0m in \u001b[0;36mgreener\u001b[0;34m(obj, *args, **kwargs)\u001b[0m\n\u001b[1;32m    193\u001b[0m             \u001b[0mgreen_mode\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0maccess\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'green_mode'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    194\u001b[0m             \u001b[0mexecutor\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mget_object_executor\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgreen_mode\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 195\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mexecutor\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwargs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mwait\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mwait\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtimeout\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtimeout\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    196\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    197\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0mgreener\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
-      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/green.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fn, args, kwargs, wait, timeout)\u001b[0m\n\u001b[1;32m    107\u001b[0m         \u001b[0;31m# Sychronous (no delegation)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    108\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masynchronous\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0min_executor_context\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 109\u001b[0;31m             \u001b[0;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    110\u001b[0m         \u001b[0;31m# Asynchronous delegation\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    111\u001b[0m         \u001b[0maccessor\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdelegate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwargs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
-      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/device_proxy.py\u001b[0m in \u001b[0;36m__DeviceProxy__read_attribute\u001b[0;34m(self, value, extract_as)\u001b[0m\n\u001b[1;32m    439\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    440\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__DeviceProxy__read_attribute\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mextract_as\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mExtractAs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mNumpy\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 441\u001b[0;31m     \u001b[0;32mreturn\u001b[0m \u001b[0m__check_read_attribute\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_read_attribute\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mextract_as\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    442\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    443\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
-      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/device_proxy.py\u001b[0m in \u001b[0;36m__check_read_attribute\u001b[0;34m(dev_attr)\u001b[0m\n\u001b[1;32m    155\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__check_read_attribute\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdev_attr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    156\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mdev_attr\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mhas_failed\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 157\u001b[0;31m         \u001b[0;32mraise\u001b[0m \u001b[0mDevFailed\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mdev_attr\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mget_err_stack\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    158\u001b[0m     \u001b[0;32mreturn\u001b[0m \u001b[0mdev_attr\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    159\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
-      "\u001b[0;31mDevFailed\u001b[0m: DevFailed[\nDevError[\n    desc = Read value for attribute FPGA_mask_RW has not been updated\n  origin = Device_3Impl::read_attributes_no_except\n  reason = API_AttrValueNotSet\nseverity = ERR]\n\nDevError[\n    desc = Failed to read_attribute on device lts/sdp/1, attribute FPGA_mask_RW\n  origin = DeviceProxy::read_attribute()\n  reason = API_AttributeFailed\nseverity = ERR]\n]"
+      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tango/device_proxy.py\u001b[0m in \u001b[0;36m__DeviceProxy__getattr\u001b[0;34m(self, name)\u001b[0m\n\u001b[1;32m    353\u001b[0m         \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread_pipe\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    354\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 355\u001b[0;31m     \u001b[0msix\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mraise_from\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mAttributeError\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mname\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcause\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    356\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    357\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
+      "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/six.py\u001b[0m in \u001b[0;36mraise_from\u001b[0;34m(value, from_value)\u001b[0m\n",
+      "\u001b[0;31mAttributeError\u001b[0m: FPGA_mask_RW"
      ]
     }
    ],
diff --git a/jupyter-notebooks/archiving_demo.ipynb b/jupyter-notebooks/archiving_demo.ipynb
index 28eeb7d3196ea347f817c3d20ee8683d096ad2bd..6ae2c3bc281d9e1269b9d0a5cab606bc11ef0553 100644
--- a/jupyter-notebooks/archiving_demo.ipynb
+++ b/jupyter-notebooks/archiving_demo.ipynb
@@ -1243,7 +1243,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "d=DeviceProxy(\"LTS/PCC/1\")"
+    "d=DeviceProxy(\"LTS/RECV/1\")"
    ]
   },
   {
@@ -1307,7 +1307,7 @@
       "tango://databaseds:10000/lts/randomdata/1/rnd21\n",
       "tango://databaseds:10000/lts/random_data/1/rnd1\n",
       "tango://databaseds:10000/lts/random_data/1/rnd21\n",
-      "tango://databaseds:10000/lts/pcc/1/rcu_temperature_r\n",
+      "tango://databaseds:10000/lts/recv/1/rcu_temperature_r\n",
       "tango://databaseds:10000/lts/random_data/1/rnd3\n",
       "tango://databaseds:10000/lts/random_data/1/rnd2\n",
       "tango://databaseds:10000/lts/random_data/1/rnd4\n"
@@ -1328,7 +1328,7 @@
    "metadata": {},
    "outputs": [],
    "source": [
-    "main_att = 'lts/pcc/1/RCU_temperature_R'\n",
+    "main_att = 'lts/recv/1/RCU_temperature_R'\n",
     "archiver.add_attribute_to_archiver(main_att,polling_period=1000,event_period=1000)"
    ]
   },
diff --git a/sbin/run_integration_test.sh b/sbin/run_integration_test.sh
index d54163625541c13816bf0309c09a2713ce35add9..93b13300eb92afe2c95c7cb5c3292869019d9d96 100755
--- a/sbin/run_integration_test.sh
+++ b/sbin/run_integration_test.sh
@@ -6,17 +6,32 @@ if [ -z "$LOFAR20_DIR" ]; then
   exit 1
 fi
 
-# Start all required containers
+# Start and stop sequence
 cd "$LOFAR20_DIR/docker-compose" || exit 1
-make start databaseds dsconfig device-sdp device-pcc jupyter elk sdptr-sim pypcc-sim
+make stop device-sdp device-recv device-sst device-unb2 sdptr-sim recv-sim unb2-sim
+make start databaseds dsconfig jupyter elk
+
+# Give dsconfig and databaseds time to start
+sleep 15
 
 # Update the dsconfig
 cd "$TANGO_LOFAR_LOCAL_DIR" || exit 1
 sbin/update_ConfigDb.sh CDB/integration_ConfigDb.json
 
+cd "$LOFAR20_DIR/docker-compose" || exit 1
+make start sdptr-sim recv-sim
+
+# Give the simulators time to start
+sleep 5
+
+make start device-sdp device-recv device-sst device-unb2
+
+# Give the devices time to start
+sleep 5
+
 # Start the integration test
 cd "$LOFAR20_DIR/docker-compose" || exit 1
 make start integration-test
 
 # Run the integration test with the output displayed on stdout
-docker start -a integration-test
\ No newline at end of file
+docker start -a integration-test