diff --git a/docker-compose/itango/lofar-requirements.txt b/docker-compose/itango/lofar-requirements.txt
index c3b73750dd4c9bb86e1174afa2a2987aed46c0fd..66cdfeefe6264fece139a955bdaa481bf2476bdb 100644
--- a/docker-compose/itango/lofar-requirements.txt
+++ b/docker-compose/itango/lofar-requirements.txt
@@ -1,3 +1,4 @@
+# Do not put tangostationcontrol dependencies here
 parso == 0.7.1
 jedi == 0.17.2
 astropy
diff --git a/docker-compose/lofar-device-base/lofar-requirements.txt b/docker-compose/lofar-device-base/lofar-requirements.txt
index 7cadcb9cc831345b7abd878743c048b778079b6b..6900aeb05149a31cb124b218da3f96a6664e9e13 100644
--- a/docker-compose/lofar-device-base/lofar-requirements.txt
+++ b/docker-compose/lofar-device-base/lofar-requirements.txt
@@ -1,3 +1,4 @@
+# Do not put tangostationcontrol dependencies here
 astropy
 python-logstash-async
 docker
diff --git a/tangostationcontrol/tangostationcontrol/common/lofar_logging.py b/tangostationcontrol/tangostationcontrol/common/lofar_logging.py
index be71e4149469fbe772918131e4f0dd28eb28552e..766b8cc0eb31256928331e1f20bea191ad571b03 100644
--- a/tangostationcontrol/tangostationcontrol/common/lofar_logging.py
+++ b/tangostationcontrol/tangostationcontrol/common/lofar_logging.py
@@ -99,7 +99,7 @@ class LogAnnotator(logging.Formatter):
         # we just annotate, we don't filter
         return True
 
-def configure_logger(logger: logging.Logger=None, log_extra=None):
+def configure_logger(logger: logging.Logger=None, log_extra=None, debug=False):
     """
        Configure the given logger (or root if None) to:
          - send logs to the ELK stack
@@ -122,6 +122,26 @@ def configure_logger(logger: logging.Logger=None, log_extra=None):
     # don't spam errors for git, as we use it in our log handler, which would result in an infinite loop
     logging.getLogger("git").setLevel(logging.ERROR)
 
+    # for now, also log to stderr
+    # Set up logging in a way that it can be understood by a human reader, be
+    # easily grep'ed, be parsed with a couple of shell commands and
+    # easily fed into an Kibana/Elastic search system.
+    handler = logging.StreamHandler()
+
+    # Always also log the hostname because it makes the origin of the log clear.
+    hostname = socket.gethostname()
+
+    formatter = logging.Formatter(fmt = '%(asctime)s.%(msecs)d %(levelname)s - HOST="{}" DEVICE="%(tango_device)s" PID="%(process)d" TNAME="%(threadName)s" FILE="%(pathname)s" LINE="%(lineno)d" FUNC="%(funcName)s" MSG="%(message)s"'.format(hostname), datefmt = '%Y-%m-%dT%H:%M:%S')
+    handler.setFormatter(formatter)
+    handler.addFilter(LogSuppressErrorSpam())
+    handler.addFilter(LogAnnotator())
+
+    logger.addHandler(handler)
+
+    # If configuring for debug; exit early
+    if debug:
+        return logger
+
     # Log to ELK stack
     try:
         from logstash_async.handler import AsynchronousLogstashHandler, LogstashFormatter
@@ -151,23 +171,6 @@ def configure_logger(logger: logging.Logger=None, log_extra=None):
     except Exception:
         logger.exception("Cannot forward logs to Tango.")
 
-
-    # for now, also log to stderr
-    # Set up logging in a way that it can be understood by a human reader, be
-    # easily grep'ed, be parsed with a couple of shell commands and
-    # easily fed into an Kibana/Elastic search system.
-    handler = logging.StreamHandler()
-
-    # Always also log the hostname because it makes the origin of the log clear.
-    hostname = socket.gethostname()
-
-    formatter = logging.Formatter(fmt = '%(asctime)s.%(msecs)d %(levelname)s - HOST="{}" DEVICE="%(tango_device)s" PID="%(process)d" TNAME="%(threadName)s" FILE="%(pathname)s" LINE="%(lineno)d" FUNC="%(funcName)s" MSG="%(message)s"'.format(hostname), datefmt = '%Y-%m-%dT%H:%M:%S')
-    handler.setFormatter(formatter)
-    handler.addFilter(LogSuppressErrorSpam())
-    handler.addFilter(LogAnnotator())
-
-    logger.addHandler(handler)
-
     return logger
 
 def device_logging_to_python():
diff --git a/tangostationcontrol/tangostationcontrol/devices/docker_device.py b/tangostationcontrol/tangostationcontrol/devices/docker_device.py
index 7d4337b13e0eac5e6277ebdcdacb19a110067a5d..5945a152b9c81479f6c5317fa552be023d2ad10a 100644
--- a/tangostationcontrol/tangostationcontrol/devices/docker_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/docker_device.py
@@ -45,7 +45,6 @@ class Docker(hardware_device):
     # ----------
     # Attributes
     # ----------
-    version_R = attribute(dtype=str, access=AttrWriteType.READ, fget=lambda self: get_version())
     archiver_maria_db_R = attribute_wrapper(comms_annotation={"container": "archiver-maria-db"}, datatype=numpy.bool_)
     archiver_maria_db_RW = attribute_wrapper(comms_annotation={"container": "archiver-maria-db"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE)
     databaseds_R = attribute_wrapper(comms_annotation={"container": "databaseds"}, datatype=numpy.bool_)
@@ -95,7 +94,7 @@ class Docker(hardware_device):
         try:
             self.docker_client.sync_stop()
         except Exception as e:
-            self.warn_stream("Exception while stopping OPC ua connection in configure_for_off function: {}. Exception ignored".format(e))
+            self.warn_stream("Exception while stopping docker client in configure_for_off function: {}. Exception ignored".format(e))
 
     @log_exceptions()
     def configure_for_initialise(self):
diff --git a/tangostationcontrol/tangostationcontrol/devices/hardware_device.py b/tangostationcontrol/tangostationcontrol/devices/hardware_device.py
index 7669cc15111619d3bfc05360406172a5bb4c796e..bb4625826ca416b87ae6a604825fbf822354da03 100644
--- a/tangostationcontrol/tangostationcontrol/devices/hardware_device.py
+++ b/tangostationcontrol/tangostationcontrol/devices/hardware_device.py
@@ -77,6 +77,7 @@ class hardware_device(Device, metaclass=AbstractDeviceMetas):
 
         self.value_dict = {i: i.initial_value() for i in self.attr_list()}
 
+    @log_exceptions()
     def init_device(self):
         """ Instantiates the device in the OFF state. """
 
@@ -85,6 +86,19 @@ class hardware_device(Device, metaclass=AbstractDeviceMetas):
 
         self.set_state(DevState.OFF)
 
+    @log_exceptions()
+    def delete_device(self):
+        """Hook to delete resources allocated in init_device.
+
+        This method allows for any memory or other resources allocated in the
+        init_device method to be released.  This method is called by the device
+        destructor and by the device Init command (a Tango built-in).
+        """
+        logger.info("Shutting down...")
+
+        self.Off()
+        logger.info("Shut down.  Good bye.")
+
     # --------
     # Commands
     # --------
@@ -191,18 +205,6 @@ class hardware_device(Device, metaclass=AbstractDeviceMetas):
         """Method always executed before any TANGO command is executed."""
         pass
 
-    def delete_device(self):
-        """Hook to delete resources allocated in init_device.
-
-        This method allows for any memory or other resources allocated in the
-        init_device method to be released.  This method is called by the device
-        destructor and by the device Init command (a Tango built-in).
-        """
-        self.debug_stream("Shutting down...")
-
-        self.Off()
-        self.debug_stream("Shut down.  Good bye.")
-
     @command()
     @only_in_states([DevState.STANDBY, DevState.ON])
     @DebugIt()
@@ -212,6 +214,11 @@ class hardware_device(Device, metaclass=AbstractDeviceMetas):
 
             A hardware point XXX is set to the value of the object member named XXX_default, if it exists.
             XXX_default can be f.e. a constant, or a device_property.
+
+            The points are set in the following order:
+                1) The python class property 'first_default_settings' is read, as an array of strings denoting property names. Each property
+                   is set in that order.
+                2) Any remaining default properties are set.
         """
 
         # we cannot write directly to our attribute, as that would not
diff --git a/tangostationcontrol/tangostationcontrol/devices/recv.py b/tangostationcontrol/tangostationcontrol/devices/recv.py
index 78479ac0e09872ec7aae90f3a4d4a41d990bcea1..ca0d3938bedaf55e057d57355e6cc772889a8b3f 100644
--- a/tangostationcontrol/tangostationcontrol/devices/recv.py
+++ b/tangostationcontrol/tangostationcontrol/devices/recv.py
@@ -19,57 +19,40 @@ from tango import AttrWriteType
 import numpy
 
 # Additional import
-from tangostationcontrol.clients.opcua_client import OPCUAConnection
 from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper
 from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
-from tangostationcontrol.common.lofar_version import get_version
 from tangostationcontrol.devices.device_decorators import *
-from tangostationcontrol.devices.hardware_device import hardware_device
+from tangostationcontrol.devices.opcua_device import opcua_device
 
 __all__ = ["RECV", "main"]
 
 @device_logging_to_python()
-class RECV(hardware_device):
-    """
-
-    **Properties:**
-
-    - Device Property
-        OPC_Server_Name
-            - Type:'DevString'
-        OPC_Server_Port
-            - Type:'DevULong'
-        OPC_Time_Out
-            - Type:'DevDouble'
-    """
-
+class RECV(opcua_device):
     # -----------------
     # Device Properties
     # -----------------
 
-    OPC_Server_Name = device_property(
-        dtype='DevString',
-        mandatory=True
+    Ant_mask_RW_default = device_property(
+        dtype='DevVarBooleanArray',
+        mandatory=False,
+        default_value=[[True] * 3] * 32
     )
 
-    OPC_Server_Port = device_property(
-        dtype='DevULong',
-        mandatory=True
+    RCU_mask_RW_default = device_property(
+        dtype='DevVarBooleanArray',
+        mandatory=False,
+        default_value=[True] * 32
     )
 
-    OPC_Time_Out = device_property(
-        dtype='DevDouble',
-        mandatory=True
-    )
-    OPC_namespace = device_property(
-        dtype='DevString',
-        mandatory=False
-    )
+    first_default_settings = [
+        # set the masks first, as those filter any subsequent settings
+        'Ant_mask_RW',
+        'RCU_mask_RW'
+    ]
 
     # ----------
     # Attributes
     # ----------
-    version_R = attribute(dtype=str, access=AttrWriteType.READ, fget=lambda self: get_version())
     Ant_mask_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:Ant_mask_RW"], datatype=numpy.bool_, dims=(3, 32), access=AttrWriteType.READ_WRITE)
     Ant_status_R = attribute(dtype=str, max_dim_x=3, max_dim_y=32)
     CLK_Enable_PWR_R = attribute_wrapper(comms_annotation=["2:PCC", "2:CLK_Enable_PWR_R"], datatype=numpy.bool_)
@@ -105,55 +88,9 @@ class RECV(hardware_device):
     RCU_translator_busy_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_translator_busy_R"], datatype=numpy.bool_)
     RCU_version_R = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_version_R"], datatype=numpy.str, dims=(32,))
 
-    @log_exceptions()
-    def delete_device(self):
-        """Hook to delete resources allocated in init_device.
-
-        This method allows for any memory or other resources allocated in the
-        init_device method to be released.  This method is called by the device
-        destructor and by the device Init command (a Tango built-in).
-        """
-        self.debug_stream("Shutting down...")
-
-        self.Off()
-        self.debug_stream("Shut down.  Good bye.")
-
     # --------
     # overloaded functions
     # --------
-    @log_exceptions()
-    def configure_for_off(self):
-        """ user code here. is called when the state is set to OFF """
-        # Stop keep-alive
-        try:
-            self.opcua_connection.stop()
-        except Exception as e:
-            self.warn_stream("Exception while stopping OPC ua connection in configure_for_off function: {}. Exception ignored".format(e))
-
-    @log_exceptions()
-    def configure_for_initialise(self):
-        """ user code here. is called when the state is set to INIT """
-
-        # Init the dict that contains function to OPC-UA function mappings.
-        self.function_mapping = {}
-        self.function_mapping["RCU_on"] = {}
-        self.function_mapping["RCU_off"] = {}
-        self.function_mapping["CLK_on"] = {}
-        self.function_mapping["CLK_off"] = {}
-
-        # set up the OPC ua client
-        self.OPCua_client = OPCUAConnection("opc.tcp://{}:{}/".format(self.OPC_Server_Name, self.OPC_Server_Port), "http://lofar.eu", self.OPC_Time_Out, self.Fault, self)
-
-        # map an access helper class
-        for i in self.attr_list():
-            try:
-                i.set_comm_client(self.OPCua_client)
-            except Exception as e:
-                # use the pass function instead of setting read/write fails
-                i.set_pass_func()
-                self.warn_stream("error while setting the RECV attribute {} read/write function. {}".format(i, e))
-
-        self.OPCua_client.start()
 
     # --------
     # Commands
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py b/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py
index 71611166b049639b2f058a12aa20c129cfe01e72..661498183d0e45dcb814fb21864a1616a5c2b97b 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/sdp.py
@@ -17,49 +17,24 @@ from tango.server import device_property, attribute
 from tango import AttrWriteType
 
 # Additional import
-from tangostationcontrol.clients.opcua_client import OPCUAConnection
 from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper
-from tangostationcontrol.devices.hardware_device import hardware_device
-
+from tangostationcontrol.devices.opcua_device import opcua_device
 from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
-from tangostationcontrol.common.lofar_version import get_version
 
 import numpy
 
 __all__ = ["SDP", "main"]
 
 @device_logging_to_python()
-class SDP(hardware_device):
-    """
-
-    **Properties:**
-
-    - Device Property
-        OPC_Server_Name
-            - Type:'DevString'
-        OPC_Server_Port
-            - Type:'DevULong'
-        OPC_Time_Out
-            - Type:'DevDouble'
-    """
-
+class SDP(opcua_device):
     # -----------------
     # Device Properties
     # -----------------
 
-    OPC_Server_Name = device_property(
-        dtype='DevString',
-        mandatory=True
-    )
-
-    OPC_Server_Port = device_property(
-        dtype='DevULong',
-        mandatory=True
-    )
-
-    OPC_Time_Out = device_property(
-        dtype='DevDouble',
-        mandatory=True
+    TR_fpga_mask_RW_default = device_property(
+        dtype='DevVarBooleanArray',
+        mandatory=False,
+        default_value=[True] * 16
     )
 
     FPGA_processing_enable_RW_default = device_property(
@@ -73,6 +48,27 @@ class SDP(hardware_device):
         mandatory=False,
         default_value=[[False] * 12] * 16
     )
+
+    # If we enable the waveform generator, we want some sane defaults.
+
+    FPGA_wg_amplitude_RW = device_property(
+        dtype='DevVarDoubleArray',
+        mandatory=False,
+        default_value=[[0.1] * 12] * 16
+    )
+
+    FPGA_wg_frequency_RW = device_property(
+        dtype='DevVarDoubleArray',
+        mandatory=False,
+        # Emit a signal on subband 102
+        default_value=[[102 * 200e6/1024] * 12] * 16
+    )
+
+    FPGA_wg_phase_RW = device_property(
+        dtype='DevVarDoubleArray',
+        mandatory=False,
+        default_value=[[0.0] * 12] * 16
+    )
     
     FPGA_sdp_info_station_id_RW_default = device_property(
         dtype='DevVarULongArray',
@@ -85,12 +81,15 @@ class SDP(hardware_device):
         default_value=[[8192] * 12 * 512] * 16
     )
 
+    first_default_settings = [
+        # set the masks first, as those filter any subsequent settings
+        'TR_fpga_mask_RW'
+    ]
+
     # ----------
     # Attributes
     # ----------
 
-    version_R = attribute(dtype=str, access=AttrWriteType.READ, fget=lambda self: get_version())
-
     FPGA_beamlet_output_enable_R = attribute_wrapper(comms_annotation=["2:FPGA_beamlet_output_enable_R"], datatype=numpy.bool_, dims=(16,))
     FPGA_beamlet_output_enable_RW = attribute_wrapper(comms_annotation=["2:FPGA_beamlet_output_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
     FPGA_beamlet_output_hdr_eth_destination_mac_R = attribute_wrapper(comms_annotation=["2:FPGA_beamlet_output_hdr_eth_destination_mac_R"], datatype=numpy.str, dims=(16,))
@@ -164,35 +163,6 @@ class SDP(hardware_device):
     # --------
     # overloaded functions
     # --------
-    @log_exceptions()
-    def configure_for_off(self):
-        """ user code here. is called when the state is set to OFF """
-
-        # Stop keep-alive
-        try:
-            self.OPCua_client.stop()
-        except Exception as e:
-            self.warn_stream("Exception while stopping OPC ua connection in configure_for_off function: {}. Exception ignored".format(e))
-
-    @log_exceptions()
-    def configure_for_initialise(self):
-        """ user code here. is called when the sate is set to INIT """
-        """Initialises the attributes and properties of the SDP."""
-
-        # set up the OPC ua client
-        self.OPCua_client = OPCUAConnection("opc.tcp://{}:{}/".format(self.OPC_Server_Name, self.OPC_Server_Port), "http://lofar.eu", self.OPC_Time_Out, self.Fault, self)
-
-        # map an access helper class
-        for i in self.attr_list():
-            try:
-                i.set_comm_client(self.OPCua_client)
-            except Exception as e:
-                # use the pass function instead of setting read/write fails
-                i.set_pass_func()
-                self.warn_stream("error while setting the SDP attribute {} read/write function. {}".format(i, e))
-                pass
-
-        self.OPCua_client.start()
 
     # --------
     # Commands
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py b/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py
index 1c12330391bd05c28971a3f9ed67cca7e8e66ae9..1cca268d2d9fd28d24d411050186d71b924dc6a7 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/sst.py
@@ -50,12 +50,29 @@ class SST(Statistics):
         mandatory=True
     )
 
+    FPGA_sst_offload_enable_RW_default = device_property(
+        dtype='DevVarBooleanArray',
+        mandatory=False,
+        default_value=[True] * 16
+    )
+
     FPGA_sst_offload_weighted_subbands_RW_default = device_property(
         dtype='DevVarBooleanArray',
         mandatory=False,
         default_value=[True] * 16
     )
 
+    first_default_settings = [
+        'FPGA_sst_offload_hdr_eth_destination_mac_RW',
+        'FPGA_sst_offload_hdr_ip_destination_address_RW',
+        'FPGA_sst_offload_hdr_udp_destination_port_RW',
+
+        'FPGA_sst_offload_weighted_subbands_RW',
+
+        # enable only after the offloading is configured correctly
+        'FPGA_sst_offload_enable_RW'
+    ]
+
     # ----------
     # Attributes
     # ----------
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/statistics.py b/tangostationcontrol/tangostationcontrol/devices/sdp/statistics.py
index 6da1263ff5ca9438e2d856747e38245c9f8d570d..aa56e71f047ebd8412deb77fa315dd6e2a7d8c16 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/statistics.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/statistics.py
@@ -21,10 +21,8 @@ from tango import AttrWriteType
 import asyncio
 
 from tangostationcontrol.clients.statistics_client import StatisticsClient
-from tangostationcontrol.clients.opcua_client import OPCUAConnection
 from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper
-from tangostationcontrol.devices.hardware_device import hardware_device
-from tangostationcontrol.common.lofar_version import get_version
+from tangostationcontrol.devices.opcua_device import opcua_device
 from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
 
 import logging
@@ -35,7 +33,7 @@ import numpy
 
 __all__ = ["Statistics"]
 
-class Statistics(hardware_device, metaclass=ABCMeta):
+class Statistics(opcua_device, metaclass=ABCMeta):
 
     # In derived classes, set this to a subclass of StatisticsCollector
     @property
@@ -47,21 +45,6 @@ class Statistics(hardware_device, metaclass=ABCMeta):
     # Device Properties
     # -----------------
 
-    OPC_Server_Name = device_property(
-        dtype='DevString',
-        mandatory=True
-    )
-
-    OPC_Server_Port = device_property(
-        dtype='DevULong',
-        mandatory=True
-    )
-
-    OPC_Time_Out = device_property(
-        dtype='DevDouble',
-        mandatory=True
-    )
-
     Statistics_Client_UDP_Port = device_property(
         dtype='DevUShort',
         mandatory=True
@@ -76,8 +59,6 @@ class Statistics(hardware_device, metaclass=ABCMeta):
     # Attributes
     # ----------
 
-    version_R = attribute(dtype = str, access = AttrWriteType.READ, fget = lambda self: get_version())
-
     # number of UDP packets and bytes that were received
     nof_packets_received_R  = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "udp", "parameter": "nof_packets_received"}, datatype=numpy.uint64)
     nof_bytes_received_R  = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "udp", "parameter": "nof_bytes_received"}, datatype=numpy.uint64)
@@ -117,16 +98,15 @@ class Statistics(hardware_device, metaclass=ABCMeta):
         except Exception as e:
             logger.exception("Exception while stopping statistics_client in configure_for_off. Exception ignored")
 
-        try:
-            self.OPCUA_client.stop()
-        except Exception as e:
-            logger.exception("Exception while stopping OPC UA connection in configure_for_off. Exception ignored")
+        super().configure_for_off()
 
     @log_exceptions()
     def configure_for_initialise(self):
         """ user code here. is called when the sate is set to INIT """
         """Initialises the attributes and properties of the statistics device."""
 
+        super().configure_for_initialise()
+
         # Options for UDPReceiver
         udp_options = {
             "udp_port": self.Statistics_Client_UDP_Port,
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/statistics_collector.py b/tangostationcontrol/tangostationcontrol/devices/sdp/statistics_collector.py
index a073fbd945ce96701d2a19f673141b45a839f449..feee750f9e0514671d18525d6ebd4612177b44c0 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/statistics_collector.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/statistics_collector.py
@@ -131,6 +131,8 @@ class XSTCollector(StatisticsCollector):
 
             # Last value array we've constructed out of the packets
             "xst_blocks":            numpy.zeros((self.MAX_BLOCKS, self.BLOCK_LENGTH * self.BLOCK_LENGTH * self.VALUES_PER_COMPLEX), dtype=numpy.int64),
+            # Whether the values are actually conjugated and transposed
+            "xst_conjugated": numpy.zeros((self.MAX_BLOCKS,), dtype=numpy.bool_),
             "xst_timestamps":        numpy.zeros((self.MAX_BLOCKS,), dtype=numpy.float64),
             "xst_subbands":          numpy.zeros((self.MAX_BLOCKS,), dtype=numpy.uint16),
             "integration_intervals": numpy.zeros((self.MAX_BLOCKS,), dtype=numpy.float32),
@@ -162,20 +164,30 @@ class XSTCollector(StatisticsCollector):
             if fields.first_baseline[antenna] % self.BLOCK_LENGTH != 0:
                 raise ValueError("Packet describes baselines starting at %s, but we require a multiple of BLOCK_LENGTH=%d" % (fields.first_baseline, self.MAX_INPUTS))
 
+        # Make sure we always have a baseline (a,b) with a>=b. If not, we swap the indices and mark that the data must be conjugated and transposed when processed.
+        first_baseline = fields.first_baseline
+        if first_baseline[0] < first_baseline[1]:
+            conjugated = True
+            first_baseline = (first_baseline[1], first_baseline[0])
+        else:
+            conjugated = False
+
         # the payload contains complex values for the block of baselines of size BLOCK_LENGTH x BLOCK_LENGTH
         # starting at baseline first_baseline.
         #
         # we honour this format, as we want to keep the metadata together with these blocks. we do need to put the blocks in a linear
         # and tight order, however, so we calculate a block index.
-        block_index = baseline_index(fields.first_baseline[0] // self.BLOCK_LENGTH, fields.first_baseline[1] // self.BLOCK_LENGTH)
+        block_index = baseline_index(first_baseline[0] // self.BLOCK_LENGTH, first_baseline[1] // self.BLOCK_LENGTH)
+
+        # We did enough checks on first_baseline for this to be a logic error in our code
+        assert 0 <= block_index < self.MAX_BLOCKS, f"Received block {block_index}, but have only room for {self.MAX_BLOCKS}. Block starts at baseline {first_baseline}."
 
         # process the packet
         self.parameters["nof_valid_payloads"][fields.gn_index] += numpy.uint64(1)
 
-        block_index = baseline_index(fields.first_baseline[0], fields.first_baseline[1])
-
         self.parameters["xst_blocks"][block_index][:fields.nof_statistics_per_packet] = fields.payload
         self.parameters["xst_timestamps"][block_index]        = numpy.float64(fields.timestamp().timestamp())
+        self.parameters["xst_conjugated"][block_index]        = conjugated
         self.parameters["xst_subbands"][block_index]          = numpy.uint16(fields.subband_index)
         self.parameters["integration_intervals"][block_index] = fields.integration_interval()
 
@@ -184,11 +196,16 @@ class XSTCollector(StatisticsCollector):
 
         matrix = numpy.zeros((self.MAX_INPUTS, self.MAX_INPUTS), dtype=numpy.complex64)
         xst_blocks = self.parameters["xst_blocks"]
+        xst_conjugated = self.parameters["xst_conjugated"]
 
         for block_index in range(self.MAX_BLOCKS):
             # convert real/imag int to complex float values. this works as real/imag come in pairs
             block = xst_blocks[block_index].astype(numpy.float32).view(numpy.complex64)
 
+            if xst_conjugated[block_index]:
+                # block is conjugated and transposed. process.
+                block = block.conjugate().transpose()
+
             # reshape into [a][b]
             block = block.reshape(self.BLOCK_LENGTH, self.BLOCK_LENGTH)
 
diff --git a/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py b/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py
index 55cd535fe7fdcb9a360f1974c9f5c24cb59066c4..98d0725d29b26cab04e494c49efb4036c7bc84e1 100644
--- a/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py
+++ b/tangostationcontrol/tangostationcontrol/devices/sdp/xst.py
@@ -63,13 +63,37 @@ class XST(Statistics):
         default_value=[[0,102,0,0,0,0,0,0]] * 16
     )
 
+    FPGA_xst_integration_interval_RW_default = device_property(
+        dtype='DevVarDoubleArray',
+        mandatory=False,
+        default_value=[1.0] * 16
+    )
+
+    FPGA_xst_offload_enable_RW_default = device_property(
+        dtype='DevVarBooleanArray',
+        mandatory=False,
+        default_value=[True] * 16
+    )
+
+    first_default_settings = [
+        'FPGA_xst_offload_hdr_eth_destination_mac_RW',
+        'FPGA_xst_offload_hdr_ip_destination_address_RW',
+        'FPGA_xst_offload_hdr_udp_destination_port_RW',
+
+        'FPGA_xst_subband_select_RW',
+        'FPGA_xst_integration_interval_RW',
+
+        # enable only after the offloading is configured correctly
+        'FPGA_xst_offload_enable_RW'
+    ]
+
     # ----------
     # Attributes
     # ----------
 
     # FPGA control points for XSTs
-    FPGA_xst_integration_interval_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_integration_interval_RW"], datatype=numpy.double, dims=(8,16), access=AttrWriteType.READ_WRITE)
-    FPGA_xst_integration_interval_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_integration_interval_R"], datatype=numpy.double, dims=(8,16))
+    FPGA_xst_integration_interval_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_integration_interval_RW"], datatype=numpy.double, dims=(16,), access=AttrWriteType.READ_WRITE)
+    FPGA_xst_integration_interval_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_integration_interval_R"], datatype=numpy.double, dims=(16,))
     FPGA_xst_offload_enable_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_offload_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE)
     FPGA_xst_offload_enable_R = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_offload_enable_R"], datatype=numpy.bool_, dims=(16,))
     FPGA_xst_offload_hdr_eth_destination_mac_RW = attribute_wrapper(comms_id=OPCUAConnection, comms_annotation=["2:FPGA_xst_offload_hdr_eth_destination_mac_RW"], datatype=numpy.str, dims=(16,), access=AttrWriteType.READ_WRITE)
@@ -89,6 +113,8 @@ class XST(Statistics):
     nof_payload_errors_R    = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "nof_payload_errors"}, dims=(XSTCollector.MAX_FPGAS,), datatype=numpy.uint64)
     # latest XSTs
     xst_blocks_R            = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "xst_blocks"}, dims=(XSTCollector.BLOCK_LENGTH * XSTCollector.BLOCK_LENGTH * XSTCollector.VALUES_PER_COMPLEX, XSTCollector.MAX_BLOCKS), datatype=numpy.int64)
+    # whether the values in the block are conjugated and transposed
+    xst_conjugated_R        = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "xst_conjugated"}, dims=(XSTCollector.MAX_BLOCKS,), datatype=numpy.bool_)
     # reported timestamp for each row in the latest XSTs
     xst_timestamp_R         = attribute_wrapper(comms_id=StatisticsClient, comms_annotation={"type": "statistics", "parameter": "xst_timestamps"}, dims=(XSTCollector.MAX_BLOCKS,), datatype=numpy.uint64)
     # which subband the XSTs describe
diff --git a/tangostationcontrol/tangostationcontrol/devices/unb2.py b/tangostationcontrol/tangostationcontrol/devices/unb2.py
index 6824d7b26c8deef76e56ad1ee19fc20e99f34ce1..495c80da63f38261d152e6ab45b09c3a7025765b 100644
--- a/tangostationcontrol/tangostationcontrol/devices/unb2.py
+++ b/tangostationcontrol/tangostationcontrol/devices/unb2.py
@@ -17,32 +17,16 @@ from tango.server import device_property, attribute
 from tango import AttrWriteType
 # Additional import
 
-from tangostationcontrol.clients.opcua_client import OPCUAConnection
 from tangostationcontrol.clients.attribute_wrapper import attribute_wrapper
-from tangostationcontrol.devices.hardware_device import hardware_device
-
+from tangostationcontrol.devices.opcua_device import opcua_device
 from tangostationcontrol.common.lofar_logging import device_logging_to_python, log_exceptions
-from tangostationcontrol.common.lofar_version import get_version
 
 import numpy
 
 __all__ = ["UNB2", "main"]
 
 @device_logging_to_python()
-class UNB2(hardware_device):
-    """
-
-    **Properties:**
-
-    - Device Property
-        OPC_Server_Name
-            - Type:'DevString'
-        OPC_Server_Port
-            - Type:'DevULong'
-        OPC_Time_Out
-            - Type:'DevDouble'
-    """
-
+class UNB2(opcua_device):
     # -----------------
     # Device Properties
     # -----------------
@@ -57,8 +41,6 @@ class UNB2(hardware_device):
     # Attributes
     # ----------
 
-    version_R = attribute(dtype=str, access=AttrWriteType.READ, fget=lambda self: get_version())
-
     N_unb = 2
     N_fpga = 4
     N_ddr = 2
@@ -150,49 +132,10 @@ class UNB2(hardware_device):
 
     # QualifiedName(2: UNB2_on)
     # QualifiedName(2: UNB2_off)
-    @log_exceptions()
-    def delete_device(self):
-        """Hook to delete resources allocated in init_device.
-
-        This method allows for any memory or other resources allocated in the
-        init_device method to be released.  This method is called by the device
-        destructor and by the device Init command (a Tango built-in).
-        """
-        self.debug_stream("Shutting down...")
-
-        self.Off()
-        self.debug_stream("Shut down.  Good bye.")
 
     # --------
     # overloaded functions
     # --------
-    @log_exceptions()
-    def configure_for_off(self):
-        """ user code here. is called when the state is set to OFF """
-        # Stop keep-alive
-        try:
-            self.opcua_connection.stop()
-        except Exception as e:
-            self.warn_stream("Exception while stopping OPC ua connection in configure_for_off function: {}. Exception ignored".format(e))
-
-    @log_exceptions()
-    def configure_for_initialise(self):
-        """ user code here. is called when the sate is set to INIT """
-        """Initialises the attributes and properties of theRECV."""
-
-        # set up the OPC ua client
-        self.OPCua_client = OPCUAConnection("opc.tcp://{}:{}/".format(self.OPC_Server_Name, self.OPC_Server_Port), "http://lofar.eu", self.OPC_Time_Out, self.Fault, self)
-
-        # map an access helper class
-        for i in self.attr_list():
-            try:
-                i.set_comm_client(self.OPCua_client)
-            except Exception as e:
-                # use the pass function instead of setting read/write fails
-                i.set_pass_func()
-                self.warn_stream("error while setting the UNB2 attribute {} read/write function. {}".format(i, e))
-
-        self.OPCua_client.start()
 
     # --------
     # Commands
diff --git a/tangostationcontrol/tangostationcontrol/integration_test/base.py b/tangostationcontrol/tangostationcontrol/integration_test/base.py
index 3583d1901a3ae7cecfefee1ac6ad698f0c098456..ed1eb2239af74251e22b42adf8ea5e2596688076 100644
--- a/tangostationcontrol/tangostationcontrol/integration_test/base.py
+++ b/tangostationcontrol/tangostationcontrol/integration_test/base.py
@@ -7,10 +7,15 @@
 # Distributed under the terms of the APACHE license.
 # See LICENSE.txt for more info.
 
+from tangostationcontrol.common.lofar_logging import configure_logger
+
 import unittest
 import asynctest
 import testscenarios
 
+"""Setup logging for integration tests"""
+configure_logger(debug=True)
+
 
 class BaseIntegrationTestCase(testscenarios.WithScenarios, unittest.TestCase):
     """Integration test base class."""
diff --git a/tangostationcontrol/tangostationcontrol/statistics_writer/hdf5_writer.py b/tangostationcontrol/tangostationcontrol/statistics_writer/hdf5_writer.py
index 87fb3d28d7ad991d9ff8e6442656835e6a636fb9..eb7fa643bd9c8d74b1e946f468532c4852ecaba5 100644
--- a/tangostationcontrol/tangostationcontrol/statistics_writer/hdf5_writer.py
+++ b/tangostationcontrol/tangostationcontrol/statistics_writer/hdf5_writer.py
@@ -133,7 +133,7 @@ class hdf5_writer:
         """
 
         # create the new hdf5 group based on the timestamp of packets
-        current_group = self.file.create_group("{}_{}".format(self.mode, self.current_timestamp.strftime("%Y-%m-%d-%H-%M-%S-%f")[:-3]))
+        current_group = self.file.create_group("{}_{}".format(self.mode, self.current_timestamp.isoformat(timespec="milliseconds")))
 
         # store the statistics values for the current group
         self.store_function(current_group)
@@ -158,11 +158,11 @@ class hdf5_writer:
 
     def write_sst_matrix(self, current_group):
         # store the SST values
-        current_group.create_dataset(name="sst_values", data=self.current_matrix.parameters["sst_values"].astype(numpy.float32), compression="gzip")
+        current_group.create_dataset(name="values", data=self.current_matrix.parameters["sst_values"].astype(numpy.float32), compression="gzip")
 
     def write_xst_matrix(self, current_group):
         # requires a function call to transform the xst_blocks in to the right structure
-        current_group.create_dataset(name="xst_values", data=self.current_matrix.xst_values().astype(numpy.cfloat), compression="gzip")
+        current_group.create_dataset(name="values", data=self.current_matrix.xst_values().astype(numpy.cfloat), compression="gzip")
 
     def write_bst_matrix(self, current_group):
         raise NotImplementedError("BST values not implemented")
diff --git a/tangostationcontrol/tangostationcontrol/statistics_writer/statistics_writer.py b/tangostationcontrol/tangostationcontrol/statistics_writer/statistics_writer.py
index e2d4666fd581b01cdb99e9ad717fbccd32cfa33c..594e261c6d1e00e0ea7882c595449813c305c8ce 100644
--- a/tangostationcontrol/tangostationcontrol/statistics_writer/statistics_writer.py
+++ b/tangostationcontrol/tangostationcontrol/statistics_writer/statistics_writer.py
@@ -70,5 +70,3 @@ if __name__ == "__main__":
         logger.info("End of input.")
     finally:
         writer.close_writer()
-
-
diff --git a/tangostationcontrol/tangostationcontrol/statistics_writer/test/hdf5_explorer.py b/tangostationcontrol/tangostationcontrol/statistics_writer/test/hdf5_explorer.py
deleted file mode 100644
index 102c36b79f7beeb6a34ffba9b95a495a85a76f6e..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/statistics_writer/test/hdf5_explorer.py
+++ /dev/null
@@ -1,95 +0,0 @@
-import h5py
-import numpy
-
-import argparse
-
-parser = argparse.ArgumentParser(description='Select a file to explore')
-parser.add_argument('--file', type=str, help='the name and path of the file')
-
-import logging
-logging.basicConfig(level=logging.INFO)
-logger = logging.getLogger("hdf5_explorer")
-logger.setLevel(logging.DEBUG)
-
-
-class statistics_data:
-    """
-    Example class not used by anything
-    This class takes the file and the statistics name as its __init__ arguments and then stores the
-    the datasets in them.
-    """
-
-class explorer:
-    """
-    This class serves both as a tool to test and verify the content of HDF5 files as well as provide an example
-    of how you can go through HDF5 files.
-    """
-
-
-    def __init__(self, filename):
-        self.file = h5py.File(filename, 'r')
-
-    def print_all_statistics_full(self):
-        """
-        Explores the file with knowledge of the file structure. assumes all top level groups are statistics
-        and that all statistics groups are made up of datasets.
-        Prints the groups, the datasets and the content of the datasets.
-
-        Can easily be modified to instead of just logging all the data, store it in whatever structure is needed.
-        """
-
-        for group_key in self.file.keys():
-            dataset = list(self.file[group_key])
-
-            #print group name
-            logger.debug(f" \n\ngroup: {group_key}")
-
-            # Go through all the datasets
-            for i in dataset:
-                data = self.file.get(f"{group_key}/{i}")
-                logger.debug(f" dataset: {i}")
-                logger.debug(f" Data: {numpy.array(data)}")
-
-            # go through all the attributes in the group (This is the header info)
-            attr_keys = self.file[group_key].attrs.keys()
-            for i in attr_keys:
-                attr = self.file[group_key].attrs[i]
-
-                logger.debug(f" {i}: {attr}")
-
-    def print_all_statistics_top_level(self):
-        """
-        Explores the file with knowledge of the file structure. assumes all top level groups are statistics
-        and that all statistics groups are made up of datasets.
-        This function prints only the top level groups, AKA all the statistics collected. Useful when dealing with
-        potentially hundreds of statistics.
-        """
-        # List all groups
-        logger.debug("Listing all statistics stored in this file:")
-
-        for group_key in self.file.keys():
-            logger.debug(group_key)
-
-
-# create a data dumper that creates a new file every 10s (for testing)
-if __name__ == "__main__":
-    args = parser.parse_args()
-    Explorer = explorer(args.file)
-
-    """
-    Print the entire files content
-    """
-    Explorer.print_all_statistics_full()
-
-    """
-    Print only the names of all the statistics in this file
-    """
-    logger.debug("--------------Top level groups--------------")
-    Explorer.print_all_statistics_top_level()
-
-
-
-
-
-
-
diff --git a/tangostationcontrol/tangostationcontrol/test/base.py b/tangostationcontrol/tangostationcontrol/test/base.py
index 81a76c46e843dd7af91f19e1c142f612916157c7..174a478c725bda18d549e95247459411dd6b89c5 100644
--- a/tangostationcontrol/tangostationcontrol/test/base.py
+++ b/tangostationcontrol/tangostationcontrol/test/base.py
@@ -7,10 +7,15 @@
 # Distributed under the terms of the APACHE license.
 # See LICENSE.txt for more info.
 
+from tangostationcontrol.common.lofar_logging import configure_logger
+
 import unittest
 import testscenarios
 import asynctest
 
+"""Setup logging for unit tests"""
+configure_logger(debug=True)
+
 
 class BaseTestCase(testscenarios.WithScenarios, unittest.TestCase):
     """Test base class."""
diff --git a/tangostationcontrol/tangostationcontrol/test/devices/test_statistics_collector.py b/tangostationcontrol/tangostationcontrol/test/devices/test_statistics_collector.py
index 3266dbd148bb4872ec1f2eec8f9c555414fde16c..4b58141c06d9b09d68dba295b007b41081ca3618 100644
--- a/tangostationcontrol/tangostationcontrol/test/devices/test_statistics_collector.py
+++ b/tangostationcontrol/tangostationcontrol/test/devices/test_statistics_collector.py
@@ -7,13 +7,16 @@ class TestXSTCollector(base.TestCase):
     def test_valid_packet(self):
         collector = XSTCollector()
 
-        # a valid packet as obtained from SDP, with 64-bit BE 1+1j as payload
-        packet =  b'X\x05\x00\x00\x00\x00\x00\x00\x10\x08\x00\x02\xfa\xef\x00f\x00\x00\x0c\x08\x01 \x14\x00\x00\x01!\xd9&z\x1b\xb3' + 288 * b'\x00\x00\x00\x00\x00\x00\x00\x01'
+        # a valid packet as obtained from SDP, with 64-bit BE 1+1j as payload at (12,0)
+        packet =  b'X\x05\x00\x00\x00\x00\x00\x00\x10\x08\x00\x02\xfa\xef\x00f\x0c\x00\x0c\x08\x01 \x14\x00\x00\x01!\xd9&z\x1b\xb3' + 288 * b'\x00\x00\x00\x00\x00\x00\x00\x01'
 
         # parse it ourselves to extract info nicely
         fields = XSTPacket(packet)
         fpga_index = fields.gn_index
 
+        # baseline indeed should be (12,0)
+        self.assertEqual((12,0), fields.first_baseline)
+
         # this should not throw
         collector.process_packet(packet)
 
@@ -41,10 +44,51 @@ class TestXSTCollector(base.TestCase):
                 else:
                     self.assertEqual(0+0j, xst_values[baseline_a][baseline_b], msg=f'element [{baseline_a}][{baseline_b}] was not in packet, but was written to the XST matrix.')
 
+    def test_conjugated_packet(self):
+        """ Test whether a packet with a baseline (a,b) with a<b will get its payload conjugated. """
+
+        collector = XSTCollector()
+
+        # a valid packet as obtained from SDP, with 64-bit BE 1+1j as payload, at baseline (0,12)
+        #                                                                       VV  VV
+        packet =  b'X\x05\x00\x00\x00\x00\x00\x00\x10\x08\x00\x02\xfa\xef\x00f\x00\x0c\x0c\x08\x01 \x14\x00\x00\x01!\xd9&z\x1b\xb3' + 288 * b'\x00\x00\x00\x00\x00\x00\x00\x01'
+
+        # parse it ourselves to extract info nicely
+        fields = XSTPacket(packet)
+
+        # baseline indeed should be (0,12)
+        self.assertEqual((0,12), fields.first_baseline)
+
+        # this should not throw
+        collector.process_packet(packet)
+
+        # counters should now be updated
+        self.assertEqual(1, collector.parameters["nof_packets"])
+        self.assertEqual(0, collector.parameters["nof_invalid_packets"])
+
+        # check whether the data ended up in the right block, and the rest is still zero
+        xst_values = collector.xst_values()
+
+        for baseline_a in range(collector.MAX_INPUTS):
+            for baseline_b in range(collector.MAX_INPUTS):
+                if baseline_b > baseline_a:
+                    # only scan top-left triangle
+                    continue
+
+                # use swapped indices!
+                baseline_a_was_in_packet = (fields.first_baseline[1] <= baseline_a < fields.first_baseline[1] + fields.nof_signal_inputs)
+                baseline_b_was_in_packet = (fields.first_baseline[0] <= baseline_b < fields.first_baseline[0] + fields.nof_signal_inputs)
+
+                if baseline_a_was_in_packet and baseline_b_was_in_packet:
+                    self.assertEqual(1-1j, xst_values[baseline_a][baseline_b], msg=f'element [{baseline_a}][{baseline_b}] did not end up conjugated in XST matrix.')
+                else:
+                    self.assertEqual(0+0j, xst_values[baseline_a][baseline_b], msg=f'element [{baseline_a}][{baseline_b}] was not in packet, but was written to the XST matrix.')
+
     def test_invalid_packet(self):
         collector = XSTCollector()
 
         # an invalid packet
+        #           V
         packet =  b'S\x05\x00\x00\x00\x00\x00\x00\x10\x08\x00\x02\xfa\xef\x00f\x00\x00\x0c\x08\x01 \x14\x00\x00\x01!\xd9&z\x1b\xb3' + 288 * b'\x00\x00\x00\x00\x00\x00\x00\x01'
 
         # this should throw
@@ -62,6 +106,7 @@ class TestXSTCollector(base.TestCase):
         collector = XSTCollector()
 
         # an valid packet with a payload error
+        #                                           V
         packet =  b'X\x05\x00\x00\x00\x00\x00\x00\x14\x08\x00\x02\xfa\xef\x00f\x00\x00\x0c\x08\x01 \x14\x00\x00\x01!\xd9&z\x1b\xb3' + 288 * b'\x00\x00\x00\x00\x00\x00\x00\x01'
 
         # parse it ourselves to extract info nicely
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/lts_cold_start.py b/tangostationcontrol/tangostationcontrol/toolkit/lts_cold_start.py
deleted file mode 100644
index b97e6d6fe9ef5a75aa37ac398b5dda328002d6e8..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/toolkit/lts_cold_start.py
+++ /dev/null
@@ -1,222 +0,0 @@
-#! /usr/bin/env python3
-import logging
-from time import sleep
-
-from tangostationcontrol.toolkit.startup import startup
-from tangostationcontrol.toolkit.lofar2_config import configure_logging
-
-
-def start_device(device: str):
-    '''
-    Start a Tango device with the help of the startup function.
-    The device will not be forced to got through
-    OFF/INIT/STANDBY/ON but it is assumed that the device is in OFF
-    state.  If the device is not in OFF state, then an exception
-    will be raised.
-    '''
-    dev = startup(device = device, force_restart = False)
-    state = device.state()
-    if state is not tango._tango.DevState.ON:
-        raise Exception("Device \"{}\" is unexpectedly in \"{}\" state but it is expected to be in \"{}\" state.  Please check the reason for the unexpected device state.  Aborting the start-up procedure.".format(device, state, tango._tango.DevState.ON))
-    return device
-
-
-def lts_cold_start():
-    '''
-    What is this?
-    This is the LTS (LOFAR Test - and I forgot what S stands for) cold start
-    procedure cast into source code.  The procedure can be found there:
-    https://support.astron.nl/confluence/display/L2M/LTS+startup+procedure
-
-    Paulus wrote already a script that - illegally ;) - makes direct use of the
-    OPC-UA servers to accomplish the same thing that we are doing here.
-    Paulus' script can be found there:
-    https://git.astron.nl/lofar2.0/pypcc/-/blob/master/scripts/Startup.py
-    Thanks, Paulus!  You made it very easy for me to cobble together this
-    script.
-
-    For obvious reasons is our script much better though.  :)
-    First, it is bigger.  And bigger is always better.
-    Then it is better documented but that does not count in the HW world.
-    But it also raises exceptions with error messages that make an attempt to
-    help the user reading them and shuts down the respective Tango device(s) if
-    something goes south.
-    And that is where we try to do it really right:  there is no reason to be
-    excessively verbatim when things work like they are expected to work.  But
-    tell the user when something goes wrong, give an indication of what could
-    have gone wrong and where to look for the problem.
-
-    Again, Paulus' script contains already very good indications where problems
-    might lie and made my job very easy.
-
-    No parameters, parameters are for wimps.  :)
-    '''
-    # Define the LOFAR2.0 specific log format
-    configure_logging()
-
-    # Get a reference to the RECV device, do not
-    # force a restart of the already running Tango
-    # device.
-    recv = startup("LTS/RECV/1")
-
-    # Getting CLK, RCU & RCU ADCs into proper shape for use by real people.
-    #
-    # The start-up needs to happen in this sequence due to HW dependencies
-    # that can introduce issues which are then becoming very complicated to
-    # handle in SW.  Therefore to keep it as simple as possible, let's stick
-    # to the rule recommended by Paulus:
-    # 1 CLK
-    # 2 RCU
-    # 3 RCU ADCs
-    #
-    #
-    # First take the CLK board through the motions.
-    # 1.1 Switch off CLK
-    # 1.2 Wait for CLK_translator_busy_R == True, throw an exception in timeout
-    # 1.3 Switch on CLK
-    # 1.4 Wait for CLK_translator_busy_R == True, throw an exception in timeout
-    # 1.5 Check if CLK_PLL_locked_R == True
-    # 1.6 Done
-    #
-    #
-    # Steps 1.1 & 1.2
-    recv.CLK_off()
-    # 2021-04-30, Thomas
-    # This should be refactored into a function.
-    timeout = 10.0
-    while recv.CLK_translator_busy_R is True:
-        logging.debug("Waiting on \"CLK_translator_busy_R\" to become \"True\"...")
-        timeout = timeout - 1.0
-        if timeout < 1.0:
-            # Switching the RECV clock off should never take longer than
-            # 10 seconds.  Here we ran into a timeout.
-            # Clean up and raise an exception.
-            recv.off()
-            raise Exception("After calling \"CLK_off\" a timeout occured while waiting for \"CLK_translator_busy_R\" to become \"True\".  Please investigate the reason why the RECV translator never set \"CLK_translator_busy_R\" to \"True\".  Aborting start-up procedure.")
-        sleep(1.0)
-
-    # Steps 1.3 & 1.4
-    recv.CLK_on()
-    # Per Paulus this should never take longer than 2 seconds.
-    # 2021-04-30, Thomas
-    # This should be refactored into a function.
-    timeout = 2.0
-    while recv.CLK_translator_busy_R is True:
-        logging.debug("After calling \"CLK_on()\"  Waiting on \"CLK_translator_busy_R\" to become \"True\"...")
-        timeout = timeout - 1.0
-        if timeout < 1.0:
-            # Switching theRECV clock on should never take longer than
-            # a couple of seconds.  Here we ran into a timeout.
-            # Clean up and raise an exception.
-            recv.off()
-            raise Exception("After calling \"CLK_on\" a timeout occured while waiting for \"CLK_translator_busy_R\" to become \"True\".  Please investigate the reason why the RECV translator never set \"CLK_translator_busy_R\" to \"True\".  Aborting start-up procedure.")
-        sleep(1.0)
-
-    # 1.5 Check if CLK_PLL_locked_R == True
-    # 2021-04-30, Thomas
-    # This should be refactored into a function.
-    clk_locked = recv.CLK_PLL_locked_R
-    if clk_locked is True:
-       logging.info("CLK signal is locked.")
-    else:
-        # CLK signal is not locked
-        clk_i2c_status = recv.CLK_I2C_STATUS_R
-        exception_text = "CLK I2C is not working.  Please investigate!  Maybe power cycle subrack to restart CLK board and translator.  Aborting start-up procedure."
-        if i2c_status <= 0:
-            exception_text = "CLK signal is not locked.  Please investigate!  The subrack probably do not receive clock input or the CLK PCB is broken.  Aborting start-up procedure."
-        recv.off()
-        raise Exception(exception_text)
-    # Step 1.6
-    # Done.
-
-    # 2 RCUs
-    # If we reach this point in the start-up procedure, then the CLK board setup
-    # is done.  We can proceed with the RCUs.
-    #
-    # Now take the RCUs through the motions.
-    # 2.1 Set RCU mask to all available RCUs
-    # 2.2 Switch off all RCUs
-    # 2.3 Wait for RCU_translator_busy_R = True, throw an exception in timeout
-    # 2.4 Switch on RCUs
-    # 2.5 Wait for RCU_translator_busy_R = True, throw an exception in timeout
-    # 2.6 Done
-    #
-    #
-    # Step 2.1
-    # We have only 8 RCUs in LTS.
-    recv.RCU_mask_RW = [True, ] * 8
-    # Steps 2.2 & 2.3
-    recv.RCU_off()
-    # 2021-04-30, Thomas
-    # This should be refactored into a function.
-    timeout = 10.0
-    while recv.RCU_translator_busy_R is True:
-        logging.debug("Waiting on \"RCU_translator_busy_R\" to become \"True\"...")
-        timeout = timeout - 1.0
-        if timeout < 1.0:
-            # Switching the RCUs off should never take longer than
-            # 10 seconds.  Here we ran into a timeout.
-            # Clean up and raise an exception.
-            recv.off()
-            raise Exception("After calling \"RCU_off\" a timeout occured while waiting for \"RCU_translator_busy_R\" to become \"True\".  Please investigate the reason why the RECV translator never set \"RCU_translator_busy_R\" to \"True\".  Aborting start-up procedure.")
-        sleep(1.0)
-
-    # Steps 2.4 & 2.5
-    # We leave the RCU mask as it is because it got already set for the
-    # RCU_off() call.
-    recv.RCU_on()
-    # Per Paulus this should never take longer than 5 seconds.
-    # 2021-04-30, Thomas
-    # This should be refactored into a function.
-    timeout = 5.0
-    while recv.RCU_translator_busy_R is True:
-        logging.debug("After calling \"RCU_on()\"  Waiting on \"RCU_translator_busy_R\" to become \"True\"...")
-        timeout = timeout - 1.0
-        if timeout < 1.0:
-            # Switching the RCUs on should never take longer than
-            # a couple of seconds.  Here we ran into a timeout.
-            # Clean up and raise an exception.
-            recv.off()
-            raise Exception("After calling \"RCU_on\" a timeout occured while waiting for \"RCU_translator_busy_R\" to become \"True\".  Please investigate the reason why the RECV translator never set \"RCU_translator_busy_R\" to \"True\".  Aborting start-up procedure.")
-        sleep(1.0)
-    # Step 2.6
-    # Done.
-
-    # 3 ADCs
-    # If we get here, we only got to check if the ADCs are locked, too.
-    # 3.1 Check RCUs' I2C status
-    # 3.2 Check RCU_ADC_lock_R == [True, ] for RCUs that have a good I2C status
-    # 3.3 Done
-    #
-    #
-    # Steps 3.1 & 3.2
-    rcu_mask = recv.RCU_mask_RW
-    adc_locked = numpy.array(recv.RCU_ADC_lock_R)
-    for rcu, i2c_status in enumerate(recv.RCU_I2C_STATUS_R):
-        if i2c_status == 0:
-            rcu_mask[rcu] = True
-            logging.info("RCU #{} is available.".format(rcu))
-            for adc, adc_is_locked in enumerate(adc_locked[rcu]):
-                if adc_is_locked < 1:
-                    logging.warning("RCU#{}, ADC#{} is unlocked.  Please investigate!  Will continue with normal operation.".format(rcu, adc))
-        else:
-            # The RCU's I2C bus is not working.
-            rcu_mask[rcu] = False
-            logging.error("RCU #{}'s I2C is not working.  Please investigate!  Disabling RCU #{} to avoid damage.".format(rcu, rcu))
-    recv.RCU_mask_RW = rcu_mask
-    # Step 3.3
-    # Done
-
-    # Start-up APSCTL, i.e. Uniboard2s.
-    aps = startup("APSCTL/SDP/1")
-    logging.warning("Cannot start-up APSCTL because it requires manual actions.")
-
-    # Start up SDP, i.e. configure the firmware in the Unibards
-    sdp = startup("LTS/SDP/1")
-    logging.warning("Cannot start-up SDP because it requires manual actions.")
-
-    logging.info("LTS has been successfully started and configured.")
-
-def main(args=None, **kwargs):
-
-    return lts_cold_start()
diff --git a/tangostationcontrol/tangostationcontrol/toolkit/startup.py b/tangostationcontrol/tangostationcontrol/toolkit/startup.py
deleted file mode 100644
index 66a8d2c496fc7e86d0d13086336e900fc1a1bfaf..0000000000000000000000000000000000000000
--- a/tangostationcontrol/tangostationcontrol/toolkit/startup.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#! /usr/bin/env python3
-import tango
-import logging
-
-logger = logging.getLogger()
-
-def startup(device: str, force_restart: bool) -> tango.DeviceProxy:
-    '''
-    Start a LOFAR Tango device:
-    recv = startup(device = 'LTS/RECV/1', force_restart = False)
-    '''
-    proxy = tango.DeviceProxy(device)
-    state = proxy.state()
-
-    # go to OFF, but only if force_restart is True
-    if force_restart is True:
-        logger.warning(f"Forcing device {device} restart.")
-        proxy.off()
-        state = proxy.state()
-        if state is not tango._tango.DevState.OFF:
-            logger.error(f"Device {device} cannot perform off although restart has been enforced, state = {state}.  Please investigate.")
-            return proxy
-
-    if state is not tango._tango.DevState.OFF:
-        logger.error(f"Device {device} is not in OFF state, cannot start it.  state = {state}")
-        return proxy
-
-    # Initialise device
-    logger.info(f"Device {device} is in OFF, performing initialisation.")
-    proxy.initialise()
-    state = proxy.state()
-    if state is not tango._tango.DevState.STANDBY:
-        logger.error(f"Device {device} cannot perform initialise, state = {state}.  Please investigate.")
-        return proxy
-
-    # Set default values
-    logger.info(f"Device {device} is in STANDBY, setting default values.")
-    proxy.set_defaults()
-
-    # Turn on device
-    logger.info(f"Device {device} is in STANDBY, performing on.")
-    proxy.on()
-    state = proxy.state()
-    if state is not tango._tango.DevState.ON:
-        logger.error(f"Device {device} cannot perform on, state = {state}.  Please investigate.")
-    else:
-        logger.info(f"Device {device} has successfully reached ON state.")
-
-    return proxy