diff --git a/README.md b/README.md
index bc579103e6e432f0230cb42997b2f8f2a300a648..c22e6a5de82df037cc1260ac29179c4d70777cdf 100644
--- a/README.md
+++ b/README.md
@@ -105,6 +105,7 @@ tox -e debug tests.requests.test_prometheus
 ```
 
 ## Releasenotes
+- 0.14  - Added new attributes to statistics HDF file as well as documentation
 - 0.13  - Added lazy connection behavior to `devices.LofarDeviceProxy` class
 - 0.12.
     * Added `HDF5Writer` class for writing HDF5 statistics file
diff --git a/VERSION b/VERSION
index f3040840fd7058ec0e224314c609184fd4ec53f2..a803cc227fe6ff1fbb6dcfc2dde3e4ccc450257e 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-0.13
+0.14.0
diff --git a/docs/source/source_documentation/lofar_station_client.rst b/docs/source/source_documentation/lofar_station_client.rst
deleted file mode 100644
index aa18120d5728a2511280c0bdb0b4e6a1f4224820..0000000000000000000000000000000000000000
--- a/docs/source/source_documentation/lofar_station_client.rst
+++ /dev/null
@@ -1,23 +0,0 @@
-lofar\_station\_client package
-==============================
-
-Subpackages
------------
-
-.. toctree::
-   :maxdepth: 4
-
-   lofar_station_client.dts
-   lofar_station_client.math
-   lofar_station_client.parsing
-   lofar_station_client.requests
-   lofar_station_client.statistics
-
-Module contents
----------------
-
-.. automodule:: lofar_station_client
-   :members:
-   :undoc-members:
-   :show-inheritance:
-   :private-members:
diff --git a/lofar_station_client/file_access/_attribute_def.py b/lofar_station_client/file_access/_attribute_def.py
index 6c986fdff97227366d6984351f1eec68857731a3..a08181081ff5a1c40a926195109069e6911c7195 100644
--- a/lofar_station_client/file_access/_attribute_def.py
+++ b/lofar_station_client/file_access/_attribute_def.py
@@ -4,9 +4,10 @@
 """
 Contains HDF5 specific classes and methods to define class members as an HDF attribute
 """
-from typing import Any
+from typing import Any, Type
 
 from ._readers import DataReader
+from ._utils import _extract_type
 from ._writers import DataWriter
 
 
@@ -25,14 +26,18 @@ class AttributeDef:
 
     def __init__(self, name: str, optional: bool, from_member: str = None):
         self.name = name
+        self.property_name: str
         self.from_member = from_member
         self.optional = optional
         self.owner: Any
+        self.type: Type
 
     def __set_name__(self, owner, name):
         if self.name is None:
             self.name = name
+        self.property_name = name
         self.owner = owner
+        self.type = _extract_type(owner, name)
 
     def __set__(self, instance, value):
         setattr(instance, self.attr_name, value)
diff --git a/lofar_station_client/file_access/_member_def.py b/lofar_station_client/file_access/_member_def.py
index 05d079cfdf7229c0ac1e9641b09c4fdbd74333a2..060cbd0ad7416491f8ed2e9b6a867676281b73a5 100644
--- a/lofar_station_client/file_access/_member_def.py
+++ b/lofar_station_client/file_access/_member_def.py
@@ -13,11 +13,11 @@ from ._utils import _extract_type
 from ._writers import DataWriter
 
 
-def member(name: str = None, optional: bool = False):
+def member(name: str = None, optional: bool = False, compression: str = None):
     """
     Define a class member as a member of a HDF5 file
     """
-    return MemberDef(name, optional)
+    return MemberDef(name, optional, compression)
 
 
 #  pylint: disable=too-few-public-methods
@@ -27,14 +27,17 @@ class MemberDef:
     and datasets to pythonic objects
     """
 
-    def __init__(self, name: str, optional: bool):
+    def __init__(self, name: str, optional: bool, compression: str):
         self.name = name
+        self.property_name: str
         self.optional = optional
+        self.compression = compression
         self.type: Type
 
     def __set_name__(self, owner, name):
         if self.name is None:
             self.name = name
+        self.property_name = name
         self.type = _extract_type(owner, name)
 
     def __get__(self, instance, obj_type=None):
diff --git a/lofar_station_client/file_access/_monitoring.py b/lofar_station_client/file_access/_monitoring.py
index 757a44900254cb732c177ff05d3fb3d44573bafc..947de3ab482a63006a0c3f821d8ea4bdc6f0cda9 100644
--- a/lofar_station_client/file_access/_monitoring.py
+++ b/lofar_station_client/file_access/_monitoring.py
@@ -5,6 +5,7 @@
 Class wrappers for lists and dictionaries monitoring changes of itself and notifying
 the registered event handler about these changes.
 """
+from typing import Any
 
 
 class MonitoredWrapper:
@@ -24,6 +25,13 @@ class MonitoredWrapper:
     def __getitem__(self, item):
         return self._instance.__getitem__(item)
 
+    def __setattr__(self, name: str, value: Any) -> None:
+        if name in ["_instance", "_event"]:
+            object.__setattr__(self, name, value)
+        else:
+            self._instance.__setattr__(name, value)
+            self._event(self._instance)
+
     def __getattribute__(self, name):
         if name in ["_instance", "_event"]:
             return object.__getattribute__(self, name)
diff --git a/lofar_station_client/file_access/_readers.py b/lofar_station_client/file_access/_readers.py
index 8eaf3925427f2b3fb90921b0861569583a70db54..6e3df42f86f6c090d0327f19530c5f1e61382934 100644
--- a/lofar_station_client/file_access/_readers.py
+++ b/lofar_station_client/file_access/_readers.py
@@ -28,6 +28,13 @@ class FileReader(Generic[T], ABC):
         Close the underlying file
         """
 
+    def load(self, instance: T):
+        """
+        Load all the data from the underlying HDF file
+        to preserve it in the objects after closing the
+        file.
+        """
+
     def __enter__(self):
         return self.read()
 
diff --git a/lofar_station_client/file_access/hdf/__init__.py b/lofar_station_client/file_access/hdf/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..c92b615444d854a6e87370b16cf733a5859a07e7 100644
--- a/lofar_station_client/file_access/hdf/__init__.py
+++ b/lofar_station_client/file_access/hdf/__init__.py
@@ -0,0 +1,2 @@
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
diff --git a/lofar_station_client/file_access/hdf/_hdf5_utils.py b/lofar_station_client/file_access/hdf/_hdf5_utils.py
index 15775e4aa132fa2e2da1ef8f1779dedbf203976d..7dbbfc553b7df0755d0583796a0f4da3b9a76b29 100644
--- a/lofar_station_client/file_access/hdf/_hdf5_utils.py
+++ b/lofar_station_client/file_access/hdf/_hdf5_utils.py
@@ -31,15 +31,6 @@ def _assert_is_group(value):
         )
 
 
-def _write_ndarray(data, key, value):
-    _assert_is_group(data)
-    if key in data:
-        _assert_is_dataset(data[key])
-        del data[key]
-
-    data.create_dataset(key, data=value)
-
-
 def _is_attachable(target_type: Type[T]):
     origin_type = get_origin(target_type)
     if origin_type is dict:
@@ -56,4 +47,4 @@ def _attach_object(target_type: Type[T], instance):
     for annotation in annotations:
         attr = inspect.getattr_static(target_type, annotation)
         if hasattr(instance, attr.attr_name):
-            setattr(instance, attr.name, getattr(instance, attr.attr_name))
+            setattr(instance, attr.property_name, getattr(instance, attr.attr_name))
diff --git a/lofar_station_client/file_access/hdf/_hdf_readers.py b/lofar_station_client/file_access/hdf/_hdf_readers.py
index 8ee6de5078b8280107fc08be13cc2784cc1cd01b..1ffb48034b6a666ab328a1020c85a4dd6d3bb365 100644
--- a/lofar_station_client/file_access/hdf/_hdf_readers.py
+++ b/lofar_station_client/file_access/hdf/_hdf_readers.py
@@ -1,11 +1,13 @@
-#  Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
 #  SPDX-License-Identifier: Apache-2.0
 
 """
 Contains classes to handle file reading
 """
+import inspect
+import weakref
 from inspect import getattr_static
-from typing import TypeVar, Type, Dict
+from typing import TypeVar, Type, Dict, List
 
 import h5py
 from numpy import ndarray, zeros
@@ -14,7 +16,9 @@ from ._hdf5_utils import (
     _assert_is_group,
     _assert_is_dataset,
 )
+from .._attribute_def import AttributeDef
 from .._compat_utils import get_origin
+from .._member_def import MemberDef
 from .._readers import FileReader, DataReader
 from .._utils import _extract_base_type
 
@@ -27,9 +31,11 @@ class HdfFileReader(FileReader[T]):
     """
 
     def __init__(self, name, target_type):
+        self.file_name = name
         self._is_closed = None
         self._target_type = target_type
         self._open_file(name)
+        self._references: List[weakref] = []
 
     def _open_file(self, name):
         self._hdf5_file = h5py.File(name, "r")
@@ -50,10 +56,49 @@ class HdfFileReader(FileReader[T]):
         """
         Close the underlying HDF file
         """
+        for ref in self._references:
+            obj = ref()
+            if obj is not None:
+                self._detach_object(obj)
+        self._references = []
+
         if not self._is_closed:
             self._is_closed = True
             del self._hdf5_file
 
+    def load(self, instance: T):
+        """
+        Load all the data from the underlying HDF file
+        to preserve it in the objects after closing the
+        file.
+        """
+        self._references.append(weakref.ref(instance))
+        target_type = type(instance)
+        for annotation in [
+            m[0] for m in inspect.getmembers(instance) if not m[0].startswith("_")
+        ]:
+            attr = inspect.getattr_static(target_type, annotation)
+            if isinstance(attr, (MemberDef, AttributeDef)):
+                setattr(instance, attr.attr_name, getattr(instance, attr.property_name))
+
+    def _detach_object(self, instance):
+        if not hasattr(instance, "_data_reader"):
+            return
+        delattr(instance, "_data_reader")
+        for attr in [
+            m[0]
+            for m in inspect.getmembers(instance)
+            if not m[0].startswith("_") and m[0] != "T"
+        ]:
+            item = getattr(instance, attr)
+            item_type = type(item)
+            if (
+                item is not None
+                and item is object
+                and not (item_type is ndarray or item_type is str)
+            ):
+                self._detach_object(item)
+
 
 class HdfDataReader(DataReader):
     """
@@ -104,13 +149,17 @@ class HdfDataReader(DataReader):
         _assert_is_dataset(value)
         return list(value[:])
 
-    @staticmethod
-    def _read_ndarray(value):
+    @classmethod
+    def _read_ndarray(cls, target_type: Type[T], value, file_reader: "HdfDataReader"):
         _assert_is_dataset(value)
         nd_value = zeros(value.shape, value.dtype)
         # convert the data set to a numpy array
         value.read_direct(nd_value)
-        return nd_value
+        if target_type is ndarray:
+            return nd_value
+        obj = nd_value.view(target_type)
+        setattr(obj, "_data_reader", cls(file_reader.file_reader, value))
+        return obj
 
     @classmethod
     def _read_dict(
@@ -136,8 +185,8 @@ class HdfDataReader(DataReader):
             )
         if get_origin(target_type) is list:
             return cls._read_list
-        if target_type is ndarray:
-            return cls._read_ndarray
+        if issubclass(target_type, ndarray):
+            return lambda value: cls._read_ndarray(target_type, value, data_reader)
         if issubclass(target_type, dict):
             return lambda value: cls._read_dict(
                 _extract_base_type(target_type), value, target_type, data_reader
diff --git a/lofar_station_client/file_access/hdf/_hdf_writers.py b/lofar_station_client/file_access/hdf/_hdf_writers.py
index 583e2bf0c40af179ddc1f4217d1944d43015700a..4c793aaab68902a01620e283da0734f7736e38bb 100644
--- a/lofar_station_client/file_access/hdf/_hdf_writers.py
+++ b/lofar_station_client/file_access/hdf/_hdf_writers.py
@@ -13,8 +13,8 @@ from numpy import ndarray
 from ._hdf5_utils import (
     _is_attachable,
     _attach_object,
-    _write_ndarray,
     _assert_is_group,
+    _assert_is_dataset,
 )
 from ._hdf_readers import HdfFileReader, HdfDataReader
 from .._writers import FileWriter, DataWriter
@@ -65,13 +65,16 @@ class HdfFileWriter(HdfFileReader[T], FileWriter[T]):
                 self._target_type,
                 obj,
                 lambda value: HdfDataWriter.write_dict(
-                    _extract_base_type(self._target_type),
+                    self._target_type,
                     self._hdf5_file,
                     value,
                     data_writer,
                 ),
             )
-        setattr(obj, "_data_writer", data_writer)
+        try:
+            setattr(obj, "_data_writer", data_writer)
+        except AttributeError:
+            pass
         return obj
 
 
@@ -152,20 +155,40 @@ class HdfDataWriter(HdfDataReader, DataWriter):
         origin_type = get_origin(target_type)
         if origin_type is dict:
             return lambda data, key, value: cls._write_dict_group(
-                _extract_base_type(target_type), data, key, value, data_writer
+                target_type, data, key, value, data_writer
             )
         if get_origin(target_type) is list:
-            return _write_ndarray
-        if target_type is ndarray:
-            return _write_ndarray
+            return lambda data, key, value: cls._write_ndarray(
+                list, data, key, value, data_writer
+            )
+        if target_type is ndarray or issubclass(target_type, ndarray):
+            return lambda data, key, value: cls._write_ndarray(
+                target_type, data, key, value, data_writer
+            )
         if issubclass(target_type, dict):
             return lambda data, key, value: cls._write_dict_group(
-                _extract_base_type(target_type), data, key, value, data_writer
+                target_type, data, key, value, data_writer
             )
         return lambda data, key, value: cls._write_object(
             target_type, data, key, value, data_writer
         )
 
+    @classmethod
+    def _write_ndarray(
+        cls, target_type: Type[T], data, key, value, data_writer: "HdfDataWriter"
+    ):
+        _assert_is_group(data)
+        if key in data:
+            _assert_is_dataset(data[key])
+            del data[key]
+
+        data.create_dataset(key, data=value)
+        if target_type is not ndarray and issubclass(target_type, ndarray):
+            data_writer = cls(data_writer.file_writer, data[key])
+            setattr(value, "_data_writer", data_writer)
+            setattr(value, "_data_reader", data_writer)
+            _attach_object(target_type, value)
+
     @classmethod
     # pylint: disable=too-many-arguments
     def _write_dict_group(
@@ -174,6 +197,15 @@ class HdfDataWriter(HdfDataReader, DataWriter):
         _assert_is_group(data)
         if key not in data:
             data.create_group(key)
+
+        try:
+            data_writer = cls(data_writer.file_writer, data[key])
+            setattr(value, "_data_writer", data_writer)
+            setattr(value, "_data_reader", data_writer)
+            _attach_object(target_type, value)
+        except AttributeError:
+            pass
+
         cls.write_dict(
             target_type, data[key], value, cls(data_writer.file_writer, data[key])
         )
@@ -189,7 +221,9 @@ class HdfDataWriter(HdfDataReader, DataWriter):
         for k in data.keys():
             if k not in value:
                 del data[k]
-        writer = HdfDataWriter.detect_writer(target_type, data_writer)
+        writer = HdfDataWriter.detect_writer(
+            _extract_base_type(target_type), data_writer
+        )
         for k in value.keys():
             writer(data, k, value[k])
 
diff --git a/lofar_station_client/statistics/__init__.py b/lofar_station_client/statistics/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..c92b615444d854a6e87370b16cf733a5859a07e7 100644
--- a/lofar_station_client/statistics/__init__.py
+++ b/lofar_station_client/statistics/__init__.py
@@ -0,0 +1,2 @@
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
diff --git a/lofar_station_client/statistics/collector.py b/lofar_station_client/statistics/collector.py
index 37855936a1d02f1eda633df40364280581906183..4e1eae76e85a9e7144faaf250be4de785d0b3f66 100644
--- a/lofar_station_client/statistics/collector.py
+++ b/lofar_station_client/statistics/collector.py
@@ -1,16 +1,5 @@
-# -*- coding: utf-8 -*-
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
 
 """Collectors for all types of statistics
 
@@ -25,18 +14,18 @@ This includes:
 
 import abc
 import logging
-import numpy
 
+import numpy
 from tango import DeviceProxy, DevState, DevFailed
 
-from lofar_station_client.statistics.packet import SSTPacket
-from lofar_station_client.statistics.packet import XSTPacket
-from lofar_station_client.statistics.packet import BSTPacket
+from lofar_station_client.math.baseline import baseline_from_index
+from lofar_station_client.math.baseline import baseline_index
 
 # TODO(Corne): Discuss moving to lofar_common_python library?
 from lofar_station_client.math.baseline import nr_baselines
-from lofar_station_client.math.baseline import baseline_index
-from lofar_station_client.math.baseline import baseline_from_index
+from lofar_station_client.statistics.packet import BSTPacket
+from lofar_station_client.statistics.packet import SSTPacket
+from lofar_station_client.statistics.packet import XSTPacket
 
 logger = logging.getLogger()
 
diff --git a/lofar_station_client/statistics/packet.py b/lofar_station_client/statistics/packet.py
index 544c3efc6855abb973b9519ef152daf8a193fc7b..62ab9eb5aa599c2ce42a65af734bc83afccc09a1 100644
--- a/lofar_station_client/statistics/packet.py
+++ b/lofar_station_client/statistics/packet.py
@@ -1,16 +1,5 @@
-# -*- coding: utf-8 -*-
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
 
 """All types of packets as parsed by different collectors"""
 
@@ -19,9 +8,9 @@
 # too-many-instance-attributes, too-many-function-args
 # pylint: disable=R0902,E1121
 
+import struct
 from datetime import datetime
 from datetime import timezone
-import struct
 
 import numpy
 
@@ -281,6 +270,11 @@ class SDPPacket:
             "timestamp": self.timestamp(),
         }
 
+        if self.t_adc == 0:
+            header["f_adc"] = 160
+        elif self.t_adc == 1:
+            header["f_adc"] = 200
+
         return header
 
     def payload(self, signed=False) -> numpy.array:
diff --git a/lofar_station_client/statistics/reader.py b/lofar_station_client/statistics/reader.py
index bceffa1776a16e03fd0ad88497e4d3fa538204bb..d14ec47de00cb3ee70a79e96ba8646a740f07afa 100644
--- a/lofar_station_client/statistics/reader.py
+++ b/lofar_station_client/statistics/reader.py
@@ -1,5 +1,5 @@
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
 
 
 """Reader and parser of statistics files"""
@@ -10,14 +10,20 @@
 
 import argparse
 import datetime
+import logging
 import os
 import sys  # noqa: F401
-import logging
-import h5py
+
 import numpy
 import psutil
 import pytz
 
+from lofar_station_client.file_access import read_hdf5
+from lofar_station_client.statistics.statistics_data import (
+    StatisticsDataFile,
+    StatisticsFileHeader,
+)
+
 process = psutil.Process(os.getpid())
 
 logging.basicConfig(level=logging.INFO)
@@ -65,6 +71,9 @@ class StatisticsParser:
         # dict of all statistics, allows for easier access.
         self.statistics_dict = {}
 
+        # header of the file(s)
+        self.file_header: StatisticsFileHeader = None
+
         # for setting the range of times to parse.
         # Initialise with the build in minimum and maximum values
         self.start_time = datetime.datetime.min.replace(tzinfo=pytz.UTC)
@@ -94,45 +103,54 @@ class StatisticsParser:
             files = [files]
 
         for file in files:
-            hdf5_file = h5py.File(file, "r")
+            hdf5_file = read_hdf5(file, StatisticsDataFile)
 
             # go through all the groups
             logger.debug("Parsing hdf5 statistics file")
 
-            for group_key in hdf5_file.keys():
-                try:
-                    # first get the statistic
-                    statistic = StatisticsData(hdf5_file, group_key)
-
-                    # extract the timestamp and convert to datetime
-                    statistic_time = statistic.timestamp
-
-                    # check if the timestamp is before the start time
-                    if statistic_time < self.start_time:
-                        continue
-
-                    # check if the timestamp is after the end times
-                    if statistic_time > self.end_time:
-                        # Exit, we're done
-                        logger.debug("Parsed %s statistics", len(self.statistics))
-                        return
-
-                    # append to the statistics list
-                    self.statistics.append(statistic)
-                    self.statistics_dict[
-                        statistic.timestamp.isoformat(timespec="milliseconds")
-                    ] = statistic
-
-                except Exception:
-                    # B001 Do not use bare `except:`, it also catches unexpected
-                    # events like memory errors, interrupts, system exit
-                    logger.exception(
-                        "Encountered an error while parsing statistic. \
-                        Skipped: %s",
-                        group_key,
+            with hdf5_file as statistic_data:
+                if self.file_header is None:
+                    self.file_header = statistic_data
+                elif not StatisticsFileHeader.__eq__(self.file_header, statistic_data):
+                    raise ValueError(
+                        "Cannot read statistic files with different headers at the "
+                        "same time"
                     )
 
-            logger.debug("Parsed %s statistics", len(self.statistics))
+                for group_key, statistic in statistic_data.items():
+                    try:
+                        # extract the timestamp and convert to datetime
+                        statistic_time = datetime.datetime.fromisoformat(
+                            statistic.timestamp
+                        )
+
+                        # check if the timestamp is before the start time
+                        if statistic_time < self.start_time:
+                            continue
+
+                        # check if the timestamp is after the end times
+                        if statistic_time > self.end_time:
+                            # Exit, we're done
+                            logger.debug("Parsed %s statistics", len(self.statistics))
+                            return
+
+                        # append to the statistics list
+                        hdf5_file.load(statistic)
+                        self.statistics.append(statistic)
+                        self.statistics_dict[
+                            statistic_time.isoformat(timespec="milliseconds")
+                        ] = statistic
+
+                    except Exception:
+                        # B001 Do not use bare `except:`, it also catches unexpected
+                        # events like memory errors, interrupts, system exit
+                        logger.exception(
+                            "Encountered an error while parsing statistic. \
+                            Skipped: %s",
+                            group_key,
+                        )
+
+                logger.debug("Parsed %s statistics", len(self.statistics))
 
     @timeit
     def collect_values(self):
@@ -141,7 +159,7 @@ class StatisticsParser:
         Uses a lot more memory (Basically double since
         the values make up the bulk of memory)
         """
-        lst = [i.values for i in self.statistics]
+        lst = [i.sst_values for i in self.statistics]
         value_array = numpy.stack(lst)
         return value_array
 
@@ -157,7 +175,7 @@ class StatisticsParser:
         Returns a statistic object based on the timestamp given.
         """
         for i in self.statistics:
-            if i.timestamp == datetime.datetime.fromisoformat(timestamp):
+            if i.timestamp == timestamp:
                 return i
 
         raise ValueError(
@@ -178,144 +196,6 @@ class StatisticsParser:
         return len(self.statistics)
 
 
-class StatisticsData:
-    """
-    This class takes the file and the statistics name
-    as its __init__ arguments and then stores the
-    the datasets in them.
-    """
-
-    HEADER_FIELDS = (
-        "station_version",
-        "writer_version",
-        "mode",
-        "antennafield_device",
-        "antenna_names",
-        "rcu_attenuator_dB",
-        "rcu_band_select",
-        "rcu_dth_on",
-        "rcu_dth_freq",
-        "antenna_usage_mask",
-        "antenna_reference_itrf",
-        "frequency_band",
-    )
-
-    # we will be creating potentially tens of thousands of these object.
-    # Using __slots__ makes them faster and uses less memory. At the cost of
-    # having to list all self attributes here.
-    __slots__ = (
-        "version_id",
-        "timestamp",
-        "station_id",
-        "source_info_t_adc",
-        "source_info_subband_calibrated_flag",
-        "source_info_payload_error",
-        "source_info_payload_error",
-        "source_info_payload_error",
-        "source_info_nyquist_zone_index",
-        "source_info_gn_index",
-        "source_info_fsub_type",
-        "source_info_beam_repositioning_flag",
-        "source_info_antenna_band_index",
-        "source_info__raw",
-        "observation_id",
-        "nof_statistics_per_packet",
-        "nof_signal_inputs",
-        "nof_bytes_per_statistic",
-        "marker",
-        "integration_interval_raw",
-        "integration_interval",
-        "data_id__raw",
-        "block_serial_number",
-        "block_period_raw",
-        "block_period",
-        "data_id_signal_input_index",
-        "data_id_subband_index",
-        "data_id_first_baseline",
-        "data_id_beamlet_index",
-        "nof_valid_payloads",
-        "nof_payload_errors",
-        "values",
-    ) + HEADER_FIELDS
-
-    def __init__(self, file, group_key):
-        # get all the general header info
-        for attr in self.HEADER_FIELDS:
-            if attr in file.attrs:
-                if file.attrs[attr] is None:
-                    setattr(self, attr, None)
-                else:
-                    setattr(self, attr, numpy.array(file.attrs[attr]))
-
-        # convert string timestamp to datetime object
-        self.timestamp = datetime.datetime.fromisoformat(
-            file[group_key].attrs["timestamp"]
-        )
-
-        self.source_info_t_adc = file[group_key].attrs["source_info_t_adc"]
-        self.source_info_subband_calibrated_flag = file[group_key].attrs[
-            "source_info_subband_calibrated_flag"
-        ]
-        self.source_info_payload_error = file[group_key].attrs[
-            "source_info_payload_error"
-        ]
-        self.source_info_nyquist_zone_index = file[group_key].attrs[
-            "source_info_nyquist_zone_index"
-        ]
-        self.source_info_gn_index = file[group_key].attrs["source_info_gn_index"]
-        self.source_info_fsub_type = file[group_key].attrs["source_info_fsub_type"]
-        self.source_info_beam_repositioning_flag = file[group_key].attrs[
-            "source_info_beam_repositioning_flag"
-        ]
-        self.source_info_antenna_band_index = file[group_key].attrs[
-            "source_info_antenna_band_index"
-        ]
-        self.source_info__raw = file[group_key].attrs["source_info__raw"]
-
-        self.observation_id = file[group_key].attrs["observation_id"]
-        self.nof_statistics_per_packet = file[group_key].attrs[
-            "nof_statistics_per_packet"
-        ]
-        self.nof_signal_inputs = file[group_key].attrs["nof_signal_inputs"]
-        self.nof_bytes_per_statistic = file[group_key].attrs["nof_bytes_per_statistic"]
-        self.marker = file[group_key].attrs["marker"]
-        self.integration_interval_raw = file[group_key].attrs[
-            "integration_interval_raw"
-        ]
-        self.integration_interval = file[group_key].attrs["integration_interval"]
-        self.data_id__raw = file[group_key].attrs["data_id__raw"]
-
-        self.block_serial_number = file[group_key].attrs["block_serial_number"]
-        self.block_period_raw = file[group_key].attrs["block_period_raw"]
-        self.block_period = file[group_key].attrs["block_period"]
-
-        # get SST specific stuff
-        if self.marker == "S":
-            self.data_id_signal_input_index = file[group_key].attrs[
-                "data_id_signal_input_index"
-            ]
-
-        # get XST specific stuff
-        if self.marker == "X":
-            self.data_id_subband_index = file[group_key].attrs["data_id_subband_index"]
-            self.data_id_first_baseline = file[group_key].attrs[
-                "data_id_first_baseline"
-            ]
-
-        # get BST specific stuff
-        if self.marker == "B":
-            self.data_id_beamlet_index = file[group_key].attrs["data_id_beamlet_index"]
-
-        # get the datasets
-        self.nof_valid_payloads = numpy.array(
-            file.get(f"{group_key}/nof_valid_payloads")
-        )
-        self.nof_payload_errors = numpy.array(
-            file.get(f"{group_key}/nof_payload_errors")
-        )
-        self.values = numpy.array(file.get(f"{group_key}"))
-
-
 def parse_arguments():
     """
     This function parses the input arguments.
diff --git a/lofar_station_client/statistics/receiver.py b/lofar_station_client/statistics/receiver.py
index cd1dcc1c35c9aa65fa8609503a13ab7f535825f5..09f08322fc75ab253b7504cdbf123b461e7a86f2 100644
--- a/lofar_station_client/statistics/receiver.py
+++ b/lofar_station_client/statistics/receiver.py
@@ -1,11 +1,5 @@
-# -*- coding: utf-8 -*-
-#
-# This file is part of the LOFAR 2.0 Station Software
-#
-#
-#
-# Distributed under the terms of the APACHE license.
-# See LICENSE.txt for more info.
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
 
 """Filedata and TCP Receivers"""
 
diff --git a/lofar_station_client/statistics/statistics_data.py b/lofar_station_client/statistics/statistics_data.py
new file mode 100644
index 0000000000000000000000000000000000000000..177f66af0a31d8e401b616424e3507f612327cdd
--- /dev/null
+++ b/lofar_station_client/statistics/statistics_data.py
@@ -0,0 +1,225 @@
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
+
+# too-few-public-methods
+# pylint: disable=R0903
+
+"""
+Models the structure of an HDF statistics file.
+"""
+import inspect
+from typing import Dict
+
+from numpy import ndarray
+
+from lofar_station_client.file_access import attribute
+
+
+class StatisticsData(ndarray):
+    """
+    Representation of a periodic data matrix written to the HDF statistics file.
+    """
+
+    version_id: int = attribute()
+    """ Version of the packet format. """
+
+    timestamp: str = attribute()
+    """ timestamp of the data """
+
+    station_id: int = attribute()
+    """ Station identifier """
+
+    source_info_t_adc: int = attribute()
+    """" Sampling clock. 0 = 160 MHz, 1 = 200 MHz. """
+
+    source_info_subband_calibrated_flag: bool = attribute()
+    """ subband_calibrated_flag 1 = subband data had subband calibration values applied,
+                                0 = not.
+    """
+
+    source_info_payload_error: bool = attribute()
+    """ 0 = data is ok, 1 = data is corrupted (a fault was encountered). """
+
+    source_info_nyquist_zone_index: int = attribute()
+    """  nyquist zone of filter
+
+            - 0 =           0 -- 1/2 * t_adc Hz (low band),
+            - 1 = 1/2 * t_adc -- t_adc Hz       (high band),
+            - 2 =       t_adc -- 3/2 * t_adc Hz (high band).
+    """
+
+    source_info_gn_index: int = attribute()
+    """ Global index of FPGA that emitted this packet. """
+
+    source_info_fsub_type: int = attribute()
+    """ Sampling method. 0 = critically sampled, 1 = oversampled. """
+
+    source_info_beam_repositioning_flag: bool = attribute()
+
+    source_info_antenna_band_index: int = attribute()
+    """ Antenna type. 0 = low band, 1 = high band. """
+
+    source_info_raw: int = attribute(name="source_info__raw", optional=True)
+    """ Bit field with input information, encoding several other properties. """
+
+    observation_id: int = attribute()
+    """ Observation identifier """
+
+    nof_statistics_per_packet: int = attribute()
+    """ Number of statistic data points in the payload. """
+
+    nof_signal_inputs: int = attribute()
+    """ Number of inputs that contributed to data in this packet. """
+
+    nof_bytes_per_statistic: int = attribute()
+    """ Word size of each statistic """
+
+    marker: str = attribute()
+    """ The type of statistic:
+
+        - 'S' = SST
+        - 'B' = BST
+        - 'X' = XST
+        - 'b' = beamlet
+    """
+
+    integration_interval: float = attribute()
+    """ Integration interval, in seconds. """
+
+    integration_interval_raw: int = attribute()
+    """ Integration interval, in block periods. """
+
+    data_id_raw: int = attribute(name="data_id__raw", optional=True)
+    """ Bit field with payload information, encoding several other properties. """
+
+    block_serial_number: int = attribute()
+    """ Timestamp of the data, in block periods since 1970. """
+
+    block_period_raw: int = attribute()
+    """ Block period, in nanoseconds. """
+
+    block_period: float = attribute()
+    """ Block period, in seconds. """
+
+    data_id_signal_input_index: int = attribute()
+    """
+    SST input (antenna polarisation) index for which this packet contains statistics
+    """
+
+    data_id_subband_index: int = attribute(optional=True)
+    """
+    XST subband number for which this packet contains statistics.
+    """
+
+    data_id_first_baseline: int = attribute(optional=True)
+    """
+    XST first antenna pair for which this packet contains statistics.
+    """
+
+    data_id_beamlet_index: int = attribute(optional=True)
+    """
+    BST the number of the beamlet for which this packet holds statistics.
+    """
+
+    nof_valid_payloads: int = attribute()
+    """ Number of packets received so far that we could parse correctly and do not have
+        a payload error """
+
+    nof_payload_errors: int = attribute()
+    """ Number of packets that reported a payload error """
+
+    tile_beam_pointing_direction: str = attribute(optional=True)
+    """ Direction of the tile beam """
+
+    hbat_pwr_on: str = attribute(optional=True)
+    """ Elements per hba tile """
+
+    clock: int = attribute(optional=True)
+    """ clock in hz """
+
+    frequency_band: ndarray = attribute(optional=True)
+    """ filter selection """
+
+    subband_frequencies: ndarray = attribute(optional=True)
+    """ subband frequencies """
+
+    nyquist_zone: ndarray = attribute(optional=True)
+    """ nyquist zone """
+
+    fpga_spectral_inversion: ndarray = attribute(optional=True)
+    """ spectral inversion """
+
+
+class StatisticsFileHeader:
+    """
+    Pythonic representation of the HDF statistics file header written by the statistics
+    writer.
+    """
+
+    station_name: str = attribute(optional=True)
+    """ Name of the station the statistics where recorded from """
+
+    station_version: str = attribute()
+    """ Lofar Station Control version """
+
+    writer_version: str = attribute()
+    """ Statistics Writer software version """
+
+    mode: str = attribute()
+    """ Mode of the statistics writer (SST,XST,BST) """
+
+    antennafield_device: str = attribute(optional=True)
+    """ Name of the antennafield device """
+
+    antenna_names: str = attribute(optional=True)
+    """ Antenna names """
+
+    antenna_type: str = attribute(optional=True)
+    """ The type of antenna in this field (LBA or HBA). """
+
+    antenna_quality: str = attribute(optional=True)
+    """ The quality of each antenna, as a string. """
+
+    antenna_usage_mask: str = attribute(optional=True)
+    """ Whether each antenna would have been used. """
+
+    antenna_reference_itrf: str = attribute(optional=True)
+    """ Absolute reference position of each tile, in ITRF (XYZ) """
+
+    rcu_attenuator_db: [float] = attribute(name="rcu_attenuator_dB", optional=True)
+    """ Amount of dB with which each antenna signal must be adjusted to line up. """
+
+    rcu_band_select: float = attribute(optional=True)
+    rcu_dth_on: float = attribute(optional=True)
+    rcu_dth_freq: float = attribute(optional=True)
+
+    frequency_band: str = attribute(optional=True)
+    """ filter selection """
+
+    subbands: int = attribute(optional=True)
+
+    fpga_firmware_version: str = attribute(optional=True)
+    fpga_hardware_version: str = attribute(optional=True)
+
+    rcu_pcb_id: str = attribute(optional=True)
+    rcu_pcb_version: str = attribute(optional=True)
+
+    def __eq__(self, other):
+        for attr in [
+            a[0]
+            for a in inspect.getmembers(__class__, lambda a: not inspect.isroutine(a))
+            if not a[0].startswith("_")
+        ]:
+            if not hasattr(self, attr) or not hasattr(other, attr):
+                return False
+            if getattr(self, attr) != getattr(other, attr):
+                return False
+
+        return True
+
+
+class StatisticsDataFile(Dict[str, StatisticsData], StatisticsFileHeader):
+    """
+    Pythonic representation of the HDF statistics file written by the statistics
+    writer.
+    """
diff --git a/lofar_station_client/statistics/writer/__init__.py b/lofar_station_client/statistics/writer/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..c92b615444d854a6e87370b16cf733a5859a07e7 100644
--- a/lofar_station_client/statistics/writer/__init__.py
+++ b/lofar_station_client/statistics/writer/__init__.py
@@ -0,0 +1,2 @@
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
diff --git a/lofar_station_client/statistics/writer/entry.py b/lofar_station_client/statistics/writer/entry.py
index 042f295a43dbb762e3df1fd5a289f473251a11e1..0f600b16967f1ba02f81edfe55a406107b55de0a 100644
--- a/lofar_station_client/statistics/writer/entry.py
+++ b/lofar_station_client/statistics/writer/entry.py
@@ -1,11 +1,10 @@
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
-
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
 
 """Statistics writer parser and executor"""
 
-# too-many-locals, broad-except, raise-missing-from
-# pylint: disable=R0914, W0703, W0707
+# too-many-locals, broad-except, raise-missing-from, too-many-arguments
+# pylint: disable=R0914, W0703, W0707, R0913
 
 import argparse
 import logging
@@ -13,6 +12,7 @@ import sys
 import time
 
 from tango import DeviceProxy
+
 from lofar_station_client.statistics.receiver import FileReceiver
 from lofar_station_client.statistics.receiver import TCPReceiver
 from lofar_station_client.statistics.writer.hdf5 import BstHdf5Writer
@@ -121,7 +121,13 @@ def _create_receiver(filename, host, port):
 
 
 def _create_writer(
-    mode, interval, output_dir, decimation, antennafield_device: DeviceProxy = None
+    mode,
+    interval,
+    output_dir,
+    decimation,
+    antennafield_device: DeviceProxy = None,
+    sdp_device: DeviceProxy = None,
+    tilebeam_device: DeviceProxy = None,
 ):
     """Create the writer"""
     if mode == "XST":
@@ -130,6 +136,8 @@ def _create_writer(
             file_location=output_dir,
             decimation_factor=decimation,
             antennafield_device=antennafield_device,
+            sdp_device=sdp_device,
+            tilebeam_device=tilebeam_device,
         )
     if mode == "SST":
         return SstHdf5Writer(
@@ -137,6 +145,8 @@ def _create_writer(
             file_location=output_dir,
             decimation_factor=decimation,
             antennafield_device=antennafield_device,
+            sdp_device=sdp_device,
+            tilebeam_device=tilebeam_device,
         )
     if mode == "BST":
         return BstHdf5Writer(
diff --git a/lofar_station_client/statistics/writer/hdf5.py b/lofar_station_client/statistics/writer/hdf5.py
index 060297d4418e4f8282052d3469bc015cc048a618..07201750943097baedf7b4d965224fa6492256a2 100644
--- a/lofar_station_client/statistics/writer/hdf5.py
+++ b/lofar_station_client/statistics/writer/hdf5.py
@@ -1,5 +1,5 @@
-# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
-# SPDX-License-Identifier: Apache-2.0
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
 
 """Hdf5 packets file writer"""
 
@@ -11,24 +11,32 @@ import logging
 import os
 from abc import ABC, abstractmethod
 from datetime import datetime, timedelta
+from itertools import chain
+from typing import TypeVar
 
 # python hdf5
 import h5py
 import numpy
 import pytz
-
 from tango import DeviceProxy, DevFailed
 
+from lofar_station_client.dts.constants import A_pn, N_pol
+from lofar_station_client.file_access.hdf._hdf_writers import HdfFileWriter, create_hdf5
 from lofar_station_client.statistics.collector import BSTCollector
 from lofar_station_client.statistics.collector import SSTCollector
 from lofar_station_client.statistics.collector import XSTCollector
 from lofar_station_client.statistics.packet import BSTPacket
 from lofar_station_client.statistics.packet import SSTPacket
 from lofar_station_client.statistics.packet import XSTPacket
-from lofar_station_client.dts.constants import A_pn, N_pol
+from lofar_station_client.statistics.statistics_data import (
+    StatisticsDataFile,
+    StatisticsData,
+)
 
 logger = logging.getLogger("statistics_writer")
 
+T = TypeVar("T")
+
 __all__ = [
     "HDF5Writer",
     "ParallelXstHdf5Writer",
@@ -122,9 +130,13 @@ class HDF5Writer(ABC):
         statistics_mode,
         decimation_factor,
         antennafield_device: DeviceProxy = None,
+        sdp_device: DeviceProxy = None,
+        tilebeam_device: DeviceProxy = None,
     ):
 
         # all variables that deal with the matrix that's currently being decoded
+        self.file: StatisticsDataFile = None
+        self.file_writer: HdfFileWriter[StatisticsDataFile] = None
         self.current_matrix = None
         self.current_timestamp = datetime.min.replace(tzinfo=pytz.UTC)
 
@@ -141,70 +153,85 @@ class HDF5Writer(ABC):
         self.decimation_factor = decimation_factor
         self.new_file_time_interval = timedelta(seconds=new_file_time_interval)
         self.last_file_time = datetime.min.replace(tzinfo=pytz.UTC)
-        self.file = None
 
         # parameters that are configured depending on the mode the statistics
         # writer is in (SST,XST,BST)
         self.mode = statistics_mode.upper()
 
-        # Set device if any, defaults to None
+        # Set devices if any, defaults to None
         self.antennafield_device = antennafield_device
+        self.sdp_device = sdp_device
+        self.tilebeam_device = tilebeam_device
+
         # By default, select all the values from SDP
-        self.antenna_selection = None
+        self.antenna_selection: [] = None
+        self.antenna_sdp_mapping = None
 
         if self.antennafield_device:
             try:
-                antenna_selection = []
-
-                # select the values from SDP that represent the antennas in this field
-                for (
-                    fpga_nr,
-                    input_nr,
-                ) in self.antennafield_device.Antenna_to_SDP_Mapping_R:
-                    if input_nr == -1:
-                        continue
-
-                    # select both polarisations
-                    antenna_selection.append((fpga_nr * A_pn + input_nr) * N_pol + 0)
-                    antenna_selection.append((fpga_nr * A_pn + input_nr) * N_pol + 1)
-
-                self.antenna_selection = antenna_selection
+                self.antenna_sdp_mapping = (
+                    self.antennafield_device.Antenna_to_SDP_Mapping_R
+                )
             except DevFailed:
                 logger.exception("Failed to read from %s", antennafield_device.name())
+            else:
+                # select the values from SDP that represent the antennas in this field
+                self.antenna_selection = list(
+                    chain.from_iterable(
+                        # select both polarisations
+                        (a, a + 1)
+                        for a in [
+                            (fpga_nr * A_pn + input_nr) * N_pol
+                            for (fpga_nr, input_nr) in self.antenna_sdp_mapping
+                            if input_nr != -1
+                        ]
+                    )
+                )
 
-    def hdf5_file_header(self) -> dict:
+    def set_file_header(self):
         """Returns the header fields per HDF5 file."""
 
-        fields = {
-            # store software version entries
-            "station_version": _get_station_version(self.antennafield_device),
-            "writer_version": _get_writer_version(),
-            "mode": self.mode,
-        }
+        # self.file.station_name
+        self.file.station_version = _get_station_version(self.antennafield_device)
+        self.file.writer_version = _get_writer_version()
+        self.file.mode = self.mode
 
         if self.antennafield_device:
             try:
-                fields.update(
-                    {
-                        "antennafield_device": self.antennafield_device.name(),
-                        "antenna_names": self.antennafield_device.Antenna_Names_R,
-                        "antenna_type": self.antennafield_device.Antenna_Type_R,
-                        "rcu_attenuator_dB": self.antennafield_device.RCU_attenuator_dB_R,  # noqa
-                        "rcu_band_select": self.antennafield_device.RCU_band_select_R,
-                        "rcu_dth_on": self.antennafield_device.RCU_DTH_on_R,
-                        "rcu_dth_freq": self.antennafield_device.RCU_DTH_freq_R,
-                        "antenna_quality": self.antennafield_device.Antenna_Quality_str_R,  # noqa
-                        "antenna_usage_mask": self.antennafield_device.Antenna_Usage_Mask_R,  # noqa
-                        "antenna_reference_itrf": self.antennafield_device.Antenna_Reference_ITRF_R,  # noqa
-                        "frequency_band": self.antennafield_device.Frequency_Band_RW,
-                    }
-                )
+                self.file.antennafield_device = self.antennafield_device.name()
+                self.file.antenna_names = self.antennafield_device.Antenna_Names_R
+                self.file.antenna_type = self.antennafield_device.Antenna_Type_R
+                self.file.rcu_attenuator_db = (
+                    self.antennafield_device.RCU_attenuator_dB_R
+                )  # noqa
+                self.file.rcu_band_select = self.antennafield_device.RCU_band_select_R
+                self.file.rcu_dth_on = self.antennafield_device.RCU_DTH_on_R
+                self.file.rcu_dth_freq = self.antennafield_device.RCU_DTH_freq_R
+                self.file.antenna_quality = (
+                    self.antennafield_device.Antenna_Quality_str_R
+                )  # noqa
+                self.file.antenna_usage_mask = (
+                    self.antennafield_device.Antenna_Usage_Mask_R
+                )  # noqa
+                self.file.antenna_reference_itrf = (
+                    self.antennafield_device.Antenna_Reference_ITRF_R
+                )  # noqa
+                self.file.frequency_band = self.antennafield_device.Frequency_Band_RW
             except DevFailed:
                 logger.exception(
                     "Failed to read from %s", self.antennafield_device.name()
                 )
 
-        return fields
+        if self.sdp_device:
+            try:
+                self.file.fpga_firmware_version = (
+                    self.sdp_device.FPGA_firmware_version_R
+                )
+                self.file.fpga_hardware_version = (
+                    self.sdp_device.FPGA_hardware_version_R
+                )
+            except DevFailed:
+                logger.exception("Failed to read from %s", self.sdp_device.name())
 
     def hdf5_matrix_header(self, statistics_packet_header: dict) -> dict:
         """Returns the header fields per statistics matrix."""
@@ -316,7 +343,8 @@ class HDF5Writer(ABC):
 
         # create a new and empty current_matrix
         self.current_matrix = self.new_collector()
-        self.statistics_packet_header = None
+        self.statistics_packet_header: dict = None
+        self.file_writer.flush()
 
     def write_matrix(self):
         """Writes the finished matrix to the hdf5 file"""
@@ -325,19 +353,87 @@ class HDF5Writer(ABC):
 
         # create the new hdf5 dataset based on the timestamp of packets
         tstamp = self.current_timestamp.isoformat(timespec="milliseconds")
-        current_dataset = self.file.create_dataset(
-            name=f"{self.mode}_{tstamp}",
-            data=self.get_values_matrix(),
-            compression="gzip",
-        )
+        matrix = self.get_values_matrix()
+
+        # Stores the header of the packet received for this matrix as a list of
+        # attributes
+        matrix.nof_payload_errors = self.current_matrix.parameters["nof_payload_errors"]
+        matrix.nof_valid_payloads = self.current_matrix.parameters["nof_valid_payloads"]
 
         # Stores the header of the packet received for this matrix as a list of
         # attributes
         header_attributes = self.hdf5_matrix_header(self.statistics_packet_header)
-        current_dataset.attrs.update(_dict_to_hdf5_attrs(header_attributes))
+        for k, val in _dict_to_hdf5_attrs(header_attributes).items():
+            if hasattr(matrix, k):
+                setattr(matrix, k, val)
+
+        if self.antennafield_device:
+            try:
+                matrix.hbat_pwr_on = self.antennafield_device.HBAT_PWR_on_R
+                matrix.frequency_band = self.antennafield_device.Frequency_Band_R
+            except DevFailed:
+                logger.exception(
+                    "Failed to read from %s", self.antennafield_device.name()
+                )
+
+        if self.sdp_device:
+            try:
+                nyquist_zones = self.sdp_device.nyquist_zone_RW
+                fpga_spectral_inversion = self.sdp_device.FPGA_spectral_inversion_R
+                subband_frequencies = self.sdp_device.subband_frequency_R
+            except DevFailed:
+                logger.exception(
+                    "Failed to read from %s", self.antennafield_device.name()
+                )
+            else:
+                matrix.nyquist_zone = numpy.empty(
+                    (len(self.antenna_sdp_mapping), N_pol), None
+                )
+                matrix.fpga_spectral_inversion = numpy.empty(
+                    (len(self.antenna_sdp_mapping), N_pol), None
+                )
+                matrix.subband_frequencies = numpy.empty(
+                    (len(self.antenna_sdp_mapping), N_pol), None
+                )
+                for antenna_nr, (fpga_nr, input_nr) in enumerate(
+                    self.antenna_sdp_mapping
+                ):
+                    if input_nr == -1:
+                        continue
+
+                    # set for x polarisation
+                    matrix.nyquist_zone[antenna_nr, 0] = nyquist_zones[
+                        fpga_nr, input_nr * N_pol + 0
+                    ]
+                    matrix.fpga_spectral_inversion[
+                        antenna_nr, 0
+                    ] = fpga_spectral_inversion[fpga_nr, input_nr * N_pol + 0]
+                    matrix.subband_frequencies[antenna_nr, 0] = subband_frequencies[
+                        (fpga_nr * A_pn * N_pol) + input_nr * N_pol + 0
+                    ]
+                    # set for y polarisation
+                    matrix.nyquist_zone[antenna_nr, 1] = nyquist_zones[
+                        fpga_nr, input_nr * N_pol + 1
+                    ]
+                    matrix.fpga_spectral_inversion[
+                        antenna_nr, 1
+                    ] = fpga_spectral_inversion[fpga_nr, input_nr * N_pol + 1]
+                    matrix.subband_frequencies[antenna_nr, 1] = subband_frequencies[
+                        (fpga_nr * A_pn * N_pol) + input_nr * N_pol + 1
+                    ]
+
+        if self.tilebeam_device:
+            try:
+                matrix.tile_beam_pointing_direction = (
+                    self.tilebeam_device.Pointing_direction_str
+                )
+            except DevFailed:
+                logger.exception("Failed to read from %s", self.tilebeam_device.name())
+
+        self.file[f"{self.mode}_{tstamp}"] = matrix
 
     @abstractmethod
-    def get_values_matrix(self):
+    def get_values_matrix(self) -> StatisticsData:
         """Abstract method"""
 
     def next_filename(self, timestamp, suffix=".h5"):
@@ -357,14 +453,14 @@ class HDF5Writer(ABC):
 
     def start_new_hdf5(self, timestamp):
         """Creates a new hdf5 file"""
-        if self.file is not None:
+        if self.file_writer is not None:
             try:
-                self.file.close()
+                self.file_writer.close()
             except Exception:
                 logger.exception(
                     "Error while attempting to close hdf5 file to disk. file"
                     "%s likely empty, please verify integrity.",
-                    self.file,
+                    self.file_writer,
                 )
 
         filename = self.next_filename(timestamp)
@@ -372,15 +468,14 @@ class HDF5Writer(ABC):
 
         # create file, add the header
         try:
-            self.file = h5py.File(filename, "w")
-        except Exception as excep:
+            self.file_writer = create_hdf5(filename, StatisticsDataFile)
+        except Exception as ex:
             logger.exception("Error while creating new file")
-            raise excep
+            raise ex
 
-        header_attributes = self.hdf5_file_header()
-        logger.debug("HDF5 header attributes: %s", header_attributes)
+        self.file = self.file_writer.create()
 
-        self.file.attrs.update(_dict_to_hdf5_attrs(header_attributes))
+        self.set_file_header()
 
         # we've officially moved on to the next timestamp
         self.last_file_time = timestamp
@@ -389,22 +484,20 @@ class HDF5Writer(ABC):
         """Function that can be used to stop the writer without data loss."""
 
         logger.debug("closing hdf5 file")
-        if self.file is not None:
-            if self.current_matrix is not None:
-                # Write matrix if one exists
-                # only creates file if there is a matrix to actually write
-                try:
-                    self.write_matrix()
-                finally:
-                    filename = str(self.file)
-                    self.file.close()
-                    logger.debug("%s closed", filename)
-                    logger.debug(
-                        "Received a total of %d statistics while running. With "
-                        "%d written to disk",
-                        self.statistics_counter,
-                        int(self.statistics_counter / self.decimation_factor),
-                    )
+        if self.file_writer is not None and self.current_matrix is not None:
+            # Write matrix if one exists
+            # only creates file if there is a matrix to actually write
+            try:
+                self.write_matrix()
+            finally:
+                self.file_writer.close()
+                logger.debug("%s closed", self.file_writer.file_name)
+                logger.debug(
+                    "Received a total of %d statistics while running. With "
+                    "%d written to disk",
+                    self.statistics_counter,
+                    int(self.statistics_counter / self.decimation_factor),
+                )
 
 
 class SstHdf5Writer(HDF5Writer):
@@ -416,6 +509,8 @@ class SstHdf5Writer(HDF5Writer):
         file_location,
         decimation_factor,
         antennafield_device: DeviceProxy = None,
+        sdp_device: DeviceProxy = None,
+        tilebeam_device: DeviceProxy = None,
     ):
         super().__init__(
             new_file_time_interval,
@@ -423,6 +518,8 @@ class SstHdf5Writer(HDF5Writer):
             HDF5Writer.SST_MODE,
             decimation_factor,
             antennafield_device=antennafield_device,
+            sdp_device=sdp_device,
+            tilebeam_device=tilebeam_device,
         )
 
     def decoder(self, packet):
@@ -431,7 +528,12 @@ class SstHdf5Writer(HDF5Writer):
     def new_collector(self):
         return SSTCollector()
 
-    def get_values_matrix(self):
+    def hdf5_matrix_header(self, statistics_packet_header: dict) -> dict:
+        header = super().hdf5_matrix_header(statistics_packet_header)
+        header["subbands"] = numpy.array(range(512))
+        return header
+
+    def get_values_matrix(self) -> StatisticsData:
         # first obtain all values from SDP
         all_values = self.current_matrix.parameters["sst_values"].astype(numpy.float32)
         # then, select our antennas
@@ -441,7 +543,7 @@ class SstHdf5Writer(HDF5Writer):
             else all_values
         )
 
-        return our_values
+        return our_values.view(StatisticsData)
 
 
 class BstHdf5Writer(HDF5Writer):
@@ -461,8 +563,12 @@ class BstHdf5Writer(HDF5Writer):
     def new_collector(self):
         return BSTCollector()
 
-    def get_values_matrix(self):
-        return self.current_matrix.parameters["bst_values"].astype(numpy.float32)
+    def get_values_matrix(self) -> StatisticsData:
+        return (
+            self.current_matrix.parameters["bst_values"]
+            .astype(numpy.float32)
+            .view(StatisticsData)
+        )
 
 
 class XstHdf5Writer(HDF5Writer):
@@ -474,6 +580,8 @@ class XstHdf5Writer(HDF5Writer):
         file_location,
         decimation_factor,
         antennafield_device,
+        sdp_device,
+        tilebeam_device,
         subband_index,
     ):
         super().__init__(
@@ -482,6 +590,8 @@ class XstHdf5Writer(HDF5Writer):
             HDF5Writer.XST_MODE,
             decimation_factor,
             antennafield_device,
+            tilebeam_device,
+            sdp_device,
         )
         self.subband_index = subband_index
 
@@ -498,7 +608,12 @@ class XstHdf5Writer(HDF5Writer):
             f"{time_str}{suffix}"
         )
 
-    def get_values_matrix(self):
+    def hdf5_matrix_header(self, statistics_packet_header: dict) -> dict:
+        header = super().hdf5_matrix_header(statistics_packet_header)
+        header["subbands"] = numpy.array([self.subband_index])
+        return header
+
+    def get_values_matrix(self) -> StatisticsData:
         # requires a function call to transform the xst_blocks in to the right
         # structure
         #
@@ -515,7 +630,7 @@ class XstHdf5Writer(HDF5Writer):
             else all_values
         )
 
-        return our_values
+        return our_values.view(StatisticsData)
 
 
 class ParallelXstHdf5Writer:
@@ -527,6 +642,8 @@ class ParallelXstHdf5Writer:
         file_location,
         decimation_factor,
         antennafield_device,
+        sdp_device,
+        tilebeam_device,
     ):
         # maintain a dedicated HDF5Writer per subband
         self.writers = {}
@@ -539,6 +656,8 @@ class ParallelXstHdf5Writer:
                 file_location,
                 decimation_factor,
                 antennafield_device,
+                sdp_device,
+                tilebeam_device,
                 subband,
             )
 
diff --git a/tests/file_access/test_file_reader.py b/tests/file_access/test_file_reader.py
index af7f573cb7b13bbf88da5f3846832495aef470a0..ca9b6ff61cbf6e8aa6b2cef6d6919ea4da62129b 100644
--- a/tests/file_access/test_file_reader.py
+++ b/tests/file_access/test_file_reader.py
@@ -1,4 +1,4 @@
-#  Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
 #  SPDX-License-Identifier: Apache-2.0
 
 from os.path import dirname
@@ -15,26 +15,30 @@ class DataSubSet(object):
 
 
 class DataSet(object):
-    observation_station: str = attribute()
-    observation_source: str = attribute(from_member="sub_set")
     nof_payload_errors: List[int] = member()
     nof_valid_payloads: List[int] = member()
     values: List[List[float]] = member()
-    sub_set: DataSubSet = member(name="test")
     non_existent: DataSubSet = member(optional=True)
 
     def __repr__(self):
         return f"DataSet(nof_payload_errors={self.nof_payload_errors})"
 
 
-class AttrDataSet(object):
+class DataSet2(DataSet):
+    sub_set: DataSubSet = member(name="test")
+
+
+class SimpleDataSet(object):
     observation_station: str = attribute()
     observation_station_optional: str = attribute(optional=True)
-    observation_station_missing_none_optional: str = attribute(optional=False)
     test_attr: str = attribute(from_member="calibration_data", name="test_attribute")
     calibration_data: ndarray = member(name="data")
 
 
+class AttrDataSet(SimpleDataSet):
+    observation_station_missing_none_optional: str = attribute(optional=False)
+
+
 class CalData:
     x_attr: str = attribute("test_attr", from_member="x")
     y_attr: str = attribute("test_attr", from_member="y")
@@ -53,7 +57,7 @@ class CalTableDict(Dict[str, Dict[str, ndarray]]):
 class TestHdf5FileReader(base.TestCase):
     def test_file_reading(self):
         with read_hdf5(
-            dirname(__file__) + "/SST_2022-11-15-14-21-39.h5", Dict[str, DataSet]
+            dirname(__file__) + "/SST_2022-11-15-14-21-39.h5", Dict[str, DataSet2]
         ) as ds:
             self.assertEqual(21, len(ds.keys()))
             item = ds["SST_2022-11-15T14:21:59.000+00:00"]
@@ -94,6 +98,29 @@ class TestHdf5FileReader(base.TestCase):
                     # this attribute does not exist but is not marked as optional
                 )
 
+    def test_load_object(self):
+        hdf5_file = read_hdf5(dirname(__file__) + "/cal-test.h5", SimpleDataSet)
+        ds = hdf5_file.read()
+        hdf5_file.load(ds)
+        hdf5_file.close()
+        self.assertEqual("test-station", ds.observation_station)
+        self.assertIsNone(ds.observation_station_optional)
+        self.assertEqual("dset_attr", ds.test_attr)
+        d = ds.calibration_data
+        self.assertTrue(isinstance(d, ndarray))
+        self.assertEqual(512, d.shape[0])
+        self.assertEqual(96, d.shape[1])
+
+    def test_load_complex(self):
+        hdf5_file = read_hdf5(
+            dirname(__file__) + "/SST_2022-11-15-14-21-39.h5", Dict[str, DataSet]
+        )
+        test = []
+        with hdf5_file as ds:
+            for key, data in ds.items():
+                hdf5_file.load(data)
+                test.append(data)
+
     def test_read_ndarray(self):
         with read_hdf5(dirname(__file__) + "/cal-test.h5", AttrDataSet) as ds:
             d = ds.calibration_data
diff --git a/tests/file_access/test_file_writer.py b/tests/file_access/test_file_writer.py
index c055cb8941aae58904860d2381c0e6a2b84be4ac..9f5aa02de8252eae662e888234eadfb50add06f8 100644
--- a/tests/file_access/test_file_writer.py
+++ b/tests/file_access/test_file_writer.py
@@ -1,9 +1,10 @@
 # Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
 # SPDX-License-Identifier: Apache-2.0
 
-from os.path import dirname
+from tempfile import TemporaryDirectory
 from typing import List, Dict
 
+import numpy
 from numpy import ndarray, array
 
 from lofar_station_client.file_access import (
@@ -35,71 +36,124 @@ class DataSet:
     non_existent: DataSubSet = member(optional=True)
 
 
+class SubArray(ndarray):
+    observation_station: str = attribute()
+
+
+class SubDict(Dict[str, ndarray]):
+    station_name: str = attribute()
+    station_version: str = attribute()
+
+
 class TestHdf5FileWriter(base.TestCase):
     def test_simple_writing(self):
-        with create_hdf5(dirname(__file__) + "/test_simple_writing.h5", DataSet) as ds:
-            ds.observation_station = "CS001"
-            ds.nof_payload_errors = [1, 2, 3, 4, 5, 6]
-            ds.values = [[2.0], [3.0], [4.0]]
-            ds.sub_set = DataSubSet()
-            ds.sub_set.values = [5, 4, 3, 2]
-            ds.observation_source = "CasA"
-
-        with read_hdf5(dirname(__file__) + "/test_simple_writing.h5", DataSet) as ds:
-            self.assertEqual("CS001", ds.observation_station)
-            self.assertEqual([1, 2, 3, 4, 5, 6], ds.nof_payload_errors)
-            self.assertEqual([[2.0], [3.0], [4.0]], ds.values)
-            self.assertIsNotNone(ds.sub_set)
-            self.assertEqual([5, 4, 3, 2], ds.sub_set.values)
-            self.assertEqual("CasA", ds.observation_source)
+        with TemporaryDirectory() as tmpdir:
+            file_name = tmpdir + "/test_simple_writing.h5"
+
+            with create_hdf5(file_name, DataSet) as ds:
+                ds.observation_station = "CS001"
+                ds.nof_payload_errors = [1, 2, 3, 4, 5, 6]
+                ds.values = [[2.0], [3.0], [4.0]]
+                ds.sub_set = DataSubSet()
+                ds.sub_set.values = [5, 4, 3, 2]
+                ds.observation_source = "CasA"
+
+            with read_hdf5(file_name, DataSet) as ds:
+                self.assertEqual("CS001", ds.observation_station)
+                self.assertEqual([1, 2, 3, 4, 5, 6], ds.nof_payload_errors)
+                self.assertEqual([[2.0], [3.0], [4.0]], ds.values)
+                self.assertIsNotNone(ds.sub_set)
+                self.assertEqual([5, 4, 3, 2], ds.sub_set.values)
+                self.assertEqual("CasA", ds.observation_source)
 
     def test_list_writing(self):
-        with create_hdf5(
-            dirname(__file__) + "/test_list_writing.h5", DataSubSet
-        ) as dss:
-            dss.values = [2, 3, 4, 5]
-            dss.values.append(1)
+        with TemporaryDirectory() as tmpdir:
+            file_name = tmpdir + "/test_list_writing.h5"
+
+            with create_hdf5(file_name, DataSubSet) as dss:
+                dss.values = [2, 3, 4, 5]
+                dss.values.append(1)
 
-        with read_hdf5(dirname(__file__) + "/test_list_writing.h5", DataSubSet) as dss:
-            self.assertEqual([2, 3, 4, 5, 1], dss.values)
+            with read_hdf5(file_name, DataSubSet) as dss:
+                self.assertEqual([2, 3, 4, 5, 1], dss.values)
 
     def test_dict_writing(self):
-        with create_hdf5(
-            dirname(__file__) + "/test_dict_writing.h5", Dict[str, ndarray]
-        ) as d:
-            d["test_1"] = array([1, 2, 3, 4, 5, 6])
-            d["test_2"] = array([6, 5, 4, 1])
-        with read_hdf5(
-            dirname(__file__) + "/test_dict_writing.h5", Dict[str, ndarray]
-        ) as d:
-            self.assertFalse(([1, 2, 3, 4, 5, 6] - d["test_1"]).any())
-            self.assertFalse(([6, 5, 4, 1] - d["test_2"]).any())
+        with TemporaryDirectory() as tmpdir:
+            file_name = tmpdir + "/test_dict_writing.h5"
+
+            with create_hdf5(file_name, Dict[str, ndarray]) as d:
+                d["test_1"] = array([1, 2, 3, 4, 5, 6])
+                d["test_2"] = array([6, 5, 4, 1])
+
+            with read_hdf5(file_name, Dict[str, ndarray]) as d:
+                self.assertFalse(([1, 2, 3, 4, 5, 6] - d["test_1"]).any())
+                self.assertFalse(([6, 5, 4, 1] - d["test_2"]).any())
+
+    def test_derived_dict_writing(self):
+        with TemporaryDirectory() as tmpdir:
+            file_name = tmpdir + "/test_derived_dict_writing.h5"
+
+            with create_hdf5(file_name, SubDict) as d:
+                d.station_name = "st1"
+                d.station_version = "999"
+                d["test_1"] = array([1, 2, 3, 4, 5, 6])
+                d["test_2"] = array([6, 5, 4, 1])
+
+            with read_hdf5(file_name, SubDict) as d:
+                self.assertEqual("st1", d.station_name)
+                self.assertEqual("999", d.station_version)
+                self.assertFalse(([1, 2, 3, 4, 5, 6] - d["test_1"]).any())
+                self.assertFalse(([6, 5, 4, 1] - d["test_2"]).any())
+
+    def test_derived_ndarray_writing(self):
+        with TemporaryDirectory() as tmpdir:
+            file_name = tmpdir + "/test_derived_ndarray_writing.h5"
+
+            with create_hdf5(file_name, Dict[str, SubArray]) as d:
+                sa = numpy.zeros((8,), dtype=numpy.float64).view(SubArray)
+                sa.observation_station = "test1"
+                d["test_1"] = sa
+                sa2 = numpy.zeros((10,), dtype=numpy.float64).view(SubArray)
+                sa2.observation_station = "test2"
+                d["test_2"] = sa2
+
+            with read_hdf5(file_name, Dict[str, SubArray]) as dss:
+                self.assertTrue("test_1" in dss)
+                self.assertTrue("test_2" in dss)
+                self.assertEqual("test1", dss["test_1"].observation_station)
+                self.assertEqual("test2", dss["test_2"].observation_station)
 
     def test_dict_altering(self):
-        with create_hdf5(
-            dirname(__file__) + "/test_dict_altering.h5", DataSubSet
-        ) as dss:
-            dss.dict_test_ndarray = {
-                "test_1": array([2, 4, 6]),
-                "test_2": array([1, 3, 5]),
-            }
-            dss.dict_test_ndarray["test_3"] = array([9, 8, 7])
-            dss.dict_test_ndarray.pop("test_1")
-            ss = SimpleSet()
-            ss.values = array([4, 9, 3])
-            dss.dict_test_object = {"test_99": ss}
-            dss.dict_test_object["test_99"].values[0] = 5
-            dss.dict_test_object["test_98"] = SimpleSet()
-            dss.dict_test_object["test_98"].values = array([4, 9, 3])
-        with read_hdf5(dirname(__file__) + "/test_dict_altering.h5", DataSubSet) as dss:
-            self.assertTrue("test_2" in dss.dict_test_ndarray)
-            self.assertTrue("test_3" in dss.dict_test_ndarray)
-            self.assertFalse(([1, 3, 5] - dss.dict_test_ndarray["test_2"]).any())
-            self.assertFalse(([9, 8, 7] - dss.dict_test_ndarray["test_3"]).any())
-            self.assertTrue("test_99" in dss.dict_test_object)
-            self.assertTrue("test_98" in dss.dict_test_object)
-            self.assertFalse(([5, 9, 3] - dss.dict_test_object["test_99"].values).any())
-            self.assertFalse(([4, 9, 3] - dss.dict_test_object["test_98"].values).any())
+        with TemporaryDirectory() as tmpdir:
+            file_name = tmpdir + "/test_dict_altering.h5"
+
+            with create_hdf5(file_name, DataSubSet) as dss:
+                dss.dict_test_ndarray = {
+                    "test_1": array([2, 4, 6]),
+                    "test_2": array([1, 3, 5]),
+                }
+                dss.dict_test_ndarray["test_3"] = array([9, 8, 7])
+                dss.dict_test_ndarray.pop("test_1")
+                ss = SimpleSet()
+                ss.values = array([4, 9, 3])
+                dss.dict_test_object = {"test_99": ss}
+                dss.dict_test_object["test_99"].values[0] = 5
+                dss.dict_test_object["test_98"] = SimpleSet()
+                dss.dict_test_object["test_98"].values = array([4, 9, 3])
+
+            with read_hdf5(file_name, DataSubSet) as dss:
+                self.assertTrue("test_2" in dss.dict_test_ndarray)
+                self.assertTrue("test_3" in dss.dict_test_ndarray)
+                self.assertFalse(([1, 3, 5] - dss.dict_test_ndarray["test_2"]).any())
+                self.assertFalse(([9, 8, 7] - dss.dict_test_ndarray["test_3"]).any())
+                self.assertTrue("test_99" in dss.dict_test_object)
+                self.assertTrue("test_98" in dss.dict_test_object)
+                self.assertFalse(
+                    ([5, 9, 3] - dss.dict_test_object["test_99"].values).any()
+                )
+                self.assertFalse(
+                    ([4, 9, 3] - dss.dict_test_object["test_98"].values).any()
+                )
 
     def test_object_access(self):
         ds = DataSet()
@@ -122,34 +176,41 @@ class TestHdf5FileWriter(base.TestCase):
         self.assertEqual("CasA", ds.observation_source)
 
     def test_attach_object(self):
-        with create_hdf5(dirname(__file__) + "/test_attach_object.h5", DataSet) as ds:
-            sub_set = DataSubSet()
-            sub_set.values = [7, 4, 9, 2, 9]
-            ds.sub_set = sub_set
-            ds.observation_source = "CasA"
-        with read_hdf5(dirname(__file__) + "/test_attach_object.h5", DataSet) as ds:
-            self.assertEqual([7, 4, 9, 2, 9], ds.sub_set.values)
-            self.assertEqual("CasA", ds.observation_source)
+        with TemporaryDirectory() as tmpdir:
+            file_name = tmpdir + "/test_attach_object.h5"
+
+            with create_hdf5(file_name, DataSet) as ds:
+                sub_set = DataSubSet()
+                sub_set.values = [7, 4, 9, 2, 9]
+                ds.sub_set = sub_set
+                ds.observation_source = "CasA"
+
+            with read_hdf5(file_name, DataSet) as ds:
+                self.assertEqual([7, 4, 9, 2, 9], ds.sub_set.values)
+                self.assertEqual("CasA", ds.observation_source)
 
     def test_open_write(self):
-        with create_hdf5(dirname(__file__) + "/test_open_write.h5", DataSet) as ds:
-            ds.observation_station = "CS001"
-            ds.nof_payload_errors = [1, 2, 3, 4, 5, 6]
-            ds.values = [[2.0], [3.0], [4.0]]
-            ds.sub_set = DataSubSet()
-            ds.sub_set.values = [5, 4, 3, 2]
-            ds.observation_source = "CasA"
-
-        with open_hdf5(dirname(__file__) + "/test_open_write.h5", DataSet) as ds:
-            ds.nof_payload_errors.append(7)
-            ds.values.append([5.0])
-            ds.observation_source = "ACAS"
-            ds.sub_set.values = [1, 2, 3]
-
-        with read_hdf5(dirname(__file__) + "/test_open_write.h5", DataSet) as ds:
-            self.assertEqual("CS001", ds.observation_station)
-            self.assertEqual([1, 2, 3, 4, 5, 6, 7], ds.nof_payload_errors)
-            self.assertEqual([[2.0], [3.0], [4.0], [5.0]], ds.values)
-            self.assertIsNotNone(ds.sub_set)
-            self.assertEqual([1, 2, 3], ds.sub_set.values)
-            self.assertEqual("ACAS", ds.observation_source)
+        with TemporaryDirectory() as tmpdir:
+            file_name = tmpdir + "/test_open_write.h5"
+
+            with create_hdf5(file_name, DataSet) as ds:
+                ds.observation_station = "CS001"
+                ds.nof_payload_errors = [1, 2, 3, 4, 5, 6]
+                ds.values = [[2.0], [3.0], [4.0]]
+                ds.sub_set = DataSubSet()
+                ds.sub_set.values = [5, 4, 3, 2]
+                ds.observation_source = "CasA"
+
+            with open_hdf5(file_name, DataSet) as ds:
+                ds.nof_payload_errors.append(7)
+                ds.values.append([5.0])
+                ds.observation_source = "ACAS"
+                ds.sub_set.values = [1, 2, 3]
+
+            with read_hdf5(file_name, DataSet) as ds:
+                self.assertEqual("CS001", ds.observation_station)
+                self.assertEqual([1, 2, 3, 4, 5, 6, 7], ds.nof_payload_errors)
+                self.assertEqual([[2.0], [3.0], [4.0], [5.0]], ds.values)
+                self.assertIsNotNone(ds.sub_set)
+                self.assertEqual([1, 2, 3], ds.sub_set.values)
+                self.assertEqual("ACAS", ds.observation_source)
diff --git a/tests/file_access/test_monitored_wrapper.py b/tests/file_access/test_monitored_wrapper.py
index 117aaf27db441937e653414dd5201e2d0710d383..c14d38f6135351906b959193cb5439b1438f9370 100644
--- a/tests/file_access/test_monitored_wrapper.py
+++ b/tests/file_access/test_monitored_wrapper.py
@@ -1,3 +1,6 @@
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
+
 from numpy import array
 
 from lofar_station_client.file_access._monitoring import MonitoredWrapper
diff --git a/tests/statistics/__init__.py b/tests/statistics/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..c92b615444d854a6e87370b16cf733a5859a07e7 100644
--- a/tests/statistics/__init__.py
+++ b/tests/statistics/__init__.py
@@ -0,0 +1,2 @@
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
diff --git a/tests/statistics/test_collector.py b/tests/statistics/test_collector.py
index bc5a147b11a0835eeff7eda2a0dd87eefa455843..de8493bee2c35db31d663ff85f5779c991ea9664 100644
--- a/tests/statistics/test_collector.py
+++ b/tests/statistics/test_collector.py
@@ -1,26 +1,14 @@
-# -*- coding: utf-8 -*-
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
+
+import numpy
 
 from lofar_station_client.statistics.collector import BSTCollector
 from lofar_station_client.statistics.collector import XSTCollector
 from lofar_station_client.statistics.packet import BSTPacket
 from lofar_station_client.statistics.packet import XSTPacket
-
 from tests import base
 
-import numpy
-
 
 class TestSelectSubbandSlot(base.TestCase):
     def test_first_entry(self):
diff --git a/tests/statistics/test_writer.py b/tests/statistics/test_writer.py
index 824aea381a39d97fd72c7cfe7881671656b86a42..3a44840846034be864cf71e5ee9221621672ab8b 100644
--- a/tests/statistics/test_writer.py
+++ b/tests/statistics/test_writer.py
@@ -8,6 +8,10 @@ import sys
 import h5py
 import numpy
 
+from lofar_station_client.statistics.statistics_data import (
+    StatisticsData,
+    StatisticsFileHeader,
+)
 from lofar_station_client.statistics.writer import entry, hdf5
 from lofar_station_client.statistics import reader
 from tests.test_devices import (
@@ -20,7 +24,7 @@ from tests import base
 class TestStatisticsReaderWriterSST(base.TestCase):
     def _run_writer_reader(
         self, tmpdir: str, writer_argv: list
-    ) -> reader.StatisticsData:
+    ) -> (StatisticsData, StatisticsFileHeader):
         """Run the statistics writer with the given arguments,
         and read and return the output."""
         # default arguments for statistics writer
@@ -57,14 +61,15 @@ class TestStatisticsReaderWriterSST(base.TestCase):
         # test statistics reader
         with mock.patch.object(reader.sys, "argv", default_reader_sys_argv):
             stat_parser = reader.setup_stat_parser()
-            SSTstatistics = stat_parser.list_statistics()
-            self.assertIsNotNone(SSTstatistics)
+            sst_statistics = stat_parser.list_statistics()
+            self.assertIsNotNone(sst_statistics)
             stat = stat_parser.get_statistic(
                 "2021-09-20T12:17:40.000+00:00"
             )  # same as stat_parser.statistics[0]
+            file_header = stat_parser.file_header
             self.assertIsNotNone(stat)
 
-        return stat
+        return stat, file_header
 
     def _mock_get_tango_device(self, tango_disabled, host, device_name):
         """Return our mocked DeviceProxies"""
@@ -87,11 +92,11 @@ class TestStatisticsReaderWriterSST(base.TestCase):
     def test_header_info(self):
         """Test whether the header info are inserted and collected in the proper way"""
         with TemporaryDirectory() as tmpdir:
-            stat = self._run_writer_reader(tmpdir, [])
+            _, file_header = self._run_writer_reader(tmpdir, [])
 
-            self.assertIsNotNone(stat.station_version)
-            self.assertIsNotNone(stat.writer_version)
-            self.assertEqual("SST", stat.mode)
+            self.assertIsNotNone(file_header.station_version)
+            self.assertIsNotNone(file_header.writer_version)
+            self.assertEqual("SST", file_header.mode)
 
     def test_insert_tango_SST_statistics(self):
         with TemporaryDirectory() as tmpdir:
@@ -103,14 +108,14 @@ class TestStatisticsReaderWriterSST(base.TestCase):
             with mock.patch.object(
                 entry, "_get_tango_device", self._mock_get_tango_device
             ):
-                stat = self._run_writer_reader(tmpdir, writer_argv)
+                stat, file_header = self._run_writer_reader(tmpdir, writer_argv)
 
             self.assertEqual(121, stat.data_id_signal_input_index)
 
             # Test some AntennField attributes, whether they match our mock
-            self.assertListEqual([0, 1, 2], stat.rcu_attenuator_dB.tolist())
-            self.assertListEqual([1, 1, 1], stat.rcu_band_select.tolist())
-            self.assertListEqual([False, False, False], stat.rcu_dth_on.tolist())
+            self.assertListEqual([0, 1, 2], file_header.rcu_attenuator_db.tolist())
+            self.assertListEqual([1, 1, 1], file_header.rcu_band_select.tolist())
+            self.assertListEqual([False, False, False], file_header.rcu_dth_on.tolist())
 
     def test_no_tango_SST_statistics(self):
         with TemporaryDirectory() as tmpdir:
diff --git a/tests/test_devices.py b/tests/test_devices.py
index 9cbf97dd0480b312f373c57e2c8e0c5e53dd9c9f..10e0a33b5d44754d878477464bb781c9ac5ffc07 100644
--- a/tests/test_devices.py
+++ b/tests/test_devices.py
@@ -1,12 +1,13 @@
-import numpy
+#  Copyright (C) 2023 ASTRON (Netherlands Institute for Radio Astronomy)
+#  SPDX-License-Identifier: Apache-2.0
 
-from tests import base
+import numpy
+from tango import DevState, DevFailed
+from tango.server import Device, attribute, AttrWriteType
+from tango.test_context import MultiDeviceTestContext
 
 from lofar_station_client.devices import LofarDeviceProxy
-
-from tango.test_context import MultiDeviceTestContext
-from tango.server import Device, attribute, AttrWriteType
-from tango import DevState, DevFailed
+from tests import base
 
 
 class MyDevice(Device):
@@ -286,6 +287,8 @@ class FakeAntennaFieldDeviceProxy:
     RCU_band_select_R = [1] * 3
     RCU_DTH_on_R = [False] * 3
     RCU_DTH_freq_R = [0.0] * 3
+    HBAT_PWR_on_R = []
+    Frequency_Band_R = []
 
     def __init__(self, name):
         self._name = name