diff --git a/lofar_station_client/file_access/__init__.py b/lofar_station_client/file_access/__init__.py index 1dc828c3b29f6574d023e0c2f167a6f5864c8ba7..661011d5f68854681f05fc8194dd1da01c8459e9 100644 --- a/lofar_station_client/file_access/__init__.py +++ b/lofar_station_client/file_access/__init__.py @@ -8,10 +8,6 @@ Contains classes to interact with (hdf5) files from ._hdf5_attribute_def import attribute from ._hdf5_member_def import member -from ._hdf_file_reader import Hdf5FileReader, read_hdf5 -from ._hdf_file_writer import Hdf5FileWriter, open_hdf5, create_hdf5 +from .hdf_file_reader import Hdf5FileReader, read_hdf5 -__all__ = [ - "Hdf5FileReader", "Hdf5FileWriter", "attribute", "member", "read_hdf5", - "open_hdf5", "create_hdf5" -] +__all__ = ["Hdf5FileReader", "attribute", "member", "read_hdf5"] diff --git a/lofar_station_client/file_access/_hdf5_attribute_def.py b/lofar_station_client/file_access/_hdf5_attribute_def.py index 4dcca8259af83f5696c4e80ec9a1fd2752a6d589..17bf8f6633398be0cb2cc1ff77a7a586ced7529c 100644 --- a/lofar_station_client/file_access/_hdf5_attribute_def.py +++ b/lofar_station_client/file_access/_hdf5_attribute_def.py @@ -27,6 +27,7 @@ class Hdf5AttributeDef: self.name = name self.from_member = from_member self.optional = optional + self.cache = None self.owner: Any def __set_name__(self, owner, name): @@ -34,14 +35,9 @@ class Hdf5AttributeDef: self.name = name self.owner = owner - def __set__(self, instance, value): - setattr(instance, f"_{self.name}", value) - attrs = self._resolve_attrs(instance) - attrs[self.name] = value - def __get__(self, obj, obj_type=None): - if hasattr(obj, f"_{self.name}"): - return getattr(obj, f"_{self.name}") + if self.cache is not None: + return self.cache attrs = self._resolve_attrs(obj) @@ -50,9 +46,8 @@ class Hdf5AttributeDef: return None raise KeyError(f"Could not find required attribute key {self.name}") - result = attrs[self.name] - setattr(obj, f"_{self.name}", result) - return result + self.cache = attrs[self.name] + return self.cache def _resolve_attrs(self, obj): data = getattr(obj, "_data") diff --git a/lofar_station_client/file_access/_hdf5_member_def.py b/lofar_station_client/file_access/_hdf5_member_def.py index 9da1f439790aa9a1bc4ac4ee7f088c8d4371ad1b..d7f67e6ff4a75b45c3788ab5e169e52f6bb1836c 100644 --- a/lofar_station_client/file_access/_hdf5_member_def.py +++ b/lofar_station_client/file_access/_hdf5_member_def.py @@ -8,7 +8,7 @@ of HDF5 files from typing import Type -from ._hdf5_utils import _detect_reader, _default, _detect_writer +from ._hdf5_utils import _detect_reader from ._utils import _extract_type @@ -29,6 +29,7 @@ class Hdf5MemberDef: def __init__(self, name: str, optional: bool): self.name = name self.optional = optional + self.cache = None self.attr_name: str self.type: Type @@ -39,24 +40,9 @@ class Hdf5MemberDef: self.type = _extract_type(owner, name) def __get__(self, obj, obj_type=None): - if hasattr(obj, f"_v_{self.name}"): - return getattr(obj, f"_v_{self.name}") + if self.cache is not None: + return self.cache - if hasattr(obj, "_data"): - value = self.execute_read(obj) - elif not self.optional: - value = _default(self.type) - - setattr(obj, f"_v_{self.name}", value) - return value - - def __set__(self, instance, value): - setattr(instance, f"_v_{self.name}", value) - data = getattr(instance, "_data") - writer = _detect_writer(self.type) - writer(data, self.name, value) - - def execute_read(self, obj): data = getattr(obj, "_data") if self.name not in data: @@ -65,4 +51,5 @@ class Hdf5MemberDef: raise KeyError(f"Could not find required key {self.name}") reader = _detect_reader(self.type) - return reader(data[self.name]) + self.cache = reader(data[self.name]) + return self.cache diff --git a/lofar_station_client/file_access/_hdf5_utils.py b/lofar_station_client/file_access/_hdf5_utils.py index 611e5e0d64e7c150a629359a1cdd3c6a7432a823..8748f2383f7ccd67b77f3938cc4ae42700519e91 100644 --- a/lofar_station_client/file_access/_hdf5_utils.py +++ b/lofar_station_client/file_access/_hdf5_utils.py @@ -4,7 +4,6 @@ """ Utils to handle transformation of HDF5 specific classes to pythonic objects """ -import inspect from collections.abc import MutableMapping from typing import Type, TypeVar, Dict @@ -19,16 +18,16 @@ T = TypeVar("T") def _assert_is_dataset(value): if issubclass(type(value), MutableMapping): raise TypeError( - f"Only <Dataset> can be mappet do primitive type while " - f"value is of type <{type(value).__name__}>" + f"Only <Dataset> can be mappet do primitive type while " + f"value is of type <{type(value).__name__}>" ) def _assert_is_group(value): if not issubclass(type(value), MutableMapping): raise TypeError( - "Only Group can be mapped to <object> while value" - f" is of type <{type(value).__name__}>" + "Only Group can be mapped to <object> while value" + f" is of type <{type(value).__name__}>" ) @@ -72,74 +71,6 @@ def _detect_reader(target_type): return _read_ndarray if issubclass(target_type, dict): return lambda value: _read_dict( - _extract_base_type(target_type), value, target_type + _extract_base_type(target_type), value, target_type ) return lambda value: _read_object(target_type, value) - - -def _write_object(target_type: Type[T], data, key, value: T): - _assert_is_group(data) - if key in data: - _assert_is_group(data[key]) - else: - data.create_group(key) - - setattr(value, "_data", data[key]) - - -def _attach_object(target_type: Type[T], instance): - annotations = inspect.get_annotations(target_type) - for a in annotations: - attr = inspect.getattr_static(target_type, a) - attr.__set__(instance, getattr(instance, f"_{attr.name}")) - - -def _write_ndarray(data, key, value): - _assert_is_group(data) - if key in data: - _assert_is_dataset(data[key]) - data[key] = value - else: - data.create_dataset(key, data=value) - - -def _write_dict(target_type: Type[T], data, key, value): - writer = _detect_writer(target_type) - for k in value.keys(): - writer(data[key], k, value[k]) - - -def _write_list(target_type: Type[T], data, key, value): - pass - - -def _detect_writer(target_type): - origin_type = get_origin(target_type) - if origin_type is dict: - return lambda data, key, value: _write_dict( - _extract_base_type(target_type), - data, key, value - ) - if get_origin(target_type) is list: - return _write_ndarray - if target_type is ndarray: - return _write_ndarray - if issubclass(target_type, dict): - return lambda data, key, value: _write_dict( - _extract_base_type(target_type), - data, key, value - ) - return lambda data, key, value: _write_object(target_type, data, key, value) - - -def _default(target_type): - origin_type = get_origin(target_type) - if origin_type is dict: - return {} - if get_origin(target_type) is list: - return [] - if target_type is ndarray: - return [] - if issubclass(target_type, dict): - return {} - return target_type() diff --git a/lofar_station_client/file_access/_hdf_file_writer.py b/lofar_station_client/file_access/_hdf_file_writer.py deleted file mode 100644 index 5d498a550ddb102e51d7196eeb3b73380e157818..0000000000000000000000000000000000000000 --- a/lofar_station_client/file_access/_hdf_file_writer.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy) -# SPDX-License-Identifier: Apache-2.0 - -""" -Contains classes to handle file writing -""" - -from typing import TypeVar, Type - -import h5py - -from . import Hdf5FileReader -from ._hdf5_utils import _default - -T = TypeVar("T") - - -class Hdf5FileWriter(Hdf5FileReader[T]): - """ - HDF5 specific file writer - """ - def __init__(self, name, target_type, create): - self._create = create - super().__init__(name, target_type) - - def _open_file(self, name): - self._hdf5_file = h5py.File(name, "w" if self._create else "a") - self._is_closed = False - - def create(self): - obj = _default(self._target_type) - setattr(obj, "_data", self._hdf5_file) - return obj - - def __enter__(self): - return self.create() - - -def open_hdf5(name: str, target_type: T) -> Hdf5FileWriter: - """ - Open a HDF5 file by name/path - """ - return Hdf5FileWriter[T](name, target_type, False) - - -def create_hdf5(name: str, target_type: T) -> Hdf5FileWriter: - """ - Open a HDF5 file by name/path - """ - return Hdf5FileWriter[T](name, target_type, True) diff --git a/lofar_station_client/file_access/_hdf_file_reader.py b/lofar_station_client/file_access/hdf_file_reader.py similarity index 93% rename from lofar_station_client/file_access/_hdf_file_reader.py rename to lofar_station_client/file_access/hdf_file_reader.py index 740f469465294aaa403d1bb100887badb1110b26..a98a4ccf1aca44591594e4552a9dbaa1dcdc57f1 100644 --- a/lofar_station_client/file_access/_hdf_file_reader.py +++ b/lofar_station_client/file_access/hdf_file_reader.py @@ -20,12 +20,8 @@ class Hdf5FileReader(Generic[T]): """ def __init__(self, name, target_type): - self._is_closed = None - self._target_type = target_type - self._open_file(name) - - def _open_file(self, name): self._hdf5_file = h5py.File(name, "r") + self._target_type = target_type self._is_closed = False def read(self) -> T: diff --git a/tests/file_access/test_file_writer.py b/tests/file_access/test_file_writer.py deleted file mode 100644 index 8a1297784732e9c5483f4ca280697159d39fc33d..0000000000000000000000000000000000000000 --- a/tests/file_access/test_file_writer.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy) -# SPDX-License-Identifier: Apache-2.0 - -from os.path import dirname -from typing import List - -from lofar_station_client.file_access import member, attribute, open_hdf5, create_hdf5 -from tests import base - - -class DataSubSet(object): - values: List[int] = member() - - -class DataSet: - observation_station: str = attribute() - observation_source: str = attribute(from_member="sub_set") - nof_payload_errors: List[int] = member() - values: List[List[float]] = member() - sub_set: DataSubSet = member(name="test") - non_existent: DataSubSet = member(optional=True) - - -class TestHdf5FileWriter(base.TestCase): - def test_file_writing(self): - with create_hdf5(dirname(__file__) + "/write-test.h5", DataSet) as ds: - ds.observation_station = "CS001" - ds.nof_payload_errors = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] - ds.values = [[1.0]] - ds.sub_set = DataSubSet() - ds.sub_set.values = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] - ds.observation_source = "CasA"