Skip to content
Snippets Groups Projects
Commit d332d1f9 authored by Hannes Feldt's avatar Hannes Feldt
Browse files

Revert "Bump"

This reverts commit f36bfe89.
parent f36bfe89
No related branches found
No related tags found
No related merge requests found
Pipeline #40793 canceled
......@@ -8,10 +8,6 @@ Contains classes to interact with (hdf5) files
from ._hdf5_attribute_def import attribute
from ._hdf5_member_def import member
from ._hdf_file_reader import Hdf5FileReader, read_hdf5
from ._hdf_file_writer import Hdf5FileWriter, open_hdf5, create_hdf5
from .hdf_file_reader import Hdf5FileReader, read_hdf5
__all__ = [
"Hdf5FileReader", "Hdf5FileWriter", "attribute", "member", "read_hdf5",
"open_hdf5", "create_hdf5"
]
__all__ = ["Hdf5FileReader", "attribute", "member", "read_hdf5"]
......@@ -27,6 +27,7 @@ class Hdf5AttributeDef:
self.name = name
self.from_member = from_member
self.optional = optional
self.cache = None
self.owner: Any
def __set_name__(self, owner, name):
......@@ -34,14 +35,9 @@ class Hdf5AttributeDef:
self.name = name
self.owner = owner
def __set__(self, instance, value):
setattr(instance, f"_{self.name}", value)
attrs = self._resolve_attrs(instance)
attrs[self.name] = value
def __get__(self, obj, obj_type=None):
if hasattr(obj, f"_{self.name}"):
return getattr(obj, f"_{self.name}")
if self.cache is not None:
return self.cache
attrs = self._resolve_attrs(obj)
......@@ -50,9 +46,8 @@ class Hdf5AttributeDef:
return None
raise KeyError(f"Could not find required attribute key {self.name}")
result = attrs[self.name]
setattr(obj, f"_{self.name}", result)
return result
self.cache = attrs[self.name]
return self.cache
def _resolve_attrs(self, obj):
data = getattr(obj, "_data")
......
......@@ -8,7 +8,7 @@ of HDF5 files
from typing import Type
from ._hdf5_utils import _detect_reader, _default, _detect_writer
from ._hdf5_utils import _detect_reader
from ._utils import _extract_type
......@@ -29,6 +29,7 @@ class Hdf5MemberDef:
def __init__(self, name: str, optional: bool):
self.name = name
self.optional = optional
self.cache = None
self.attr_name: str
self.type: Type
......@@ -39,24 +40,9 @@ class Hdf5MemberDef:
self.type = _extract_type(owner, name)
def __get__(self, obj, obj_type=None):
if hasattr(obj, f"_v_{self.name}"):
return getattr(obj, f"_v_{self.name}")
if self.cache is not None:
return self.cache
if hasattr(obj, "_data"):
value = self.execute_read(obj)
elif not self.optional:
value = _default(self.type)
setattr(obj, f"_v_{self.name}", value)
return value
def __set__(self, instance, value):
setattr(instance, f"_v_{self.name}", value)
data = getattr(instance, "_data")
writer = _detect_writer(self.type)
writer(data, self.name, value)
def execute_read(self, obj):
data = getattr(obj, "_data")
if self.name not in data:
......@@ -65,4 +51,5 @@ class Hdf5MemberDef:
raise KeyError(f"Could not find required key {self.name}")
reader = _detect_reader(self.type)
return reader(data[self.name])
self.cache = reader(data[self.name])
return self.cache
......@@ -4,7 +4,6 @@
"""
Utils to handle transformation of HDF5 specific classes to pythonic objects
"""
import inspect
from collections.abc import MutableMapping
from typing import Type, TypeVar, Dict
......@@ -75,71 +74,3 @@ def _detect_reader(target_type):
_extract_base_type(target_type), value, target_type
)
return lambda value: _read_object(target_type, value)
def _write_object(target_type: Type[T], data, key, value: T):
_assert_is_group(data)
if key in data:
_assert_is_group(data[key])
else:
data.create_group(key)
setattr(value, "_data", data[key])
def _attach_object(target_type: Type[T], instance):
annotations = inspect.get_annotations(target_type)
for a in annotations:
attr = inspect.getattr_static(target_type, a)
attr.__set__(instance, getattr(instance, f"_{attr.name}"))
def _write_ndarray(data, key, value):
_assert_is_group(data)
if key in data:
_assert_is_dataset(data[key])
data[key] = value
else:
data.create_dataset(key, data=value)
def _write_dict(target_type: Type[T], data, key, value):
writer = _detect_writer(target_type)
for k in value.keys():
writer(data[key], k, value[k])
def _write_list(target_type: Type[T], data, key, value):
pass
def _detect_writer(target_type):
origin_type = get_origin(target_type)
if origin_type is dict:
return lambda data, key, value: _write_dict(
_extract_base_type(target_type),
data, key, value
)
if get_origin(target_type) is list:
return _write_ndarray
if target_type is ndarray:
return _write_ndarray
if issubclass(target_type, dict):
return lambda data, key, value: _write_dict(
_extract_base_type(target_type),
data, key, value
)
return lambda data, key, value: _write_object(target_type, data, key, value)
def _default(target_type):
origin_type = get_origin(target_type)
if origin_type is dict:
return {}
if get_origin(target_type) is list:
return []
if target_type is ndarray:
return []
if issubclass(target_type, dict):
return {}
return target_type()
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
"""
Contains classes to handle file writing
"""
from typing import TypeVar, Type
import h5py
from . import Hdf5FileReader
from ._hdf5_utils import _default
T = TypeVar("T")
class Hdf5FileWriter(Hdf5FileReader[T]):
"""
HDF5 specific file writer
"""
def __init__(self, name, target_type, create):
self._create = create
super().__init__(name, target_type)
def _open_file(self, name):
self._hdf5_file = h5py.File(name, "w" if self._create else "a")
self._is_closed = False
def create(self):
obj = _default(self._target_type)
setattr(obj, "_data", self._hdf5_file)
return obj
def __enter__(self):
return self.create()
def open_hdf5(name: str, target_type: T) -> Hdf5FileWriter:
"""
Open a HDF5 file by name/path
"""
return Hdf5FileWriter[T](name, target_type, False)
def create_hdf5(name: str, target_type: T) -> Hdf5FileWriter:
"""
Open a HDF5 file by name/path
"""
return Hdf5FileWriter[T](name, target_type, True)
......@@ -20,12 +20,8 @@ class Hdf5FileReader(Generic[T]):
"""
def __init__(self, name, target_type):
self._is_closed = None
self._target_type = target_type
self._open_file(name)
def _open_file(self, name):
self._hdf5_file = h5py.File(name, "r")
self._target_type = target_type
self._is_closed = False
def read(self) -> T:
......
# Copyright (C) 2022 ASTRON (Netherlands Institute for Radio Astronomy)
# SPDX-License-Identifier: Apache-2.0
from os.path import dirname
from typing import List
from lofar_station_client.file_access import member, attribute, open_hdf5, create_hdf5
from tests import base
class DataSubSet(object):
values: List[int] = member()
class DataSet:
observation_station: str = attribute()
observation_source: str = attribute(from_member="sub_set")
nof_payload_errors: List[int] = member()
values: List[List[float]] = member()
sub_set: DataSubSet = member(name="test")
non_existent: DataSubSet = member(optional=True)
class TestHdf5FileWriter(base.TestCase):
def test_file_writing(self):
with create_hdf5(dirname(__file__) + "/write-test.h5", DataSet) as ds:
ds.observation_station = "CS001"
ds.nof_payload_errors = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
ds.values = [[1.0]]
ds.sub_set = DataSubSet()
ds.sub_set.values = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
ds.observation_source = "CasA"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment