diff --git a/QA/QA_Common/lib/hdf5_io.py b/QA/QA_Common/lib/hdf5_io.py
index ea7d8908684dea2080ea7256afa7a8dacd2c2ae0..0b0906140d667b17fe7efff3cd6be72cb9096e87 100644
--- a/QA/QA_Common/lib/hdf5_io.py
+++ b/QA/QA_Common/lib/hdf5_io.py
@@ -155,9 +155,8 @@ def write_hypercube(path, saps, parset=None, sas_id=None, wsrta_id=None, do_comp
         measurement_group.attrs['description'] = 'all data (visibilities, flagging, parset, ...) for this measurement (observation/pipeline)'
 
         if parset is not None:
-            parset_str = str(parset)
             ds = file.create_dataset('measurement/parset', (1,), h5py.special_dtype(vlen=str),
-                                     [parset_str],
+                                     [str(parset).encode('utf-8')],
                                      compression="lzf")
             ds.attrs['description'] = 'the parset of this observation/pipeline with all settings how this data was created'
 
@@ -184,11 +183,12 @@ def write_hypercube(path, saps, parset=None, sas_id=None, wsrta_id=None, do_comp
             antenna_locations = sap_dict.get('antenna_locations')
 
             sap_group = file.create_group('measurement/saps/%d' % sap_nr)
-            ds = sap_group.create_dataset('polarizations', (len(polarizations),), h5py.special_dtype(vlen=str), polarizations)
+            ds = sap_group.create_dataset('polarizations', (len(polarizations),), h5py.special_dtype(vlen=str),
+                                          [p.encode('ascii') for p in polarizations])
             ds.attrs['description'] = 'polarizations of the visibilities'
 
             ds = sap_group.create_dataset('baselines', (len(baselines),2), h5py.special_dtype(vlen=str),
-                                    [[str(bl[0]), str(bl[1])] for bl in baselines])
+                                    [[bl[0].encode('ascii'), bl[1].encode('ascii')] for bl in baselines])
             ds.attrs['description'] = 'pairs of baselines between stations'
 
             if any(isinstance(t, datetime) for t in timestamps):
@@ -655,13 +655,13 @@ def combine_hypercubes(input_paths, output_dir, output_filename=None, do_compres
         # convert any 1.2 to 1.3 file if needed
         for path in existing_paths:
             with SharedH5File(path, "r") as file:
-                if file['version'][0] == 1.2:
+                if file['version'][0] == '1.2':
                     convert_12_to_13(path)
 
         # convert any 1.3 to 1.4 file if needed
         for path in existing_paths:
             with SharedH5File(path, "r") as file:
-                if file['version'][0] == 1.3:
+                if file['version'][0] == '1.3':
                     convert_13_to_14(path)
 
         input_files = [SharedH5File(p, "r").open() for p in existing_paths]
@@ -1364,7 +1364,7 @@ def fill_info_folder_from_parset(h5_path):
                                   ('name', 'Scheduler.taskName')]:
                     ps_key = 'ObsSW.Observation.' + key
                     ps_value = parset.getString(ps_key, '<unknown>')
-                    info_group.create_dataset(name, (1,), h5py.special_dtype(vlen=str), [ps_value])
+                    info_group.create_dataset(name, (1,), h5py.special_dtype(vlen=str), [ps_value.encode('utf-8')])
 
                 try:
                     # try to import lofar.common.datetimeutils here and not at the top of the file
@@ -1373,8 +1373,8 @@ def fill_info_folder_from_parset(h5_path):
                     start_time = parset.getString('ObsSW.Observation.startTime')
                     stop_time = parset.getString('ObsSW.Observation.stopTime')
                     duration = parseDatetime(stop_time) - parseDatetime(start_time)
-                    info_group.create_dataset('start_time', (1,), h5py.special_dtype(vlen=str), [start_time])
-                    info_group.create_dataset('stop_time', (1,), h5py.special_dtype(vlen=str), [stop_time])
+                    info_group.create_dataset('start_time', (1,), h5py.special_dtype(vlen=str), [start_time.encode('utf-8')])
+                    info_group.create_dataset('stop_time', (1,), h5py.special_dtype(vlen=str), [stop_time.encode('utf-8')])
                     ds = info_group.create_dataset('duration', data=[totalSeconds(duration)])
                     ds.attrs['description'] = 'duration in seconds'
                 except (ImportError, RuntimeError, ValueError) as e: