diff --git a/.gitattributes b/.gitattributes
index 731e41a6893bb2269065882d2624ad6cdd8f067d..71c98d3d1eead474227b3501486456902c596562 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -3933,6 +3933,7 @@ MAC/doc/package.dox -text
 QA/CMakeLists.txt -text
 QA/QA_Common/CMakeLists.txt -text
 QA/QA_Common/bin/CMakeLists.txt -text
+QA/QA_Common/bin/create_test_hypercube -text
 QA/QA_Common/bin/find_hdf5 -text
 QA/QA_Common/bin/show_hdf5_info -text
 QA/QA_Common/lib/CMakeLists.txt -text
@@ -3940,6 +3941,7 @@ QA/QA_Common/lib/__init__.py -text
 QA/QA_Common/lib/cep4_utils.py -text
 QA/QA_Common/lib/geoconversions.py -text
 QA/QA_Common/lib/hdf5_io.py -text
+QA/QA_Common/lib/utils.py -text
 QA/QA_Common/test/CMakeLists.txt -text
 QA/QA_Common/test/create_test_hypercube -text
 QA/QA_Common/test/t_cep4_utils.py -text
@@ -3953,6 +3955,7 @@ QA/QA_Service/CMakeLists.txt -text
 QA/QA_Service/bin/CMakeLists.txt -text
 QA/QA_Service/bin/qa_service -text
 QA/QA_Service/bin/qa_service.ini -text
+QA/QA_Service/bin/qa_webservice.ini -text
 QA/QA_Service/lib/CMakeLists.txt -text
 QA/QA_Service/lib/QABusListener.py -text
 QA/QA_Service/lib/__init__.py -text
diff --git a/QA/QA_Common/CMakeLists.txt b/QA/QA_Common/CMakeLists.txt
index 5a4b1543df5a355492bffc8dc6407d8d1d89efa3..3590fb7ee54afa6ea45bc9999cf68c33af785d67 100644
--- a/QA/QA_Common/CMakeLists.txt
+++ b/QA/QA_Common/CMakeLists.txt
@@ -1,4 +1,4 @@
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
diff --git a/QA/QA_Common/bin/CMakeLists.txt b/QA/QA_Common/bin/CMakeLists.txt
index c72417466da349a4ec956415b4a2ac11ee0844ad..0f696cc0dcba6026d49ac4adbcba9e862df57fe5 100644
--- a/QA/QA_Common/bin/CMakeLists.txt
+++ b/QA/QA_Common/bin/CMakeLists.txt
@@ -1,4 +1,4 @@
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -18,5 +18,5 @@
 # $Id$
 
 lofar_add_bin_scripts(show_hdf5_info
-                      find_hdf5)
-
+                      find_hdf5
+                      create_test_hypercube)
diff --git a/QA/QA_Common/bin/create_test_hypercube b/QA/QA_Common/bin/create_test_hypercube
new file mode 100755
index 0000000000000000000000000000000000000000..77c5982a8b108b2aaa1cf727e99164ed32eb46b7
--- /dev/null
+++ b/QA/QA_Common/bin/create_test_hypercube
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+import os
+from optparse import OptionParser
+from lofar.qa.utils import *
+from lofar.qa.hdf5_io import write_hypercube
+
+import logging
+logger = logging.getLogger(__name__)
+
+def main():
+    # make sure we run in UTC timezone
+    os.environ['TZ'] = 'UTC'
+
+    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
+                        level=logging.INFO)
+
+    ## Check the invocation arguments
+    parser = OptionParser(usage='create_test_hypercube [options] <path_to_new_h5_file>',
+                          description='creates a test h5 hypercube with random data for the given number of stations, saps, subbands, timestamps.')
+    parser.add_option('-S', '--stations', dest='stations', type='int', default=3, help='number of stations to create, default: %default')
+    parser.add_option('-s', '--subbands', dest='subbands', type='int', default=244, help='number of subbands (per sap) to create, default: %default')
+    parser.add_option('-t', '--timestamps', dest='timestamps', type='int', default=128, help='number of timestamps to create, default: %default')
+    parser.add_option('--saps', dest='saps', type='int', default=1, help='number of saps to create, default: %default')
+    parser.add_option('-o', '--otdb_id', dest='otdb_id', type='int', default=None, help='optional (fake/test) otdb id, default: %default')
+    parser.add_option('--snr', dest='snr', type='float', default=0.9, help='signal to noise ratio. The signal is a test image with a full sweep through all phase and amplitudes from [0..1], and the noise is just random complex numbers, default: %default')
+    parser.add_option('-a', '--amplitude', dest='max_signal_amplitude', type='float', default=100, help='the max signal amplitude, default: %default')
+    parser.add_option('-p', '--pol-ratio', dest='parallel_to_cross_polarization_ratio', type='float', default=1, help='the ratio between parallel and cross polarization visibility amplitudes, default: %default')
+    parser.add_option('-w', '--num_phase_wraps', dest='num_phase_wraps', type='float', default=1, help='the number of times the phase wraps around 2pi along the freq/sb axis, default: %default')
+
+    (options, args) = parser.parse_args()
+
+    if len(args) != 1:
+        print 'Please provide a file name for the h5 file which you want to create...'
+        print
+        parser.print_help()
+        exit(1)
+
+    cube = create_hypercube(num_stations=options.stations,
+                            num_saps=options.saps,
+                            num_subbands_per_sap={sap:options.subbands for sap in range(options.saps)},
+                            num_timestamps=options.timestamps,
+                            snr=options.snr,
+                            max_signal_amplitude = options.max_signal_amplitude,
+                            parallel_to_cross_polarization_ratio= options.parallel_to_cross_polarization_ratio,
+                            num_phase_wraps=options.num_phase_wraps)
+
+    write_hypercube(args[0], cube, sas_id=options.otdb_id)
+
+if __name__ == '__main__':
+    main()
\ No newline at end of file
diff --git a/QA/QA_Common/bin/find_hdf5 b/QA/QA_Common/bin/find_hdf5
index e5fe0540bda0cea0d7a245a06d3e886d36a4ac80..19ca4a0cf650c96a76637515c2b182d4c358e9d7 100755
--- a/QA/QA_Common/bin/find_hdf5
+++ b/QA/QA_Common/bin/find_hdf5
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -23,12 +23,15 @@ if __name__ == '__main__':
 
     import os
     import os.path
+    import sys
     import fnmatch
     import glob
     from optparse import OptionParser, OptionGroup
     from datetime import datetime, timedelta
 
+    from lofar.parameterset import *
     from lofar.qa.hdf5_io import *
+    from lofar.common.datetimeutils import parseDatetime
 
     # make sure we run in UTC timezone
     os.environ['TZ'] = 'UTC'
@@ -86,12 +89,10 @@ if __name__ == '__main__':
     logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
                         level=logging.DEBUG if options.verbose else logging.WARN)
 
-    # parse the options; if no specific filter for either obs or pipeline was given, then search for both.
     if not options.observation and not options.pipeline:
         options.observation = True
         options.pipeline = True
 
-    # parse the options; if no specific filter for either lba or hba was given, then search for both.
     if options.lba or options.hba:
         options.observation = True
         options.pipeline = False
@@ -107,7 +108,6 @@ if __name__ == '__main__':
 
     info_dicts = {}
 
-    # gather all info_dicts for all files...
     for file in files:
         try:
             info_dict = read_info_dict(file)
@@ -116,64 +116,52 @@ if __name__ == '__main__':
         except:
             pass
 
-    # ...and filter out the files that do not match the search filters
-
     if not (options.observation and options.pipeline):
         if options.observation:
-            # keep observations
             files = [f for f in files
                      if f in info_dicts and 'observation' in info_dicts[f].get('type', '').lower()]
 
         if options.pipeline:
-            # keep pipelines
             files = [f for f in files
                      if f in info_dicts and 'pipeline' in info_dicts[f].get('type', '').lower()]
 
 
     if not (options.lba and options.hba):
         if options.lba:
-            # keep lba
             files = [f for f in files
                      if f in info_dicts and 'lba' in info_dicts[f].get('antenna_array', '').lower()]
 
         if options.hba:
-            # keep hba
             files = [f for f in files
                      if f in info_dicts and 'hba' in info_dicts[f].get('antenna_array', '').lower()]
 
     if options.name:
-        # keep matching task names
         files = [f for f in files if f in info_dicts and
                  fnmatch.fnmatch(info_dicts[f].get('name', '').lower(), options.name.lower())]
 
     if options.project:
-        # keep matching project names
         files = [f for f in files if f in info_dicts and
                  (fnmatch.fnmatch(info_dicts[f].get('project', '').lower(), options.project.lower()) or
                   fnmatch.fnmatch(info_dicts[f].get('project_description', '').lower(), options.project.lower()))]
 
     if options.date:
-        # keep matching date
         options.date = datetime.strptime(options.date, '%Y-%m-%d').date()
         files = [f for f in files if f in info_dicts and
                  'start_time' in info_dicts[f] and info_dicts[f]['start_time'].date() == options.date]
 
     if options.min_duration:
-        # keep matching duration
         hours, sep, minutes = options.min_duration.partition(':')
         options.min_duration = timedelta(hours=int(hours), minutes=int(minutes))
         files = [f for f in files if f in info_dicts and
                  'stop_time' in info_dicts[f] and info_dicts[f]['stop_time'].date() == options.date]
 
     if options.max_duration:
-        # keep matching duration
         hours, sep, minutes = options.max_duration.partition(':')
         options.max_duration = timedelta(hours=int(hours), minutes=int(minutes))
         files = [f for f in files
                  if f in info_dicts and info_dicts[f].get('duration', timedelta()) <= options.max_duration]
 
     if options.clusters or options.no_clusters:
-        # keep matching have/havenot clusters
         def has_clusters(h5_path):
             with h5py.File(h5_path, "r+") as file:
                 return len(file.get('clustering',{}))
@@ -184,15 +172,10 @@ if __name__ == '__main__':
         if options.no_clusters:
             files = [f for f in files if not has_clusters(f)]
 
-    # the final files list now contains only the files matching all given filters.
-
-    # lastly, print the results...
     if options.info:
-        # print the full file info
         for file in files:
             print read_info_from_hdf5(file, read_data_info=False)
     else:
-        # just print the filtered filenames
         print '\n'.join(files)
 
 
diff --git a/QA/QA_Common/bin/show_hdf5_info b/QA/QA_Common/bin/show_hdf5_info
index b1ea563d55ea610cff6e9d5caf3fae79fcb21324..bd9daed3ac0bb98b78a14094e3ef74c50f9d9e7b 100755
--- a/QA/QA_Common/bin/show_hdf5_info
+++ b/QA/QA_Common/bin/show_hdf5_info
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -26,6 +26,7 @@ if __name__ == '__main__':
     from optparse import OptionParser
 
     from lofar.qa.hdf5_io import *
+    from lofar.parameterset import *
 
     # make sure we run in UTC timezone
     os.environ['TZ'] = 'UTC'
diff --git a/QA/QA_Common/lib/CMakeLists.txt b/QA/QA_Common/lib/CMakeLists.txt
index 01be7c1d72e33506e1a92e13b64096d1a8b54c93..7ab0697539811de6e20f4e31a5724617640cf4f2 100644
--- a/QA/QA_Common/lib/CMakeLists.txt
+++ b/QA/QA_Common/lib/CMakeLists.txt
@@ -1,4 +1,4 @@
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -22,5 +22,6 @@ python_install(
     cep4_utils.py
     hdf5_io.py
     geoconversions.py
+    utils.py
     DESTINATION lofar/qa)
 
diff --git a/QA/QA_Common/lib/__init__.py b/QA/QA_Common/lib/__init__.py
index 6248f495c54e31977e3f61b2c86ebb9600501d21..4f54da1af6a2548fa7ac163d34990380f2139bf9 100644
--- a/QA/QA_Common/lib/__init__.py
+++ b/QA/QA_Common/lib/__init__.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2018    ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -9,8 +9,9 @@
 #
 # The LOFAR software suite is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.    See the
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU General Public License for more details.
 #
 # You should have received a copy of the GNU General Public License along
 # with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
diff --git a/QA/QA_Common/lib/cep4_utils.py b/QA/QA_Common/lib/cep4_utils.py
index 66c4c6d44b190f0592084b7475cda16d3d9b8990..c0df21a357667e057a387f1969d8ece819a7f45b 100644
--- a/QA/QA_Common/lib/cep4_utils.py
+++ b/QA/QA_Common/lib/cep4_utils.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2018    ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -9,7 +9,7 @@
 #
 # The LOFAR software suite is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.    See the
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU General Public License for more details.
 #
 # You should have received a copy of the GNU General Public License along
@@ -21,71 +21,67 @@ from random import randint
 import logging
 logger = logging.getLogger(__name__)
 
-CEP4_HEAD_NODE = 'head.cep4.control.lofar'
-LOFARSYS_AT_CEP4_HEAD_NODE = 'lofarsys@%s' % (CEP4_HEAD_NODE,)
 
 def ssh_cmd_list(host):
-    """
+    '''
     returns a subprocess compliant command list to do an ssh call to the given node
     uses ssh option -tt to force remote pseudo terminal
     uses ssh option -q for ssh quiet mode (no ssh warnings/errors)
     uses ssh option -o StrictHostKeyChecking=no to prevent prompts about host keys
     :param host: the node name or ip address
     :return: a subprocess compliant command list
-    """
+    '''
     return ['ssh', '-T', '-q', '-o StrictHostKeyChecking=no', host]
 
 
 def wrap_command_in_cep4_head_node_ssh_call(cmd):
-    """wrap the command in an ssh call to head.cep4
+    '''wrap the command in an ssh call to head.cep4
     :param list cmd: a subprocess cmd list
     cpu node. Otherwise, the command is executed on the head node.
     :return: the same subprocess cmd list, but then wrapped with cep4 ssh calls
-    """
-    return ssh_cmd_list(LOFARSYS_AT_CEP4_HEAD_NODE) + cmd
+    '''
+    return ssh_cmd_list('lofarsys@head.cep4.control.lofar') + cmd
 
 def wrap_command_in_cep4_random_cpu_node_ssh_call(cmd, via_head=True):
-    """wrap the command in an ssh call an available random cep4 cpu node (via head.cep4)
+    '''wrap the command in an ssh call an available random cep4 cpu node (via head.cep4)
     :param list cmd: a subprocess cmd list
     :param bool via_head: when True, route the cmd first via the cep4 head node
     :return: the same subprocess cmd list, but then wrapped with cep4 ssh calls
-    """
+    '''
     # pick a random  available cpu node
     node_nrs = get_cep4_available_cpu_nodes()
     node_nr = node_nrs[randint(0, len(node_nrs)-1)]
     return wrap_command_in_cep4_cpu_node_ssh_call(cmd, node_nr, via_head=via_head)
 
 def wrap_command_in_cep4_available_cpu_node_with_lowest_load_ssh_call(cmd, via_head=True):
-    """wrap the command in an ssh call to the available random cep4 cpu node with the lowest load (via head.cep4)
+    '''wrap the command in an ssh call to the available random cep4 cpu node with the lowest load (via head.cep4)
     :param list cmd: a subprocess cmd list
     :param bool via_head: when True, route the cmd first via the cep4 head node
     :return: the same subprocess cmd list, but then wrapped with cep4 ssh calls
-    """
+    '''
     lowest_load_node_nr = get_cep4_available_cpu_node_with_lowest_load()
     return wrap_command_in_cep4_cpu_node_ssh_call(cmd, lowest_load_node_nr, via_head=via_head)
 
 def wrap_command_in_cep4_cpu_node_ssh_call(cmd, cpu_node_nr, via_head=True):
-    """wrap the command in an ssh call the given cep4 cpu node (via head.cep4)
+    '''wrap the command in an ssh call the given cep4 cpu node (via head.cep4)
     :param list cmd: a subprocess cmd list
     :param int cpu_node_nr: the number of the cpu node where to execute the command
     :param bool via_head: when True, route the cmd first via the cep4 head node
     :return: the same subprocess cmd list, but then wrapped with cep4 ssh calls
-    """
-    # hard-coded cpu-node hostname. Might change for future clusters or cluster upgrades.
-    lofarsys_at_cpu_node = 'lofarsys@cpu%02d.cep4' % (cpu_node_nr,)
-    remote_cmd = ssh_cmd_list(lofarsys_at_cpu_node) + cmd
+    '''
+    remote_cmd = ssh_cmd_list('lofarsys@cpu%02d.cep4' % cpu_node_nr) + cmd
     if via_head:
         return wrap_command_in_cep4_head_node_ssh_call(remote_cmd)
     else:
         return remote_cmd
 
 def wrap_command_for_docker(cmd, image_name, image_label=''):
-    """wrap the command to be run in a docker container for the lofarsys user and environment
+    '''wrap the command to be run in a docker container for the lofarsys user and environment
     :param list cmd: a subprocess cmd list
     :param string image_name: the name of the docker image to run
     :param string image_label: the optional label of the docker image to run
     :return: the same subprocess cmd list, but then wrapped with docker calls
-    """
+    '''
     #fetch the lofarsys user id and group id first from the cep4 head node
     id_string = '%s:%s' % (check_output(wrap_command_in_cep4_head_node_ssh_call(['id', '-u'])).strip(),
                            check_output(wrap_command_in_cep4_head_node_ssh_call(['id', '-g'])).strip())
@@ -102,10 +98,10 @@ def wrap_command_for_docker(cmd, image_name, image_label=''):
             '%s:%s' % (image_name, image_label) if image_label else image_name] + cmd
 
 def get_cep4_available_cpu_nodes():
-    """
+    '''
     get a list of cep4 cpu nodes which are currently up and running according to slurm
     :return: a list of cpu node numbers (ints) for the up and running cpu nodes
-    """
+    '''
     available_cep4_nodes = []
 
     try:
@@ -153,12 +149,12 @@ def get_cep4_available_cpu_nodes():
     return available_cep4_nodes
 
 def get_cep4_cpu_nodes_loads(node_nrs=None):
-    """
+    '''
     get the 5min load for each given cep4 cpu node nr
     :param node_nrs: optional list of node numbers to get the load for. If None, then all available nodes are queried.
     :return: dict with node_nr -> load mapping
-    """
-    if node_nrs is None:
+    '''
+    if node_nrs == None:
         node_nrs = get_cep4_available_cpu_nodes()
 
     procs = {}
@@ -186,10 +182,10 @@ def get_cep4_cpu_nodes_loads(node_nrs=None):
     return loads
 
 def get_cep4_available_cpu_nodes_sorted_ascending_by_load():
-    """
+    '''
     get the cep4 available cpu node numbers sorted ascending by load (5min).
     :return: sorted list of node numbers.
-    """
+    '''
     node_nrs = get_cep4_available_cpu_nodes()
     loads = get_cep4_cpu_nodes_loads(node_nrs)
     sorted_loads = sorted(loads.items(), key=lambda x: x[1])
@@ -198,10 +194,10 @@ def get_cep4_available_cpu_nodes_sorted_ascending_by_load():
     return sorted_node_nrs
 
 def get_cep4_available_cpu_node_with_lowest_load():
-    """
+    '''
     get the cep4 cpu node which is available and has the lowest (5min) load of them all.
     :return: the node number (int) with the lowest load.
-    """
+    '''
     node_nrs = get_cep4_available_cpu_nodes_sorted_ascending_by_load()
     if node_nrs:
         logger.debug('cep4 cpu node with lowest load: %s', node_nrs[0])
diff --git a/QA/QA_Common/lib/geoconversions.py b/QA/QA_Common/lib/geoconversions.py
index 7c93b4a08d9438e98ed59d231dda418832187222..08fdf805d6b1afa04691a028eb91b149e32b5118 100644
--- a/QA/QA_Common/lib/geoconversions.py
+++ b/QA/QA_Common/lib/geoconversions.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2018    ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -9,7 +9,7 @@
 #
 # The LOFAR software suite is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.    See the
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU General Public License for more details.
 #
 # You should have received a copy of the GNU General Public License along
@@ -29,11 +29,11 @@ def normalized_earth_radius(latitude_rad):
 
 
 def geographic_from_xyz(xyz_m):
-    """
+    '''
     convert xyz coordinates to wgs84 coordinates
     :param xyz_m: 1D array/list/tuple of x,y,z in meters
     :return: tuple of lat_rad, lon_rad, height_m
-    """
+    '''
     wgs84_a = 6378137.0
     wgs84_f = 1./298.257223563
     wgs84_e2 = wgs84_f*(2.0 - wgs84_f)
@@ -99,9 +99,9 @@ def pqr_from_xyz(xyz_m, xyz0_m=LOFAR_XYZ0_m, matrix=LOFAR_PQR_TO_ETRS_MATRIX):
     return transform(xyz_m, xyz0_m, matrix.T)
 
 def interpolation_function(pqr):
-    """
+    '''
     Return an interpolation function fn(x, y, z), which returns the value at x, y.
-    """
+    '''
     rbfi = Rbf(pqr[:,0], pqr[:,1], 0.0*pqr[:,2], pqr[:,2], function='linear')
     def interpolator(x_m, y_m):
         return rbfi(x_m, y_m, y_m*0.0)
@@ -124,11 +124,11 @@ def fit_plane(xyz):
 
 
 def pqr_cs002_from_xyz(xyz_m):
-    """
+    '''
     convert xyz coordinates to lofar pqr coordinates with origin in CS002
     :param xyz_m: 1D array/list/tuple of x,y,z in meters
     :return: tuple of pqr coords in meters
-    """
+    '''
     pqr = pqr_from_xyz(array([xyz_m]),
                        xyz0_m=array([ 3826577.462,   461022.624,  5064892.526]))
     return pqr[0][0], pqr[0][1], pqr[0][2]
diff --git a/QA/QA_Common/lib/hdf5_io.py b/QA/QA_Common/lib/hdf5_io.py
index c405077ac7828748462b5fcf48d96190c6c54e9b..6ad89e0c468222912861ad57e2b50f85714b2af8 100644
--- a/QA/QA_Common/lib/hdf5_io.py
+++ b/QA/QA_Common/lib/hdf5_io.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2018    ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -9,14 +9,12 @@
 #
 # The LOFAR software suite is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.    See the
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU General Public License for more details.
 #
 # You should have received a copy of the GNU General Public License along
 # with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
 
-# TODO: refactor large functions into collections of smaller function calls and isolate behaviour.
-
 """Module hdf5_io offers various methods to read/write/modify hdf5 files containing lofar measurement data.
 Such an h5 file is usually generated from Lofar Measurement Sets (MS/casacore format) using the ms2hdf5 conversion tool.
 
@@ -55,6 +53,7 @@ with warnings.catch_warnings():
 import logging
 logger = logging.getLogger(__name__)
 
+np.set_printoptions(precision=1)
 
 def write_hypercube(path, saps, parset=None, sas_id=None, wsrta_id=None, do_compress=True, **kwargs):
     """
@@ -94,7 +93,9 @@ def write_hypercube(path, saps, parset=None, sas_id=None, wsrta_id=None, do_comp
         os.makedirs(save_dir)
 
     with h5py.File(path, "w") as file:
-        version = '1.2' # 1.1 -> 1.2 change is not backwards compatible by design.
+        version = '1.3'
+        # 1.1 -> 1.2 change is not backwards compatible by design.
+        # 1.2 -> 1.3 change is almost backwards compatible, it just needs a dB/linear correction. see convert_12_to_13
         ds = file.create_dataset('version', (1,), h5py.special_dtype(vlen=str), version)
         ds.attrs['description'] = 'version of this hdf5 MS extract file'
 
@@ -165,13 +166,13 @@ def write_hypercube(path, saps, parset=None, sas_id=None, wsrta_id=None, do_comp
                     for antenna, location in antenna_locations[ref_frame].items():
                         location_sub_group.create_dataset(antenna, data=location)
 
-            logger.info("""flagging NaN's and zero's in visibilities for file %s""", path)
+            logger.debug('''flagging NaN's and zero's in visibilities for file %s''', path)
             zero_or_nan = np.absolute(visibilities) == 0.0
             zero_or_nan[np.isnan(visibilities)] = True
             flagging[zero_or_nan] = True
 
             #we'll scale the 10log10(visibilities) so the complex-float can be mapped onto 2*int8
-            logger.info('normalizing visibilities for file %s', path)
+            logger.debug('normalizing visibilities for file %s', path)
             #remove any NaN and/or 0 values in the visibilities? log(0) or log(nan) crashes,
             # so determine smallest non-zero abs value, and fill that in for the flagged visibilities
             try:
@@ -186,25 +187,33 @@ def write_hypercube(path, saps, parset=None, sas_id=None, wsrta_id=None, do_comp
             visibilities[zero_or_nan] = min_non_zero_or_nan_abs_value
             del zero_or_nan
 
-            visibilities_dB = 10.0*np.log10(visibilities)
-            abs_vis_dB = np.absolute(visibilities_dB)
-
-            #compute scale factor per subband to map the visibilities_dB per subband from complex64 to 2xint8
-            scale_factors = np.empty((len(subbands),), dtype=np.float32)
-            for sb_nr in range(len(subbands)):
-                #use 99.9 percentile instead if max to get rid of spikes
-                max_abs_vis_sb = np.percentile(abs_vis_dB[:,:,sb_nr,:], 99.9)
-                scale_factor = 127.0 / max_abs_vis_sb
-                scale_factors[sb_nr] = 1.0/scale_factor
-
-            ds = sap_group.create_dataset('visibility_scale_factors', data=scale_factors)
-            ds.attrs['description'] = 'multiply real and imag parts of visibilities with this factor per subband to un-normalize them and get the 10log10 values of the real and imag parts of the visibilities'
-            ds.attrs['units'] = '-'
+            # reduce dynamic range (so we fit more data in the available bits)
+            visibility_amplitudes = np.abs(visibilities)
+            visibility_amplitudes_dB = 10.0*np.log10(visibility_amplitudes)
+            visibility_phases = np.exp(1j*np.angle(visibilities))
+            visibilities_dB = visibility_amplitudes_dB * visibility_phases
+
+            #compute scale factors per subband, per polarization
+            scale_factors = np.empty(shape=(len(subbands),len(polarizations)), dtype=np.float32)
+
+            for pol_idx, polarization in enumerate(polarizations):
+                #compute scale factor per subband to map the visibilities_dB per subband from complex64 to 2xint8
+                for sb_nr in range(len(subbands)):
+                    #use 99.9 percentile instead if max to get rid of spikes
+                    max_abs_vis_sb = max(1.0, np.percentile(visibility_amplitudes_dB[:,:,sb_nr,pol_idx], 99.9))
+                    scale_factor = 127.0 / max_abs_vis_sb
+                    scale_factors[sb_nr, pol_idx] = 1.0/scale_factor
+
+            # store the scale_factors in the file
+            scale_factor_ds = sap_group.create_dataset('visibility_scale_factors', data=scale_factors)
+            scale_factor_ds.attrs['description'] = 'scale factors per subband per polatization to un-normalize the stored visibilities'
+            scale_factor_ds.attrs['description'] = 'multiply real and imag parts of the visibilities with this factor per subband per polatization to un-normalize them and get the 10log10 values of the real and imag parts of the visibilities'
+            scale_factor_ds.attrs['units'] = '-'
 
             #create a array with one extra dimension, so we can split the complex value into two scaled int8's for real and imag part
             #looping in python is not the most cpu efficient way
             #but is saves us extra copies of the large visibilities array, which might not fit in memory?
-            logger.info('converting visibilities from complexfloat to 2xint8 for file %s', path)
+            logger.debug('converting visibilities from complexfloat to 2xint8 for file %s', path)
             extended_shape = visibilities_dB.shape[:] + (2,)
             scaled_visibilities = np.empty(extended_shape, dtype=np.int8)
             for sb_nr in range(len(subbands)):
@@ -212,8 +221,8 @@ def write_hypercube(path, saps, parset=None, sas_id=None, wsrta_id=None, do_comp
                 scaled_visibilities[:,:,sb_nr,:,0] = scale_factor*visibilities_dB[:,:,sb_nr,:].real
                 scaled_visibilities[:,:,sb_nr,:,1] = scale_factor*visibilities_dB[:,:,sb_nr,:].imag
 
-            logger.info('reduced visibilities size from %s to %s bytes (factor %s)',
-                        visibilities.nbytes, scaled_visibilities.nbytes, visibilities.nbytes/scaled_visibilities.nbytes)
+            logger.debug('reduced visibilities size from %s to %s bytes (factor %s)',
+                         visibilities.nbytes, scaled_visibilities.nbytes, visibilities.nbytes/scaled_visibilities.nbytes)
 
             ds = sap_group.create_dataset('visibilities', data=scaled_visibilities,
                                           compression="lzf" if do_compress else None)
@@ -245,6 +254,7 @@ def write_hypercube(path, saps, parset=None, sas_id=None, wsrta_id=None, do_comp
     except ImportError:
         logger.info('finished writing hypercube to file: %s', path)
 
+
 def read_sap_numbers(path):
     """
     read the sap numbers (keys) from the hypercube data from the hdf5 hypercube file given by path.
@@ -256,11 +266,12 @@ def read_sap_numbers(path):
     with h5py.File(path, "r") as file:
         version_str = file['version'][0]
 
-        if version_str != '1.2':
+        if version_str not in ['1.2', '1.3']:
             raise ValueError('Cannot read version %s' % (version_str,))
 
         return sorted([int(sap_nr) for sap_nr in file['measurement/saps'].keys()])
 
+
 def read_hypercube(path, visibilities_in_dB=True, python_datetimes=False, read_visibilities=True, read_flagging=True, saps_to_read=None):
     """
     read the hypercube data from the hdf5 hypercube file given by path.
@@ -276,14 +287,16 @@ def read_hypercube(path, visibilities_in_dB=True, python_datetimes=False, read_v
     """
     logger.info('reading hypercube from file: %s', path)
 
-    with h5py.File(path, "r") as file:
-        result = {}
-
-        version_str = file['version'][0]
+    with h5py.File(path, "r+") as file:
+        if file['version'][0] == '1.2':
+            convert_12_to_13(path)
 
-        if version_str != '1.2':
-            raise ValueError('Cannot read version %s' % (version_str,))
+    # reopen file read-only for safety reasons.
+    with h5py.File(path, "r") as file:
+        if file['version'][0] != '1.3':
+            raise ValueError('Cannot read version %s' % (file['version'][0],))
 
+        result = {}
         if 'measurement/parset' in file:
             parset = read_hypercube_parset(path)
             if parset:
@@ -348,17 +361,19 @@ def read_hypercube(path, visibilities_in_dB=True, python_datetimes=False, read_v
                 scale_factors = sap_dict['visibility_scale_factors'][:]
                 normalized_visibilities = sap_dict['visibilities'][:]
 
-                logger.info('denormalizing and converting real/imag to complex visibilities for file sap %s in %s', sap_nr, path)
+                logger.debug('denormalizing and converting real/imag to complex visibilities for file sap %s in %s', sap_nr, path)
                 reduced_shape = normalized_visibilities.shape[:-1]
                 visibilities = np.empty(reduced_shape, dtype=np.complex64)
 
-                for sb_nr, scale_factor in enumerate(scale_factors):
-                    visibilities[:,:,sb_nr,:].real = scale_factor*normalized_visibilities[:,:,sb_nr,:,0]
-                    visibilities[:,:,sb_nr,:].imag = scale_factor*normalized_visibilities[:,:,sb_nr,:,1]
+                for pol_idx, polarization in enumerate(polarizations):
+                    pol_scale_factors = scale_factors[:,pol_idx]
+                    for sb_nr, scale_factor in enumerate(pol_scale_factors):
+                        visibilities[:,:,sb_nr,pol_idx].real = scale_factor*normalized_visibilities[:,:,sb_nr,pol_idx,0]
+                        visibilities[:,:,sb_nr,pol_idx].imag = scale_factor*normalized_visibilities[:,:,sb_nr,pol_idx,1]
 
                 if not visibilities_in_dB:
-                    logger.info('converting visibilities from dB to raw for file sap %s in %s', sap_nr, path)
-                    visibilities = np.power(10, 0.1*visibilities)
+                    logger.debug('converting visibilities from dB to raw linear for file sap %s in %s', sap_nr, path)
+                    visibilities = np.power(10, 0.1*np.abs(visibilities)) * np.exp(1j * np.angle(visibilities))
 
                 #HACK: explicitely set non-XX-polarizations to 0 for apertif
                 if 'measurement/wsrta_id' in file:
@@ -385,6 +400,71 @@ def read_hypercube(path, visibilities_in_dB=True, python_datetimes=False, read_v
 
     return result
 
+
+def convert_12_to_13(h5_path):
+    with h5py.File(h5_path, "r+") as file:
+        version_str = file['version'][0]
+
+        if version_str != '1.2':
+            raise ValueError('Cannot convert version %s to 1.3' % (version_str,))
+
+        for sap_nr, sap_group in file['measurement/saps'].items():
+            # read the scale_factors and visibilities in a v1.2 way,
+            # including incorrect reverse log10 to undo the incorrect storage of phases
+            scale_factors = sap_group['visibility_scale_factors'][:]
+            normalized_visibilities = sap_group['visibilities'][:]
+            subbands = sap_group['subbands']
+            polarizations = sap_group['polarizations']
+
+            # apply v1.2 reconstruction of visibilities
+            visibilities = np.empty(normalized_visibilities.shape[:-1], dtype=np.complex64)
+            for sb_nr, scale_factor in enumerate(scale_factors):
+                visibilities[:, :, sb_nr, :].real = scale_factor * normalized_visibilities[:, :, sb_nr, :, 0]
+                visibilities[:, :, sb_nr, :].imag = scale_factor * normalized_visibilities[:, :, sb_nr, :, 1]
+            visibilities = np.power(10, 0.1 * visibilities)
+
+            # now we have the original raw visibilities again (including some minor errors in amplitude and phase due to rounding/truncation.
+            # let's store them in the correct v1.3 way.
+
+            # reduce dynamic range (so we fit more data in the available bits)
+            visibility_amplitudes = np.abs(visibilities)
+            visibility_amplitudes_dB = 10.0*np.log10(visibility_amplitudes)
+            visibility_phases = np.exp(1j*np.angle(visibilities))
+            visibilities_dB = visibility_amplitudes_dB * visibility_phases
+
+            #compute scale factors per subband, per polarization
+            scale_factors = np.empty(shape=(len(subbands),len(polarizations)), dtype=np.float32)
+
+            for pol_idx, polarization in enumerate(polarizations):
+                #compute scale factor per subband to map the visibilities_dB per subband from complex64 to 2xint8
+                for sb_nr in range(len(subbands)):
+                    #use 99.9 percentile instead if max to get rid of spikes
+                    max_abs_vis_sb = max(1.0, np.percentile(visibility_amplitudes_dB[:,:,sb_nr,pol_idx], 99.9))
+                    scale_factor = 127.0 / max_abs_vis_sb
+                    scale_factors[sb_nr, pol_idx] = 1.0/scale_factor
+
+            # overwrite the scale_factors in the file
+            del sap_group['visibility_scale_factors']
+            scale_factor_ds = sap_group.create_dataset('visibility_scale_factors', data=scale_factors)
+            scale_factor_ds.attrs['description'] = 'scale factors per subband per polatization to un-normalize the stored visibilities'
+            scale_factor_ds.attrs['description'] = 'multiply real and imag parts of the visibilities with this factor per subband per polatization to un-normalize them and get the 10log10 values of the real and imag parts of the visibilities'
+            scale_factor_ds.attrs['units'] = '-'
+
+            # scale the visibilities in the v1.3 way
+            extended_shape = visibilities_dB.shape[:] + (2,)
+            scaled_visibilities = np.empty(extended_shape, dtype=np.int8)
+            for sb_nr in range(len(subbands)):
+                scale_factor = 1.0 / scale_factors[sb_nr]
+                scaled_visibilities[:,:,sb_nr,:,0] = scale_factor*visibilities_dB[:,:,sb_nr,:].real
+                scaled_visibilities[:,:,sb_nr,:,1] = scale_factor*visibilities_dB[:,:,sb_nr,:].imag
+
+            # overwrite the scale_factors in the file
+            sap_group['visibilities'][:] = scaled_visibilities
+
+        # and finally update the version number
+        file['version'][0] = '1.3'
+
+
 def add_parset_to_hypercube(h5_path, otdbrpc):
     """
     helper method which tries to get the parset for the sas_id in the h5 file from otdb via the otdbrpc, and add it to the h5 file.
@@ -410,6 +490,8 @@ def add_parset_to_hypercube(h5_path, otdbrpc):
                                              compression="lzf")
                     ds.attrs['description'] = 'the parset of this observation/pipeline with all settings how this data was created'
                     logger.info('added parset for sas_id %s to %s hdf5 file', sas_id, os.path.basename(h5_path))
+
+        fill_info_folder_from_parset(h5_path)
     except Exception as e:
         logger.error(e)
 
@@ -467,6 +549,12 @@ def combine_hypercubes(input_paths, output_dir, output_filename=None, do_compres
         if not existing_paths:
             raise ValueError('No input h5 files with valid paths given: %s' % (', '.join(input_paths),))
 
+        # convert any 1.2 to 1.3 file if needed
+        for path in existing_paths:
+            with h5py.File(path, "r") as file:
+                if file['version'][0] == 1.2:
+                    convert_12_to_13(path)
+
         input_files = [h5py.File(p, "r") for p in existing_paths]
 
         versions = set([file['version'][0] for file in input_files])
@@ -476,7 +564,7 @@ def combine_hypercubes(input_paths, output_dir, output_filename=None, do_compres
 
         version_str = list(versions)[0]
 
-        if version_str != '1.2':
+        if version_str != '1.3':
             raise ValueError('Cannot read version %s' % (version_str,))
 
         sas_ids = set([file['measurement/sas_id'][0] for file in input_files if 'measurement/sas_id' in file])
@@ -497,7 +585,7 @@ def combine_hypercubes(input_paths, output_dir, output_filename=None, do_compres
         logger.info('combine_hypercubes: combining %s h5 files into %s', len(input_paths), output_path)
 
         with h5py.File(output_path, "w") as output_file:
-            version = '1.2'
+            version = '1.3'
             ds = output_file.create_dataset('version', (1,), h5py.special_dtype(vlen=str), version)
             ds.attrs['description'] = 'version of this hdf5 MS extract file'
 
@@ -564,6 +652,8 @@ def combine_hypercubes(input_paths, output_dir, output_filename=None, do_compres
                                 value_dict[key] = data[:,:,sb_cntr,:]
                             elif any(item in key for item in ['baselines', 'polarizations', 'timestamps', 'antenna_locations']):
                                 value_dict[key] = data
+                            elif 'visibility_scale_factors' in key:
+                                value_dict[key] = data[sb_cntr,:]
                             else:
                                 value_dict[key] = data[sb_cntr]
 
@@ -589,6 +679,8 @@ def combine_hypercubes(input_paths, output_dir, output_filename=None, do_compres
                             shape = list(data.shape)
                             shape.insert(2, num_subbands)
                             shape = tuple(shape)
+                        elif 'visibility_scale_factors' in key:
+                            shape = (num_subbands,) + data.shape
                         else:
                             shape = (num_subbands,)
 
@@ -814,6 +906,7 @@ def read_clusters(h5_path, label='latest'):
                 timestamp = anno_ds.attrs.get('timestamp')
 
                 result_annotations.append({'annotation': annotation,
+                                           'index': anno_nr,
                                            'user': user,
                                            'timestamp': datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S')})
 
@@ -857,6 +950,7 @@ def read_clusters(h5_path, label='latest'):
                             sap_clusters_annotations[cluster_nr] = []
 
                         sap_clusters_annotations[cluster_nr].append({'annotation': annotation,
+                                                                     'index': anno_nr,
                                                                      'user': user,
                                                                      'timestamp': datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S')})
                     except:
@@ -908,7 +1002,12 @@ def _add_annotation_to_group(annotations__parent_group, annotation, user=None, *
         annotations_group = annotations__parent_group.create_group('annotations')
         annotations_group.attrs['description'] = 'annotations on this cluster'
 
-    seq_nr = len(annotations_group)
+    for seq_nr, ds in annotations_group.items():
+        if ds[0] == annotation:
+            if not 'cluster_nr' in kwargs or ('cluster_nr' in kwargs and ds.attrs['cluster_nr'] == kwargs['cluster_nr']):
+                raise ValueError('annotation "%s" already exists' % (annotation,))
+
+    seq_nr = max([int(x) for x in annotations_group.keys()])+1 if annotations_group.keys() else 0
     ds = annotations_group.create_dataset(str(seq_nr), (1,), h5py.special_dtype(vlen=str), annotation)
     ds.attrs['user'] = user if user else 'anonymous'
     ds.attrs['timestamp'] = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
@@ -940,6 +1039,33 @@ def annotate_cluster(h5_path, label, sap_nr, cluster_nr, annotation, user=None):
                     _add_annotation_to_group(sap_group, annotation, user, cluster_nr=cluster_nr)
 
 
+def delete_cluster_annotation(h5_path, sap_nr, cluster_nr, annotation_nr, label='latest'):
+    """
+    remove the annotation_nr'th annotation for the cluster in the file at h5_path, given by the clustering label, sap_nr, cluster_nr.
+    :param str h5_path: h5_path to the h5 file
+    :param str label: the label of the clustering results group
+    :param int sap_nr: the sap number withing the clustering results group
+    :param int cluster_nr: the cluster number withing the sap within the clustering results group
+    :param str annotation_nr: the annotation number (index) to delete
+    :param str label: the label of the clustering results group
+    """
+    with h5py.File(h5_path, "r+") as file:
+        if 'clustering' in file:
+            clustering_group = file['clustering']
+
+            if label in clustering_group:
+                algo_group = clustering_group[label]
+                saps_group = algo_group['saps']
+
+                if str(sap_nr) in saps_group:
+                    sap_group = saps_group[str(sap_nr)]
+                    if 'annotations' in sap_group:
+                        annotations_group = sap_group['annotations']
+                        if 'annotations' in sap_group:
+                            annotations_group = sap_group['annotations']
+                            if str(annotation_nr) in annotations_group:
+                                del annotations_group[str(annotation_nr)]
+
 def annotate_clustering_results(h5_path, label, annotation, user=None):
     """
     add an annotation at top level for the entire file at h5_path.
@@ -1084,13 +1210,14 @@ def create_info_string(data, h5_path=None, file_annotations=None, clusters=None,
 
 def fill_info_folder_from_parset(h5_path):
     try:
+        logger.info('fill_info_folder_from_parset for %s', h5_path)
         parset = read_hypercube_parset(h5_path)
 
         with h5py.File(h5_path, "r+") as file:
-            info_group = file.create_group('measurement/info')
-            info_group.attrs['description'] = 'Meta information about the measurement'
+            if parset is not None:
+                info_group = file.create_group('measurement/info')
+                info_group.attrs['description'] = 'Meta information about the measurement'
 
-            if parset:
                 for name, key in [('project', 'Campaign.name'),
                                   ('project_description', 'Campaign.title'),
                                   ('PI', 'Campaign.PI'),
@@ -1117,10 +1244,10 @@ def fill_info_folder_from_parset(h5_path):
                 except (ImportError, RuntimeError, ValueError) as e:
                     logger.warning('Could not convert start/end time and/or duration in fill_info_folder_from_parset for %s. error: %s', h5_path, e)
     except Exception as e:
-        logger.error('Error whle running fill_info_folder_from_parset: %s', e)
+        logger.error('Error while running fill_info_folder_from_parset: %s', e)
 
 def read_info_dict(h5_path):
-    """ read the info about the observation/pipeline from the h5 file given by h5_path.
+    ''' read the info about the observation/pipeline from the h5 file given by h5_path.
     :param str h5_path: h5_path to the h5 file
     :return: a dict with the info about the observation/pipeline in native python types, like:
             {'PI': 'my_PI',
@@ -1132,7 +1259,7 @@ def read_info_dict(h5_path):
              'antenna_array': 'LBA',
              'start_time': datetime.datetime(2018, 6, 11, 11, 0),
              'stop_time': datetime.datetime(2018, 6, 11, 12, 0),
-             'type': 'my_process_subtype'} """
+             'type': 'my_process_subtype'} '''
     with h5py.File(h5_path, "r+") as file:
         if not 'measurement/info' in file:
             # try to convert old style file with parsets only into new files with info.
diff --git a/QA/QA_Common/lib/utils.py b/QA/QA_Common/lib/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf88d439cf67b3152ff673dff057d21c3394029b
--- /dev/null
+++ b/QA/QA_Common/lib/utils.py
@@ -0,0 +1,124 @@
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+import numpy as np
+from datetime import datetime, timedelta
+
+from lofar.qa.geoconversions import *
+
+import logging
+logger = logging.getLogger(__name__)
+
+def create_hypercube(num_saps=3, num_stations=5, num_timestamps=11, num_subbands_per_sap=None, snr=0.9,
+                     max_signal_amplitude=1e5, parallel_to_cross_polarization_ratio=20.0,
+                     num_phase_wraps=1.0):
+    data = {}
+    assert max_signal_amplitude > 1.0
+    logger.info('create_hypercube: num_saps=%s num_stations=%s num_timestamps=%s num_subbands_per_sap=%s snr=%s max_amplitude=%s pol_ratio=%s',
+                num_saps, num_stations, num_timestamps, num_subbands_per_sap, snr, max_signal_amplitude, parallel_to_cross_polarization_ratio)
+
+    if num_subbands_per_sap is None:
+        num_subbands_per_sap = {}
+        for sap_nr in range(num_saps):
+            num_subbands_per_sap[sap_nr] = 13*(sap_nr+1)
+
+    stations = ['CS%03d' % (i + 1) for i in range(num_stations)]
+    baselines = []
+    for idx, station1 in enumerate(stations):
+        for station2 in stations[idx:]:
+            baselines.append((station1, station2))
+
+    num_baselines = len(baselines)
+
+    for sap_nr in range(num_saps):
+        #generate nice test visibilities
+        num_subbands = num_subbands_per_sap[sap_nr]
+
+        #generate 'ticks' along the polarization-axes
+        polarizations = ['xx', 'xy', 'yx', 'yy']
+        parallel_pol_idxs = [0,3]
+        cross_pol_idxs = [1,2]
+
+        # create synthetic visibilities signal
+        baseline_visibilities_signal = np.zeros((num_timestamps, num_subbands, len(polarizations)), dtype=np.complex64)
+        for timestamp_idx in range(num_timestamps):
+            # timestamp_ratio ranges from-and-including 1.0 to 'small'-but-not-zero
+            # this prevents the visibility_value from becoming 0 (from which we cannot take the log)
+            timestamp_ratio = (num_timestamps - timestamp_idx) / float(num_timestamps) if num_timestamps > 1 else 1.0
+            for subband_idx in range(num_subbands):
+                # subband_ratio ranges from 0 to-but-not-including 1.0
+                # this ensures the phases start at 0rad, and sweep up to but not including 2PIrad
+                subband_ratio = subband_idx/float(num_subbands) if num_subbands > 1 else 1.0
+
+                # create synthetic visibility_value
+                # amplitude varies in time. make sure the smallest amplitude is >= 1.0,
+                # because otherwise we cannot store them with enough bits in dB's
+                amplitude = max(1.0, max_signal_amplitude * timestamp_ratio)
+                # phase varies in subband direction
+                phase = np.exp(1j * subband_ratio * 2.0 * np.pi * num_phase_wraps)
+                visibility_value_parallel = amplitude * phase
+                visibility_value_cross = max(1.0, amplitude/parallel_to_cross_polarization_ratio) * phase
+                baseline_visibilities_signal[timestamp_idx,subband_idx,parallel_pol_idxs] = visibility_value_parallel
+                baseline_visibilities_signal[timestamp_idx, subband_idx, cross_pol_idxs] = visibility_value_cross
+
+        # use/apply the same visibilities for each baseline
+        visibilities_signal = np.zeros((num_baselines, num_timestamps, num_subbands, len(polarizations)), dtype=np.complex64)
+        for i in range(num_baselines):
+            visibilities_signal[i,:,:,:] = baseline_visibilities_signal
+
+        # create some noise
+        visibilities_noise = np.zeros((num_baselines, num_timestamps, num_subbands, len(polarizations)), dtype=np.complex64)
+        visibilities_noise.real = np.random.normal(size=visibilities_noise.shape)
+        visibilities_noise.imag = np.random.normal(size=visibilities_noise.shape)
+        visibilities_noise *= max_signal_amplitude/np.max(np.abs(visibilities_noise))
+
+        # add signal and noise according to given ratio
+        visibilities = snr*visibilities_signal + (1.0-snr)*visibilities_noise
+
+        # and some empty flagging
+        flagging = np.zeros(visibilities.shape, dtype=np.bool)
+
+        # generate 'ticks' along the timestamp-axis
+        now = datetime.utcnow()
+        timestamps = [now+timedelta(minutes=i) for i in range(num_timestamps)]
+
+        # generate 'ticks' along the central_frequencies-axes
+        # fill the HBA frequency range of 120-240MHz
+        central_frequencies = [120e6+i*120e6/max(1,num_subbands-1) for i in range(num_subbands)]
+        sb_offset = sum([len(sap['subbands']) for sap in data.values()])
+        subbands = [i for i in range(sb_offset, sb_offset+num_subbands)]
+
+        # create some synthetic antenna locations
+        antenna_locations = {'XYZ': {}, 'PQR': {}, 'WGS84' : {}}
+        for i, station in enumerate(stations):
+            ratio = float(i)/len(stations)
+            xyz_pos = (np.cos(ratio*2*np.pi),np.sin(ratio*2*np.pi),0)
+            antenna_locations['XYZ'][station] = xyz_pos
+            antenna_locations['PQR'][station] = pqr_cs002_from_xyz(xyz_pos)
+            antenna_locations['WGS84'][station] = geographic_from_xyz(xyz_pos)
+
+        # combine all data in the dict
+        data[sap_nr] = { 'baselines':baselines,
+                         'timestamps':timestamps,
+                         'central_frequencies':central_frequencies,
+                         'subbands':subbands,
+                         'polarizations':polarizations,
+                         'visibilities':visibilities,
+                         'flagging':flagging,
+                         'antenna_locations': antenna_locations}
+    return data
+
diff --git a/QA/QA_Common/test/CMakeLists.txt b/QA/QA_Common/test/CMakeLists.txt
index 5b0d0bfca76035e2a1fdbea9e79f4e12d7f769bb..82f42c7156d3787a4fbe8d4f165436ddacadaec3 100644
--- a/QA/QA_Common/test/CMakeLists.txt
+++ b/QA/QA_Common/test/CMakeLists.txt
@@ -1,4 +1,4 @@
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -18,14 +18,6 @@
 # $Id$
 include(LofarCTest)
 
-IF(BUILD_TESTING)
-    # deploy test modules and scripts
-    python_install(test_utils.py
-                   DESTINATION lofar/qa/test)
-
-    lofar_add_bin_scripts(create_test_hypercube)
-ENDIF(BUILD_TESTING)
-
 lofar_add_test(t_cep4_utils)
 lofar_add_test(t_hdf5_io)
 
diff --git a/QA/QA_Common/test/t_cep4_utils.py b/QA/QA_Common/test/t_cep4_utils.py
index 1a0b970a515f042e558c5506c90f53d3bab9fbdb..8aa8f90fa73dbb69fde98a7c168a93b7db89ea04 100755
--- a/QA/QA_Common/test/t_cep4_utils.py
+++ b/QA/QA_Common/test/t_cep4_utils.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2018    ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -11,7 +11,7 @@
 #
 # The LOFAR software suite is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.    See the
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU General Public License for more details.
 #
 # You should have received a copy of the GNU General Public License along
@@ -37,31 +37,31 @@ class TestCep4Utils(unittest.TestCase):
         self.assertTrue(len(node_nrs) > 0)
 
     def test_03_wrap_command_in_cep4_random_cpu_node_ssh_call(self):
-        """
+        '''
         this test calls and tests the functionality of the following methods via
         wrap_command_in_cep4_random_cpu_node_ssh_call: get_cep4_available_cpu_nodes, wrap_command_in_cep4_cpu_node_ssh_call
-        """
+        '''
         cmd = wrap_command_in_cep4_random_cpu_node_ssh_call(['true'], via_head=True)
         logger.info('executing command: %s', ' '.join(cmd))
         self.assertEqual(0, call(cmd))
 
     def test_04_wrap_command_in_cep4_available_cpu_node_with_lowest_load_ssh_call(self):
-        """
+        '''
         this test calls and tests the functionality of the following methods via
         wrap_command_in_cep4_random_cpu_node_ssh_call:
         get_cep4_available_cpu_nodes, get_cep4_cpu_nodes_loads,
         get_cep4_available_cpu_nodes_sorted_ascending_by_load, wrap_command_in_cep4_cpu_node_ssh_call
-        """
+        '''
         cmd = wrap_command_in_cep4_available_cpu_node_with_lowest_load_ssh_call(['true'], via_head=True)
         logger.info('executing command: %s', ' '.join(cmd))
         self.assertEqual(0, call(cmd))
 
     def test_05_wrap_command_for_docker_in_cep4_head_node_ssh_call(self):
-        """
+        '''
         this test calls and tests the functionality of wrap_command_for_docker and
         wrap_command_in_cep4_head_node_ssh_call.
         It is assumed that a docker image is available on head.cep4.
-        """
+        '''
         #wrap the command in a docker call first, and then in an ssh call
         cmd = wrap_command_for_docker(['true'], 'adder', 'latest')
         cmd = wrap_command_in_cep4_head_node_ssh_call(cmd)
@@ -69,10 +69,10 @@ class TestCep4Utils(unittest.TestCase):
         self.assertEqual(0, call(cmd))
 
     def test_06_get_slurm_info_from_within_docker_via_cep4_head(self):
-        """
+        '''
         test to see if we can execute a command via ssh on the head node,
         from within a docker container, started via ssh on the head node (yes, that's multiple levels of indirection)
-        """
+        '''
         # use the slurm sinfo command (because it's available on the head nodes only)...
         cmd = ['sinfo']
         # ...called on cep4 headnode...
diff --git a/QA/QA_Common/test/t_cep4_utils.run b/QA/QA_Common/test/t_cep4_utils.run
index 4b5b4ef354d88b60635eb68d21c1e52f18e169ef..b8d03e94c099ffa993719a2665c2ee89cf7b027d 100755
--- a/QA/QA_Common/test/t_cep4_utils.run
+++ b/QA/QA_Common/test/t_cep4_utils.run
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-# Copyright (C) 2018    ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -11,7 +11,7 @@
 #
 # The LOFAR software suite is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.    See the
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU General Public License for more details.
 #
 # You should have received a copy of the GNU General Public License along
diff --git a/QA/QA_Common/test/t_cep4_utils.sh b/QA/QA_Common/test/t_cep4_utils.sh
index 044222a17bf7ba7886b928ada2660a61de01bf41..9298df51c1b5e4c48c03b7d15833c6e1806ed4ee 100755
--- a/QA/QA_Common/test/t_cep4_utils.sh
+++ b/QA/QA_Common/test/t_cep4_utils.sh
@@ -1,6 +1,6 @@
 #!/bin/sh
 
-# Copyright (C) 2018    ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -11,7 +11,7 @@
 #
 # The LOFAR software suite is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.    See the
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU General Public License for more details.
 #
 # You should have received a copy of the GNU General Public License along
diff --git a/QA/QA_Common/test/t_hdf5_io.py b/QA/QA_Common/test/t_hdf5_io.py
index 7c8ef4498331036a77c66971d54abe82e32fc4f9..b104eec0080285bf0a2c331dd70938b42d143ee8 100755
--- a/QA/QA_Common/test/t_hdf5_io.py
+++ b/QA/QA_Common/test/t_hdf5_io.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2018    ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -11,7 +11,7 @@
 #
 # The LOFAR software suite is distributed in the hope that it will be useful,
 # but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.    See the
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 # GNU General Public License for more details.
 #
 # You should have received a copy of the GNU General Public License along
@@ -29,12 +29,16 @@ from lofar.qa.hdf5_io import *
 from lofar.parameterset import *
 from lofar.common.datetimeutils import to_modified_julian_date_in_seconds
 
-from lofar.qa.test.test_utils import *
+from lofar.qa.utils import *
+
+np.set_printoptions(precision=2)
 
 logger = logging.getLogger(__name__)
 
 class TestHdf5_IO(unittest.TestCase):
     def test_write_and_read_again(self):
+        '''tests writing and reading an hdf5 file, and checks all parameters except for the visibility data.
+        See test_write_and_read_and_verify_data for elaborate data verification.'''
         logger.info('test_write_and_read_again')
 
         path = tempfile.mkstemp()[1]
@@ -61,20 +65,14 @@ class TestHdf5_IO(unittest.TestCase):
             for sap_nr, sap_out in result['saps'].items():
                 sap_in = saps_in[sap_nr]
 
-                self.assertTrue(sap_out['timestamps'])
+                self.assertTrue('timestamps' in sap_out)
+                self.assertEqual(len(sap_in['timestamps']), len(sap_out['timestamps']))
                 for t_in, t_out in zip(sap_in['timestamps'], sap_out['timestamps']):
                     self.assertEqual(t_in, t_out)
 
                 self.assertFalse(sap_out['visibilities_in_dB'])
                 self.assertEqual(sap_in['visibilities'].shape, sap_out['visibilities'].shape)
 
-                diff = sap_in['visibilities'] - sap_out['visibilities']
-                error = np.absolute(diff/sap_in['visibilities'])
-
-                median_error = np.median(error)
-                self.assertTrue(median_error < 0.05)
-                logger.info('median error %s < threshold %s', median_error, 0.05)
-
                 self.assertTrue('antenna_locations' in sap_out)
                 for coords_type in ['XYZ', 'PQR', 'WGS84']:
                     self.assertTrue(coords_type in sap_out['antenna_locations'])
@@ -92,16 +90,251 @@ class TestHdf5_IO(unittest.TestCase):
             logger.info('removing test file: %s', path)
             os.remove(path)
 
+    def test_write_and_read_and_verify_data(self):
+        '''extensive test to verify to correctnes of all visibility amplitudes and phases
+        after it has been written and read back again, bot in raw and dB.'''
+        logger.info('test_write_and_read_and_verify_data')
+
+        path = tempfile.mkstemp()[1]
+
+        try:
+            # test over a wide range of possible number of saps, stations, timestamps, subbands, max_amplitude
+            # because these parameters can influence the applied data reduction in writing, and reconstruction in reading
+            for num_saps in [1, 2]:
+                for num_stations in [1, 3, 10]:
+                    for num_timestamps in [1, 2, 10]:
+                        for num_subbands_per_sap in [1, 2, 10]:
+                            for max_amplitude in [100, 1000, 10000]:
+                                for pol_ratio in [1, 10, 50]:
+                                    # create a synthetic hypercube with known data which we can verify
+                                    # amplitude varies with time
+                                    # phase varies with subband
+                                    saps_in = create_hypercube(num_saps=num_saps,
+                                                               num_stations=num_stations, num_timestamps=num_timestamps,
+                                                               num_subbands_per_sap={sap_nr:num_subbands_per_sap for sap_nr in range(num_saps)},
+                                                               snr=1.0, max_signal_amplitude=max_amplitude,
+                                                               parallel_to_cross_polarization_ratio=pol_ratio)
+
+                                    for sap_nr, sap_in_raw in saps_in.items():
+                                        # test for correct input test data
+                                        max_amplitude_in = np.max(np.abs(sap_in_raw['visibilities']))
+                                        self.assertTrue(np.abs(max_amplitude - max_amplitude_in) < 1e-3*max_amplitude)
+
+                                    # write the hypercube and parset into an h5 file....
+                                    write_hypercube(path, saps_in)
+
+                                    # ...and read the data back from file and compare it
+                                    # input visibilities are not in dB, so request the output visibilities not to be in dB either
+                                    # (side note, visibilities are stored in the h5 file in dB's for better compression)
+                                    result_raw = read_hypercube(path, visibilities_in_dB=False, python_datetimes=True)
+                                    # but because we usually plot the visibilities in dB, read and check those as well
+                                    result_dB = read_hypercube(path, visibilities_in_dB=True, python_datetimes=True)
+
+                                    self.assertTrue('saps' in result_raw)
+                                    self.assertTrue('saps' in result_dB)
+                                    saps_out_raw = result_raw['saps']
+                                    saps_out_dB = result_dB['saps']
+                                    self.assertEqual(num_saps, len(saps_out_raw))
+                                    self.assertEqual(num_saps, len(saps_out_dB))
+
+                                    for sap_nr, sap_out_raw in saps_out_raw.items():
+                                        sap_in_raw = saps_in[sap_nr]
+                                        sap_out_dB = saps_out_dB[sap_nr]
+
+                                        # compare all in/out timestamps
+                                        self.assertTrue('timestamps' in sap_out_raw)
+                                        for t_in, t_out in zip(sap_in_raw['timestamps'], sap_out_raw['timestamps']):
+                                            self.assertEqual(t_in, t_out)
+
+                                        # compare all in/out subbands
+                                        self.assertTrue('subbands' in sap_out_raw)
+                                        for sb_in, sb_out in zip(sap_in_raw['subbands'], sap_out_raw['subbands']):
+                                            self.assertEqual(sb_in, sb_out)
+
+                                        # compare all in/out central_frequencies
+                                        self.assertTrue('central_frequencies' in sap_out_raw)
+                                        for freq_in, freq_out in zip(sap_in_raw['central_frequencies'], sap_out_raw['central_frequencies']):
+                                            self.assertEqual(freq_in, freq_out)
+
+                                        self.assertFalse(sap_out_raw['visibilities_in_dB'])
+                                        self.assertEqual(sap_in_raw['visibilities'].shape, sap_out_raw['visibilities'].shape)
+
+                                        # compare all in/out visibilities
+                                        vis_in_raw = sap_in_raw['visibilities']
+                                        vis_out_raw = sap_out_raw['visibilities']
+                                        vis_out_dB = sap_out_dB['visibilities']
+
+                                        # for the raw visibilities, comparison is easy...
+                                        # just check the differences in amplitude and in phase
+                                        abs_diff_raw = np.abs(vis_in_raw) - np.abs(vis_out_raw)
+                                        abs_phase_diff_raw = np.abs(np.unwrap(np.angle(vis_in_raw) - np.angle(vis_out_raw), axis=2))
+                                        # phase has no 'meaning' for small (insignificant) amplitudes,
+                                        # so just set the phase difference to zero there
+                                        abs_phase_diff_raw[np.abs(vis_in_raw) <= max(1.0, 1e-3*max_amplitude)] = 0
+                                        abs_phase_diff_raw[np.abs(vis_out_raw) <= max(1.0, 1e-3*max_amplitude)] = 0
+
+                                        # for the visibilities in dB, the phases should be equal to the input phases,
+                                        # no matter whether the visibilities are in dB or raw.
+                                        # but the amplitudes need conversion from dB back to raw first.
+                                        abs_vis_out_raw_from_dB = np.power(10, 0.1*np.abs(vis_out_dB))
+                                        abs_diff_raw_dB = np.abs(vis_in_raw) - abs_vis_out_raw_from_dB
+                                        abs_phase_diff_raw_dB = np.abs(np.unwrap(np.angle(vis_in_raw) - np.angle(vis_out_dB), axis=2))
+                                        # phase has no 'meaning' for small (insignificant) amplitudes, so just set it to zero there
+                                        abs_phase_diff_raw_dB[np.abs(vis_in_raw) <= max(1.0, 1e-3*max_amplitude)] = 0
+                                        abs_phase_diff_raw_dB[abs_vis_out_raw_from_dB <= max(1.0, 1e-3*max_amplitude)] = 0
+
+                                        amplitude_threshold = 0.10 * max_amplitude
+                                        phase_threshold = 0.025 * 2 * np.pi
+
+                                        for i in range(vis_in_raw.shape[0]):
+                                            for j in range(vis_in_raw.shape[1]):
+                                                for k in range(vis_in_raw.shape[2]):
+                                                    for l in range(vis_in_raw.shape[3]):
+                                                        self.assertLess(abs_diff_raw[i,j,k,l],    amplitude_threshold)
+                                                        self.assertLess(abs_diff_raw_dB[i,j,k,l], amplitude_threshold)
+                                                        self.assertLess(abs_phase_diff_raw[i,j,k,l],    phase_threshold)
+                                                        self.assertLess(abs_phase_diff_raw_dB[i,j,k,l], phase_threshold)
+        finally:
+            logger.info('removing test file: %s', path)
+            os.remove(path)
+
+    def test_12_to13_conversion(self):
+        path = tempfile.mkstemp()[1]
+
+        try:
+            max_amplitude = 1000
+            saps_in = create_hypercube(num_saps=1,
+                                       num_stations=2, num_timestamps=32,
+                                       num_subbands_per_sap={0: 32},
+                                       snr=1.0, max_signal_amplitude=max_amplitude,
+                                       parallel_to_cross_polarization_ratio=1.0)
+
+            # write the hypercube and parset into an h5 file....
+            # this currently results in a v1.3 file
+            write_hypercube(path, saps_in, sas_id=123456)
+
+            # check if version is 1.3
+            with h5py.File(path, "r") as file:
+                version_str = file['version'][0]
+                self.assertEqual('1.3', version_str)
+
+            # change version back to 1.2
+            # and modify visibility data to have the 1.2 incorrect phases
+            with h5py.File(path, "r+") as file:
+                # revert version...
+                file['version'][0] = '1.2'
+
+                # revert visibilities.
+                # Use saps_in's visibilities and the old hdf5_io code to compute and store the incorrect phases.
+                for sap_nr in sorted(saps_in.keys()):
+                    visibilities_in = saps_in[sap_nr]['visibilities']
+                    subbands = saps_in[sap_nr]['subbands']
+
+                    # this is v1.2's incorrect dB conversion messing up the phases
+                    visibilities_dB = 10.0 * np.log10(visibilities_in)
+                    abs_vis_dB = np.absolute(visibilities_dB)
+
+                    # this is v1.2's way of computing the scale factors per subband only
+                    # compute scale factor per subband to map the visibilities_dB per subband from complex64 to 2xint8
+                    scale_factors = np.empty((len(subbands),), dtype=np.float32)
+                    for sb_nr in range(len(subbands)):
+                        # use 99.9 percentile instead if max to get rid of spikes
+                        max_abs_vis_sb = np.percentile(abs_vis_dB[:, :, sb_nr, :], 99.9)
+                        scale_factor = 127.0 / max_abs_vis_sb
+                        scale_factors[sb_nr] = 1.0 / scale_factor
+
+                    # overwrite the visibility_scale_factors in the file with the v1.2 values
+                    sap_group = file['measurement/saps/%s'%(sap_nr,)]
+                    del sap_group['visibility_scale_factors']
+                    sap_group.create_dataset('visibility_scale_factors', data=scale_factors)
+
+                    extended_shape = visibilities_dB.shape[:] + (2,)
+                    scaled_visibilities = np.empty(extended_shape, dtype=np.int8)
+                    for sb_nr in range(len(subbands)):
+                        scale_factor = 1.0 / scale_factors[sb_nr]
+                        scaled_visibilities[:, :, sb_nr, :, 0] = scale_factor * visibilities_dB[:, :, sb_nr, :].real
+                        scaled_visibilities[:, :, sb_nr, :, 1] = scale_factor * visibilities_dB[:, :, sb_nr, :].imag
+
+                    # overwrite the visibilities in the file with the v1.2 values
+                    del sap_group['visibilities']
+                    sap_group.create_dataset('visibilities', data=scaled_visibilities)
+
+            # check if version is 1.2
+            with h5py.File(path, "r") as file:
+                version_str = file['version'][0]
+                self.assertEqual('1.2', version_str)
+
+            # reading the 1.2 file should result in automatic conversion to 1.3 and correction of phases
+            result_raw = read_hypercube(path, visibilities_in_dB=False, python_datetimes=True)
+
+            # check if version is now 1.3
+            with h5py.File(path, "r") as file:
+                version_str = file['version'][0]
+                self.assertEqual('1.3', version_str)
+
+            # read in dB as well because we usually plot the visibilities in dB
+            result_dB = read_hypercube(path, visibilities_in_dB=True, python_datetimes=True)
+
+            saps_out_raw = result_raw['saps']
+            saps_out_dB = result_dB['saps']
+
+            for sap_nr, sap_in_raw in saps_in.items():
+                sap_out_raw = saps_out_raw[sap_nr]
+                sap_out_dB = saps_out_dB[sap_nr]
+
+                # compare all in/out visibilities
+                vis_in_raw = sap_in_raw['visibilities']
+                vis_out_raw = sap_out_raw['visibilities']
+                vis_out_dB = sap_out_dB['visibilities']
+
+                # for the raw visibilities, comparison is easy...
+                # just check the differences in amplitude and in phase
+                abs_diff_raw = np.abs(vis_in_raw) - np.abs(vis_out_raw)
+                abs_phase_diff_raw = np.abs(np.unwrap(np.angle(vis_in_raw) - np.angle(vis_out_raw), axis=2))
+                # phase has no 'meaning' for small (insignificant) amplitudes,
+                # so just set the phase difference to zero there
+                abs_phase_diff_raw[np.abs(vis_in_raw) <= max(1.0, 1e-3 * max_amplitude)] = 0
+                abs_phase_diff_raw[np.abs(vis_out_raw) <= max(1.0, 1e-3 * max_amplitude)] = 0
+
+                # for the visibilities in dB, the phases should be equal to the input phases,
+                # no matter whether the visibilities are in dB or raw.
+                # but the amplitudes need conversion from dB back to raw first.
+                abs_vis_out_raw_from_dB = np.power(10, 0.1 * np.abs(vis_out_dB))
+                abs_diff_raw_dB = np.abs(vis_in_raw) - abs_vis_out_raw_from_dB
+                abs_phase_diff_raw_dB = np.abs(np.unwrap(np.angle(vis_in_raw) - np.angle(vis_out_dB), axis=2))
+                # phase has no 'meaning' for small (insignificant) amplitudes, so just set it to zero there
+                abs_phase_diff_raw_dB[np.abs(vis_in_raw) <= max(1.0, 1e-3 * max_amplitude)] = 0
+                abs_phase_diff_raw_dB[abs_vis_out_raw_from_dB <= max(1.0, 1e-3 * max_amplitude)] = 0
+
+                amplitude_threshold = 0.10 * max_amplitude
+                phase_threshold = 0.025 * 2 * np.pi
+
+                for i in range(vis_in_raw.shape[0]):
+                    for j in range(vis_in_raw.shape[1]):
+                        for k in range(vis_in_raw.shape[2]):
+                            for l in range(vis_in_raw.shape[3]):
+                                self.assertLess(abs_diff_raw[i, j, k, l], amplitude_threshold)
+                                self.assertLess(abs_diff_raw_dB[i, j, k, l], amplitude_threshold)
+                                self.assertLess(abs_phase_diff_raw[i, j, k, l], phase_threshold)
+                                self.assertLess(abs_phase_diff_raw_dB[i, j, k, l], phase_threshold)
+        finally:
+            logger.info('removing test file: %s', path)
+            os.remove(path)
+
+
     def test_combine_hypercubes(self):
         logger.info('test_combine_hypercubes')
 
         paths = []
         try:
             logger.info('generating test data')
-            num_saps=3
-            num_stations=7
-            num_timestamps=11
-            saps_in = create_hypercube(num_saps=num_saps, num_stations=num_stations, num_timestamps=num_timestamps)
+            num_saps=2
+            num_stations=2
+            num_timestamps=2
+            MAX_AMPLITUDE = 100
+            saps_in = create_hypercube(num_saps=num_saps, num_stations=num_stations,
+                                       num_timestamps=num_timestamps, num_subbands_per_sap={0:1,1:1},
+                                       snr=1.0, max_signal_amplitude=MAX_AMPLITUDE)
 
             #write each sap to a seperate file
             for sap_nr, sap_in in saps_in.items():
@@ -134,8 +367,8 @@ class TestHdf5_IO(unittest.TestCase):
                 error = np.absolute(diff/sap_in['visibilities'])
 
                 median_error = np.median(error)
-                self.assertTrue(median_error < 0.05)
                 logger.info('median error %s < threshold %s', median_error, 0.05)
+                self.assertTrue(median_error < 0.05)
 
         finally:
             for path in paths:
diff --git a/QA/QA_Common/test/t_hdf5_io.run b/QA/QA_Common/test/t_hdf5_io.run
index 91d84bee6d0d464367988c328389f67ec7d0e59d..d0c3f18b8b6fcdb1dea8e85f97ca91d49a8f926b 100755
--- a/QA/QA_Common/test/t_hdf5_io.run
+++ b/QA/QA_Common/test/t_hdf5_io.run
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
diff --git a/QA/QA_Common/test/t_hdf5_io.sh b/QA/QA_Common/test/t_hdf5_io.sh
index 62cd139a238a701b6de2dde05de60af164de5e6e..d2a83354c226bab334dd7a0203e89ec69a7ad33e 100755
--- a/QA/QA_Common/test/t_hdf5_io.sh
+++ b/QA/QA_Common/test/t_hdf5_io.sh
@@ -1,6 +1,6 @@
 #!/bin/sh
 
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
diff --git a/QA/QA_Service/CMakeLists.txt b/QA/QA_Service/CMakeLists.txt
index 99225b799351411d067de8c4ee195a1375af9131..8f7fde3cc354bfb9e6c7a4fc4426f7b0d2bd6f1f 100644
--- a/QA/QA_Service/CMakeLists.txt
+++ b/QA/QA_Service/CMakeLists.txt
@@ -1,4 +1,4 @@
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -17,6 +17,7 @@
 
 # $Id$
 
+
 lofar_package(QA_Service 1.0 DEPENDS QA_Common PyMessaging OTDB_Services)
 
 add_subdirectory(lib)
diff --git a/QA/QA_Service/bin/CMakeLists.txt b/QA/QA_Service/bin/CMakeLists.txt
index 51b7f46c1124c5dd50fe2c9a9650c16ed262f2f6..f8cb80f52437e68456840952f4467a3d3cb6b330 100644
--- a/QA/QA_Service/bin/CMakeLists.txt
+++ b/QA/QA_Service/bin/CMakeLists.txt
@@ -1,4 +1,4 @@
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -22,5 +22,6 @@ lofar_add_bin_scripts(qa_service)
 # supervisord config files
 install(FILES
   qa_service.ini
+  qa_webservice.ini
   DESTINATION etc/supervisord.d)
 
diff --git a/QA/QA_Service/bin/qa_service b/QA/QA_Service/bin/qa_service
index 64c50190c7bba7fba57d69ace8e5975a4f160f79..33e40bc973feb0d831b4bdddd6159728830e313a 100755
--- a/QA/QA_Service/bin/qa_service
+++ b/QA/QA_Service/bin/qa_service
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
diff --git a/QA/QA_Service/bin/qa_webservice.ini b/QA/QA_Service/bin/qa_webservice.ini
new file mode 100644
index 0000000000000000000000000000000000000000..b8a26b507d06b250f2dfb32c5c3341df33e92324
--- /dev/null
+++ b/QA/QA_Service/bin/qa_webservice.ini
@@ -0,0 +1,10 @@
+; supervisor ini file to start and run the adder_clustering docker image on head.cep4 with the webservice for the adder inspection plots
+
+[program:qa_webservice]
+command=ssh -T -q -o StrictHostKeyChecking=no lofarsys@head.cep4.control.lofar docker run --rm --net=host -v /data/qa:/opt/adder/data/qa -u `id -u`:`id -g` -v /etc/passwd:/etc/passwd:ro -v /etc/group:/etc/group:ro -v $HOME:$HOME -e HOME=$HOME -e USER=$USER -w $HOME adder_clustering:latest webandmapvis.sh
+user=lofarsys
+stopsignal=INT ; KeyboardInterrupt
+stopasgroup=true ; bash does not propagate signals
+stdout_logfile=%(program_name)s.log
+redirect_stderr=true
+stderr_logfile=NONE
diff --git a/QA/QA_Service/lib/CMakeLists.txt b/QA/QA_Service/lib/CMakeLists.txt
index ed5533b3b616680fa748d52b95f6807fc68a7ae5..a3e87e5a7cbc3ecbc3d8d0bf51eba64dd5f17dca 100644
--- a/QA/QA_Service/lib/CMakeLists.txt
+++ b/QA/QA_Service/lib/CMakeLists.txt
@@ -1,4 +1,4 @@
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
diff --git a/QA/QA_Service/lib/__init__.py b/QA/QA_Service/lib/__init__.py
index 491a020ffc2353c60f86a2d734eb9d846bd22054..4f54da1af6a2548fa7ac163d34990380f2139bf9 100644
--- a/QA/QA_Service/lib/__init__.py
+++ b/QA/QA_Service/lib/__init__.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
diff --git a/QA/QA_Service/lib/config.py b/QA/QA_Service/lib/config.py
index dc54af1997319e566524eeefb75fec188cdd2618..ccd9e891949e7746da451e170e1f8488fd8d1928 100644
--- a/QA/QA_Service/lib/config.py
+++ b/QA/QA_Service/lib/config.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -21,3 +21,7 @@ from lofar.messaging import adaptNameToEnvironment
 
 DEFAULT_QA_NOTIFICATION_BUSNAME= adaptNameToEnvironment('lofar.qa.notification')
 DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX='QA'
+
+from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_BUSNAME
+DEFAULT_QA_OTDB_NOTIFICATION_BUSNAME = "%s.for.qa_service" % (DEFAULT_OTDB_NOTIFICATION_BUSNAME,)
+
diff --git a/QA/QA_Service/lib/qa_service.py b/QA/QA_Service/lib/qa_service.py
index c8fcbba860301db6bdb68c3fa813edaf4e21c323..7db9201fb8c08cf5014b57904205115ac0db5eff 100644
--- a/QA/QA_Service/lib/qa_service.py
+++ b/QA/QA_Service/lib/qa_service.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -23,7 +23,8 @@ from subprocess import call, Popen, PIPE, STDOUT
 from optparse import OptionParser, OptionGroup
 from lofar.common.util import waitForInterrupt
 from lofar.sas.otdb.OTDBBusListener import OTDBBusListener
-from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_BUSNAME, DEFAULT_OTDB_NOTIFICATION_SUBJECT
+from lofar.qa.service.config import DEFAULT_QA_OTDB_NOTIFICATION_BUSNAME
+from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_SUBJECT
 from lofar.messaging.messagebus import ToBus
 from lofar.messaging.messages import EventMessage
 from lofar.qa.service.config import DEFAULT_QA_NOTIFICATION_BUSNAME, DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX
@@ -34,15 +35,15 @@ logger = logging.getLogger(__name__)
 #TODO:  idea: convert periodically while observing?
 
 class QAService(OTDBBusListener):
-    """
+    '''
     QAService listens on the lofar otdb message bus for NotificationMessages and starts qa processes
     upon observation/pipeline completion. The qa processes convert MS (measurement sets) to hdf5 qa files,
     and then starts generating plots from the hdf5 file.
-    """
+    '''
     def __init__(self,
                  qa_notification_busname=DEFAULT_QA_NOTIFICATION_BUSNAME,
                  qa_notification_subject_prefix=DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX,
-                 otdb_notification_busname=DEFAULT_OTDB_NOTIFICATION_BUSNAME,
+                 otdb_notification_busname=DEFAULT_QA_OTDB_NOTIFICATION_BUSNAME,
                  otdb_notification_subject=DEFAULT_OTDB_NOTIFICATION_SUBJECT,
                  broker=None,
                  qa_base_dir = '/data/qa',
@@ -52,7 +53,7 @@ class QAService(OTDBBusListener):
         See also the superclass, OTDBBusListener.
         :param string qa_notification_busname: valid Qpid address (default: DEFAULT_QA_NOTIFICATION_BUSNAME)
         :param string qa_notification_subject: the subject to listen for. (default: DEFAULT_QA_NOTIFICATION_SUBJECT)
-        :param string otdb_notification_busname: valid Qpid address (default: DEFAULT_OTDB_NOTIFICATION_BUSNAME)
+        :param string otdb_notification_busname: valid Qpid address (default: DEFAULT_QA_OTDB_NOTIFICATION_BUSNAME)
         :param string otdb_notification_subject: the subject to listen for. (default: DEFAULT_OTDB_NOTIFICATION_SUBJECT)
         :param broker: valid Qpid broker host (default: None, which means localhost)
         """
@@ -66,36 +67,36 @@ class QAService(OTDBBusListener):
         self.qa_base_dir = qa_base_dir
 
     def start_listening(self, numthreads=None):
-        """
+        '''
         start listening and open event _send_bus. This method is called in __enter__ when using 'with' context.
-        """
+        '''
         super(QAService, self).start_listening(numthreads=numthreads)
         self._send_bus.open()
 
     def stop_listening(self):
-        """
+        '''
         stop listening and close event _send_bus. This method is called in __exit__ when using 'with' context.
-        """
+        '''
         super(QAService, self).stop_listening()
         self._send_bus.close()
 
     def onObservationCompleting(self, otdb_id, modificationTime):
-        """
+        '''
         this mehod is called automatically upon receiving a Completion NotificationMessage
         :param int otdb_id: the task's otdb database id
         :param datetime modificationTime: timestamp when the task's status changed to completing
         :return: None
-        """
+        '''
         logger.info("task with otdb_id %s completed.", otdb_id)
         self.do_qa(otdb_id=otdb_id)
 
     def do_qa(self, otdb_id):
-        """
+        '''
         try to do all qa (quality assurance) steps for the given otdb_id
         resulting in an h5 MS-extract file and inspection plots
         :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done.
         :return: None
-        """
+        '''
         hdf5_file_path = self._convert_ms2hdf5(otdb_id)
         if hdf5_file_path:
             self._cluster_h5_file(hdf5_file_path, otdb_id)
@@ -117,14 +118,14 @@ class QAService(OTDBBusListener):
             logger.error('Could not send event message: %s', e)
 
     def _convert_ms2hdf5(self, otdb_id):
-        """
+        '''
         convert the MS for the given otdb_id to an h5 MS-extract file.
         The conversion will run via ssh on cep4 with massive parellelization.
         When running on cep4, it is assumed that a docker image called adder exists on head.cep4
         When running locally, it is assumed that ms2hdf5 is installed locally.
         :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done.
         :return string: path to the generated h5 file.
-        """
+        '''
         try:
             logger.info('trying to convert MS uv dataset with otdb_id %s if any', otdb_id)
 
@@ -158,43 +159,45 @@ class QAService(OTDBBusListener):
         return None
 
     def _create_plots_for_h5_file(self, hdf5_path, otdb_id=None):
-        """
+        '''
         create plots for the given h5 file. The plots are created via an ssh call to cep4
         where the plots are created in parallel in the docker image.
         :param hdf5_path: the full path to the hdf5 file for which we want the plots.
         :param otdb_id: the otdb_id of the converted observation/pipeline (is used for logging only)
         :return: the full directory path to the directory containing the created plots.
-        """
+        '''
         try:
             #use default cep4 qa output dir.
             plot_dir_path = os.path.join(self.qa_base_dir, 'inspectionplots')
 
-            cmd = ['plot_hdf5_dynamic_spectra',
-                   '-o %s' % (plot_dir_path,),
-                   '--mixed',
-                   '-n', '0',
-                   '--force',
-                   hdf5_path]
-
-            # wrap the command in a cep4 ssh call to docker container
-            cmd = wrap_command_for_docker(cmd, 'adder', 'latest')
-            cmd = wrap_command_in_cep4_available_cpu_node_with_lowest_load_ssh_call(cmd)
-
-            logger.info('generating plots for otdb_id %s, executing: %s', otdb_id, ' '.join(cmd))
-
-            if call(cmd) == 0:
-                plot_dir_path = os.path.join(plot_dir_path, 'L%s' % otdb_id)
-                logger.info('generated plots for otdb_id %s in %s', otdb_id, plot_dir_path)
-
-                self._send_event_message('CreatedInspectionPlots', {'otdb_id': otdb_id,
-                                                                    'hdf5_file_path': hdf5_path,
-                                                                    'plot_dir_path': plot_dir_path})
-
-                return plot_dir_path
-            else:
-                msg = 'could not generate plots for otdb_id %s' % otdb_id
-                logger.error(msg)
-                self._send_event_message('Error', {'otdb_id': otdb_id, 'message': msg})
+            for plot_type in ['-1', '-4']: # plot dynspec and delay-rate
+                cmd = ['plot_hdf5_dynamic_spectra',
+                    '-o %s' % (plot_dir_path,),
+                    '--mixed', # plot amplitude autocors, and complex crosscors
+                    plot_type,
+                    '-n', '0',
+                    '--force',
+                    hdf5_path]
+
+                # wrap the command in a cep4 ssh call to docker container
+                cmd = wrap_command_for_docker(cmd, 'adder', 'latest')
+                cmd = wrap_command_in_cep4_available_cpu_node_with_lowest_load_ssh_call(cmd)
+
+                logger.info('generating plots for otdb_id %s, executing: %s', otdb_id, ' '.join(cmd))
+
+                if call(cmd) == 0:
+                    plot_dir_path = os.path.join(plot_dir_path, 'L%s' % otdb_id)
+                    logger.info('generated plots for otdb_id %s in %s', otdb_id, plot_dir_path)
+
+                    self._send_event_message('CreatedInspectionPlots', {'otdb_id': otdb_id,
+                                                                        'hdf5_file_path': hdf5_path,
+                                                                        'plot_dir_path': plot_dir_path})
+                else:
+                    msg = 'could not generate plots for otdb_id %s' % otdb_id
+                    logger.error(msg)
+                    self._send_event_message('Error', {'otdb_id': otdb_id, 'message': msg})
+                    return None
+            return plot_dir_path
         except Exception as e:
             logging.exception('error in _create_plots_for_h5_file: %s', e)
             self._send_event_message('Error', {'otdb_id': otdb_id, 'message': e.message})
@@ -202,7 +205,7 @@ class QAService(OTDBBusListener):
 
 
     def _cluster_h5_file(self, hdf5_path, otdb_id=None):
-        """
+        '''
         Try to cluster the baselines based on visibilities in the h5 file
         using the clustering docker image developed by e-science.
         This method assumes the adder_clustering docker image is available on cep4. If not, or if anything else
@@ -214,7 +217,7 @@ class QAService(OTDBBusListener):
         :param hdf5_path: the full path to the hdf5 file for which we want the plots.
         :param otdb_id: the otdb_id of the converted observation/pipeline (is used for logging only)
         :return: None
-        """
+        '''
         try:
             # the command to cluster the given h5 file (executed in the e-science adder docker image)
             cmd = ['cluster_this.py', hdf5_path]
@@ -239,9 +242,9 @@ class QAService(OTDBBusListener):
 
 
 def main():
-    """
+    '''
     Run the qa service program with commandline arguments.
-    """
+    '''
 
     # Check the invocation arguments
     parser = OptionParser("%prog [options]",
@@ -251,7 +254,7 @@ def main():
     group = OptionGroup(parser, 'QPid Messaging options')
     group.add_option('-q', '--broker', dest='broker', type='string', default='localhost', help='Address of the qpid broker, default: %default')
     group.add_option("--otdb_notification_busname", dest="otdb_notification_busname", type="string",
-                      default=DEFAULT_OTDB_NOTIFICATION_BUSNAME,
+                      default=DEFAULT_QA_OTDB_NOTIFICATION_BUSNAME,
                       help="Bus or queue where the OTDB notifications are published. [default: %default]")
     group.add_option("--otdb_notification_subject", dest="otdb_notification_subject", type="string",
                       default=DEFAULT_OTDB_NOTIFICATION_SUBJECT,
@@ -265,7 +268,7 @@ def main():
     #start the qa service
     with QAService(otdb_notification_busname=options.otdb_notification_busname,
                    otdb_notification_subject=options.otdb_notification_subject,
-                   broker=options.broker):
+                   broker=options.broker) as service:
         #loop and wait for messages or interrupt.
         waitForInterrupt()
 
diff --git a/QA/QA_Service/test/CMakeLists.txt b/QA/QA_Service/test/CMakeLists.txt
index 7d5201dab79068a633d73ef93fd0f87e0a00302b..6c2f2e66afa83079482e3c08dd54d6a3981cbaa9 100644
--- a/QA/QA_Service/test/CMakeLists.txt
+++ b/QA/QA_Service/test/CMakeLists.txt
@@ -1,4 +1,4 @@
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
diff --git a/QA/QA_Service/test/t_qa_service.py b/QA/QA_Service/test/t_qa_service.py
index 98ded88a4c58fdc82cd0e86ba5014ee694f6c29b..c1cf167781f25a4e22f7140f23f7620ee9a235f4 100755
--- a/QA/QA_Service/test/t_qa_service.py
+++ b/QA/QA_Service/test/t_qa_service.py
@@ -1,6 +1,6 @@
 #!/usr/bin/env python
 
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
@@ -46,11 +46,11 @@ from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_SUBJECT
 # the tests below test is multi threaded (even multi process)
 # define a QABusListener-derivative to handle synchronization (set the *_events)
 class SynchronizingQABusListener(QABusListener):
-    """
+    '''
     the tests below test is multi threaded (even multi process)
     this QABusListener-derivative handles synchronization (set the *_events)
     and stores the msg_content results for expected result checking
-    """
+    '''
     def __init__(self, busname):
         super(SynchronizingQABusListener, self).__init__(busname=busname)
         self.converted_event = Event()
@@ -81,15 +81,15 @@ class SynchronizingQABusListener(QABusListener):
 
 
 class TestQAService(unittest.TestCase):
-    """
+    '''
     Tests for the QAService class
-    """
+    '''
     def setUp(self):
-        """
+        '''
         quite complicated setup to setup test qpid exhanges
         and mock away ssh calls to cep4
         and mock away dockerized commands
-        """
+        '''
         # setup broker connection
         self.connection = Connection.establish('127.0.0.1')
         self.broker = BrokerAgent(self.connection)
@@ -168,7 +168,7 @@ class TestQAService(unittest.TestCase):
             self.connection.close()
 
     def send_otdb_task_completing_event(self):
-        """helper method: create a ToBus and send a completing EventMessage"""
+        '''helper method: create a ToBus and send a completing EventMessage'''
         with ToBus(self.busname) as sender:
             msg = EventMessage(context=DEFAULT_OTDB_NOTIFICATION_SUBJECT,
                                content={"treeID": self.TEST_OTDB_ID,
@@ -177,12 +177,12 @@ class TestQAService(unittest.TestCase):
             sender.send(msg)
 
     def test_01_qa_service_for_expected_behaviour(self):
-        """
+        '''
         This test starts a QAService, triggers a test observation completing event,
         and tests if the generated h5 file and plots are as expected.
         It is an end-to-end test which does not check the intermediate results. It is assumed that
         the intermediate steps are tested in other tests/modules.
-        """
+        '''
 
         logger.info(' -- test_01_qa_service_for_expected_behaviour -- ')
 
@@ -196,19 +196,19 @@ class TestQAService(unittest.TestCase):
 
                 mocked_cmd = [create_test_hypercube_path, '-s 4', '-S 8', '-t 16',
                               '-o', str(self.TEST_OTDB_ID), self.TEST_H5_PATH]
-                logger.info("""mocked_wrap_command_for_docker returning mocked command to create test h5 file: '%s', instead of original command: '%s' """,
+                logger.info('''mocked_wrap_command_for_docker returning mocked command to create test h5 file: '%s', instead of original command: '%s' ''',
                             ' '.join(mocked_cmd), ' '.join(cmd))
                 return mocked_cmd
 
-            if 'cluster_this.py' in cmd or 'plot_hdf5_dynamic_spectra' in cmd:
+            if 'cluster_this.py' in cmd:
                 # replace the cluster command which runs normally in the docker container
                 # by a call to bash true, so the 'cluster_this' call returns 0 exit code
                 mocked_cmd = ['true']
-                logger.info("""mocked_wrap_command_for_docker returning mocked command: '%s', instead of original command: '%s' """,
+                logger.info('''mocked_wrap_command_for_docker returning mocked command: '%s', instead of original command: '%s' ''',
                             ' '.join(mocked_cmd), ' '.join(cmd))
                 return mocked_cmd
 
-            logger.info("""mocked_wrap_command_for_docker returning original command: '%s' """, ' '.join(cmd))
+            logger.info('''mocked_wrap_command_for_docker returning original command: '%s' ''', ' '.join(cmd))
             return cmd
 
         self.wrap_command_for_docker_mock.side_effect = mocked_wrap_command_for_docker
@@ -261,20 +261,18 @@ class TestQAService(unittest.TestCase):
                 # check if the output dirs/files exist
                 self.assertTrue(os.path.exists(qa_listener.plotted_msg_content['hdf5_file_path']))
                 logger.info(qa_listener.plotted_msg_content['plot_dir_path'])
+                self.assertTrue(os.path.exists(qa_listener.plotted_msg_content['plot_dir_path']))
+                plot_file_names = [f for f in os.listdir(qa_listener.plotted_msg_content['plot_dir_path'])
+                                   if f.endswith('png')]
+                self.assertEqual(10, len(plot_file_names))
 
-                #DISABLED checks for plots because we mock plot_hdf5_dynamic_spectra
-                #self.assertTrue(os.path.exists(qa_listener.plotted_msg_content['plot_dir_path']))
-                #plot_file_names = [f for f in os.listdir(qa_listener.plotted_msg_content['plot_dir_path'])
-                                   #if f.endswith('png')]
-                #self.assertEqual(10, len(plot_file_names))
-
-                #auto_correlation_plot_file_names = [f for f in plot_file_names
-                                                    #if 'auto' in f]
-                #self.assertEqual(4, len(auto_correlation_plot_file_names))
+                auto_correlation_plot_file_names = [f for f in plot_file_names
+                                                    if 'auto' in f]
+                self.assertEqual(4, len(auto_correlation_plot_file_names))
 
-                #complex_plot_file_names = [f for f in plot_file_names
-                                           #if 'complex' in f]
-                #self.assertEqual(6, len(complex_plot_file_names))
+                complex_plot_file_names = [f for f in plot_file_names
+                                           if 'complex' in f]
+                self.assertEqual(6, len(complex_plot_file_names))
 
                 # start waiting until QAFinished event message received (or timeout)
                 qa_listener.finished_event.wait(30)
@@ -294,12 +292,12 @@ class TestQAService(unittest.TestCase):
                 self.ssh_cmd_list_mock.assert_not_called()
 
     def test_02_qa_service_for_error_in_ms2hdf5(self):
-        """
+        '''
         This test starts a QAService, triggers a test observation completing event,
         and tests if the conversion from MS to hdf5 fails (by intention).
         It is an end-to-end test which does not check the intermediate results. It is assumed that
         the intermediate steps are tested in other tests/modules.
-        """
+        '''
 
         logger.info(' -- test_02_qa_service_for_error_in_ms2hdf5 -- ')
 
@@ -342,13 +340,13 @@ class TestQAService(unittest.TestCase):
                 self.ssh_cmd_list_mock.assert_not_called()
 
     def test_03_qa_service_for_error_in_creating_plots(self):
-        """
+        '''
         This test starts a QAService, triggers a test observation completing event,
         and tests if the conversion from MS to hdf5 works,
         but the plot generation fails (by intention).
         It is an end-to-end test which does not check the intermediate results. It is assumed that
         the intermediate steps are tested in other tests/modules.
-        """
+        '''
 
         logger.info(' -- test_03_qa_service_for_error_in_creating_plots -- ')
 
@@ -420,12 +418,12 @@ class TestQAService(unittest.TestCase):
                 self.ssh_cmd_list_mock.assert_not_called()
 
     def test_04_qa_service_for_error_ssh(self):
-        """
+        '''
         This test starts a QAService, triggers a test observation completing event,
         and tests if conversion fails due to an intentionally failing (mocked) ssh call.
         It is an end-to-end test which does not check the intermediate results. It is assumed that
         the intermediate steps are tested in other tests/modules.
-        """
+        '''
 
         logger.info(' -- test_04_qa_service_for_error_ssh -- ')
 
@@ -472,4 +470,4 @@ if __name__ == '__main__':
         exit(3)
 
     #run the unit tests
-    unittest.main()
+    unittest.main(defaultTest='TestQAService.test_01_qa_service_for_expected_behaviour')
diff --git a/QA/QA_Service/test/t_qa_service.run b/QA/QA_Service/test/t_qa_service.run
index e70f75e609424c6bdbd89ca326982b2c61312297..d7f90bc01c6ee65646e2fe5bded22f08ae6e235d 100755
--- a/QA/QA_Service/test/t_qa_service.run
+++ b/QA/QA_Service/test/t_qa_service.run
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.
diff --git a/QA/QA_Service/test/t_qa_service.sh b/QA/QA_Service/test/t_qa_service.sh
index 5276ddeed8424eec743b96a02ae6e3c47a87b848..be3cafb42ba4503ee5852669cb32c238c87f46af 100755
--- a/QA/QA_Service/test/t_qa_service.sh
+++ b/QA/QA_Service/test/t_qa_service.sh
@@ -1,6 +1,6 @@
 #!/bin/sh
 
-# Copyright (C) 2018  ASTRON (Netherlands Institute for Radio Astronomy)
+# Copyright (C) 2012-2015  ASTRON (Netherlands Institute for Radio Astronomy)
 # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
 #
 # This file is part of the LOFAR software suite.