diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/t_resource_estimator.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/t_resource_estimator.py
index 27a1ad8871f4f89ee05febadb27d0f0e7039048e..6934b88e9af55c0ed68dc157ed5a9a0f1d8ca180 100755
--- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/t_resource_estimator.py
+++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/t_resource_estimator.py
@@ -1,104 +1,146 @@
 #!/usr/bin/env python
 
-print "TODO: fix this test"
-exit(3)
-
 import unittest
 import uuid
 import datetime
 import logging
 from lofar.messaging import RPC, RPCException
+from lofar.sas.resourceassignment.resourceassignmentestimator.service import ResourceEstimatorHandler
 from lofar.sas.resourceassignment.resourceassignmentestimator.service import createService
 from lofar.sas.resourceassignment.resourceassignmentestimator.test.testset import TestSet
 
 logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
 logger = logging.getLogger(__name__)
 
-try:
-    from qpid.messaging import Connection
-    from qpidtoollibs import BrokerAgent
-except ImportError:
-    print 'Cannot run test without qpid tools'
-    print 'Please source qpid profile'
-    exit(3)
-
-try:
-    # setup broker connection
-    connection = Connection.establish('127.0.0.1')
-    broker = BrokerAgent(connection)
-
-    # add test service busname
-    busname = 'test-lofarbus-raestimator-%s' % (uuid.uuid1())
-    broker.addExchange('topic', busname)
-
-    class Test1(unittest.TestCase):
-        '''Test'''
-
-        def test(self):
-            '''basic test '''
-            self.maxDiff = None
-            ts = TestSet()
-
-            # test observation
-            ts.add_observation()
-            with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
-                result = rpc(ts.test_dict() )
-            self.assertEqual(result[0], ts.valid_dict())
-
-            # test add beams
-            ts.add_observation_beams()
-            with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
-                result = rpc( ts.test_dict() )
-            self.assertEqual(result[0], ts.valid_dict())
-
-            # test add flys_eye
-            ts.enabble_flys_eye()
-            with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
-                result = rpc( ts.test_dict() )
-            self.assertEqual(result[0], ts.valid_dict())
-
-            # test add coherent_stokes
-            ts.enable_observations_coherent_stokes()
-            with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
-                result = rpc( ts.test_dict() )
-            self.assertEqual(result[0], ts.valid_dict())
-
-            # test add incoherent_stokes
-            ts.enable_observations_incoherent_stokes()
-            with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
-                result = rpc( ts.test_dict() )
-            self.assertEqual(result[0], ts.valid_dict())
-
-            # test add calibration_pipeline
-            ts.enable_calibration_pipeline()
-            with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
-                result = rpc( ts.test_dict() )
-            self.assertEqual(result[0], ts.valid_dict())
-
-            # test add longbaseline_pipeline
-            ts.enable_longbaseline_pipeline()
-            with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
-                result = rpc( ts.test_dict() )
-            self.assertEqual(result[0], ts.valid_dict())
-
-            # test add pulsar_pipeline
-            ts.enable_pulsar_pipeline()
-            with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
-                result = rpc( ts.test_dict() )
-            self.assertEqual(result[0], ts.valid_dict())
-
-            # test add image_pipeline
-            ts.enable_image_pipeline()
-            with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
-                result = rpc( ts.test_dict() )
-            self.assertEqual(result[0], ts.valid_dict())
-
-    # create and run the service
-    with createService(busname=busname):
-        # and run all tests
-        unittest.main()
 
-finally:
-    # cleanup test bus and exit
-    broker.delExchange(busname)
-    connection.close()
+class TestEstimations(unittest.TestCase):
+    """
+    Collection of tests for verifying if the uut meets the estimation requirements
+    """
+    def setUp(self):
+        self.uut = ResourceEstimatorHandler()
+        self.specification_tree = {
+            'otdb_id': 0,
+            'task_type': '',
+            'task_subtype': '',
+            'predecessors': []
+            'specification': {}
+        }
+
+    def test_get_subtree_estimate_for_observation_(self):
+        pass
+
+    def test_get_subtree_estimate_for_calibration_pipeline(self):
+        pass
+
+    def test_get_subtree_estimate_for_image_pipeline(self):
+        pass
+
+    def test_get_subtree_estimate_for_longbaseline_pipeline(self):
+        pass
+
+    def test_get_subtree_estimate_for_pulsar_pipeline(self):
+        pass
+
+class TestServices(unittest.TestCase):
+    """
+    Collection of tests for verifying if the uut meets the service requirements (in other words: how it communicates to
+    the outside world).
+    """
+    def setUp(self):
+        pass
+
+    def test_rpc_interface(self):
+        pass
+
+# try:
+#     from qpid.messaging import Connection
+#     from qpidtoollibs import BrokerAgent
+# except ImportError:
+#     print 'Cannot run test without qpid tools'
+#     print 'Please source qpid profile'
+#     exit(3)
+#
+# try:
+#     # setup broker connection
+#     connection = Connection.establish('127.0.0.1')
+#     broker = BrokerAgent(connection)
+#
+#     # add test service busname
+#     busname = 'test-lofarbus-raestimator-%s' % (uuid.uuid1())
+#     broker.addExchange('topic', busname)
+#
+#     class TestRAEstimator(unittest.TestCase):
+#         '''Test'''
+#
+#         def test(self):
+#             '''basic test '''
+#             self.maxDiff = None
+#             ts = TestSet()
+#
+#             # test observation
+#             ts.add_observation()
+#             with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
+#                 result = rpc(ts.test_dict() )
+#             self.assertEqual(result[0], ts.valid_dict())
+#
+#             # test add beams
+#             ts.add_observation_beams()
+#             with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
+#                 result = rpc( ts.test_dict() )
+#             self.assertEqual(result[0], ts.valid_dict())
+#
+#             # test add flys_eye
+#             ts.enable_flys_eye()
+#             with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
+#                 result = rpc( ts.test_dict() )
+#             self.assertEqual(result[0], ts.valid_dict())
+#
+#             # test add coherent_stokes
+#             ts.enable_observations_coherent_stokes()
+#             with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
+#                 result = rpc( ts.test_dict() )
+#             self.assertEqual(result[0], ts.valid_dict())
+#
+#             # test add incoherent_stokes
+#             ts.enable_observations_incoherent_stokes()
+#             with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
+#                 result = rpc( ts.test_dict() )
+#             self.assertEqual(result[0], ts.valid_dict())
+#
+#             # test add calibration_pipeline
+#             ts.enable_calibration_pipeline()
+#             with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
+#                 result = rpc( ts.test_dict() )
+#             self.assertEqual(result[0], ts.valid_dict())
+#
+#             # test add longbaseline_pipeline
+#             ts.enable_longbaseline_pipeline()
+#             with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
+#                 result = rpc( ts.test_dict() )
+#             self.assertEqual(result[0], ts.valid_dict())
+#
+#             # test add pulsar_pipeline
+#             ts.enable_pulsar_pipeline()
+#             with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
+#                 result = rpc( ts.test_dict() )
+#             self.assertEqual(result[0], ts.valid_dict())
+#
+#             # test add image_pipeline
+#             ts.enable_image_pipeline()
+#             with RPC('ResourceEstimation', busname=busname, timeout=3) as rpc:
+#                 result = rpc( ts.test_dict() )
+#             self.assertEqual(result[0], ts.valid_dict())
+#
+#     # create and run the service
+#     with createService(busname=busname):
+#         # and run all tests
+#         unittest.main()
+#
+# finally:
+#     # cleanup test bus and exit
+#     broker.delExchange(busname)
+#     connection.close()
+
+if __name__ == '__main__':
+    unittest.main()
\ No newline at end of file
diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/testset.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/testset.py
index b861309fae143187b275173d9eb2b4e1f185a44f..b75c8ad545c15198b5ae94fdcff951f6161f1ffb 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/testset.py
+++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/test/testset.py
@@ -1,237 +1,237 @@
-#TODO: fix test
-
-#""" test parameterset for resource estimator
-#"""
-
-#from lofar.sas.resourceassignment.resourceassignmentestimator.resource_estimators import ParameterSet
-
-#class TestSet(object):
-    #def __init__(self):
-        #self.check_set = ParameterSet()
-        #self.valid_set = ParameterSet()
-
-    #def clear(self):
-        #self.check_set.clear()
-        #self.valid_set.clear()
-
-    #def test_dict(self):
-        #return self.check_set.get_set()
-
-    #def valid_dict(self):
-        #return self.valid_set.get_set()
-
-    ## for observation
-    #def add_observation(self):
-        #checkset = """
-        #observation.sample_clock= 200
-        #observation.duration=3600
-        #observation.channels_per_subband= 61
-        #observation.intergration_time= 1
-        #observation.antenna_mode= HBA_DUAL
-        #observation.stations= [CS001, CS002, RS307, RS509]
-        #observation.flys_eye.enabled= false
-        #observation.nr_beams= 0
-
-        ## coherent_stokes.type can be: DATA_TYPE_XXYY or DATA_TYPE_STOKES_IQUV
-        #observation.output.coherent_stokes.enabled= false
-        #observation.output.coherent_stokes.type=
-        #observation.output.coherent_stokes.integration_factor=
-
-        ## incoherent_stokes.type can be: DATA_TYPE_STOKES_IQUV
-        #observation.output.incoherent_stokes.enabled= false
-        #observation.output.incoherent_stokes.type=
-
-        ## for calibration-pipeline
-        #dp.output.correlated.enabled= false
-        #dp.output.correlated.demixing_settings.freq_step=
-        #dp.output.correlated.demixing_settings.time_step=
-        #dp.output.instrument_model.enabled= false
-
-        ## for longbaseline-pipeline
-        #dp.output.longbaseline.enabled= false
-        #dp.output.longbaseline.subband_groups_per_ms=
-        #dp.output.longbaseline.subbands_per_subband_group=
-
-        ## for pulsar-pipeline
-        #dp.output.pulsar.enabled= false
-
-        ## for image-pipeline
-        #dp.output.skyimage.enabled= false
-        #dp.output.skyimage.slices_per_image=
-        #dp.output.skyimage.subbands_per_image=
-        #"""
-        #self.check_set.import_string(checkset)
-        #validset = """
-        #observation.total_data_size=
-        #observation.total_bandwidth=
-        #observation.output_files.dp_correlated_uv.nr_files=
-        #observation.output_files.dp_correlated_uv.file_size=
-        #observation.output_files.dp_coherent_stokes.nr_files=
-        #observation.output_files.dp_coherent_stokes.file_size=
-        #observation.output_files.dp_incoherent_stokes.nr_files=
-        #observation.output_files.dp_incoherent_stokes.file_size=
-        #"""
-        #self.valid_set.import_string(validset)
-
-
-    #def add_observation_beams(self):
-        #checkset = """
-        #observation.nr_beams= 2
-        #observation.beam[0].nr_subbands= 400
-        #observation.beam[0].nr_tab_rings= 4
-        #observation.beam[0].tied_array_beam[0].coherent= true
-        #observation.beam[0].tied_array_beam[1].coherent= true
-        #observation.beam[1].nr_subbands= 400
-        #observation.beam[1].nr_tab_rings= 4
-        #observation.beam[1].tied_array_beam[0].coherent= true
-        #observation.beam[1].tied_array_beam[1].coherent= true
-        #"""
-        #self.check_set.import_string(checkset)
-        #self.check_set.import_string(checkset)
-        #validset = """
-        #observation.total_data_size=
-        #observation.total_bandwidth=
-        #observation.output_files.dp_correlated_uv.nr_files=
-        #observation.output_files.dp_correlated_uv.file_size=
-        #observation.output_files.dp_coherent_stokes.nr_files=
-        #observation.output_files.dp_coherent_stokes.file_size=
-        #observation.output_files.dp_incoherent_stokes.nr_files=
-        #observation.output_files.dp_incoherent_stokes.file_size=
-        #"""
-        #self.valid_set.import_string(validset)
-
-
-    #def enable_observations_coherent_stokes(self):
-        #checkset = """
-        ## coherent_stokes.type can be: DATA_TYPE_XXYY or DATA_TYPE_STOKES_IQUV
-        #observation.output.coherent_stokes.enabled= true
-        #observation.output.coherent_stokes.type= DATA_TYPE_XXYY
-        #observation.output.coherent_stokes.integration_factor= 1
-        #"""
-        #self.check_set.import_string(checkset)
-        #self.check_set.import_string(checkset)
-        #validset = """
-        #observation.total_data_size=
-        #observation.total_bandwidth=
-        #observation.output_files.dp_correlated_uv.nr_files=
-        #observation.output_files.dp_correlated_uv.file_size=
-        #observation.output_files.dp_coherent_stokes.nr_files=
-        #observation.output_files.dp_coherent_stokes.file_size=
-        #observation.output_files.dp_incoherent_stokes.nr_files=
-        #observation.output_files.dp_incoherent_stokes.file_size=
-        #"""
-        #self.valid_set.import_string(validset)
-
-
-    #def enable_observations_incoherent_stokes(self):
-        #checkset = """
-        ## incoherent_stokes.type can be: DATA_TYPE_STOKES_IQUV
-        #observation.output.incoherent_stokes.enabled= true
-        #observation.output.incoherent_stokes.type= DATA_TYPE_STOKES_IQUV
-        #"""
-        #self.check_set.import_string(checkset)
-        #self.check_set.import_string(checkset)
-        #validset = """
-        #observation.total_data_size=
-        #observation.total_bandwidth=
-        #observation.output_files.dp_correlated_uv.nr_files=
-        #observation.output_files.dp_correlated_uv.file_size=
-        #observation.output_files.dp_coherent_stokes.nr_files=
-        #observation.output_files.dp_coherent_stokes.file_size=
-        #observation.output_files.dp_incoherent_stokes.nr_files=
-        #observation.output_files.dp_incoherent_stokes.file_size=
-        #"""
-        #self.valid_set.import_string(validset)
-
-
-    #def enabble_flys_eye(self):
-        #checkset = """
-        #observation.flys_eye.enabled= true
-        #"""
-        #self.check_set.import_string(checkset)
-        #self.check_set.import_string(checkset)
-        #validset = """
-        #observation.total_data_size=
-        #observation.total_bandwidth=
-        #observation.output_files.dp_correlated_uv.nr_files=
-        #observation.output_files.dp_correlated_uv.file_size=
-        #observation.output_files.dp_coherent_stokes.nr_files=
-        #observation.output_files.dp_coherent_stokes.file_size=
-        #observation.output_files.dp_incoherent_stokes.nr_files=
-        #observation.output_files.dp_incoherent_stokes.file_size=
-        #"""
-        #self.valid_set.import_string(validset)
-
-    ## for all pipelines
-    #def enable_calibration_pipeline(self):
-        #checkset = """
-        ## for calibration-pipeline
-        #dp.output.correlated.enabled= true
-        #dp.output.correlated.demixing_settings.freq_step= 60
-        #dp.output.correlated.demixing_settings.time_step= 10
-        #dp.output.instrument_model.enabled= true
-        #"""
-        #self.check_set.import_string(checkset)
-        #self.check_set.import_string(checkset)
-        #validset = """
-        #calibration_pipeline.total_data_size=
-        #calibration_pipeline.total_bandwidth=
-        #calibration_pipeline.dp_correlated_uv.nr_files=
-        #calibration_pipeline.dp_correlated_uv.file_size=
-        #calibration_pipeline.dp_instrument_model.nr_files=
-        #calibration_pipeline.dp_instrument_model.file_size=
-        #"""
-        #self.valid_set.import_string(validset)
-
-
-    #def enable_longbaseline_pipeline(self):
-        #checkset = """
-        ## for -pipeline
-        #dp.output.longbaseline.enabled= true
-        #dp.output.longbaseline.subband_groups_per_ms= 1
-        #dp.output.longbaseline.subbands_per_subband_group= 1
-        #"""
-        #self.check_set.import_string(checkset)
-        #self.check_set.import_string(checkset)
-        #validset = """
-        #longbaseline_pipeline.total_data_size=
-        #longbaseline_pipeline.total_bandwidth=
-        #longbaseline_pipeline.dp_correlated_uv.nr_files=
-        #longbaseline_pipeline.dp_correlated_uv.file_size=
-        #"""
-        #self.valid_set.import_string(validset)
-
-
-    #def enable_pulsar_pipeline(self):
-        #checkset = """
-        ## for pulsar-pipeline
-        #dp.output.pulsar.enabled= true
-        #"""
-        #self.check_set.import_string(checkset)
-        #self.check_set.import_string(checkset)
-        #validset = """
-        #pulsar_pipeline.total_data_size=
-        #pulsar_pipeline.total_bandwidth=
-        #pulsar_pipeline.dp_pulsar.nr_files=
-        #pulsar_pipeline.dp_pulsar.file_size=
-        #"""
-        #self.valid_set.import_string(validset)
-
-
-    #def enable_image_pipeline(self):
-        #checkset = """
-        ## for image-pipeline
-        #dp.output.skyimage.enabled= true
-        #dp.output.skyimage.slices_per_image= 1
-        #dp.output.skyimage.subbands_per_image= 2
-        #"""
-        #self.check_set.import_string(checkset)
-        #self.check_set.import_string(checkset)
-        #validset = """
-        #image_pipeline.total_data_size=
-        #image_pipeline.total_bandwidth=
-        #image_pipeline.dp_sky_image.nr_files=
-        #image_pipeline.dp_sky_image.file_size=
-        #"""
-        #self.valid_set.import_string(validset)
+"""
+Test parameterset for resource estimator
+"""
+
+from lofar.sas.resourceassignment.resourceassignmentestimator.resource_estimators.parameterset import ParameterSet
+
+
+class TestSet(object):
+    def __init__(self):
+        self.check_set = ParameterSet()
+        self.valid_set = ParameterSet()
+
+    def clear(self):
+        self.check_set.clear()
+        self.valid_set.clear()
+
+    def test_dict(self):
+        return self.check_set.get_set()
+
+    def valid_dict(self):
+        return self.valid_set.get_set()
+
+    # for observation
+    def add_observation(self):
+        checkset = """
+        observation.sample_clock= 200
+        observation.duration=3600
+        observation.channels_per_subband= 61
+        observation.intergration_time= 1
+        observation.antenna_mode= HBA_DUAL
+        observation.stations= [CS001, CS002, RS307, RS509]
+        observation.flys_eye.enabled= false
+        observation.nr_beams= 0
+
+        # coherent_stokes.type can be: DATA_TYPE_XXYY or DATA_TYPE_STOKES_IQUV
+        observation.output.coherent_stokes.enabled= false
+        observation.output.coherent_stokes.type=
+        observation.output.coherent_stokes.integration_factor=
+
+        # incoherent_stokes.type can be: DATA_TYPE_STOKES_IQUV
+        observation.output.incoherent_stokes.enabled= false
+        observation.output.incoherent_stokes.type=
+
+        # for calibration-pipeline
+        dp.output.correlated.enabled= false
+        dp.output.correlated.demixing_settings.freq_step=
+        dp.output.correlated.demixing_settings.time_step=
+        dp.output.instrument_model.enabled= false
+
+        # for longbaseline-pipeline
+        dp.output.longbaseline.enabled= false
+        dp.output.longbaseline.subband_groups_per_ms=
+        dp.output.longbaseline.subbands_per_subband_group=
+
+        # for pulsar-pipeline
+        dp.output.pulsar.enabled= false
+
+        # for image-pipeline
+        dp.output.skyimage.enabled= false
+        dp.output.skyimage.slices_per_image=
+        dp.output.skyimage.subbands_per_image=
+        """
+        self.check_set.import_string(checkset)
+        validset = """
+        observation.total_data_size=
+        observation.total_bandwidth=
+        observation.output_files.dp_correlated_uv.nr_files=
+        observation.output_files.dp_correlated_uv.file_size=
+        observation.output_files.dp_coherent_stokes.nr_files=
+        observation.output_files.dp_coherent_stokes.file_size=
+        observation.output_files.dp_incoherent_stokes.nr_files=
+        observation.output_files.dp_incoherent_stokes.file_size=
+        """
+        self.valid_set.import_string(validset)
+
+
+    def add_observation_beams(self):
+        checkset = """
+        observation.nr_beams= 2
+        observation.beam[0].nr_subbands= 400
+        observation.beam[0].nr_tab_rings= 4
+        observation.beam[0].tied_array_beam[0].coherent= true
+        observation.beam[0].tied_array_beam[1].coherent= true
+        observation.beam[1].nr_subbands= 400
+        observation.beam[1].nr_tab_rings= 4
+        observation.beam[1].tied_array_beam[0].coherent= true
+        observation.beam[1].tied_array_beam[1].coherent= true
+        """
+        self.check_set.import_string(checkset)
+        self.check_set.import_string(checkset)
+        validset = """
+        observation.total_data_size=
+        observation.total_bandwidth=
+        observation.output_files.dp_correlated_uv.nr_files=
+        observation.output_files.dp_correlated_uv.file_size=
+        observation.output_files.dp_coherent_stokes.nr_files=
+        observation.output_files.dp_coherent_stokes.file_size=
+        observation.output_files.dp_incoherent_stokes.nr_files=
+        observation.output_files.dp_incoherent_stokes.file_size=
+        """
+        self.valid_set.import_string(validset)
+
+
+    def enable_observations_coherent_stokes(self):
+        checkset = """
+        # coherent_stokes.type can be: DATA_TYPE_XXYY or DATA_TYPE_STOKES_IQUV
+        observation.output.coherent_stokes.enabled= true
+        observation.output.coherent_stokes.type= DATA_TYPE_XXYY
+        observation.output.coherent_stokes.integration_factor= 1
+        """
+        self.check_set.import_string(checkset)
+        self.check_set.import_string(checkset)
+        validset = """
+        observation.total_data_size=
+        observation.total_bandwidth=
+        observation.output_files.dp_correlated_uv.nr_files=
+        observation.output_files.dp_correlated_uv.file_size=
+        observation.output_files.dp_coherent_stokes.nr_files=
+        observation.output_files.dp_coherent_stokes.file_size=
+        observation.output_files.dp_incoherent_stokes.nr_files=
+        observation.output_files.dp_incoherent_stokes.file_size=
+        """
+        self.valid_set.import_string(validset)
+
+
+    def enable_observations_incoherent_stokes(self):
+        checkset = """
+        # incoherent_stokes.type can be: DATA_TYPE_STOKES_IQUV
+        observation.output.incoherent_stokes.enabled= true
+        observation.output.incoherent_stokes.type= DATA_TYPE_STOKES_IQUV
+        """
+        self.check_set.import_string(checkset)
+        self.check_set.import_string(checkset)
+        validset = """
+        observation.total_data_size=
+        observation.total_bandwidth=
+        observation.output_files.dp_correlated_uv.nr_files=
+        observation.output_files.dp_correlated_uv.file_size=
+        observation.output_files.dp_coherent_stokes.nr_files=
+        observation.output_files.dp_coherent_stokes.file_size=
+        observation.output_files.dp_incoherent_stokes.nr_files=
+        observation.output_files.dp_incoherent_stokes.file_size=
+        """
+        self.valid_set.import_string(validset)
+
+
+    def enable_flys_eye(self):
+        checkset = """
+        observation.flys_eye.enabled= true
+        """
+        self.check_set.import_string(checkset)
+        self.check_set.import_string(checkset)
+        validset = """
+        observation.total_data_size=
+        observation.total_bandwidth=
+        observation.output_files.dp_correlated_uv.nr_files=
+        observation.output_files.dp_correlated_uv.file_size=
+        observation.output_files.dp_coherent_stokes.nr_files=
+        observation.output_files.dp_coherent_stokes.file_size=
+        observation.output_files.dp_incoherent_stokes.nr_files=
+        observation.output_files.dp_incoherent_stokes.file_size=
+        """
+        self.valid_set.import_string(validset)
+
+    # for all pipelines
+    def enable_calibration_pipeline(self):
+        checkset = """
+        # for calibration-pipeline
+        dp.output.correlated.enabled= true
+        dp.output.correlated.demixing_settings.freq_step= 60
+        dp.output.correlated.demixing_settings.time_step= 10
+        dp.output.instrument_model.enabled= true
+        """
+        self.check_set.import_string(checkset)
+        self.check_set.import_string(checkset)
+        validset = """
+        calibration_pipeline.total_data_size=
+        calibration_pipeline.total_bandwidth=
+        calibration_pipeline.dp_correlated_uv.nr_files=
+        calibration_pipeline.dp_correlated_uv.file_size=
+        calibration_pipeline.dp_instrument_model.nr_files=
+        calibration_pipeline.dp_instrument_model.file_size=
+        """
+        self.valid_set.import_string(validset)
+
+
+    def enable_longbaseline_pipeline(self):
+        checkset = """
+        # for -pipeline
+        dp.output.longbaseline.enabled= true
+        dp.output.longbaseline.subband_groups_per_ms= 1
+        dp.output.longbaseline.subbands_per_subband_group= 1
+        """
+        self.check_set.import_string(checkset)
+        self.check_set.import_string(checkset)
+        validset = """
+        longbaseline_pipeline.total_data_size=
+        longbaseline_pipeline.total_bandwidth=
+        longbaseline_pipeline.dp_correlated_uv.nr_files=
+        longbaseline_pipeline.dp_correlated_uv.file_size=
+        """
+        self.valid_set.import_string(validset)
+
+
+    def enable_pulsar_pipeline(self):
+        checkset = """
+        # for pulsar-pipeline
+        dp.output.pulsar.enabled= true
+        """
+        self.check_set.import_string(checkset)
+        self.check_set.import_string(checkset)
+        validset = """
+        pulsar_pipeline.total_data_size=
+        pulsar_pipeline.total_bandwidth=
+        pulsar_pipeline.dp_pulsar.nr_files=
+        pulsar_pipeline.dp_pulsar.file_size=
+        """
+        self.valid_set.import_string(validset)
+
+
+    def enable_image_pipeline(self):
+        checkset = """
+        # for image-pipeline
+        dp.output.skyimage.enabled= true
+        dp.output.skyimage.slices_per_image= 1
+        dp.output.skyimage.subbands_per_image= 2
+        """
+        self.check_set.import_string(checkset)
+        self.check_set.import_string(checkset)
+        validset = """
+        image_pipeline.total_data_size=
+        image_pipeline.total_bandwidth=
+        image_pipeline.dp_sky_image.nr_files=
+        image_pipeline.dp_sky_image.file_size=
+        """
+        self.valid_set.import_string(validset)