diff --git a/.gitattributes b/.gitattributes
index cef4b4b8993d3f98b1fcddfa051251b3df925a96..54401450464fe195debd3f7e6e151f2e03115b78 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -4554,9 +4554,9 @@ SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice -text
 SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.ini -text
 SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.py -text
 SAS/MoM/MoMQueryService/test/CMakeLists.txt -text
-SAS/MoM/MoMQueryService/test/test_momqueryservice.py -text
-SAS/MoM/MoMQueryService/test/test_momqueryservice.run -text
-SAS/MoM/MoMQueryService/test/test_momqueryservice.sh -text
+SAS/MoM/MoMQueryService/test/t_momqueryservice.py -text
+SAS/MoM/MoMQueryService/test/t_momqueryservice.run -text
+SAS/MoM/MoMQueryService/test/t_momqueryservice.sh -text
 SAS/OTB/OTB-distribution/assembly.xml -text
 SAS/OTB/OTB-distribution/pom.xml -text
 SAS/OTB/OTB/assembly.xml -text
diff --git a/SAS/MoM/MoMQueryService/MoMQueryServiceClient/momqueryrpc.py b/SAS/MoM/MoMQueryService/MoMQueryServiceClient/momqueryrpc.py
index 98ba9594d53337acd4f4e99913c57ab632b3efe0..01a59bcb525a19bbf07202092e6f7d5a8ac74356 100644
--- a/SAS/MoM/MoMQueryService/MoMQueryServiceClient/momqueryrpc.py
+++ b/SAS/MoM/MoMQueryService/MoMQueryServiceClient/momqueryrpc.py
@@ -242,6 +242,27 @@ class MoMQueryRPC(RPCWrapper):
         result = convertStringDigitKeysToInt(result)
         return result
 
+    def get_station_selection(self, mom_id):
+        """
+        Get the station selection represented as resource groups with min/max values for given mom id.
+        :param mom_id: int
+        :return: list of dict
+        """
+        logger.info("Calling GetStationSelection for mom id "+str(mom_id))
+        station_selection = self.rpc('GetStationSelection', mom_id=mom_id)
+        return station_selection
+
+    def get_time_restrictions(self, mom_id):
+        """
+        Returns min start and max end times and duration for given mom id.
+        :param mom_id: int
+        :return: dict
+        """
+        logger.info("Calling GetTimeRestrictions for mom id "+str(mom_id))
+        time_restrictions = self.rpc('GetTimeRestrictions', mom_id=mom_id)
+        return time_restrictions
+
+
 def main():
     # Check the invocation arguments
     parser = OptionParser('%prog [options]',
diff --git a/SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.py b/SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.py
index 7ad39883217ee93524971a3ca97cb40b89650889..329530cf56c2562f1bcd4293137b13ae43bc23d2 100755
--- a/SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.py
+++ b/SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.py
@@ -92,7 +92,7 @@ def _toIdsString(ids):
     if not ids_list:
         raise ValueError("Could not find proper ids in: " + ids)
 
-    ids_str = ','.join([str(id) for id in ids_list])
+    ids_str = ','.join([str(ident) for ident in ids_list])
     return ids_str
 
 
@@ -121,12 +121,12 @@ class MoMDatabaseWrapper:
         # max of 3 tries, on success return result
         # use new connection for every query,
         # because on the flaky lofar network a connection may appear functional but returns improper results.
-        MAXTRIES=3
+        maxtries = 3
 
         if data is not None and type(data) not in (tuple, dict):
             raise ValueError('Need data as tuple or dict, got ' + str(type(data)))
 
-        for i in range(MAXTRIES):
+        for i in range(maxtries):
             try:
                 self._connect()
                 cursor = self.conn.cursor(dictionary=True)
@@ -135,16 +135,16 @@ class MoMDatabaseWrapper:
             except (OperationalError, AttributeError) as e:
                 logger.error(str(e))
 
-                if i+1 == MAXTRIES: raise e
+                if i+1 == maxtries: raise e
 
     def _executeInsertQuery(self, query, data=None):
         # try to execute query on flaky lofar mysql connection
         # max of 3 tries, on success return result
         # use new connection for every query,
         # because on the flaky lofar network a connection may appear functional but returns improper results.
-        MAXTRIES=3
+        maxtries = 3
 
-        for i in range(MAXTRIES):
+        for i in range(maxtries):
             try:
                 self._connect()
                 cursor = self.conn.cursor(dictionary=True)
@@ -154,7 +154,7 @@ class MoMDatabaseWrapper:
             except (OperationalError, AttributeError) as e:
                 logger.error(str(e))
 
-                if i+1 == MAXTRIES: raise e
+                if i+1 == maxtries: raise e
 
     def add_trigger(self, user_name, host_name, project_name, meta_data):
         logger.info("add_trigger for user_name: %s, host_name: %s, project_name: %s, meta_data: %s",
@@ -244,7 +244,8 @@ where mom2object.name = %s"""
         join """ + self.momprivilege_db + """.statustransitionrole as transition_role on system_role.systemroleid=transition_role.roleid
         join """ + self.momprivilege_db + """.statustransition as transition on transition_role.statustransitionid=transition.id
         join status as open_status on open_status.code='opened'
-        join status as status on status.id=transition.newstatusid and (transition.oldstatusid=0 or transition.oldstatusid=open_status.id)
+        join status as status on status.id=transition.newstatusid
+        and (transition.oldstatusid=0 or transition.oldstatusid=open_status.id)
         where status.code=%s and
         status.type='""" + status_type[job_type] + """' and
         open_status.type='""" + status_type[job_type] + """' and
@@ -266,7 +267,8 @@ where mom2object.name = %s"""
         join """ + self.momprivilege_db + """.statustransitionrole as transition_role on project_role.id=transition_role.roleid
         join """ + self.momprivilege_db + """.statustransition as transition on transition_role.statustransitionid=transition.id
         join status as open_status on open_status.code='opened'
-        join status as status on status.id=transition.newstatusid and (transition.oldstatusid=0 or transition.oldstatusid=open_status.id)
+        join status as status on status.id=transition.newstatusid
+        and (transition.oldstatusid=0 or transition.oldstatusid=open_status.id)
         where status.code=%s and
         status.type='""" + status_type[job_type] + """' and
         open_status.type='""" + status_type[job_type] + """' and
@@ -369,32 +371,12 @@ where mom2object.name = %s"""
         """
         logger.info("get_trigger_id for mom_id: %s", mom_id)
 
-        query = """SELECT mom.mom2id, mom.mom2objecttype, obs_spec.misc
-FROM mom2object as mom
-join lofar_observation as obs on mom.mom2objecttype = "LOFAR_OBSERVATION" and mom.id = obs.mom2objectid
-join lofar_observation_specification as obs_spec on
-  mom.mom2objecttype = "LOFAR_OBSERVATION" and obs.user_specification_id = obs_spec.id
-where mom.mom2id=%s
-union
-SELECT mom.mom2id, mom.mom2objecttype, pipeline.misc
-FROM mom2object as mom
-join lofar_pipeline as pipeline on mom.mom2objecttype like "%PIPELINE%" and mom.id = pipeline.mom2objectid
-where mom.mom2id=%s;"""
-        parameters = (mom_id, mom_id)
-
-        rows_trigger_ids = self._executeSelectQuery(query, parameters)
-
         trigger_id = None
-
-        if rows_trigger_ids:
-            misc_json = rows_trigger_ids[0]['misc']
-            if misc_json:
-                misc = json.loads(misc_json)
-                if 'trigger_id' in misc:
-                    trigger_id = misc['trigger_id']
+        misc = self._get_misc_contents(mom_id)
+        if misc and 'trigger_id' in misc:
+            trigger_id = misc['trigger_id']
 
         logger.info("get_trigger_id for mom_id (%s): %s", mom_id, trigger_id)
-
         return trigger_id
 
     def get_project_details(self, mom_id):
@@ -428,12 +410,11 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
         return result
 
-
     def get_project_priorities_for_objects(self, mom_ids):
-        ''' get the project priorities for given mom object mom_ids (observations/pipelines/reservations)
-        :param mixed mom_ids comma seperated string of mom2object id's, or list of ints
+        """ get the project priorities for given mom object mom_ids (observations/pipelines/reservations)
+        :param mom_ids: mixed mom_ids comma seperated string of mom2object id's, or list of ints
         :rtype list of dict's key value pairs with the project priorities
-        '''
+        """
         if not mom_ids:
             return {}
 
@@ -467,12 +448,13 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         return result
 
     def getObjectDetails(self, mom_ids):
-        ''' get the object details (project_mom2id, project_name,
+        """
+        get the object details (project_mom2id, project_name,
         project_description, object_mom2id, object_name, object_description,
         object_type, object_group_id, object_group_name, object_status) for given mom object mom_ids
-        :param mixed mom_ids comma seperated string of mom2object id's, or list of ints
+        :param mom_ids: mixed mom_ids comma seperated string of mom2object id's, or list of ints
         :rtype list of dict's key value pairs with the project details
-        '''
+        """
         if not mom_ids:
             return {}
 
@@ -484,10 +466,13 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         placeholder = (len(parameters)*'%s,')[:-1]
 
         # TODO: make a view for this query in momdb!
-        query = '''SELECT project.mom2id as project_mom2id, project.id as project_mom2objectid, project.name as project_name, project.description as project_description,
-        object.mom2id as object_mom2id, object.id as object_mom2objectid, object.name as object_name, object.description as object_description, object.mom2objecttype as object_type, status.code as object_status,
-        object.group_id as object_group_id, grp.id as object_group_mom2objectid, grp.name as object_group_name, grp.description as object_group_description,
-        parent_grp.id as parent_group_mom2objectid, parent_grp.mom2id as parent_group_mom2id, parent_grp.name as parent_group_name, parent_grp.description as parent_group_description
+        query = '''SELECT project.mom2id as project_mom2id, project.id as project_mom2objectid, project.name as
+        project_name, project.description as project_description, object.mom2id as object_mom2id,
+        object.id as object_mom2objectid, object.name as object_name, object.description as object_description,
+        object.mom2objecttype as object_type, status.code as object_status, object.group_id as object_group_id,
+        grp.id as object_group_mom2objectid, grp.name as object_group_name, grp.description as object_group_description,
+        parent_grp.id as parent_group_mom2objectid, parent_grp.mom2id as parent_group_mom2id,
+        parent_grp.name as parent_group_name, parent_grp.description as parent_group_description
         FROM mom2object as object
         left join mom2object as project on project.id = object.ownerprojectid
         left join mom2object as grp on grp.mom2id = object.group_id
@@ -513,11 +498,12 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         return result
 
     def getProjects(self):
-        ''' get the list of all projects with columns (project_mom2id, project_name,
+        """
+        get the list of all projects with columns (project_mom2id, project_name,
         project_description, status_name, status_id, last_user_id,
         last_user_name, statustime)
         :rtype list of dict's key value pairs with all projects
-        '''
+        """
         # TODO: make a view for this query in momdb!
         query = '''SELECT project.mom2id as mom2id, project.name as name, project.description as description,
                 statustype.code as status_name,  statustype.id as status_id,
@@ -535,10 +521,11 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         return result
 
     def getProject(self, project_mom2id):
-        ''' get project for the given project_mom2id with columns (project_mom2id, project_name,
+        """
+        get project for the given project_mom2id with columns (project_mom2id, project_name,
         project_description, status_name, status_id, last_user_id,
         last_user_name, statustime)
-        '''
+        """
         ids_str = _toIdsString(project_mom2id)
 
         # TODO: make a view for this query in momdb!
@@ -573,7 +560,7 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
         rows = self._executeSelectQuery(query, parameters)
 
-        result = { 'project_mom2id': project_mom2id, 'task_mom2ids': [r['mom2id'] for r in rows]}
+        result = {'project_mom2id': project_mom2id, 'task_mom2ids': [r['mom2id'] for r in rows]}
 
         logger.info('task ids for project: %s', result)
 
@@ -605,7 +592,7 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
             result[str(mom2id)] = pred_id_list
 
         for mom2id in ids_str.split(','):
-            if not mom2id in result:
+            if mom2id not in result:
                 result[mom2id] = []
 
         logger.info('predecessors: %s', result)
@@ -648,8 +635,11 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         return result
 
     def getTaskIdsGraph(self, mom2id):
-        '''Get the fully connected graph of interconnected tasks given any mom2id in that graph
-        returns: dict with mom2id:node as key value pairs, where each node is a dict with items node_mom2id, predecessor_ids, successor_ids'''
+        """
+        Get the fully connected graph of interconnected tasks given any mom2id in that graph
+        returns: dict with mom2id:node as key value pairs, where each node is a dict with items
+        node_mom2id, predecessor_ids, successor_ids
+        """
 
         def extendGraphWithPredecessorsAndSuccessors(graph, current_node_id):
             node = graph[current_node_id]
@@ -657,7 +647,7 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
             new_node_ids = set()
 
-            node_pred_ids = self.getPredecessorIds(node_mom2id).get(str(node_mom2id),[])
+            node_pred_ids = self.getPredecessorIds(node_mom2id).get(str(node_mom2id), [])
             for pred_id in node_pred_ids:
                 if pred_id not in node['predecessor_ids']:
                     node['predecessor_ids'].append(pred_id)
@@ -665,13 +655,13 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
                 pred_node = graph.get(pred_id)
 
                 if not pred_node:
-                    graph[pred_id] = { 'node_mom2id': pred_id,
-                                       'predecessor_ids': [],
-                                       'successor_ids': [node_mom2id] }
+                    graph[pred_id] = {'node_mom2id': pred_id,
+                                      'predecessor_ids': [],
+                                      'successor_ids': [node_mom2id]}
 
                     new_node_ids.add(pred_id)
 
-            node_succ_ids = self.getSuccessorIds(node_mom2id).get(str(node_mom2id),[])
+            node_succ_ids = self.getSuccessorIds(node_mom2id).get(str(node_mom2id), [])
             for succ_id in node_succ_ids:
                 if succ_id not in node['successor_ids']:
                     node['successor_ids'].append(succ_id)
@@ -679,26 +669,24 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
                 succ_node = graph.get(succ_id)
 
                 if not succ_node:
-                    graph[succ_id] = { 'node_mom2id': succ_id,
-                                       'predecessor_ids': [node_mom2id],
-                                       'successor_ids': [] }
+                    graph[succ_id] = {'node_mom2id': succ_id,
+                                      'predecessor_ids': [node_mom2id],
+                                      'successor_ids': []}
 
                     new_node_ids.add(succ_id)
 
-            #recurse
+            # recurse
             for new_node_id in new_node_ids:
                 extendGraphWithPredecessorsAndSuccessors(graph, new_node_id)
 
         # start with simple graph with the given node_mom2id
-        the_graph = { mom2id: { 'node_mom2id': mom2id,
-                                'predecessor_ids': [],
-                                'successor_ids': [] } }
+        the_graph = {mom2id: {'node_mom2id': mom2id,
+                              'predecessor_ids': [],
+                              'successor_ids': []}}
 
         # recursively append next layers until done.
         extendGraphWithPredecessorsAndSuccessors(the_graph, mom2id)
 
-
-
         # the_graph is now complete, return it
         return the_graph
 
@@ -739,12 +727,13 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         logger.debug("getGroupsInParentGroup for mom parent group ids: %s", ids_str)
 
         query = '''SELECT parent.id as parent_mom2object_id, parent.mom2id as parent_mom2id,
-                    grp.mom2id as group_mom2id, grp.id as group_mom2object_id, grp.name as group_name, grp.description as group_description
+                    grp.mom2id as group_mom2id, grp.id as group_mom2object_id, grp.name as group_name,
+                    grp.description as group_description
                     from mom2object parent
                     inner join mom2object grp on parent.id = grp.parentid
                     where parent.mom2id in (%s)
                     and grp.group_id = grp.mom2id'''
-        parameters =  (ids_str, )
+        parameters = (ids_str, )
 
         rows = self._executeSelectQuery(query, parameters)
 
@@ -785,8 +774,11 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         return result
 
     def getMoMIdsForOTDBIds(self, otdb_ids):
-        '''reverse lookup from otdb_id(s) to mom2id(s)
-        returns: dict with otdb_id(s) in keys, mom2id(s) as values'''
+        """
+        reverse lookup from otdb_id(s) to mom2id(s)
+        returns: dict with otdb_id(s) in keys, mom2id(s) as values
+        """
+
         if not otdb_ids:
             return {}
 
@@ -794,9 +786,9 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
         logger.debug("getMoMIdsForOTDBIds for otdb ids: %s" % ids_str)
 
-        result = {int(otdb_id):None for otdb_id in ids_str.split(',')}
+        result = {int(otdb_id): None for otdb_id in ids_str.split(',')}
 
-        #first query all observations
+        # first query all observations
         query = '''SELECT obs.observation_id as otdb_id, mo.mom2id as mom2id
                    FROM lofar_observation obs
                    INNER JOIN mom2object mo on mo.id = obs.mom2objectid
@@ -806,10 +798,10 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         rows = self._executeSelectQuery(query)
 
         for row in rows:
-            if row['mom2id'] != None:
+            if row['mom2id'] is not None:
                 result[row['otdb_id']] = row['mom2id']
 
-        #then query all pipelines and combine the results
+        # then query all pipelines and combine the results
         query = '''SELECT pl.pipeline_id as otdb_id, mo.mom2id as mom2id
                    FROM lofar_pipeline pl
                    INNER JOIN mom2object mo on mo.id = pl.mom2objectid
@@ -819,16 +811,18 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         rows = self._executeSelectQuery(query)
 
         for row in rows:
-            if row['mom2id'] != None:
+            if row['mom2id'] is not None:
                 result[row['otdb_id']] = row['mom2id']
 
         logger.info("getMoMIdsForOTDBIds: %s" % result)
         return result
 
-
     def getOTDBIdsForMoMIds(self, mom_ids):
-        '''lookup from mom2id(s) to otdb_id(s)
-        returns: dict with mom2id(s) in keys, otdb_id(s) as values'''
+        """
+        lookup from mom2id(s) to otdb_id(s)
+        returns: dict with mom2id(s) in keys, otdb_id(s) as values
+        """
+
         if not mom_ids:
             return {}
 
@@ -836,9 +830,9 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
         logger.debug("getOTDBIdsForMoMIds for otdb ids: %s" % ids_str)
 
-        result = {int(mom_id):None for mom_id in ids_str.split(',')}
+        result = {int(mom_id): None for mom_id in ids_str.split(',')}
 
-        #first query all observations
+        # first query all observations
         query = '''SELECT obs.observation_id as otdb_id, mo.mom2id as mom2id
                    FROM lofar_observation obs
                    INNER JOIN mom2object mo on mo.id = obs.mom2objectid
@@ -848,10 +842,10 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         rows = self._executeSelectQuery(query)
 
         for row in rows:
-            if row['mom2id'] != None:
+            if row['mom2id'] is not None:
                 result[row['mom2id']] = row['otdb_id']
 
-        #then query all pipelines and combine the results
+        # then query all pipelines and combine the results
         query = '''SELECT pl.pipeline_id as otdb_id, mo.mom2id as mom2id
                    FROM lofar_pipeline pl
                    INNER JOIN mom2object mo on mo.id = pl.mom2objectid
@@ -861,7 +855,7 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         rows = self._executeSelectQuery(query)
 
         for row in rows:
-            if row['mom2id'] != None:
+            if row['mom2id'] is not None:
                 result[row['mom2id']] = row['otdb_id']
 
         logger.info("getOTDBIdsForMoMIds: %s" % result)
@@ -908,11 +902,129 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
         pass
 
+    def _get_misc_contents(self, mom_id):
+        """
+        Get deserialized contents of misc field for given obs id. May be empty if mom_id exists but misc is empty.
+        Returns None if no entry found for mom id
+        :param mom_id: int
+        :return: dict or None
+        """
+        logger.info("getting misc for mom_id: %s", mom_id)
+
+        misc = None
+
+        query = """SELECT mom.mom2id, mom.mom2objecttype, obs_spec.misc
+                FROM mom2object as mom
+                join lofar_observation as obs on mom.mom2objecttype = "LOFAR_OBSERVATION" and mom.id = obs.mom2objectid
+                join lofar_observation_specification as obs_spec on
+                  mom.mom2objecttype = "LOFAR_OBSERVATION" and obs.user_specification_id = obs_spec.id
+                where mom.mom2id=%s
+                union
+                SELECT mom.mom2id, mom.mom2objecttype, pipeline.misc
+                FROM mom2object as mom
+                join lofar_pipeline as pipeline on mom.mom2objecttype like "%PIPELINE%"
+                and mom.id = pipeline.mom2objectid
+                where mom.mom2id=%s;"""
+        parameters = (mom_id, mom_id)
+        rows_misc = self._executeSelectQuery(query, parameters)
+
+        if rows_misc:
+            misc = {}
+            misc_json = rows_misc[0]['misc']
+            if misc_json:
+                misc = json.loads(misc_json)
+
+        return misc
+
+    def get_time_restrictions(self, mom_id):
+        """
+        Returns min start and max end times and min/max duration for given mom id.
+        :param mom_id: int
+        :return: dict
+        """
+        logger.info("get_time_restrictions for mom_id: %s", mom_id)
+
+        time_restrictions = {}
+
+        # Note: this duplicates the _get_misc_contents(), but we save a (quite expensive?) additional query if we
+        # handle this together with duration:
+        query = """SELECT mom.mom2id, mom.mom2objecttype, obs_spec.misc, obs_spec.spec_duration AS duration,
+                obs_spec.starttime,  obs_spec.endtime
+                FROM mom2object as mom
+                join lofar_observation as obs on mom.mom2objecttype = "LOFAR_OBSERVATION" and mom.id = obs.mom2objectid
+                join lofar_observation_specification as obs_spec on
+                  mom.mom2objecttype = "LOFAR_OBSERVATION" and obs.user_specification_id = obs_spec.id
+                where mom.mom2id=%s
+                union
+                SELECT mom.mom2id, mom.mom2objecttype, pipeline.misc, pipeline.duration AS duration,
+                pipeline.starttime, pipeline.endtime
+                FROM mom2object as mom
+                join lofar_pipeline as pipeline on mom.mom2objecttype like "%PIPELINE%"
+                and mom.id = pipeline.mom2objectid
+                where mom.mom2id=%s;"""
+        parameters = (mom_id, mom_id)
+        rows = self._executeSelectQuery(query, parameters)
+
+        if rows is None or len(rows) == 0:
+            raise ValueError("mom_id (%s) not found in MoM database" % mom_id)
+
+        # add timewindow to response, if present
+        misc_json = rows[0]['misc']
+        if misc_json is not None:
+            misc = json.loads(misc_json)
+            if 'timeWindow' in misc:
+                time_restrictions.update(misc['timeWindow'])
+
+        # use mom db duration and startime to fill missing info
+        if 'minDuration' not in time_restrictions:
+            duration = rows[0]['duration']
+            if duration:
+                time_restrictions['minDuration'] = duration
+        if 'maxDuration' not in time_restrictions:
+            if 'minDuration' in time_restrictions:  # might not be if duration entry was NULL
+                time_restrictions['maxDuration'] = time_restrictions['minDuration']
+        if 'minStartTime' not in time_restrictions:
+            starttime = rows[0]['starttime']
+            if starttime:
+                time_restrictions['minStartTime'] = starttime
+        if 'maxEndTime' not in time_restrictions:
+            endtime = rows[0]['endtime']
+            if endtime:
+                time_restrictions['maxEndTime'] = endtime
+
+        if len(time_restrictions) == 0:
+            raise ValueError("No time restrictions for mom_id (%s) in database" % mom_id)
+
+        logger.info("get_time_restrictions for mom_id (%s): %s", mom_id, time_restrictions)
+
+        return time_restrictions
+
+    def get_station_selection(self, mom_id):
+        """
+        Get the station selection represented as resource groups with min/max values for given mom id.
+        :param mom_id: int
+        :return: list of dict
+
+        """
+        logger.info("get_station_selection for mom_id: %s", mom_id)
+
+        misc = self._get_misc_contents(mom_id)
+        if misc is None:
+            raise ValueError("mom_id (%s) not found in MoM database" % mom_id)
+        if 'stationSelection' not in misc:
+            raise ValueError("misc field for mom_id (%s) does not contain stationSelection" % mom_id)
+        station_selection = misc['stationSelection']
+
+        logger.info("get_station_selection for mom_id (%s): %s", mom_id, station_selection)
+
+        return station_selection
+
 
 class ProjectDetailsQueryHandler(MessageHandlerInterface):
-    '''handler class for details query in mom db
+    """
+    handler class for details query in mom db
     :param MoMDatabaseWrapper momdb inject database access via wrapper
-    '''
+    """
     def __init__(self, **kwargs):
         super(ProjectDetailsQueryHandler, self).__init__(**kwargs)
         self.dbcreds = kwargs.pop("dbcreds", None)
@@ -938,7 +1050,9 @@ class ProjectDetailsQueryHandler(MessageHandlerInterface):
             'GetMoMIdsForOTDBIds': self.getMoMIdsForOTDBIds,
             'GetOTDBIdsForMoMIds': self.getOTDBIdsForMoMIds,
             'GetTaskIdsGraph': self.getTaskIdsGraph,
-            'GetProjectPrioritiesForObjects': self.get_project_priorities_for_objects
+            'GetProjectPrioritiesForObjects': self.get_project_priorities_for_objects,
+            'GetStationSelection': self.get_station_selection,
+            'GetTimeRestrictions': self.get_time_restrictions
             }
 
     def prepare_loop(self):
@@ -1018,17 +1132,27 @@ class ProjectDetailsQueryHandler(MessageHandlerInterface):
     def getTaskIdsGraph(self, mom2id):
         return convertIntKeysToString(self.momdb.getTaskIdsGraph(mom2id))
 
+    def get_time_restrictions(self, mom_id):
+        return self.momdb.get_time_restrictions(mom_id)
+
+    def get_station_selection(self, mom_id):
+        return self.momdb.get_station_selection(mom_id)
+
+
 def createService(busname=DEFAULT_MOMQUERY_BUSNAME,
                   servicename=DEFAULT_MOMQUERY_SERVICENAME,
                   dbcreds=None,
                   handler=None,
                   broker=None):
-    '''create the GetObjectDetails on given busname
+    """
+    create the GetObjectDetails on given busname
     :param string busname: name of the bus on which this service listens
     :param string servicename: name of the service
     :param Credentials dbcreds: Credentials for the MoM database.
     :param ProjectDetailsQueryHandler handler: ProjectDetailsQueryHandler class Type, or mock like type
-    :rtype: lofar.messaging.Service'''
+    :param broker:
+    :rtype: lofar.messaging.Service
+    """
 
     if not handler:
         handler = ProjectDetailsQueryHandler
@@ -1040,18 +1164,23 @@ def createService(busname=DEFAULT_MOMQUERY_BUSNAME,
                    use_service_methods=True,
                    verbose=False,
                    broker=broker,
-                   handler_args={'dbcreds' : dbcreds})
+                   handler_args={'dbcreds': dbcreds})
 
 
 def main():
-    '''Starts the momqueryservice.GetObjectDetails service'''
+    """
+    Starts the momqueryservice.GetObjectDetails service
+    """
 
     # Check the invocation arguments
     parser = OptionParser("%prog [options]",
                           description='runs the momqueryservice')
-    parser.add_option('-q', '--broker', dest='broker', type='string', default=None, help='Address of the qpid broker, default: localhost')
-    parser.add_option("-b", "--busname", dest="busname", type="string", default=DEFAULT_MOMQUERY_BUSNAME, help="Name of the bus exchange on the qpid broker, [default: %default]")
-    parser.add_option("-s", "--servicename", dest="servicename", type="string", default=DEFAULT_MOMQUERY_SERVICENAME, help="Name for this service, [default: %default]")
+    parser.add_option('-q', '--broker', dest='broker', type='string', default=None,
+                      help='Address of the qpid broker, default: localhost')
+    parser.add_option("-b", "--busname", dest="busname", type="string", default=DEFAULT_MOMQUERY_BUSNAME,
+                      help="Name of the bus exchange on the qpid broker, [default: %default]")
+    parser.add_option("-s", "--servicename", dest="servicename", type="string", default=DEFAULT_MOMQUERY_SERVICENAME,
+                      help="Name for this service, [default: %default]")
     parser.add_option_group(dbcredentials.options_group(parser))
     parser.set_defaults(dbcredentials="MoM")
     (options, args) = parser.parse_args()
diff --git a/SAS/MoM/MoMQueryService/test/CMakeLists.txt b/SAS/MoM/MoMQueryService/test/CMakeLists.txt
index ef7eebef1f6a3add44022865bb631f366a84634f..8840218b722fbb67e8fd3e20f0baf604aabcf739 100644
--- a/SAS/MoM/MoMQueryService/test/CMakeLists.txt
+++ b/SAS/MoM/MoMQueryService/test/CMakeLists.txt
@@ -6,5 +6,5 @@ find_python_module(mysql)
 find_python_module(testing.mysqld)
 find_python_module(mock)
 
-lofar_add_test(test_momqueryservice)
+lofar_add_test(t_momqueryservice)
 
diff --git a/SAS/MoM/MoMQueryService/test/test_momqueryservice.py b/SAS/MoM/MoMQueryService/test/t_momqueryservice.py
similarity index 77%
rename from SAS/MoM/MoMQueryService/test/test_momqueryservice.py
rename to SAS/MoM/MoMQueryService/test/t_momqueryservice.py
index 8c7e57d3e5cfb3e1b432e8faaf50460e163f1579..452e017618fe3203e46280f5a28cb4dd5f11a88a 100755
--- a/SAS/MoM/MoMQueryService/test/test_momqueryservice.py
+++ b/SAS/MoM/MoMQueryService/test/t_momqueryservice.py
@@ -22,6 +22,7 @@ import unittest
 import uuid
 from mysql import connector
 import logging
+import json
 
 logger = logging.getLogger(__name__)
 
@@ -313,6 +314,7 @@ def populate_db(mysqld):
 
 Mysqld = testing.mysqld.MysqldFactory(cache_initialized_db=True, on_initialized=populate_db)
 
+
 def tearDownModule():
     # clear cached database at end of tests
     Mysqld.clear_cache()
@@ -335,7 +337,6 @@ class TestProjectDetailsQueryHandler(unittest.TestCase):
         self.addCleanup(mom_database_wrapper_patcher.stop)
         self.mom_database_wrapper_mock = mom_database_wrapper_patcher.start()
 
-
         self.project_details_query_handler = ProjectDetailsQueryHandler(dbcreds=self.database_credentials)
         self.project_details_query_handler.prepare_loop()
 
@@ -346,7 +347,7 @@ class TestProjectDetailsQueryHandler(unittest.TestCase):
 
         self.assertTrue(return_value['active'])
 
-    def test_IsProjectActive_returns_active_flase_when_mom_wrapper_returns_false(self):
+    def test_IsProjectActive_returns_active_false_when_mom_wrapper_returns_false(self):
         self.mom_database_wrapper_mock().is_project_active.return_value = False
 
         return_value = self.project_details_query_handler.is_project_active(self.project_name)
@@ -473,6 +474,37 @@ class TestProjectDetailsQueryHandler(unittest.TestCase):
 
         self.assertEqual(return_value["pi_email"], pi_email)
 
+    def test_get_time_restrictions_returns_what_the_mom_wrapper_returns(self):
+        min_start_time = "2017-01-01"
+        max_end_time = "2017-01-02"
+        min_duration = 300
+        max_duration = 600
+
+        self.mom_database_wrapper_mock().get_time_restrictions.return_value = \
+            {"minStartTime": min_start_time, "maxEndTime": max_end_time,
+             "minDuration": min_duration, "maxDuration": max_duration}
+
+        result = self.project_details_query_handler.get_time_restrictions(1234)
+
+        self.assertEqual(result["minStartTime"], min_start_time)
+        self.assertEqual(result["maxEndTime"], max_end_time)
+        self.assertEqual(result["minDuration"], min_duration)
+        self.assertEqual(result["maxDuration"], max_duration)
+
+    def test_get_station_selection_returns_what_the_mom_wrapper_returns(self):
+        resource_group = "SuperTerp"
+        rg_min = 1
+        rg_max = 3
+
+        self.mom_database_wrapper_mock().get_station_selection.return_value = \
+            [{"resourceGroup": resource_group, "min": rg_min, "max": rg_max}]
+
+        result = self.project_details_query_handler.get_station_selection(1234)
+
+        self.assertEqual(result[0]["resourceGroup"], resource_group)
+        self.assertEqual(result[0]["min"], rg_min)
+        self.assertEqual(result[0]["max"], rg_max)
+
 
 class TestMomQueryRPC(unittest.TestCase):
     test_id = 1234
@@ -549,32 +581,58 @@ class TestMomQueryRPC(unittest.TestCase):
                                               })
 
     qpid_message_add_trigger_row_id = 33
-    qpid_message_add_trigger  = QpidMessage({"row_id": qpid_message_add_trigger_row_id},
-                                             properties={
-                                                 "SystemName": "LOFAR",
-                                                 "MessageType": "ReplyMessage",
-                                                 "MessageId": message_id,
-                                                 "status": "OK"
-                                             })
+    qpid_message_add_trigger = QpidMessage({"row_id": qpid_message_add_trigger_row_id},
+                                           properties={
+                                               "SystemName": "LOFAR",
+                                               "MessageType": "ReplyMessage",
+                                               "MessageId": message_id,
+                                               "status": "OK"
+                                           })
 
     author_email = "author@example.com"
     pi_email = "pi@example.com"
     qpid_message_get_project_details = QpidMessage({"author_email": author_email, "pi_email": pi_email},
-                                             properties={
-                                                 "SystemName": "LOFAR",
-                                                 "MessageType": "ReplyMessage",
-                                                 "MessageId": message_id,
-                                                 "status": "OK"
-                                             })
+                                                   properties={
+                                                       "SystemName": "LOFAR",
+                                                       "MessageType": "ReplyMessage",
+                                                       "MessageId": message_id,
+                                                       "status": "OK"
+                                                   })
 
     test_priority = 42
     qpid_message_get_project_priorities_for_objects = QpidMessage({str(test_id): test_priority},
-                                             properties={
-                                                 "SystemName": "LOFAR",
-                                                 "MessageType": "ReplyMessage",
-                                                 "MessageId": message_id,
-                                                 "status": "OK"
-                                             })
+                                                                  properties={
+                                                                      "SystemName": "LOFAR",
+                                                                      "MessageType": "ReplyMessage",
+                                                                      "MessageId": message_id,
+                                                                      "status": "OK"
+                                                                  })
+
+    min_start_time = "2017-01-01"
+    max_end_time = "2017-01-02"
+    min_duration = 300
+    max_duration = 600
+    qpid_message_get_time_restrictions = QpidMessage({"minStartTime": min_start_time,
+                                                      "maxEndTime": max_end_time,
+                                                      "minDuration": min_duration,
+                                                      "maxDuration": max_duration},
+                                                     properties={
+                                                         "SystemName": "LOFAR",
+                                                         "MessageType": "ReplyMessage",
+                                                         "MessageId": message_id,
+                                                         "status": "OK"
+                                                     })
+
+    resourceGroup = "SuperTerp"
+    rg_min = 1
+    rg_max = 3
+    qpid_message_get_station_selection = QpidMessage([{"resourceGroup": resourceGroup, "min": rg_min, "max": rg_max}],
+                                                     properties={
+                                                         "SystemName": "LOFAR",
+                                                         "MessageType": "ReplyMessage",
+                                                         "MessageId": message_id,
+                                                         "status": "OK"
+                                                     })
 
     def setUp(self):
         # the mock library had difficulty to mock ToBus and FromBus probably to some weir naming issue.
@@ -917,7 +975,6 @@ class TestMomQueryRPC(unittest.TestCase):
         self.assertEqual(result["author_email"], self.author_email)
         self.assertEqual(result["pi_email"], self.pi_email)
 
-
     @mock.patch('lofar.messaging.messagebus.qpid.messaging')
     def test_get_project_priorities_for_objects_query(self, qpid_mock):
         self.receiver_mock.fetch.return_value = self.qpid_message_get_project_priorities_for_objects
@@ -932,6 +989,35 @@ class TestMomQueryRPC(unittest.TestCase):
         self.assertEquals(self.test_id, result.keys()[0])
         self.assertEqual(self.test_priority, result[self.test_id])
 
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_get_time_restrictions_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_get_time_restrictions
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.get_time_restrictions(self.test_id)
+
+        self.assertEqual(result["minStartTime"], self.min_start_time)
+        self.assertEqual(result["maxEndTime"], self.max_end_time)
+        self.assertEqual(result["minDuration"], self.min_duration)
+        self.assertEqual(result["maxDuration"], self.max_duration)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_get_station_selection_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_get_station_selection
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.get_station_selection(self.test_id)
+
+        self.assertEqual(result[0]["resourceGroup"], self.resourceGroup)
+        self.assertEqual(result[0]["min"], self.rg_min)
+        self.assertEqual(result[0]["max"], self.rg_max)
+
 
 class TestMoMDatabaseWrapper(unittest.TestCase):
     database_credentials = Credentials()
@@ -1152,7 +1238,7 @@ class TestMoMDatabaseWrapper(unittest.TestCase):
             self.user_name, self.host_name, self.project_name, self.meta_data)
 
     def test_add_trigger_logs_end_of_query(self):
-        self.mysql_mock.connect().cursor().lastrowid  = 34
+        self.mysql_mock.connect().cursor().lastrowid = 34
 
         result = self.mom_database_wrapper.add_trigger(
             self.user_name, self.host_name, self.project_name, self.meta_data)
@@ -1181,7 +1267,7 @@ class TestMoMDatabaseWrapper(unittest.TestCase):
         self.mysql_mock.connect().cursor().fetchall.return_value = \
             [{u'misc': '{"trigger_id": ' + str(self.trigger_id) + '}'}]
 
-        result = self.mom_database_wrapper.get_trigger_id(self.mom_id)
+        self.mom_database_wrapper.get_trigger_id(self.mom_id)
 
         self.logger_mock.info.assert_any_call("get_trigger_id for mom_id (%s): %s", self.mom_id, self.trigger_id)
 
@@ -1223,7 +1309,7 @@ class TestMoMDatabaseWrapper(unittest.TestCase):
         self.assertEqual(result, expected_result)
 
     def test_get_project_priorities_for_objects_returns_priorities(self):
-        expected_result = {self.mom_id:self.project_priority}
+        expected_result = {self.mom_id: self.project_priority}
         details_result = [{"project_priority": self.project_priority, "object_mom2id": self.mom_id}]
         self.mysql_mock.connect().cursor().fetchall.return_value = details_result
 
@@ -1231,6 +1317,91 @@ class TestMoMDatabaseWrapper(unittest.TestCase):
 
         self.assertEqual(result, expected_result)
 
+    def test_get_station_selection_returns_info_from_misc_field(self):
+        resource_group = "SuperTerp"
+        rg_min = 1
+        rg_max = 3
+        station_selection = [{"resourceGroup": resource_group, "min": rg_min, "max": rg_max}]
+
+        expected_result = station_selection
+        details_result = [{u"mom2id": self.mom_id,  u"mom2objecttype": self.job_type,
+                           u"misc": json.dumps({u"stationSelection": station_selection})}]
+        self.mysql_mock.connect().cursor().fetchall.return_value = details_result
+
+        result = self.mom_database_wrapper.get_station_selection(self.mom_id)
+        self.assertEqual(result, expected_result)
+
+    def test_get_station_selection_throws_ValueError_on_empty_query_result(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = []
+
+        with self.assertRaises(ValueError):
+            self.mom_database_wrapper.get_station_selection(1234)
+
+    def test_get_station_selection_throws_ValueError_if_station_selection_not_present_in_misc(self):
+        details_result = [{u"mom2id": self.mom_id, u"mom2objecttype": self.job_type,
+                           u"misc": json.dumps({u"timeWindow": {u'minDuration': 300, u'maxDuration': 300}})}]
+        self.mysql_mock.connect().cursor().fetchall.return_value = details_result
+
+        with self.assertRaises(ValueError):
+            self.mom_database_wrapper.get_station_selection(1234)
+
+    def test_get_time_restrictions_returns_misc_field_info_from_query_result(self):
+        min_start_time = u"2017-01-01"
+        max_end_time = u"2017-01-04"
+        mom_starttime = u"2017-01-02"
+        mom_endtime = u"2017-01-03"
+        min_duration = 300
+        max_duration = 600
+        mom_duration = 400
+
+        timewindow = {u"minStartTime": min_start_time,
+                      u"maxEndTime": max_end_time,
+                      u"minDuration": min_duration,
+                      u"maxDuration": max_duration}
+        expected_result = timewindow
+
+        details_result = [{u"mom2id": self.mom_id, u"mom2objecttype": self.job_type,
+                           u"misc": json.dumps({u"timeWindow": timewindow}), u"duration": mom_duration,
+                           u"starttime": mom_starttime, u"endtime": mom_endtime}]
+        self.mysql_mock.connect().cursor().fetchall.return_value = details_result
+
+        result = self.mom_database_wrapper.get_time_restrictions(self.mom_id)
+        self.assertEqual(result, expected_result)
+
+    def test_get_time_restrictions_returns_mom_info_if_misc_empty_in_query_result(self):
+        mom_starttime = u"2017-01-02"
+        mom_endtime = u"2017-01-03"
+        mom_duration = 400
+
+        expected_result = {u"minStartTime": mom_starttime,
+                           u"maxEndTime": mom_endtime,
+                           u"minDuration": mom_duration,
+                           u"maxDuration": mom_duration}
+
+        details_result = [{u"mom2id": self.mom_id, u"mom2objecttype": self.job_type,
+                           u"misc": None, u"duration": mom_duration,
+                           u"starttime": mom_starttime, u"endtime": mom_endtime}]
+        self.mysql_mock.connect().cursor().fetchall.return_value = details_result
+
+        result = self.mom_database_wrapper.get_time_restrictions(self.mom_id)
+        self.assertEqual(result, expected_result)
+
+    def test_get_time_restrictions_throws_ValueError_on_empty_query_result(self):
+
+        self.mysql_mock.connect().cursor().fetchall.return_value = []
+
+        with self.assertRaises(ValueError):
+            self.mom_database_wrapper.get_time_restrictions(1234)
+
+    def test_get_time_restrictions_throws_ValueError_if_no_time_restrictions_in_query_result(self):
+        details_result = [{u"mom2id": self.mom_id, u"mom2objecttype": self.job_type,
+                           u"misc": None, u"duration": None,
+                           u"starttime": None, u"endtime": None}]
+        self.mysql_mock.connect().cursor().fetchall.return_value = details_result
+
+        with self.assertRaises(ValueError):
+            self.mom_database_wrapper.get_time_restrictions(1234)
+
 
 @unittest.skip("Skipping integration test")
 class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
@@ -1254,7 +1425,7 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
     def setUp(self):
         logger.info('setting up test MoM database...')
 
-        self.mysqld = Mysqld() # for a fresh one, use: self.mysqld = testing.mysqld.Mysqld()
+        self.mysqld = Mysqld()  # for a fresh one, use: self.mysqld = testing.mysqld.Mysqld()
 
         # set up fresh connection to the mom (!) database.
         self.connection = connector.connect(**self.mysqld.dsn())
@@ -1441,8 +1612,8 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         self.assertEqual(result, None)
 
     def test_get_trigger_id_returns_id_for_lofar_observation(self):
-        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', "
-                     "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', "
+                     "'test-lofar', NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
         # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing
         self.execute("insert into lofar_observation values(83, 1, NULL, 'Interferometer', 47, 48, NULL, NULL, 0,"
                      " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)")
@@ -1457,8 +1628,8 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         self.assertEqual(result, self.trigger_id)
 
     def test_get_trigger_id_returns_none_for_lofar_observation_with_empty_misc(self):
-        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', "
-                     "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', "
+                     "'test-lofar', NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
         # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing
         self.execute("insert into lofar_observation values(83, 2, NULL, 'Interferometer', 47, 48, NULL, NULL, 0,"
                      " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)")
@@ -1473,8 +1644,8 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         self.assertEqual(result, None)
 
     def test_get_trigger_id_returns_none_for_lofar_observation_with_empty_json(self):
-        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', "
-                     "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', "
+                     "'test-lofar', NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
         # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing
         self.execute("insert into lofar_observation values(83, 2, NULL, 'Interferometer', 47, 48, NULL, NULL, 0,"
                      " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)")
@@ -1495,7 +1666,8 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         # id, mom2objectid, starttime, endtime, pipeline_id, pending, template, runtimeDirectory, resultDirectory, workingDirectory, parset, nr_output_correlated, nr_output_beamformed, nr_output_instrument_model, nr_output_skyimage, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_instrument_model_valid, nr_output_skyimage_valid, feedback, demixing_parameters_id, bbs_parameters_id, duration, storage_cluster_id, processing_cluster_id, misc
         self.execute("insert into lofar_pipeline values(1761, 1, NULL, NULL, 63722, 0, "
                      "'Calibration Pipeline Calibrator', NULL, NULL, NULL, 'parset', 0, NULL, 244, NULL, 0, 0, NULL, 0,"
-                     " NULL, 3071, 3071, NULL, NULL, NULL, '{\"trigger_id\": %(trigger_id)s}')" % {"trigger_id": self.trigger_id})
+                     " NULL, 3071, 3071, NULL, NULL, NULL, '{\"trigger_id\": %(trigger_id)s}')"
+                     % {"trigger_id": self.trigger_id})
 
         result = self.mom_database_wrapper.get_trigger_id("2")
 
@@ -1508,12 +1680,13 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
 
     def test_get_project_details_returns_correct_emails_with_filled_database(self):
         self.execute("insert into mom2object "
-                     "values(111, NULL, NULL, 2334, 'PROJECT', 'CEP4tests', 'Project for CEP4 tests', NULL, 1725713, NULL, NULL, 0, NULL, NULL);")
+                     "values(111, NULL, NULL, 2334, 'PROJECT', 'CEP4tests', 'Project for CEP4 tests', "
+                     "NULL, 1725713, NULL, NULL, 0, NULL, NULL);")
 
         self.execute("insert into member "
-                    "values(1, 111, 0);")
+                     "values(1, 111, 0);")
         self.execute("insert into member "
-                    "values(2, 111, 0);")
+                     "values(2, 111, 0);")
 
         self.execute("insert into registeredmember "
                      "values(1, 1, 1);")
@@ -1553,16 +1726,16 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         pname = 'myproject_'+str(1)
 
         self.execute("insert into mom2object values(%s, NULL, NULL, %s, 'PROJECT', '%s', 'x', "
-                         "NULL, %s, NULL, NULL, 0, 0, 0)"
-                         % (1, 1, pname, statusid))
+                     "NULL, %s, NULL, NULL, 0, 0, 0)"
+                     % (1, 1, pname, statusid))
 
         self.execute("insert into project values(%s, %s, '2012-09-14', FALSE, 0)"
-                         % (1, 1))
+                     % (1, 1))
 
         self.execute("insert into mom2object values(%s, NULL, NULL, %s , 'OBSERVATION', 'x', "
-                         "'x', %s, %s, 'x', 'x', 0, NULL,"
-                         " 0)"
-                         % (2, oid, 1, statusid))
+                     "'x', %s, %s, 'x', 'x', 0, NULL,"
+                     " 0)"
+                     % (2, oid, 1, statusid))
 
         self.execute("insert into status values(%s, '%s', 'OBSERVATION', %s)" % (statusid, status, statusid))
 
@@ -1576,8 +1749,6 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         self.assertEqual(result[str(oid)]['object_status'], status)
         self.assertEqual(result[str(oid)]['project_name'], pname)
 
-
-
     def test_get_project_priorities_for_objects_returns_correct_priorities(self):
         object_ids = [3344, 1234, 7654]
         project_prios = [42, 24, 12]
@@ -1608,13 +1779,11 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
             prio = return_value[oid]
             self.assertEqual(prio, expected_prio)
 
-
     def test_get_project_priorities_for_objects_returns_empty_dict_on_empty_database(self):
 
         return_value = self.mom_database_wrapper.get_project_priorities_for_objects("1234")
         self.assertEqual(return_value, {})
 
-
     def test_get_project_priorities_for_objects_returns_only_priorities_of_existing_objects(self):
 
         object_ids = [380, 747]
@@ -1633,12 +1802,12 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
                          % (eid, eid, 'myproject_'+str(i)))
 
             self.execute("insert into project values(%s, %s, '2012-09-14', FALSE, %s)"
-                         % (eid, eid, prio)) # unique id in project table, refer to mom2object of our project
+                         % (eid, eid, prio))  # unique id in project table, refer to mom2object of our project
 
             self.execute("insert into mom2object values(%s, NULL, NULL, %s , 'PIPELINE', 'x', "
                          "'x', %s, NULL, 'x', 'x', 0, NULL,"
                          " 0)"
-                         % (eid+100, oid, eid)) # unique id for the pipeline, refer to project id
+                         % (eid+100, oid, eid))  # unique id for the pipeline, refer to project id
 
         return_value = self.mom_database_wrapper.get_project_priorities_for_objects(object_ids + [extra_id])
 
@@ -1646,6 +1815,124 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
             self.assertTrue(oid in return_value.keys())
         self.assertFalse(extra_id in return_value.keys())
 
+    def test_get_time_restrictions_throws_ValueError_on_empty_database(self):
+        with self.assertRaises(ValueError):
+            self.mom_database_wrapper.get_time_restrictions(1234)
+
+    def test_get_time_restrictions_throws_ValueError_if_no_time_restrictions_in_database(self):
+
+        self.execute(
+            "insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', "
+            "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing
+        self.execute("insert into lofar_observation values(83, 1, NULL, 'Interferometer', 47, 48, NULL, NULL, 0,"
+                     " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)")
+        # id, type, correlated_data, filtered_data, beamformed_data, coherent_stokes_data, incoherent_stokes_data, antenna, clock_mode, instrument_filter, integration_interval, channels_per_subband, cn_integration_steps, pencilbeams_flyseye, pencilbeams_nr_pencil_rings, pencilbeams_ring_size, stokes_selection, stokes_integrate_channels, stokes_integration_steps, station_set, timeframe, starttime, endtime, spec_duration, coherent_dedisperse_channels, dispersion_measure, subbands_per_file_cs, subbands_per_file_bf, collapsed_channels_cs, collapsed_channels_is, downsampling_steps_cs, downsampling_steps_is, which_cs, which_is, bypass_pff, enable_superterp, flyseye, tab_nr_rings, tab_ring_size, bits_per_sample, misc
+        self.execute("insert into lofar_observation_specification values(47, 'USER', 1, 0, 0, 0, 0, 'HBA Dual', "
+                     "'160 MHz', '170-230 MHz', 1, NULL, NULL, 0, NULL, NULL, NULL, 0, NULL, 'Custom', NULL, NULL, "
+                     "NULL, NULL, 0, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, 0, 0, NULL, NULL, "
+                     "16, NULL)")
+
+        with self.assertRaises(ValueError):
+            self.mom_database_wrapper.get_time_restrictions(2)
+
+    def test_get_time_restrictions_returns_correct_time_restrictions(self):
+        min_start_time = "2017-01-01"
+        max_end_time = "2017-01-02"
+        min_duration = 300
+        max_duration = 600
+
+        self.execute(
+            "insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', "
+            "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing
+        self.execute("insert into lofar_observation values(83, 1, NULL, 'Interferometer', 47, 48, NULL, NULL, 0,"
+                     " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)")
+        # id, type, correlated_data, filtered_data, beamformed_data, coherent_stokes_data, incoherent_stokes_data, antenna, clock_mode, instrument_filter, integration_interval, channels_per_subband, cn_integration_steps, pencilbeams_flyseye, pencilbeams_nr_pencil_rings, pencilbeams_ring_size, stokes_selection, stokes_integrate_channels, stokes_integration_steps, station_set, timeframe, starttime, endtime, spec_duration, coherent_dedisperse_channels, dispersion_measure, subbands_per_file_cs, subbands_per_file_bf, collapsed_channels_cs, collapsed_channels_is, downsampling_steps_cs, downsampling_steps_is, which_cs, which_is, bypass_pff, enable_superterp, flyseye, tab_nr_rings, tab_ring_size, bits_per_sample, misc
+        self.execute("insert into lofar_observation_specification values(47, 'USER', 1, 0, 0, 0, 0, 'HBA Dual', "
+                     "'160 MHz', '170-230 MHz', 1, NULL, NULL, 0, NULL, NULL, NULL, 0, NULL, 'Custom', NULL, NULL, "
+                     "NULL, %s, 0, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, 0, 0, NULL, NULL, "
+                     "16, '{\"timeWindow\":{\"minStartTime\": \"%s\", \"maxEndTime\": \"%s\", \"minDuration\": %s, "
+                     "\"maxDuration\": %s}}')"
+                     % (min_duration, min_start_time, max_end_time, min_duration, max_duration))
+
+        result = self.mom_database_wrapper.get_time_restrictions(2)
+
+        self.assertEqual(result["minStartTime"], min_start_time)
+        self.assertEqual(result["maxEndTime"], max_end_time)
+        self.assertEqual(result["minDuration"], min_duration)
+        self.assertEqual(result["maxDuration"], max_duration)
+
+    def test_get_time_restrictions_returns_mom_duration_if_misc_empty(self):
+        duration = 300
+
+        self.execute(
+            "insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', "
+            "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing
+        self.execute("insert into lofar_observation values(83, 1, NULL, 'Interferometer', 47, 48, NULL, NULL, 0,"
+                     " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)")
+        # id, type, correlated_data, filtered_data, beamformed_data, coherent_stokes_data, incoherent_stokes_data, antenna, clock_mode, instrument_filter, integration_interval, channels_per_subband, cn_integration_steps, pencilbeams_flyseye, pencilbeams_nr_pencil_rings, pencilbeams_ring_size, stokes_selection, stokes_integrate_channels, stokes_integration_steps, station_set, timeframe, starttime, endtime, spec_duration, coherent_dedisperse_channels, dispersion_measure, subbands_per_file_cs, subbands_per_file_bf, collapsed_channels_cs, collapsed_channels_is, downsampling_steps_cs, downsampling_steps_is, which_cs, which_is, bypass_pff, enable_superterp, flyseye, tab_nr_rings, tab_ring_size, bits_per_sample, misc
+        self.execute("insert into lofar_observation_specification values(47, 'USER', 1, 0, 0, 0, 0, 'HBA Dual', "
+                     "'160 MHz', '170-230 MHz', 1, NULL, NULL, 0, NULL, NULL, NULL, 0, NULL, 'Custom', NULL, NULL, "
+                     "NULL, %s, 0, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, 0, 0, NULL, NULL, "
+                     "16, NULL)" % duration)
+
+        result = self.mom_database_wrapper.get_time_restrictions(2)
+
+        self.assertEqual(result["minDuration"], duration)
+        self.assertEqual(result["maxDuration"], duration)
+
+    def test_get_station_selection_throws_ValueError_on_empty_database(self):
+        with self.assertRaises(ValueError):
+            self.mom_database_wrapper.get_station_selection(1234)
+
+    def test_get_station_selection_throws_ValueError_if_not_present_in_misc(self):
+
+        self.execute(
+            "insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', "
+            "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing
+        self.execute("insert into lofar_observation values(83, 1, NULL, 'Interferometer', 47, 48, NULL, NULL, 0,"
+                     " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)")
+        # id, type, correlated_data, filtered_data, beamformed_data, coherent_stokes_data, incoherent_stokes_data, antenna, clock_mode, instrument_filter, integration_interval, channels_per_subband, cn_integration_steps, pencilbeams_flyseye, pencilbeams_nr_pencil_rings, pencilbeams_ring_size, stokes_selection, stokes_integrate_channels, stokes_integration_steps, station_set, timeframe, starttime, endtime, spec_duration, coherent_dedisperse_channels, dispersion_measure, subbands_per_file_cs, subbands_per_file_bf, collapsed_channels_cs, collapsed_channels_is, downsampling_steps_cs, downsampling_steps_is, which_cs, which_is, bypass_pff, enable_superterp, flyseye, tab_nr_rings, tab_ring_size, bits_per_sample, misc
+        self.execute("insert into lofar_observation_specification values(47, 'USER', 1, 0, 0, 0, 0, 'HBA Dual', "
+                     "'160 MHz', '170-230 MHz', 1, NULL, NULL, 0, NULL, NULL, NULL, 0, NULL, 'Custom', NULL, NULL, "
+                     "NULL, NULL, 0, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, 0, 0, NULL, NULL, "
+                     "16, '{\"timeWindow\":{\"minDuration\": 300, \"maxDuration\": 600}}')")
+
+        with self.assertRaises(ValueError):
+            self.mom_database_wrapper.get_station_selection(1234)
+
+    def test_get_station_selection_returns_correct_station_selection(self):
+        resource_group = "SuperTerp"
+        rg_min = 4
+        rg_max = 9
+        resource_group2 = "CS001"
+        rg_min2 = 1
+
+        self.execute(
+            "insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', "
+            "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing
+        self.execute("insert into lofar_observation values(83, 1, NULL, 'Interferometer', 47, 48, NULL, NULL, 0,"
+                     " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)")
+        # id, type, correlated_data, filtered_data, beamformed_data, coherent_stokes_data, incoherent_stokes_data, antenna, clock_mode, instrument_filter, integration_interval, channels_per_subband, cn_integration_steps, pencilbeams_flyseye, pencilbeams_nr_pencil_rings, pencilbeams_ring_size, stokes_selection, stokes_integrate_channels, stokes_integration_steps, station_set, timeframe, starttime, endtime, spec_duration, coherent_dedisperse_channels, dispersion_measure, subbands_per_file_cs, subbands_per_file_bf, collapsed_channels_cs, collapsed_channels_is, downsampling_steps_cs, downsampling_steps_is, which_cs, which_is, bypass_pff, enable_superterp, flyseye, tab_nr_rings, tab_ring_size, bits_per_sample, misc
+        self.execute("insert into lofar_observation_specification values(47, 'USER', 1, 0, 0, 0, 0, 'HBA Dual', "
+                     "'160 MHz', '170-230 MHz', 1, NULL, NULL, 0, NULL, NULL, NULL, 0, NULL, 'Custom', NULL, NULL, "
+                     "NULL, NULL, 0, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, 0, 0, NULL, NULL, "
+                     "16, '{\"stationSelection\": [{\"resourceGroup\": \"%s\", \"min\": %s, \"max\": %s}, "
+                     "{\"resourceGroup\": \"%s\", \"min\": %s}]}')" % (resource_group, rg_min, rg_max,
+                                                                       resource_group2, rg_min2))
+
+        result = self.mom_database_wrapper.get_station_selection(2)
+
+        self.assertEqual(result[0]["resourceGroup"], resource_group)
+        self.assertEqual(result[0]["min"], rg_min)
+        self.assertEqual(result[0]["max"], rg_max)
+        self.assertEqual(result[1]["resourceGroup"], resource_group2)
+        self.assertEqual(result[1]["min"], rg_min2)
+
 
 if __name__ == "__main__":
     logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
diff --git a/SAS/MoM/MoMQueryService/test/t_momqueryservice.run b/SAS/MoM/MoMQueryService/test/t_momqueryservice.run
new file mode 100755
index 0000000000000000000000000000000000000000..0e4120028ae4d3a46a2d22d07884ccfbaa911a4c
--- /dev/null
+++ b/SAS/MoM/MoMQueryService/test/t_momqueryservice.run
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+# Run the unit test
+source python-coverage.sh
+python_coverage_test "MoMQueryService/*" t_momqueryservice.py
diff --git a/SAS/MoM/MoMQueryService/test/t_momqueryservice.sh b/SAS/MoM/MoMQueryService/test/t_momqueryservice.sh
new file mode 100755
index 0000000000000000000000000000000000000000..cd4cc2f294db2ecdd82faa9d95966e5f313c65ec
--- /dev/null
+++ b/SAS/MoM/MoMQueryService/test/t_momqueryservice.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+./runctest.sh t_momqueryservice
diff --git a/SAS/MoM/MoMQueryService/test/test_momqueryservice.run b/SAS/MoM/MoMQueryService/test/test_momqueryservice.run
deleted file mode 100755
index 9553d2fdcfe704c6655b35b6b10d60229ca084d8..0000000000000000000000000000000000000000
--- a/SAS/MoM/MoMQueryService/test/test_momqueryservice.run
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/bash
-
-# Run the unit test
-source python-coverage.sh
-python_coverage_test "MoMQueryService/*" test_momqueryservice.py
diff --git a/SAS/MoM/MoMQueryService/test/test_momqueryservice.sh b/SAS/MoM/MoMQueryService/test/test_momqueryservice.sh
deleted file mode 100755
index 488e7f88ef29f3efbda2cda3da7aa43560f23374..0000000000000000000000000000000000000000
--- a/SAS/MoM/MoMQueryService/test/test_momqueryservice.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-
-./runctest.sh test_momqueryservice