From c48b058edbb2bcba62faf914f02c2b84a55528ca Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?J=C3=B6rn=20K=C3=BCnsem=C3=B6ller?=
 <jkuensem@physik.uni-bielefeld.de>
Date: Wed, 12 Jul 2017 14:32:50 +0000
Subject: [PATCH] Task #11037 - processed review comments

---
 .../MoMQueryServiceServer/momqueryservice.py  | 190 +++++++-------
 .../MoMQueryService/test/t_momqueryservice.py | 234 +++++++++++-------
 2 files changed, 244 insertions(+), 180 deletions(-)

diff --git a/SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.py b/SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.py
index 81f27fcd943..329530cf56c 100755
--- a/SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.py
+++ b/SAS/MoM/MoMQueryService/MoMQueryServiceServer/momqueryservice.py
@@ -45,7 +45,6 @@ from optparse import OptionParser
 from mysql import connector
 from mysql.connector.errors import OperationalError
 import json
-from datetime import datetime, timedelta
 
 #from django.db.models import query
 from lofar.messaging import Service
@@ -93,7 +92,7 @@ def _toIdsString(ids):
     if not ids_list:
         raise ValueError("Could not find proper ids in: " + ids)
 
-    ids_str = ','.join([str(id) for id in ids_list])
+    ids_str = ','.join([str(ident) for ident in ids_list])
     return ids_str
 
 
@@ -122,12 +121,12 @@ class MoMDatabaseWrapper:
         # max of 3 tries, on success return result
         # use new connection for every query,
         # because on the flaky lofar network a connection may appear functional but returns improper results.
-        MAXTRIES=3
+        maxtries = 3
 
         if data is not None and type(data) not in (tuple, dict):
             raise ValueError('Need data as tuple or dict, got ' + str(type(data)))
 
-        for i in range(MAXTRIES):
+        for i in range(maxtries):
             try:
                 self._connect()
                 cursor = self.conn.cursor(dictionary=True)
@@ -136,16 +135,16 @@ class MoMDatabaseWrapper:
             except (OperationalError, AttributeError) as e:
                 logger.error(str(e))
 
-                if i+1 == MAXTRIES: raise e
+                if i+1 == maxtries: raise e
 
     def _executeInsertQuery(self, query, data=None):
         # try to execute query on flaky lofar mysql connection
         # max of 3 tries, on success return result
         # use new connection for every query,
         # because on the flaky lofar network a connection may appear functional but returns improper results.
-        MAXTRIES=3
+        maxtries = 3
 
-        for i in range(MAXTRIES):
+        for i in range(maxtries):
             try:
                 self._connect()
                 cursor = self.conn.cursor(dictionary=True)
@@ -155,7 +154,7 @@ class MoMDatabaseWrapper:
             except (OperationalError, AttributeError) as e:
                 logger.error(str(e))
 
-                if i+1 == MAXTRIES: raise e
+                if i+1 == maxtries: raise e
 
     def add_trigger(self, user_name, host_name, project_name, meta_data):
         logger.info("add_trigger for user_name: %s, host_name: %s, project_name: %s, meta_data: %s",
@@ -245,7 +244,8 @@ where mom2object.name = %s"""
         join """ + self.momprivilege_db + """.statustransitionrole as transition_role on system_role.systemroleid=transition_role.roleid
         join """ + self.momprivilege_db + """.statustransition as transition on transition_role.statustransitionid=transition.id
         join status as open_status on open_status.code='opened'
-        join status as status on status.id=transition.newstatusid and (transition.oldstatusid=0 or transition.oldstatusid=open_status.id)
+        join status as status on status.id=transition.newstatusid
+        and (transition.oldstatusid=0 or transition.oldstatusid=open_status.id)
         where status.code=%s and
         status.type='""" + status_type[job_type] + """' and
         open_status.type='""" + status_type[job_type] + """' and
@@ -267,7 +267,8 @@ where mom2object.name = %s"""
         join """ + self.momprivilege_db + """.statustransitionrole as transition_role on project_role.id=transition_role.roleid
         join """ + self.momprivilege_db + """.statustransition as transition on transition_role.statustransitionid=transition.id
         join status as open_status on open_status.code='opened'
-        join status as status on status.id=transition.newstatusid and (transition.oldstatusid=0 or transition.oldstatusid=open_status.id)
+        join status as status on status.id=transition.newstatusid
+        and (transition.oldstatusid=0 or transition.oldstatusid=open_status.id)
         where status.code=%s and
         status.type='""" + status_type[job_type] + """' and
         open_status.type='""" + status_type[job_type] + """' and
@@ -372,7 +373,7 @@ where mom2object.name = %s"""
 
         trigger_id = None
         misc = self._get_misc_contents(mom_id)
-        if 'trigger_id' in misc:
+        if misc and 'trigger_id' in misc:
             trigger_id = misc['trigger_id']
 
         logger.info("get_trigger_id for mom_id (%s): %s", mom_id, trigger_id)
@@ -409,12 +410,11 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
         return result
 
-
     def get_project_priorities_for_objects(self, mom_ids):
-        ''' get the project priorities for given mom object mom_ids (observations/pipelines/reservations)
-        :param mixed mom_ids comma seperated string of mom2object id's, or list of ints
+        """ get the project priorities for given mom object mom_ids (observations/pipelines/reservations)
+        :param mom_ids: mixed mom_ids comma seperated string of mom2object id's, or list of ints
         :rtype list of dict's key value pairs with the project priorities
-        '''
+        """
         if not mom_ids:
             return {}
 
@@ -448,12 +448,13 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         return result
 
     def getObjectDetails(self, mom_ids):
-        ''' get the object details (project_mom2id, project_name,
+        """
+        get the object details (project_mom2id, project_name,
         project_description, object_mom2id, object_name, object_description,
         object_type, object_group_id, object_group_name, object_status) for given mom object mom_ids
-        :param mixed mom_ids comma seperated string of mom2object id's, or list of ints
+        :param mom_ids: mixed mom_ids comma seperated string of mom2object id's, or list of ints
         :rtype list of dict's key value pairs with the project details
-        '''
+        """
         if not mom_ids:
             return {}
 
@@ -465,10 +466,13 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         placeholder = (len(parameters)*'%s,')[:-1]
 
         # TODO: make a view for this query in momdb!
-        query = '''SELECT project.mom2id as project_mom2id, project.id as project_mom2objectid, project.name as project_name, project.description as project_description,
-        object.mom2id as object_mom2id, object.id as object_mom2objectid, object.name as object_name, object.description as object_description, object.mom2objecttype as object_type, status.code as object_status,
-        object.group_id as object_group_id, grp.id as object_group_mom2objectid, grp.name as object_group_name, grp.description as object_group_description,
-        parent_grp.id as parent_group_mom2objectid, parent_grp.mom2id as parent_group_mom2id, parent_grp.name as parent_group_name, parent_grp.description as parent_group_description
+        query = '''SELECT project.mom2id as project_mom2id, project.id as project_mom2objectid, project.name as
+        project_name, project.description as project_description, object.mom2id as object_mom2id,
+        object.id as object_mom2objectid, object.name as object_name, object.description as object_description,
+        object.mom2objecttype as object_type, status.code as object_status, object.group_id as object_group_id,
+        grp.id as object_group_mom2objectid, grp.name as object_group_name, grp.description as object_group_description,
+        parent_grp.id as parent_group_mom2objectid, parent_grp.mom2id as parent_group_mom2id,
+        parent_grp.name as parent_group_name, parent_grp.description as parent_group_description
         FROM mom2object as object
         left join mom2object as project on project.id = object.ownerprojectid
         left join mom2object as grp on grp.mom2id = object.group_id
@@ -494,11 +498,12 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         return result
 
     def getProjects(self):
-        ''' get the list of all projects with columns (project_mom2id, project_name,
+        """
+        get the list of all projects with columns (project_mom2id, project_name,
         project_description, status_name, status_id, last_user_id,
         last_user_name, statustime)
         :rtype list of dict's key value pairs with all projects
-        '''
+        """
         # TODO: make a view for this query in momdb!
         query = '''SELECT project.mom2id as mom2id, project.name as name, project.description as description,
                 statustype.code as status_name,  statustype.id as status_id,
@@ -516,10 +521,11 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         return result
 
     def getProject(self, project_mom2id):
-        ''' get project for the given project_mom2id with columns (project_mom2id, project_name,
+        """
+        get project for the given project_mom2id with columns (project_mom2id, project_name,
         project_description, status_name, status_id, last_user_id,
         last_user_name, statustime)
-        '''
+        """
         ids_str = _toIdsString(project_mom2id)
 
         # TODO: make a view for this query in momdb!
@@ -554,7 +560,7 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
         rows = self._executeSelectQuery(query, parameters)
 
-        result = { 'project_mom2id': project_mom2id, 'task_mom2ids': [r['mom2id'] for r in rows]}
+        result = {'project_mom2id': project_mom2id, 'task_mom2ids': [r['mom2id'] for r in rows]}
 
         logger.info('task ids for project: %s', result)
 
@@ -586,7 +592,7 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
             result[str(mom2id)] = pred_id_list
 
         for mom2id in ids_str.split(','):
-            if not mom2id in result:
+            if mom2id not in result:
                 result[mom2id] = []
 
         logger.info('predecessors: %s', result)
@@ -629,8 +635,11 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         return result
 
     def getTaskIdsGraph(self, mom2id):
-        '''Get the fully connected graph of interconnected tasks given any mom2id in that graph
-        returns: dict with mom2id:node as key value pairs, where each node is a dict with items node_mom2id, predecessor_ids, successor_ids'''
+        """
+        Get the fully connected graph of interconnected tasks given any mom2id in that graph
+        returns: dict with mom2id:node as key value pairs, where each node is a dict with items
+        node_mom2id, predecessor_ids, successor_ids
+        """
 
         def extendGraphWithPredecessorsAndSuccessors(graph, current_node_id):
             node = graph[current_node_id]
@@ -638,7 +647,7 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
             new_node_ids = set()
 
-            node_pred_ids = self.getPredecessorIds(node_mom2id).get(str(node_mom2id),[])
+            node_pred_ids = self.getPredecessorIds(node_mom2id).get(str(node_mom2id), [])
             for pred_id in node_pred_ids:
                 if pred_id not in node['predecessor_ids']:
                     node['predecessor_ids'].append(pred_id)
@@ -646,13 +655,13 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
                 pred_node = graph.get(pred_id)
 
                 if not pred_node:
-                    graph[pred_id] = { 'node_mom2id': pred_id,
-                                       'predecessor_ids': [],
-                                       'successor_ids': [node_mom2id] }
+                    graph[pred_id] = {'node_mom2id': pred_id,
+                                      'predecessor_ids': [],
+                                      'successor_ids': [node_mom2id]}
 
                     new_node_ids.add(pred_id)
 
-            node_succ_ids = self.getSuccessorIds(node_mom2id).get(str(node_mom2id),[])
+            node_succ_ids = self.getSuccessorIds(node_mom2id).get(str(node_mom2id), [])
             for succ_id in node_succ_ids:
                 if succ_id not in node['successor_ids']:
                     node['successor_ids'].append(succ_id)
@@ -660,26 +669,24 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
                 succ_node = graph.get(succ_id)
 
                 if not succ_node:
-                    graph[succ_id] = { 'node_mom2id': succ_id,
-                                       'predecessor_ids': [node_mom2id],
-                                       'successor_ids': [] }
+                    graph[succ_id] = {'node_mom2id': succ_id,
+                                      'predecessor_ids': [node_mom2id],
+                                      'successor_ids': []}
 
                     new_node_ids.add(succ_id)
 
-            #recurse
+            # recurse
             for new_node_id in new_node_ids:
                 extendGraphWithPredecessorsAndSuccessors(graph, new_node_id)
 
         # start with simple graph with the given node_mom2id
-        the_graph = { mom2id: { 'node_mom2id': mom2id,
-                                'predecessor_ids': [],
-                                'successor_ids': [] } }
+        the_graph = {mom2id: {'node_mom2id': mom2id,
+                              'predecessor_ids': [],
+                              'successor_ids': []}}
 
         # recursively append next layers until done.
         extendGraphWithPredecessorsAndSuccessors(the_graph, mom2id)
 
-
-
         # the_graph is now complete, return it
         return the_graph
 
@@ -720,12 +727,13 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         logger.debug("getGroupsInParentGroup for mom parent group ids: %s", ids_str)
 
         query = '''SELECT parent.id as parent_mom2object_id, parent.mom2id as parent_mom2id,
-                    grp.mom2id as group_mom2id, grp.id as group_mom2object_id, grp.name as group_name, grp.description as group_description
+                    grp.mom2id as group_mom2id, grp.id as group_mom2object_id, grp.name as group_name,
+                    grp.description as group_description
                     from mom2object parent
                     inner join mom2object grp on parent.id = grp.parentid
                     where parent.mom2id in (%s)
                     and grp.group_id = grp.mom2id'''
-        parameters =  (ids_str, )
+        parameters = (ids_str, )
 
         rows = self._executeSelectQuery(query, parameters)
 
@@ -766,8 +774,11 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         return result
 
     def getMoMIdsForOTDBIds(self, otdb_ids):
-        '''reverse lookup from otdb_id(s) to mom2id(s)
-        returns: dict with otdb_id(s) in keys, mom2id(s) as values'''
+        """
+        reverse lookup from otdb_id(s) to mom2id(s)
+        returns: dict with otdb_id(s) in keys, mom2id(s) as values
+        """
+
         if not otdb_ids:
             return {}
 
@@ -775,9 +786,9 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
         logger.debug("getMoMIdsForOTDBIds for otdb ids: %s" % ids_str)
 
-        result = {int(otdb_id):None for otdb_id in ids_str.split(',')}
+        result = {int(otdb_id): None for otdb_id in ids_str.split(',')}
 
-        #first query all observations
+        # first query all observations
         query = '''SELECT obs.observation_id as otdb_id, mo.mom2id as mom2id
                    FROM lofar_observation obs
                    INNER JOIN mom2object mo on mo.id = obs.mom2objectid
@@ -787,10 +798,10 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         rows = self._executeSelectQuery(query)
 
         for row in rows:
-            if row['mom2id'] != None:
+            if row['mom2id'] is not None:
                 result[row['otdb_id']] = row['mom2id']
 
-        #then query all pipelines and combine the results
+        # then query all pipelines and combine the results
         query = '''SELECT pl.pipeline_id as otdb_id, mo.mom2id as mom2id
                    FROM lofar_pipeline pl
                    INNER JOIN mom2object mo on mo.id = pl.mom2objectid
@@ -800,16 +811,18 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         rows = self._executeSelectQuery(query)
 
         for row in rows:
-            if row['mom2id'] != None:
+            if row['mom2id'] is not None:
                 result[row['otdb_id']] = row['mom2id']
 
         logger.info("getMoMIdsForOTDBIds: %s" % result)
         return result
 
-
     def getOTDBIdsForMoMIds(self, mom_ids):
-        '''lookup from mom2id(s) to otdb_id(s)
-        returns: dict with mom2id(s) in keys, otdb_id(s) as values'''
+        """
+        lookup from mom2id(s) to otdb_id(s)
+        returns: dict with mom2id(s) in keys, otdb_id(s) as values
+        """
+
         if not mom_ids:
             return {}
 
@@ -817,9 +830,9 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
         logger.debug("getOTDBIdsForMoMIds for otdb ids: %s" % ids_str)
 
-        result = {int(mom_id):None for mom_id in ids_str.split(',')}
+        result = {int(mom_id): None for mom_id in ids_str.split(',')}
 
-        #first query all observations
+        # first query all observations
         query = '''SELECT obs.observation_id as otdb_id, mo.mom2id as mom2id
                    FROM lofar_observation obs
                    INNER JOIN mom2object mo on mo.id = obs.mom2objectid
@@ -829,10 +842,10 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         rows = self._executeSelectQuery(query)
 
         for row in rows:
-            if row['mom2id'] != None:
+            if row['mom2id'] is not None:
                 result[row['mom2id']] = row['otdb_id']
 
-        #then query all pipelines and combine the results
+        # then query all pipelines and combine the results
         query = '''SELECT pl.pipeline_id as otdb_id, mo.mom2id as mom2id
                    FROM lofar_pipeline pl
                    INNER JOIN mom2object mo on mo.id = pl.mom2objectid
@@ -842,7 +855,7 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         rows = self._executeSelectQuery(query)
 
         for row in rows:
-            if row['mom2id'] != None:
+            if row['mom2id'] is not None:
                 result[row['mom2id']] = row['otdb_id']
 
         logger.info("getOTDBIdsForMoMIds: %s" % result)
@@ -891,13 +904,14 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
     def _get_misc_contents(self, mom_id):
         """
-        Get deserialized contents of misc field for given obs id
+        Get deserialized contents of misc field for given obs id. May be empty if mom_id exists but misc is empty.
+        Returns None if no entry found for mom id
         :param mom_id: int
-        :return: dict
+        :return: dict or None
         """
         logger.info("getting misc for mom_id: %s", mom_id)
 
-        misc = {}
+        misc = None
 
         query = """SELECT mom.mom2id, mom.mom2objecttype, obs_spec.misc
                 FROM mom2object as mom
@@ -915,13 +929,13 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
         rows_misc = self._executeSelectQuery(query, parameters)
 
         if rows_misc:
+            misc = {}
             misc_json = rows_misc[0]['misc']
             if misc_json:
                 misc = json.loads(misc_json)
 
         return misc
 
-
     def get_time_restrictions(self, mom_id):
         """
         Returns min start and max end times and min/max duration for given mom id.
@@ -934,14 +948,16 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
         # Note: this duplicates the _get_misc_contents(), but we save a (quite expensive?) additional query if we
         # handle this together with duration:
-        query = """SELECT mom.mom2id, mom.mom2objecttype, obs_spec.misc, obs_spec.spec_duration AS duration, obs_spec.starttime,  obs_spec.endtime
+        query = """SELECT mom.mom2id, mom.mom2objecttype, obs_spec.misc, obs_spec.spec_duration AS duration,
+                obs_spec.starttime,  obs_spec.endtime
                 FROM mom2object as mom
                 join lofar_observation as obs on mom.mom2objecttype = "LOFAR_OBSERVATION" and mom.id = obs.mom2objectid
                 join lofar_observation_specification as obs_spec on
                   mom.mom2objecttype = "LOFAR_OBSERVATION" and obs.user_specification_id = obs_spec.id
                 where mom.mom2id=%s
                 union
-                SELECT mom.mom2id, mom.mom2objecttype, pipeline.misc, pipeline.duration AS duration, pipeline.starttime, pipeline.endtime
+                SELECT mom.mom2id, mom.mom2objecttype, pipeline.misc, pipeline.duration AS duration,
+                pipeline.starttime, pipeline.endtime
                 FROM mom2object as mom
                 join lofar_pipeline as pipeline on mom.mom2objecttype like "%PIPELINE%"
                 and mom.id = pipeline.mom2objectid
@@ -965,7 +981,7 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
             if duration:
                 time_restrictions['minDuration'] = duration
         if 'maxDuration' not in time_restrictions:
-            if 'minDuration' in time_restrictions: # might not be if duration entry was NULL
+            if 'minDuration' in time_restrictions:  # might not be if duration entry was NULL
                 time_restrictions['maxDuration'] = time_restrictions['minDuration']
         if 'minStartTime' not in time_restrictions:
             starttime = rows[0]['starttime']
@@ -983,15 +999,6 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
         return time_restrictions
 
-    def _mysql_datetimestring_to_python_datetime(self, mysql_datetimestring):
-        format = '%Y-%m-%d %H:%M:%S'
-        return datetime.datetime.strptime(mysql_datetimestring, format)
-
-    def _datetime_to_mysql_datetimestring(self, dtime):
-        format = '%Y-%m-%d %H:%M:%S'
-        dtime.strftime(format)
-
-
     def get_station_selection(self, mom_id):
         """
         Get the station selection represented as resource groups with min/max values for given mom id.
@@ -1003,7 +1010,7 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
         misc = self._get_misc_contents(mom_id)
         if misc is None:
-            raise ValueError("No misc contents for mom_id (%s) in database" % mom_id)
+            raise ValueError("mom_id (%s) not found in MoM database" % mom_id)
         if 'stationSelection' not in misc:
             raise ValueError("misc field for mom_id (%s) does not contain stationSelection" % mom_id)
         station_selection = misc['stationSelection']
@@ -1014,9 +1021,10 @@ where project.mom2id = %s and (project_role.name = "Pi" or project_role.name = "
 
 
 class ProjectDetailsQueryHandler(MessageHandlerInterface):
-    '''handler class for details query in mom db
+    """
+    handler class for details query in mom db
     :param MoMDatabaseWrapper momdb inject database access via wrapper
-    '''
+    """
     def __init__(self, **kwargs):
         super(ProjectDetailsQueryHandler, self).__init__(**kwargs)
         self.dbcreds = kwargs.pop("dbcreds", None)
@@ -1136,12 +1144,15 @@ def createService(busname=DEFAULT_MOMQUERY_BUSNAME,
                   dbcreds=None,
                   handler=None,
                   broker=None):
-    '''create the GetObjectDetails on given busname
+    """
+    create the GetObjectDetails on given busname
     :param string busname: name of the bus on which this service listens
     :param string servicename: name of the service
     :param Credentials dbcreds: Credentials for the MoM database.
     :param ProjectDetailsQueryHandler handler: ProjectDetailsQueryHandler class Type, or mock like type
-    :rtype: lofar.messaging.Service'''
+    :param broker:
+    :rtype: lofar.messaging.Service
+    """
 
     if not handler:
         handler = ProjectDetailsQueryHandler
@@ -1153,18 +1164,23 @@ def createService(busname=DEFAULT_MOMQUERY_BUSNAME,
                    use_service_methods=True,
                    verbose=False,
                    broker=broker,
-                   handler_args={'dbcreds' : dbcreds})
+                   handler_args={'dbcreds': dbcreds})
 
 
 def main():
-    '''Starts the momqueryservice.GetObjectDetails service'''
+    """
+    Starts the momqueryservice.GetObjectDetails service
+    """
 
     # Check the invocation arguments
     parser = OptionParser("%prog [options]",
                           description='runs the momqueryservice')
-    parser.add_option('-q', '--broker', dest='broker', type='string', default=None, help='Address of the qpid broker, default: localhost')
-    parser.add_option("-b", "--busname", dest="busname", type="string", default=DEFAULT_MOMQUERY_BUSNAME, help="Name of the bus exchange on the qpid broker, [default: %default]")
-    parser.add_option("-s", "--servicename", dest="servicename", type="string", default=DEFAULT_MOMQUERY_SERVICENAME, help="Name for this service, [default: %default]")
+    parser.add_option('-q', '--broker', dest='broker', type='string', default=None,
+                      help='Address of the qpid broker, default: localhost')
+    parser.add_option("-b", "--busname", dest="busname", type="string", default=DEFAULT_MOMQUERY_BUSNAME,
+                      help="Name of the bus exchange on the qpid broker, [default: %default]")
+    parser.add_option("-s", "--servicename", dest="servicename", type="string", default=DEFAULT_MOMQUERY_SERVICENAME,
+                      help="Name for this service, [default: %default]")
     parser.add_option_group(dbcredentials.options_group(parser))
     parser.set_defaults(dbcredentials="MoM")
     (options, args) = parser.parse_args()
diff --git a/SAS/MoM/MoMQueryService/test/t_momqueryservice.py b/SAS/MoM/MoMQueryService/test/t_momqueryservice.py
index 260aff398f2..2cbbe428b7f 100755
--- a/SAS/MoM/MoMQueryService/test/t_momqueryservice.py
+++ b/SAS/MoM/MoMQueryService/test/t_momqueryservice.py
@@ -314,6 +314,7 @@ def populate_db(mysqld):
 
 Mysqld = testing.mysqld.MysqldFactory(cache_initialized_db=True, on_initialized=populate_db)
 
+
 def tearDownModule():
     # clear cached database at end of tests
     Mysqld.clear_cache()
@@ -336,7 +337,6 @@ class TestProjectDetailsQueryHandler(unittest.TestCase):
         self.addCleanup(mom_database_wrapper_patcher.stop)
         self.mom_database_wrapper_mock = mom_database_wrapper_patcher.start()
 
-
         self.project_details_query_handler = ProjectDetailsQueryHandler(dbcreds=self.database_credentials)
         self.project_details_query_handler.prepare_loop()
 
@@ -475,33 +475,33 @@ class TestProjectDetailsQueryHandler(unittest.TestCase):
         self.assertEqual(return_value["pi_email"], pi_email)
 
     def test_get_time_restrictions_returns_what_the_mom_wrapper_returns(self):
-        minStartTime = "2017-01-01"
-        maxEndTime = "2017-01-02"
-        minDuration = 300
-        maxDuration = 600
+        min_start_time = "2017-01-01"
+        max_end_time = "2017-01-02"
+        min_duration = 300
+        max_duration = 600
 
         self.mom_database_wrapper_mock().get_time_restrictions.return_value = \
-            {"minStartTime": minStartTime, "maxEndTime": maxEndTime, "minDuration": minDuration, "maxDuration": maxDuration}
+            {"minStartTime": min_start_time, "maxEndTime": max_end_time,
+             "minDuration": min_duration, "maxDuration": max_duration}
 
         result = self.project_details_query_handler.get_time_restrictions(1234)
 
-        self.assertEqual(result["minStartTime"], minStartTime)
-        self.assertEqual(result["maxEndTime"], maxEndTime)
-        self.assertEqual(result["minDuration"], minDuration)
-        self.assertEqual(result["maxDuration"], maxDuration)
+        self.assertEqual(result["minStartTime"], min_start_time)
+        self.assertEqual(result["maxEndTime"], max_end_time)
+        self.assertEqual(result["minDuration"], min_duration)
+        self.assertEqual(result["maxDuration"], max_duration)
 
     def test_get_station_selection_returns_what_the_mom_wrapper_returns(self):
-        resourceGroup = "SuperTerp"
+        resource_group = "SuperTerp"
         rg_min = 1
         rg_max = 3
 
         self.mom_database_wrapper_mock().get_station_selection.return_value = \
-            [{"resourceGroup": resourceGroup, "min": rg_min, "max": rg_max }]
+            [{"resourceGroup": resource_group, "min": rg_min, "max": rg_max}]
 
         result = self.project_details_query_handler.get_station_selection(1234)
 
-
-        self.assertEqual(result[0]["resourceGroup"], resourceGroup)
+        self.assertEqual(result[0]["resourceGroup"], resource_group)
         self.assertEqual(result[0]["min"], rg_min)
         self.assertEqual(result[0]["max"], rg_max)
 
@@ -581,55 +581,58 @@ class TestMomQueryRPC(unittest.TestCase):
                                               })
 
     qpid_message_add_trigger_row_id = 33
-    qpid_message_add_trigger  = QpidMessage({"row_id": qpid_message_add_trigger_row_id},
-                                             properties={
-                                                 "SystemName": "LOFAR",
-                                                 "MessageType": "ReplyMessage",
-                                                 "MessageId": message_id,
-                                                 "status": "OK"
-                                             })
+    qpid_message_add_trigger = QpidMessage({"row_id": qpid_message_add_trigger_row_id},
+                                           properties={
+                                               "SystemName": "LOFAR",
+                                               "MessageType": "ReplyMessage",
+                                               "MessageId": message_id,
+                                               "status": "OK"
+                                           })
 
     author_email = "author@example.com"
     pi_email = "pi@example.com"
     qpid_message_get_project_details = QpidMessage({"author_email": author_email, "pi_email": pi_email},
-                                             properties={
-                                                 "SystemName": "LOFAR",
-                                                 "MessageType": "ReplyMessage",
-                                                 "MessageId": message_id,
-                                                 "status": "OK"
-                                             })
+                                                   properties={
+                                                       "SystemName": "LOFAR",
+                                                       "MessageType": "ReplyMessage",
+                                                       "MessageId": message_id,
+                                                       "status": "OK"
+                                                   })
 
     test_priority = 42
     qpid_message_get_project_priorities_for_objects = QpidMessage({str(test_id): test_priority},
-                                             properties={
-                                                 "SystemName": "LOFAR",
-                                                 "MessageType": "ReplyMessage",
-                                                 "MessageId": message_id,
-                                                 "status": "OK"
-                                             })
+                                                                  properties={
+                                                                      "SystemName": "LOFAR",
+                                                                      "MessageType": "ReplyMessage",
+                                                                      "MessageId": message_id,
+                                                                      "status": "OK"
+                                                                  })
 
     min_start_time = "2017-01-01"
     max_end_time = "2017-01-02"
     min_duration = 300
     max_duration = 600
-    qpid_message_get_time_restrictions = QpidMessage({"minStartTime": min_start_time, "maxEndTime": max_end_time, "minDuration": min_duration, "maxDuration": max_duration},
-                                             properties={
-                                                 "SystemName": "LOFAR",
-                                                 "MessageType": "ReplyMessage",
-                                                 "MessageId": message_id,
-                                                 "status": "OK"
-                                             })
+    qpid_message_get_time_restrictions = QpidMessage({"minStartTime": min_start_time,
+                                                      "maxEndTime": max_end_time,
+                                                      "minDuration": min_duration,
+                                                      "maxDuration": max_duration},
+                                                     properties={
+                                                         "SystemName": "LOFAR",
+                                                         "MessageType": "ReplyMessage",
+                                                         "MessageId": message_id,
+                                                         "status": "OK"
+                                                     })
 
     resourceGroup = "SuperTerp"
     rg_min = 1
     rg_max = 3
-    qpid_message_get_station_selection = QpidMessage([{ "resourceGroup": resourceGroup, "min": rg_min, "max": rg_max }],
-                                             properties={
-                                                 "SystemName": "LOFAR",
-                                                 "MessageType": "ReplyMessage",
-                                                 "MessageId": message_id,
-                                                 "status": "OK"
-                                             })
+    qpid_message_get_station_selection = QpidMessage([{"resourceGroup": resourceGroup, "min": rg_min, "max": rg_max}],
+                                                     properties={
+                                                         "SystemName": "LOFAR",
+                                                         "MessageType": "ReplyMessage",
+                                                         "MessageId": message_id,
+                                                         "status": "OK"
+                                                     })
 
     def setUp(self):
         # the mock library had difficulty to mock ToBus and FromBus probably to some weir naming issue.
@@ -972,7 +975,6 @@ class TestMomQueryRPC(unittest.TestCase):
         self.assertEqual(result["author_email"], self.author_email)
         self.assertEqual(result["pi_email"], self.pi_email)
 
-
     @mock.patch('lofar.messaging.messagebus.qpid.messaging')
     def test_get_project_priorities_for_objects_query(self, qpid_mock):
         self.receiver_mock.fetch.return_value = self.qpid_message_get_project_priorities_for_objects
@@ -1017,7 +1019,6 @@ class TestMomQueryRPC(unittest.TestCase):
         self.assertEqual(result[0]["max"], self.rg_max)
 
 
-
 class TestMoMDatabaseWrapper(unittest.TestCase):
     database_credentials = Credentials()
     database_credentials.host = "localhost"
@@ -1237,7 +1238,7 @@ class TestMoMDatabaseWrapper(unittest.TestCase):
             self.user_name, self.host_name, self.project_name, self.meta_data)
 
     def test_add_trigger_logs_end_of_query(self):
-        self.mysql_mock.connect().cursor().lastrowid  = 34
+        self.mysql_mock.connect().cursor().lastrowid = 34
 
         result = self.mom_database_wrapper.add_trigger(
             self.user_name, self.host_name, self.project_name, self.meta_data)
@@ -1266,7 +1267,7 @@ class TestMoMDatabaseWrapper(unittest.TestCase):
         self.mysql_mock.connect().cursor().fetchall.return_value = \
             [{u'misc': '{"trigger_id": ' + str(self.trigger_id) + '}'}]
 
-        result = self.mom_database_wrapper.get_trigger_id(self.mom_id)
+        self.mom_database_wrapper.get_trigger_id(self.mom_id)
 
         self.logger_mock.info.assert_any_call("get_trigger_id for mom_id (%s): %s", self.mom_id, self.trigger_id)
 
@@ -1308,7 +1309,7 @@ class TestMoMDatabaseWrapper(unittest.TestCase):
         self.assertEqual(result, expected_result)
 
     def test_get_project_priorities_for_objects_returns_priorities(self):
-        expected_result = {self.mom_id:self.project_priority}
+        expected_result = {self.mom_id: self.project_priority}
         details_result = [{"project_priority": self.project_priority, "object_mom2id": self.mom_id}]
         self.mysql_mock.connect().cursor().fetchall.return_value = details_result
 
@@ -1317,10 +1318,10 @@ class TestMoMDatabaseWrapper(unittest.TestCase):
         self.assertEqual(result, expected_result)
 
     def test_get_station_selection_returns_info_from_misc_field(self):
-        resourceGroup = "SuperTerp"
+        resource_group = "SuperTerp"
         rg_min = 1
         rg_max = 3
-        station_selection = [{"resourceGroup": resourceGroup, "min": rg_min, "max": rg_max}]
+        station_selection = [{"resourceGroup": resource_group, "min": rg_min, "max": rg_max}]
 
         expected_result = station_selection
         details_result = [{u"mom2id": self.mom_id,  u"mom2objecttype": self.job_type,
@@ -1330,21 +1331,38 @@ class TestMoMDatabaseWrapper(unittest.TestCase):
         result = self.mom_database_wrapper.get_station_selection(self.mom_id)
         self.assertEqual(result, expected_result)
 
+    def test_get_station_selection_throws_ValueError_on_empty_query_result(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = []
+
+        with self.assertRaises(ValueError):
+            self.mom_database_wrapper.get_station_selection(1234)
+
+    def test_get_station_selection_throws_ValueError_if_station_selection_not_present_in_misc(self):
+        details_result = [{u"mom2id": self.mom_id, u"mom2objecttype": self.job_type,
+                           u"misc": json.dumps({u"timeWindow": {u'minDuration': 300, u'maxDuration': 300}})}]
+        self.mysql_mock.connect().cursor().fetchall.return_value = details_result
+
+        with self.assertRaises(ValueError):
+            self.mom_database_wrapper.get_station_selection(1234)
+
     def test_get_time_restrictions_returns_misc_field_info_from_query_result(self):
         min_start_time = u"2017-01-01"
         max_end_time = u"2017-01-04"
-        mom_starttime =  u"2017-01-02"
+        mom_starttime = u"2017-01-02"
         mom_endtime = u"2017-01-03"
         min_duration = 300
         max_duration = 600
         mom_duration = 400
 
-        timewindow = {u"minStartTime": min_start_time, u"maxEndTime": max_end_time, u"minDuration":min_duration, u"maxDuration":max_duration}
+        timewindow = {u"minStartTime": min_start_time,
+                      u"maxEndTime": max_end_time,
+                      u"minDuration": min_duration,
+                      u"maxDuration": max_duration}
         expected_result = timewindow
 
         details_result = [{u"mom2id": self.mom_id, u"mom2objecttype": self.job_type,
-                           u"misc": json.dumps({u"timeWindow": timewindow}), u"duration":mom_duration,
-                           u"starttime":mom_starttime, u"endtime":mom_endtime}]
+                           u"misc": json.dumps({u"timeWindow": timewindow}), u"duration": mom_duration,
+                           u"starttime": mom_starttime, u"endtime": mom_endtime}]
         self.mysql_mock.connect().cursor().fetchall.return_value = details_result
 
         result = self.mom_database_wrapper.get_time_restrictions(self.mom_id)
@@ -1355,21 +1373,37 @@ class TestMoMDatabaseWrapper(unittest.TestCase):
         mom_endtime = u"2017-01-03"
         mom_duration = 400
 
-        # case 2) time window has no min/max duration: mom duration is filled in
-        timewindow = {}
-        expected_result = {u"minStartTime": mom_starttime, u"maxEndTime": mom_endtime, u"minDuration":mom_duration, u"maxDuration":mom_duration}
+        expected_result = {u"minStartTime": mom_starttime,
+                           u"maxEndTime": mom_endtime,
+                           u"minDuration": mom_duration,
+                           u"maxDuration": mom_duration}
 
         details_result = [{u"mom2id": self.mom_id, u"mom2objecttype": self.job_type,
-                           u"misc": json.dumps({u"timeWindow": timewindow}), u"duration": mom_duration,
-                           u"starttime":mom_starttime, u"endtime":mom_endtime}]
+                           u"misc": None, u"duration": mom_duration,
+                           u"starttime": mom_starttime, u"endtime": mom_endtime}]
         self.mysql_mock.connect().cursor().fetchall.return_value = details_result
 
         result = self.mom_database_wrapper.get_time_restrictions(self.mom_id)
         self.assertEqual(result, expected_result)
 
+    def test_get_time_restrictions_throws_ValueError_on_empty_query_result(self):
+
+        self.mysql_mock.connect().cursor().fetchall.return_value = []
+
+        with self.assertRaises(ValueError):
+            self.mom_database_wrapper.get_time_restrictions(1234)
+
+    def test_get_time_restrictions_throws_ValueError_if_no_time_restrictions_in_query_result(self):
+        details_result = [{u"mom2id": self.mom_id, u"mom2objecttype": self.job_type,
+                           u"misc": None, u"duration": None,
+                           u"starttime": None, u"endtime": None}]
+        self.mysql_mock.connect().cursor().fetchall.return_value = details_result
+
+        with self.assertRaises(ValueError):
+            self.mom_database_wrapper.get_time_restrictions(1234)
 
 
-#@unittest.skip("Skipping integration test")
+@unittest.skip("Skipping integration test")
 class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
     database_credentials = Credentials()
     database_credentials.host = "localhost"
@@ -1391,7 +1425,7 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
     def setUp(self):
         logger.info('setting up test MoM database...')
 
-        self.mysqld = Mysqld() # for a fresh one, use: self.mysqld = testing.mysqld.Mysqld()
+        self.mysqld = Mysqld()  # for a fresh one, use: self.mysqld = testing.mysqld.Mysqld()
 
         # set up fresh connection to the mom (!) database.
         self.connection = connector.connect(**self.mysqld.dsn())
@@ -1578,8 +1612,8 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         self.assertEqual(result, None)
 
     def test_get_trigger_id_returns_id_for_lofar_observation(self):
-        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', "
-                     "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', "
+                     "'test-lofar', NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
         # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing
         self.execute("insert into lofar_observation values(83, 1, NULL, 'Interferometer', 47, 48, NULL, NULL, 0,"
                      " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)")
@@ -1594,8 +1628,8 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         self.assertEqual(result, self.trigger_id)
 
     def test_get_trigger_id_returns_none_for_lofar_observation_with_empty_misc(self):
-        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', "
-                     "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', "
+                     "'test-lofar', NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
         # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing
         self.execute("insert into lofar_observation values(83, 2, NULL, 'Interferometer', 47, 48, NULL, NULL, 0,"
                      " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)")
@@ -1610,8 +1644,8 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         self.assertEqual(result, None)
 
     def test_get_trigger_id_returns_none_for_lofar_observation_with_empty_json(self):
-        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', "
-                     "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', "
+                     "'test-lofar', NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
         # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing
         self.execute("insert into lofar_observation values(83, 2, NULL, 'Interferometer', 47, 48, NULL, NULL, 0,"
                      " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)")
@@ -1632,7 +1666,8 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         # id, mom2objectid, starttime, endtime, pipeline_id, pending, template, runtimeDirectory, resultDirectory, workingDirectory, parset, nr_output_correlated, nr_output_beamformed, nr_output_instrument_model, nr_output_skyimage, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_instrument_model_valid, nr_output_skyimage_valid, feedback, demixing_parameters_id, bbs_parameters_id, duration, storage_cluster_id, processing_cluster_id, misc
         self.execute("insert into lofar_pipeline values(1761, 1, NULL, NULL, 63722, 0, "
                      "'Calibration Pipeline Calibrator', NULL, NULL, NULL, 'parset', 0, NULL, 244, NULL, 0, 0, NULL, 0,"
-                     " NULL, 3071, 3071, NULL, NULL, NULL, '{\"trigger_id\": %(trigger_id)s}')" % {"trigger_id": self.trigger_id})
+                     " NULL, 3071, 3071, NULL, NULL, NULL, '{\"trigger_id\": %(trigger_id)s}')"
+                     % {"trigger_id": self.trigger_id})
 
         result = self.mom_database_wrapper.get_trigger_id("2")
 
@@ -1645,12 +1680,13 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
 
     def test_get_project_details_returns_correct_emails_with_filled_database(self):
         self.execute("insert into mom2object "
-                     "values(111, NULL, NULL, 2334, 'PROJECT', 'CEP4tests', 'Project for CEP4 tests', NULL, 1725713, NULL, NULL, 0, NULL, NULL);")
+                     "values(111, NULL, NULL, 2334, 'PROJECT', 'CEP4tests', 'Project for CEP4 tests', "
+                     "NULL, 1725713, NULL, NULL, 0, NULL, NULL);")
 
         self.execute("insert into member "
-                    "values(1, 111, 0);")
+                     "values(1, 111, 0);")
         self.execute("insert into member "
-                    "values(2, 111, 0);")
+                     "values(2, 111, 0);")
 
         self.execute("insert into registeredmember "
                      "values(1, 1, 1);")
@@ -1690,16 +1726,16 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         pname = 'myproject_'+str(1)
 
         self.execute("insert into mom2object values(%s, NULL, NULL, %s, 'PROJECT', '%s', 'x', "
-                         "NULL, %s, NULL, NULL, 0, 0, 0)"
-                         % (1, 1, pname, statusid))
+                     "NULL, %s, NULL, NULL, 0, 0, 0)"
+                     % (1, 1, pname, statusid))
 
         self.execute("insert into project values(%s, %s, '2012-09-14', FALSE, 0)"
-                         % (1, 1))
+                     % (1, 1))
 
         self.execute("insert into mom2object values(%s, NULL, NULL, %s , 'OBSERVATION', 'x', "
-                         "'x', %s, %s, 'x', 'x', 0, NULL,"
-                         " 0)"
-                         % (2, oid, 1, statusid))
+                     "'x', %s, %s, 'x', 'x', 0, NULL,"
+                     " 0)"
+                     % (2, oid, 1, statusid))
 
         self.execute("insert into status values(%s, '%s', 'OBSERVATION', %s)" % (statusid, status, statusid))
 
@@ -1713,8 +1749,6 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         self.assertEqual(result[str(oid)]['object_status'], status)
         self.assertEqual(result[str(oid)]['project_name'], pname)
 
-
-
     def test_get_project_priorities_for_objects_returns_correct_priorities(self):
         object_ids = [3344, 1234, 7654]
         project_prios = [42, 24, 12]
@@ -1745,13 +1779,11 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
             prio = return_value[oid]
             self.assertEqual(prio, expected_prio)
 
-
     def test_get_project_priorities_for_objects_returns_empty_dict_on_empty_database(self):
 
         return_value = self.mom_database_wrapper.get_project_priorities_for_objects("1234")
         self.assertEqual(return_value, {})
 
-
     def test_get_project_priorities_for_objects_returns_only_priorities_of_existing_objects(self):
 
         object_ids = [380, 747]
@@ -1770,12 +1802,12 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
                          % (eid, eid, 'myproject_'+str(i)))
 
             self.execute("insert into project values(%s, %s, '2012-09-14', FALSE, %s)"
-                         % (eid, eid, prio)) # unique id in project table, refer to mom2object of our project
+                         % (eid, eid, prio))  # unique id in project table, refer to mom2object of our project
 
             self.execute("insert into mom2object values(%s, NULL, NULL, %s , 'PIPELINE', 'x', "
                          "'x', %s, NULL, 'x', 'x', 0, NULL,"
                          " 0)"
-                         % (eid+100, oid, eid)) # unique id for the pipeline, refer to project id
+                         % (eid+100, oid, eid))  # unique id for the pipeline, refer to project id
 
         return_value = self.mom_database_wrapper.get_project_priorities_for_objects(object_ids + [extra_id])
 
@@ -1783,12 +1815,10 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
             self.assertTrue(oid in return_value.keys())
         self.assertFalse(extra_id in return_value.keys())
 
-
     def test_get_time_restrictions_throws_ValueError_on_empty_database(self):
         with self.assertRaises(ValueError):
             self.mom_database_wrapper.get_time_restrictions(1234)
 
-
     def test_get_time_restrictions_throws_ValueError_if_no_time_restrictions_in_database(self):
 
         self.execute(
@@ -1822,8 +1852,9 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         self.execute("insert into lofar_observation_specification values(47, 'USER', 1, 0, 0, 0, 0, 'HBA Dual', "
                      "'160 MHz', '170-230 MHz', 1, NULL, NULL, 0, NULL, NULL, NULL, 0, NULL, 'Custom', NULL, NULL, "
                      "NULL, %s, 0, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, 0, 0, NULL, NULL, "
-                     "16, '{\"timeWindow\":{\"minStartTime\": \"%s\", \"maxEndTime\": \"%s\", \"minDuration\": %s, \"maxDuration\": %s}}')" % (
-                     min_duration, min_start_time, max_end_time, min_duration, max_duration))
+                     "16, '{\"timeWindow\":{\"minStartTime\": \"%s\", \"maxEndTime\": \"%s\", \"minDuration\": %s, "
+                     "\"maxDuration\": %s}}')"
+                     % (min_duration, min_start_time, max_end_time, min_duration, max_duration))
 
         result = self.mom_database_wrapper.get_time_restrictions(2)
 
@@ -1845,7 +1876,7 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
         self.execute("insert into lofar_observation_specification values(47, 'USER', 1, 0, 0, 0, 0, 'HBA Dual', "
                      "'160 MHz', '170-230 MHz', 1, NULL, NULL, 0, NULL, NULL, NULL, 0, NULL, 'Custom', NULL, NULL, "
                      "NULL, %s, 0, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, 0, 0, NULL, NULL, "
-                     "16, NULL)" % (duration))
+                     "16, NULL)" % duration)
 
         result = self.mom_database_wrapper.get_time_restrictions(2)
 
@@ -1854,7 +1885,24 @@ class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
 
     def test_get_station_selection_throws_ValueError_on_empty_database(self):
         with self.assertRaises(ValueError):
-            self.mom_database_wrapper.get_time_restrictions(1234)
+            self.mom_database_wrapper.get_station_selection(1234)
+
+    def test_get_station_selection_throws_ValueError_if_not_present_in_misc(self):
+
+        self.execute(
+            "insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', "
+            "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing
+        self.execute("insert into lofar_observation values(83, 1, NULL, 'Interferometer', 47, 48, NULL, NULL, 0,"
+                     " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)")
+        # id, type, correlated_data, filtered_data, beamformed_data, coherent_stokes_data, incoherent_stokes_data, antenna, clock_mode, instrument_filter, integration_interval, channels_per_subband, cn_integration_steps, pencilbeams_flyseye, pencilbeams_nr_pencil_rings, pencilbeams_ring_size, stokes_selection, stokes_integrate_channels, stokes_integration_steps, station_set, timeframe, starttime, endtime, spec_duration, coherent_dedisperse_channels, dispersion_measure, subbands_per_file_cs, subbands_per_file_bf, collapsed_channels_cs, collapsed_channels_is, downsampling_steps_cs, downsampling_steps_is, which_cs, which_is, bypass_pff, enable_superterp, flyseye, tab_nr_rings, tab_ring_size, bits_per_sample, misc
+        self.execute("insert into lofar_observation_specification values(47, 'USER', 1, 0, 0, 0, 0, 'HBA Dual', "
+                     "'160 MHz', '170-230 MHz', 1, NULL, NULL, 0, NULL, NULL, NULL, 0, NULL, 'Custom', NULL, NULL, "
+                     "NULL, NULL, 0, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, 0, 0, NULL, NULL, "
+                     "16, '{\"timeWindow\":{\"minDuration\": 300, \"maxDuration\": 600}}')")
+
+        with self.assertRaises(ValueError):
+            self.mom_database_wrapper.get_station_selection(1234)
 
     def test_get_station_selection_returns_correct_station_selection(self):
         resource_group = "SuperTerp"
-- 
GitLab