diff --git a/.gitattributes b/.gitattributes
index c9e3eb6c827846489daf86c6f0ddc2d120d3808c..a0a7fbab12c8340fbf56ae4c0068f53399f3b964 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -5154,6 +5154,15 @@ SAS/ResourceAssignment/SystemStatusService/test/CMakeLists.txt -text
 SAS/ResourceAssignment/SystemStatusService/test/test_datamonitorqueueservice_and_rpc.py -text
 SAS/ResourceAssignment/SystemStatusService/test/test_datamonitorqueueservice_and_rpc.run -text
 SAS/ResourceAssignment/SystemStatusService/test/test_datamonitorqueueservice_and_rpc.sh -text
+SAS/ResourceAssignment/TaskPrescheduler/CMakeLists.txt -text
+SAS/ResourceAssignment/TaskPrescheduler/__init__.py -text
+SAS/ResourceAssignment/TaskPrescheduler/prescheduler.py -text
+SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler -text
+SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler.ini -text
+SAS/ResourceAssignment/TaskPrescheduler/test/CMakeLists.txt -text
+SAS/ResourceAssignment/TaskPrescheduler/test/test_taskprescheduler.py -text
+SAS/ResourceAssignment/TaskPrescheduler/test/test_taskprescheduler.run -text
+SAS/ResourceAssignment/TaskPrescheduler/test/test_taskprescheduler.sh -text
 SAS/Scheduler/src/.default_settings.set -text
 SAS/Scheduler/src/LOFAR_libScheduler.pro -text
 SAS/Scheduler/src/conflictdialog.ui -text
diff --git a/SAS/ResourceAssignment/CMakeLists.txt b/SAS/ResourceAssignment/CMakeLists.txt
index a391f0f99f467ee90aaed5d7d010bed5b4e313d7..ff87bb14e4be37737b3186977ba466ba6241aaea 100644
--- a/SAS/ResourceAssignment/CMakeLists.txt
+++ b/SAS/ResourceAssignment/CMakeLists.txt
@@ -11,5 +11,5 @@ lofar_add_package(SystemStatusDatabase)
 lofar_add_package(SystemStatusService)
 lofar_add_package(OTDBtoRATaskStatusPropagator)
 lofar_add_package(RAScripts)
-
+lofar_add_package(TaskPrescheduler)
 
diff --git a/SAS/ResourceAssignment/RATaskSpecifiedService/lib/RATaskSpecified.py b/SAS/ResourceAssignment/RATaskSpecifiedService/lib/RATaskSpecified.py
index e8bb6e443ab7deccb44866292dbe472a7f92d490..23383a9e410f2bb65d408e243b26401cbb5d6835 100755
--- a/SAS/ResourceAssignment/RATaskSpecifiedService/lib/RATaskSpecified.py
+++ b/SAS/ResourceAssignment/RATaskSpecifiedService/lib/RATaskSpecified.py
@@ -275,6 +275,7 @@ class RATaskSpecified(OTDBBusListener):
         key = PARSET_PREFIX + "Observation.processSubtype"
         result['task_type'], result['task_subtype'] = convertSchedulerProcessSubtype(parset.get(key, ""))
 
+        #TODO probably we only need to do this on state=prescheduled, but then we need a different name for the function?
         logger.info("Processing predecessors")
         predecessor_ids = self.get_predecessors(parset)
         for id in predecessor_ids:
diff --git a/SAS/ResourceAssignment/TaskPrescheduler/CMakeLists.txt b/SAS/ResourceAssignment/TaskPrescheduler/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..67263176c28472b62e252371cdfa41796d26a4cf
--- /dev/null
+++ b/SAS/ResourceAssignment/TaskPrescheduler/CMakeLists.txt
@@ -0,0 +1,21 @@
+# $Id$
+
+lofar_package(TaskPrescheduler 1.0 DEPENDS PyMessaging ResourceAssignmentService OTDB_Services)
+
+lofar_find_package(Python 2.6 REQUIRED)
+include(PythonInstall)
+
+set(_py_files
+  __init__.py
+  prescheduler.py
+)
+
+python_install(${_py_files} DESTINATION lofar/sas/resourceassignment/taskprescheduler)
+
+lofar_add_bin_scripts(taskprescheduler)
+
+# supervisord config files
+install(FILES
+  taskprescheduler.ini
+  DESTINATION etc/supervisord.d)
+
diff --git a/SAS/ResourceAssignment/TaskPrescheduler/__init__.py b/SAS/ResourceAssignment/TaskPrescheduler/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..fbbab2d1199b1de168c7f25fb5e7eac727b3d066
--- /dev/null
+++ b/SAS/ResourceAssignment/TaskPrescheduler/__init__.py
@@ -0,0 +1 @@
+# $Id$
diff --git a/SAS/ResourceAssignment/TaskPrescheduler/prescheduler.py b/SAS/ResourceAssignment/TaskPrescheduler/prescheduler.py
new file mode 100644
index 0000000000000000000000000000000000000000..0ea6548e84b0b0f4aa3f7e25c6fa3bb1763be0ba
--- /dev/null
+++ b/SAS/ResourceAssignment/TaskPrescheduler/prescheduler.py
@@ -0,0 +1,114 @@
+#!/usr/bin/python
+# $Id$
+
+'''
+TODO: add doc
+'''
+import logging
+from datetime import datetime, timedelta
+from lofar.sas.otdb.OTDBBusListener import OTDBBusListener
+from lofar.sas.otdb.otdbrpc import OTDBRPC
+from lofar.sas.otdb.config import DEFAULT_OTDB_SERVICE_BUSNAME, DEFAULT_OTDB_SERVICENAME
+from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_BUSNAME, DEFAULT_OTDB_NOTIFICATION_SUBJECT
+from lofar.mom.momqueryservice.momqueryrpc import MoMQueryRPC
+from lofar.mom.momqueryservice.config import DEFAULT_MOMQUERY_BUSNAME, DEFAULT_MOMQUERY_SERVICENAME
+
+logger = logging.getLogger(__name__)
+
+class TaskPrescheduler(OTDBBusListener):
+    def __init__(self,
+                 otdb_notification_busname=DEFAULT_OTDB_NOTIFICATION_BUSNAME,
+                 otdb_notification_subject=DEFAULT_OTDB_NOTIFICATION_SUBJECT,
+                 otdb_service_busname=DEFAULT_OTDB_SERVICE_BUSNAME,
+                 otdb_service_subject=DEFAULT_OTDB_SERVICENAME,
+                 mom_service_busname=DEFAULT_MOMQUERY_BUSNAME,
+                 mom_service_subject=DEFAULT_MOMQUERY_SERVICENAME,
+                 broker=None, **kwargs):
+        super(TaskPrescheduler, self).__init__(busname=otdb_notification_busname,
+                                               subject=otdb_notification_subject,
+                                               broker=broker,
+                                               **kwargs)
+
+        self.otdb = OTDBRPC(busname=otdb_service_busname, servicename=otdb_service_subject, broker=broker) ## , ForwardExceptions=True hardcoded in RPCWrapper right now
+        self.momquery = MoMQueryRPC(busname=mom_query_busname, servicename=mom_query_servicename, timeout=10, broker)
+
+    def start_listening(self, **kwargs):
+        self.otdb.open()
+        self.momquery.open()
+        super(TaskPrescheduler, self).start_listening(**kwargs)
+
+    def stop_listening(self, **kwargs):
+        self.otdb.close()
+        self.momquery.close()
+        super(TaskPrescheduler, self).stop_listening(**kwargs)
+
+    def onObservationApproved(self, treeId, modificationTime):
+    """ Updates task specification and puts task on prescheduled if it was generated by a trigger
+    """ #TODO might work for all tasks in the future
+        try:
+            otdb_id = treeId
+            mom_ids = self.momquery.getMoMIdsForOTDBIds([otdb_id])
+            if mom_ids:
+                mom_id = mom_ids[0]
+            else:
+                mom_id = None
+            if len(mom_ids) > 1:
+                logger.warning('Found multiple mom_ids %s', mom_ids)
+            if mom_id:
+                response = self.momquery.get_trigger_id(mom_id)
+                if response['status'] == 'OK':
+                    logger.info('Found a task mom_id=%s with a trigger_id=%s', mom_id, response['trigger_id'])
+                    
+                    #TODO, check for stations and other resources, start/endtime, target position, then update specification
+                    #self.otdb.taskSetSpecification(otdb_id, otdb_info)
+                    
+                    logger.info('prescheduling otdb_id=%s because it was generated by trigger_id=%s', otdb_id, response['trigger_id'])
+                    self.otdb.taskSetStatus(otdb_id, 'prescheduled')
+                else:
+                    logger.info('Did not find a trigger for task mom_id=%s, because %s', mom_id, response['errors'])
+            else:
+                logger.info('Did not find a mom_id for task otdb_id=%s', otdb_id)
+        except Exception as e:
+            logger.error(e)
+
+def main():
+    from optparse import OptionParser
+    from lofar.messaging import setQpidLogLevel
+    from lofar.common.util import waitForInterrupt
+
+    # make sure we run in UTC timezone
+    import os
+    os.environ['TZ'] = 'UTC'
+
+    # Check the invocation arguments
+    parser = OptionParser("%prog [options]", description='runs the task prescheduler service')
+    parser.add_option('-q', '--broker', dest='broker', type='string', default=None, help='Address of the qpid broker, default: localhost')
+    parser.add_option("--otdb_notification_busname", dest="otdb_notification_busname", type="string",
+                      default=DEFAULT_OTDB_NOTIFICATION_BUSNAME,
+                      help="Bus or queue where the OTDB notifications are published. [default: %default]")
+    parser.add_option("--otdb_notification_subject", dest="otdb_notification_subject", type="string",
+                      default=DEFAULT_OTDB_NOTIFICATION_SUBJECT,
+                      help="Subject of OTDB notifications on otdb_notification_busname. [default: %default]")
+    parser.add_option("--momquery_busname", dest="momquery_busname", type="string",
+                      default=DEFAULT_MOMQUERY_BUSNAME,
+                      help="Name of the momquery bus exchange on the qpid broker. [default: %default]")
+    parser.add_option("--momquery_servicename", dest="momquery_servicename", type="string",
+                      default=DEFAULT_MOMQUERY_SERVICENAME,
+                      help="Name of the momquery service. [default: %default]")
+    (options, args) = parser.parse_args()
+
+    setQpidLogLevel(logging.INFO)
+    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
+                        level=logging.DEBUG if options.verbose else logging.INFO)
+
+    with OTDBtoRATaskStatusPropagator(otdb_notification_busname=options.otdb_notification_busname,
+                                      otdb_notification_subject=options.otdb_notification_subject,
+                                      radb_busname=options.radb_busname,
+                                      radb_servicename=options.radb_servicename,
+                                      mom_service_busname=options.momquery_busname,
+                                      mom_service_subject=options.momquery_servicename,
+                                      broker=options.broker):
+        waitForInterrupt()
+
+if __name__ == '__main__':
+    main()
diff --git a/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler b/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler
new file mode 100644
index 0000000000000000000000000000000000000000..cc1bcc87a52128fe4b1b9a0faa602416a8251543
--- /dev/null
+++ b/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler
@@ -0,0 +1,10 @@
+#!/usr/bin/python
+# $Id: taskprescheduler 33373 2016-01-22 11:01:15Z schaap $
+
+'''
+runs the task prescheduler service
+'''
+from lofar.sas.resourceassignment.taskprescheduler.prescheduler import main
+
+if __name__ == '__main__':
+    main()
diff --git a/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler.ini b/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler.ini
new file mode 100644
index 0000000000000000000000000000000000000000..f52ef85c19347460db70c597ea1abe60753cb96b
--- /dev/null
+++ b/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler.ini
@@ -0,0 +1,8 @@
+[program:taskprescheduler]
+command=/bin/bash -c 'source $LOFARROOT/lofarinit.sh;exec taskprescheduler'
+user=lofarsys
+stopsignal=INT ; KeyboardInterrupt
+stopasgroup=true ; bash does not propagate signals
+stdout_logfile=%(program_name)s.log
+redirect_stderr=true
+stderr_logfile=NONE
diff --git a/SAS/ResourceAssignment/TaskPrescheduler/test/CMakeLists.txt b/SAS/ResourceAssignment/TaskPrescheduler/test/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..07d41747d3cd2de62ec384f123495cb5f695b124
--- /dev/null
+++ b/SAS/ResourceAssignment/TaskPrescheduler/test/CMakeLists.txt
@@ -0,0 +1,8 @@
+# $Id: CMakeLists.txt 32679 2015-10-26 09:31:56Z schaap $
+include(LofarCTest)
+include(FindPythonModule)
+
+find_python_module(mock REQUIRED)
+
+lofar_add_test(test_taskprescheduler)
+
diff --git a/SAS/ResourceAssignment/TaskPrescheduler/test/test_taskprescheduler.py b/SAS/ResourceAssignment/TaskPrescheduler/test/test_taskprescheduler.py
new file mode 100755
index 0000000000000000000000000000000000000000..7d4472e9ec037549b72db6d1c4bc3c40ea9152e1
--- /dev/null
+++ b/SAS/ResourceAssignment/TaskPrescheduler/test/test_taskprescheduler.py
@@ -0,0 +1,1298 @@
+#!/usr/bin/python
+
+# Copyright (C) 2012-2015    ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.    See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+# $Id:  $
+import unittest
+
+try:
+    import mock
+except ImportError as e:
+    print str(e)
+    print 'Please install python package mock: sudo apt-get install python-mock'
+    exit(3) #special lofar test exit code: skipped test
+
+from lofar.common.dbcredentials import Credentials
+from lofar.mom.momqueryservice.momqueryrpc import MoMQueryRPC
+from lofar.mom.momqueryservice.config import DEFAULT_MOMQUERY_SERVICENAME
+from lofar.mom.momqueryservice.momqueryservice import MoMDatabaseWrapper, ProjectDetailsQueryHandler
+from qpid.messaging.message import Message as QpidMessage
+
+
+class TestProjectDetailsQueryHandler(unittest.TestCase):
+    database_credentials = Credentials()
+    database_credentials.host = "localhost"
+    database_credentials.user = "root"
+    database_credentials.database = "testdb"
+    database_credentials.password = None
+    database_credentials.config = {"useradministration_database": "useradministration",
+                                   "momprivilege_database": "momprivilege"}
+
+    project_name = "project name"
+    folder = "/project/folder"
+
+    def setUp(self):
+        mom_database_wrapper_patcher = mock.patch('lofar.mom.momqueryservice.momqueryservice.MoMDatabaseWrapper')
+        self.addCleanup(mom_database_wrapper_patcher.stop)
+        self.mom_database_wrapper_mock = mom_database_wrapper_patcher.start()
+
+
+        self.project_details_query_handler = ProjectDetailsQueryHandler(dbcreds=self.database_credentials)
+        self.project_details_query_handler.prepare_loop()
+
+    def test_IsProjectActive_returns_active_true_when_mom_wrapper_returns_true(self):
+        self.mom_database_wrapper_mock().is_project_active.return_value = True
+
+        return_value = self.project_details_query_handler.is_project_active(self.project_name)
+
+        self.assertTrue(return_value['active'])
+
+    def test_IsProjectActive_returns_active_flase_when_mom_wrapper_returns_false(self):
+        self.mom_database_wrapper_mock().is_project_active.return_value = False
+
+        return_value = self.project_details_query_handler.is_project_active(self.project_name)
+
+        self.assertFalse(return_value['active'])
+
+    def test_FolderExists_return_exists_true_when_mom_wrapper_returns_true(self):
+        self.mom_database_wrapper_mock().folder_exists.return_value = True
+
+        return_value = self.project_details_query_handler.folder_exists(self.folder)
+
+        self.assertTrue(return_value['exists'])
+
+    def test_FolderExists_return_exists_false_when_mom_wrapper_returns_false(self):
+        self.mom_database_wrapper_mock().folder_exists.return_value = False
+
+        return_value = self.project_details_query_handler.folder_exists(self.folder)
+
+        self.assertFalse(return_value['exists'])
+
+    def test_authorized_add_with_status_returns_autorized_false_when_mom_wrapper_returns_false(self):
+        user_name = "user"
+        project_name = "project"
+        job_type = "observation"
+        status = "approved"
+
+        self.mom_database_wrapper_mock().authorized_add_with_status.return_value = False
+
+        return_value = self.project_details_query_handler.authorized_add_with_status(user_name, project_name, job_type,
+                                                                                     status)
+
+        self.assertFalse(return_value['authorized'])
+
+    def test_allows_triggers_returns_allows_true_when_mom_wrapper_returns_true(self):
+        project_name = "project"
+
+        self.mom_database_wrapper_mock().allows_triggers.return_value = True
+
+        return_value = self.project_details_query_handler.allows_triggers(project_name)
+
+        self.assertTrue(return_value['allows'])
+
+    def test_allows_triggers_returns_allows_false_when_mom_wrapper_returns_false(self):
+        project_name = "project"
+
+        self.mom_database_wrapper_mock().allows_triggers.return_value = False
+
+        return_value = self.project_details_query_handler.allows_triggers(project_name)
+
+        self.assertFalse(return_value['allows'])
+
+    def test_get_project_priority_returns_priority_that_the_mom_wrapper_returs(self):
+        project_name = "project"
+
+        self.mom_database_wrapper_mock().get_project_priority.return_value = 1000
+
+        return_value = self.project_details_query_handler.get_project_priority(project_name)
+
+        self.assertEqual(return_value['priority'], 1000)
+
+    def test_add_trigger_returns_row_id_that_the_mom_wrapper_returns(self):
+        project_name = "project"
+        host_name = "host name"
+        user_name = "user name"
+        meta_data = "meta data"
+
+        row_id = 55
+
+        self.mom_database_wrapper_mock().add_trigger.return_value = row_id
+
+        return_value = self.project_details_query_handler.add_trigger(user_name, host_name, project_name, meta_data)
+
+        self.assertEqual(return_value['row_id'], row_id)
+
+    def test_get_trigger_id_returns_trigger_id_when_mom_wrapper_returns_an_id(self):
+        trigger_id = 1234
+
+        self.mom_database_wrapper_mock().get_trigger_id.return_value = trigger_id
+
+        return_value = self.project_details_query_handler.get_trigger_id(5432)
+
+        self.assertEqual(return_value['trigger_id'], trigger_id)
+
+    def test_get_trigger_id_returns_status_ok_when_mom_wrapper_returns_an_id(self):
+        trigger_id = 1234
+
+        self.mom_database_wrapper_mock().get_trigger_id.return_value = trigger_id
+
+        return_value = self.project_details_query_handler.get_trigger_id(5432)
+
+        self.assertEqual(return_value['status'], "OK")
+
+    def test_get_trigger_id_returns_status_error_when_mom_wrapper_returns_none(self):
+        self.mom_database_wrapper_mock().get_trigger_id.return_value = None
+
+        return_value = self.project_details_query_handler.get_trigger_id(5432)
+
+        self.assertEqual(return_value['status'], "Error")
+
+    def test_get_trigger_id_returns_error_when_mom_wrapper_returns_none(self):
+        mom_id = 5432
+
+        self.mom_database_wrapper_mock().get_trigger_id.return_value = None
+
+        return_value = self.project_details_query_handler.get_trigger_id(mom_id)
+
+        self.assertEqual(return_value['errors'][0], "No trigger_id for mom_id: " + str(mom_id))
+
+
+class TestMomQueryRPC(unittest.TestCase):
+    test_id = 1234
+    message_id = str(uuid.uuid4())
+    folder = "/project/folder"
+    user_name = "user name"
+    project_name = "project name"
+    meta_data = "meta data"
+    host_name = "host name"
+    job_type = "observation"
+    status = "opened"
+
+    qpid_message = QpidMessage({
+        str(test_id): {
+                'project_mom2id': '4567',
+                'project_name': 'foo',
+                'project_description': 'bar',
+                'object_mom2id': str(test_id)
+            }
+        },
+        properties={
+            "SystemName": "LOFAR",
+            "MessageType": "ReplyMessage",
+            "MessageId": message_id,
+            "status": "OK"
+        })
+
+    qpid_message_is_project_active_true = QpidMessage({"active": True},
+                                                      properties={
+                                                          "SystemName": "LOFAR",
+                                                          "MessageType": "ReplyMessage",
+                                                          "MessageId": message_id,
+                                                          "status": "OK"
+                                                      })
+
+    qpid_message_project_exists_true = QpidMessage({"exists": True},
+                                                   properties={
+                                                       "SystemName": "LOFAR",
+                                                       "MessageType": "ReplyMessage",
+                                                       "MessageId": message_id,
+                                                       "status": "OK"
+                                                   })
+    qpid_message_authorized_true = QpidMessage({"authorized": True},
+                                               properties={
+                                                   "SystemName": "LOFAR",
+                                                   "MessageType": "ReplyMessage",
+                                                   "MessageId": message_id,
+                                                   "status": "OK"
+                                               })
+
+    qpid_message_allows_true = QpidMessage({"allows": True},
+                                           properties={
+                                               "SystemName": "LOFAR",
+                                               "MessageType": "ReplyMessage",
+                                               "MessageId": message_id,
+                                               "status": "OK"
+                                           })
+
+    qpid_message_priority_1000 = QpidMessage({"priority": 1000},
+                                             properties={
+                                                 "SystemName": "LOFAR",
+                                                 "MessageType": "ReplyMessage",
+                                                 "MessageId": message_id,
+                                                 "status": "OK"
+                                             })
+
+    trigger_id = 12345
+    qpid_message_get_trigger_id = QpidMessage({"trigger_id": trigger_id, "status": "OK"},
+                                              properties={
+                                                  "SystemName": "LOFAR",
+                                                  "MessageType": "ReplyMessage",
+                                                  "MessageId": message_id,
+                                                  "status": "OK"
+                                              })
+
+    qpid_message_add_trigger_row_id = 33
+    qpid_message_add_trigger  = QpidMessage({"row_id": qpid_message_add_trigger_row_id},
+                                             properties={
+                                                 "SystemName": "LOFAR",
+                                                 "MessageType": "ReplyMessage",
+                                                 "MessageId": message_id,
+                                                 "status": "OK"
+                                             })
+
+    def setUp(self):
+        # the mock library had difficulty to mock ToBus and FromBus probably to some weir naming issue.
+        # so mocking is done on QPID messaging level.
+
+        self.momrpc = MoMQueryRPC('busname', DEFAULT_MOMQUERY_SERVICENAME)
+        self.sender_mock = mock.MagicMock()
+
+        self.receiver_mock = mock.MagicMock()
+
+        logger_patcher = mock.patch('lofar.mom.momqueryservice.momqueryrpc.logger')
+        self.addCleanup(logger_patcher.stop)
+        self.logger_mock = logger_patcher.start()
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_project_details_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.getProjectDetails(self.test_id)
+
+        self.assertEquals(1, len(result.keys()))
+        self.assertEquals(self.test_id, result.keys()[0])
+        self.assertTrue('project_mom2id' in result[self.test_id])
+        self.assertTrue('project_name' in result[self.test_id])
+        self.assertTrue('project_description' in result[self.test_id])
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_is_project_active_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_is_project_active_true
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.isProjectActive(self.project_name)
+
+        self.assertTrue(result['active'])
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_is_project_active_logs_before_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_is_project_active_true
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        self.momrpc.isProjectActive(self.project_name)
+
+        self.logger_mock.info.assert_any_call("Requesting if project: %s is active", self.project_name)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_is_project_active_logs_after_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_is_project_active_true
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.isProjectActive(self.project_name)
+
+        self.logger_mock.info.assert_any_call("Received Project is active: %s", result)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_folder_exists_active_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_project_exists_true
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.folderExists(self.folder)
+
+        self.assertTrue(result['exists'])
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_is_project_logs_before_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_project_exists_true
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        self.momrpc.folderExists(self.folder)
+
+        self.logger_mock.info.assert_any_call("Requesting folder: %s exists", self.folder)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_is_project_logs_after_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_project_exists_true
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.folderExists(self.folder)
+
+        self.logger_mock.info.assert_any_call("Received folder exists: %s", result)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_authorized_add_with_status_logs_before_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_authorized_true
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        self.momrpc.authorized_add_with_status(self.user_name, self.project_name, self.job_type, self.status)
+
+        self.logger_mock.info.assert_any_call(
+            "Requesting AutorizedAddWithStatus for user_name: %s project_name: %s job_type: %s status: %s",
+            self.user_name, self.project_name, self.job_type, self.status)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_authorized_add_with_status_logs_after_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_authorized_true
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.authorized_add_with_status(self.user_name, self.project_name, self.job_type, self.status)
+
+        self.logger_mock.info.assert_any_call(
+            "Received AutorizedAddWithStatus for user_name: %s project_name: %s job_type: %s status: %s result: %s",
+            self.user_name, self.project_name, self.job_type, self.status, result)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_authorized_add_with_status_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_authorized_true
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.authorized_add_with_status(self.user_name, self.project_name, self.job_type, self.status)
+
+        self.assertTrue(result['authorized'])
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_allows_triggers_logs_before_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_allows_true
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        self.momrpc.allows_triggers(self.project_name)
+
+        self.logger_mock.info.assert_any_call("Requesting AllowsTriggers for project_name: %s", self.project_name)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_allows_triggers_logs_after_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_allows_true
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.allows_triggers(self.project_name)
+
+        self.logger_mock.info.assert_any_call(
+            "Received AllowsTriggers for project_name (%s): %s", self.project_name, result)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_allows_triggers_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_allows_true
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.allows_triggers(self.project_name)
+
+        self.assertTrue(result['allows'])
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_get_project_priority_logs_before_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_priority_1000
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        self.momrpc.get_project_priority(self.project_name)
+
+        self.logger_mock.info.assert_any_call("Requestion GetProjectPriority for project_name: %s", self.project_name)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_get_project_priority_logs_after_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_priority_1000
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.get_project_priority(self.project_name)
+
+        self.logger_mock.info.assert_any_call(
+            "Received GetProjectPriority for project_name (%s): %s", self.project_name, result)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_get_project_priority_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_priority_1000
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.get_project_priority(self.project_name)
+
+        self.assertEqual(result['priority'], 1000)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_add_trigger_logs_before_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_add_trigger
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        self.momrpc.add_trigger(self.user_name, self.host_name, self.project_name, self.meta_data)
+
+        self.logger_mock.info.assert_any_call(
+            "Requestion AddTrigger for user_name: %s, host_name: %s, project_name: %s and meta_data: %s",
+            self.user_name, self.host_name, self.project_name, self.meta_data)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_add_trigger_logs_after_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_add_trigger
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.add_trigger(self.user_name, self.host_name, self.project_name, self.meta_data)
+
+        self.logger_mock.info.assert_any_call(
+            "Received AddTrigger for user_name (%s), host_name(%s), project_name(%s) and meta_data(%s): %s",
+            self.user_name, self.host_name, self.project_name, self.meta_data, result)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_add_trigger_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_add_trigger
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.add_trigger(self.user_name, self.host_name, self.project_name, self.meta_data)
+
+        self.assertEqual(result['row_id'], self.qpid_message_add_trigger_row_id)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_get_trigger_id_logs_before_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_get_trigger_id
+
+        mom_id = 6789
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        self.momrpc.get_trigger_id(mom_id)
+
+        self.logger_mock.info.assert_any_call("Requesting GetTriggerId for mom_id: %s", mom_id)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_get_trigger_id_logs_after_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_get_trigger_id
+
+        mom_id = 6789
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.get_trigger_id(mom_id)
+
+        self.logger_mock.info.assert_any_call("Received trigger_id: %s", result)
+
+    @mock.patch('lofar.messaging.messagebus.qpid.messaging')
+    def test_get_trigger_id_query(self, qpid_mock):
+        self.receiver_mock.fetch.return_value = self.qpid_message_get_trigger_id
+
+        mom_id = 6789
+
+        qpid_mock.Message = QpidMessage
+        qpid_mock.Connection().session().senders = [self.sender_mock]
+        qpid_mock.Connection().session().next_receiver.return_value = self.receiver_mock
+
+        result = self.momrpc.get_trigger_id(mom_id)
+
+        self.assertEqual(result["trigger_id"], self.trigger_id)
+        self.assertEqual(result["status"], "OK")
+
+
+class TestMoMDatabaseWrapper(unittest.TestCase):
+    database_credentials = Credentials()
+    database_credentials.host = "localhost"
+    database_credentials.user = "root"
+    database_credentials.database = "testdb"
+    database_credentials.password = None
+    database_credentials.config = {"useradministration_database": "useradministration",
+                                   "momprivilege_database": "momprivilege"}
+
+    project_name = "project name"
+    folder = "/project/folder1/folder2"
+
+    user_name = "user name"
+    meta_data = "meta data"
+    host_name = "host name"
+    job_type = "observation"
+    status = "opened"
+
+    mom_id = 84903
+    trigger_id = 294093
+
+    def setUp(self):
+        logger_patcher = mock.patch('lofar.mom.momqueryservice.momqueryservice.logger')
+        self.addCleanup(logger_patcher.stop)
+        self.logger_mock = logger_patcher.start()
+
+        mysql_patcher = mock.patch('lofar.mom.momqueryservice.momqueryservice.connector')
+        self.addCleanup(mysql_patcher.stop)
+        self.mysql_mock = mysql_patcher.start()
+
+        self.mom_database_wrapper = MoMDatabaseWrapper(self.database_credentials)
+
+    def test_is_project_active_logs_start_of_query(self):
+        self.mom_database_wrapper.is_project_active(self.project_name)
+
+        self.logger_mock.info.assert_any_call("is_project_active for project name: %s", self.project_name)
+
+    def test_is_project_active_logs_end_of_query(self):
+        is_active = False
+
+        self.mom_database_wrapper.is_project_active(self.project_name)
+
+        self.logger_mock.info.assert_any_call("is_project_active for project (%s): %s", self.project_name, is_active)
+
+    def test_is_project_active_return_true_when_query_returns_rows(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = [{u'1': 1}]
+
+        return_value = self.mom_database_wrapper.is_project_active(self.project_name)
+
+        self.assertTrue(return_value)
+
+    def test_is_project_active_return_false_when_query_returns_no_rows(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = []
+
+        return_value = self.mom_database_wrapper.is_project_active(self.project_name)
+
+        self.assertFalse(return_value)
+
+    def test_folder_exists_logs_start_of_query(self):
+        self.mom_database_wrapper.folder_exists(self.folder)
+
+        self.logger_mock.info.assert_any_call("folder_exists for folder: %s", self.folder)
+
+    def test_folder_exists_logs_stop_of_query(self):
+        exists = False
+
+        self.mom_database_wrapper.folder_exists(self.folder)
+
+        self.logger_mock.info.assert_any_call("folder_exists for folder (%s): %s", self.folder, exists)
+
+    def test_folder_exists_returns_true_when_query_returns_rows(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = [{u'1': 1}]
+
+        return_value = self.mom_database_wrapper.folder_exists(self.folder)
+
+        self.assertTrue(return_value)
+
+    def test_folder_exists_returns_false_when_query_returns_no_rows(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = []
+
+        return_value = self.mom_database_wrapper.folder_exists(self.folder)
+
+        self.assertFalse(return_value)
+
+    def test_folder_exists_returns_false_on_empty_folder_path(self):
+        empty_path = ""
+
+        self.assertFalse(self.mom_database_wrapper.folder_exists(empty_path))
+
+    def test_folder_exists_logs_error_on_empty_folder_path(self):
+        empty_path = ""
+
+        self.mom_database_wrapper.folder_exists(empty_path)
+
+        self.logger_mock.error.assert_any_call(
+            "Folder path is incorrect: %s", "Folder path () does not start with a /")
+
+    def test_folder_exists_raises_ValueError_on_folder_path_with_no_parent(self):
+        no_parent_path = "/"
+
+        self.assertFalse(self.mom_database_wrapper.folder_exists(no_parent_path))
+
+    def test_folder_exists_logs_error_on_folder_path_with_no_parent(self):
+        no_parent_path = "/"
+
+        self.mom_database_wrapper.folder_exists(no_parent_path)
+
+        self.logger_mock.error.assert_any_call(
+            "Folder path is incorrect: %s", "Folder path (%s) should minimally have a project")
+
+    def test_authorized_add_with_status_logs_start_of_query(self):
+        self.mom_database_wrapper.authorized_add_with_status(self.user_name, self.project_name, self.job_type,
+                                                             self.status)
+
+        self.logger_mock.info.assert_any_call(
+            "authorized_add_with_status for user_name: %s project_name: %s job_type: %s status: %s",
+            self.user_name, self.project_name, self.job_type, self.status)
+
+    def test_authorized_add_with_status_logs_stop_of_query(self):
+        authorized = False
+
+        self.mom_database_wrapper.authorized_add_with_status(self.user_name, self.project_name, self.job_type,
+                                                             self.status)
+
+        self.logger_mock.info.assert_any_call(
+            "authorized_add_with_status for user_name: %s project_name: %s job_type: %s status: %s result: %s",
+            self.user_name, self.project_name, self.job_type, self.status, authorized)
+
+    def test_authorized_add_with_status_returns_true_when_query_returns_rows(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = [{u'1': 1}]
+
+        return_value = self.mom_database_wrapper.authorized_add_with_status(self.user_name, self.project_name,
+                                                                            self.job_type, self.status)
+        self.assertTrue(return_value)
+
+    def test_authorized_add_with_status_returns_false_when_query_returns_no_rows(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = []
+
+        return_value = self.mom_database_wrapper.authorized_add_with_status(self.user_name, self.project_name,
+                                                                            self.job_type, self.status)
+
+        self.assertFalse(return_value)
+
+    def test_authorized_add_with_status_throws_ValueError_when_status_is_not_approved_or_opened(self):
+        with self.assertRaises(ValueError) as exception:
+            self.mom_database_wrapper.authorized_add_with_status(self.user_name, self.project_name,
+                                                                 self.job_type, "aborted")
+
+        self.assertEqual(exception.exception.message, "status should be either 'opened' or 'approved'")
+
+    def test_authorized_add_with_status_throws_ValueError_when_job_type_is_not_observation_or_pipeline_ingest(self):
+        with self.assertRaises(ValueError) as exception:
+            self.mom_database_wrapper.authorized_add_with_status(self.user_name, self.project_name,
+                                                                 "measurment", self.status)
+
+        self.assertEqual(exception.exception.message, "job_type should be either 'observation', 'ingest' or 'pipeline'")
+
+    def test_allows_triggers_logs_start_of_query(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = [{u'allowtriggers': True}]
+
+        self.mom_database_wrapper.allows_triggers(self.project_name)
+
+        self.logger_mock.info.assert_any_call("allows_triggers for project_name: %s", self.project_name)
+
+    def test_allows_triggers_logs_end_of_query(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = [{u'allowtriggers': True}]
+
+        result = self.mom_database_wrapper.allows_triggers(self.project_name)
+
+        self.logger_mock.info.assert_any_call(
+            "allows_triggers for project_name (%s) result: %s", self.project_name, result)
+
+    def test_allows_triggers_returns_throws_exception_when_query_returns_no_rows(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = []
+
+        with self.assertRaises(ValueError) as exception:
+            self.mom_database_wrapper.allows_triggers(self.project_name)
+
+        self.assertEqual(exception.exception.message, "project name (%s) not found in MoM database" % self.project_name)
+
+    def test_allows_triggers_returns_true_when_query_returns_rows(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = [{u'allowtriggers': True}]
+
+        return_value = self.mom_database_wrapper.allows_triggers(self.project_name)
+
+        self.assertTrue(return_value)
+
+    def test_get_project_priority_logs_start_of_query(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = [{u'priority': 1000}]
+
+        self.mom_database_wrapper.get_project_priority(self.project_name)
+
+        self.logger_mock.info.assert_any_call("get_project_priority for project_name: %s", self.project_name)
+
+    def test_get_project_priority_logs_end_of_query(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = [{u'priority': 1000}]
+
+        return_value = self.mom_database_wrapper.get_project_priority(self.project_name)
+
+        self.logger_mock.info.assert_any_call(
+            "get_project_priority for project_name (%s): %s", self.project_name, return_value)
+
+    def test_get_project_priority_returns_priority_when_query_returns_a_row(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = [{u'priority': 1000}]
+
+        return_value = self.mom_database_wrapper.get_project_priority(self.project_name)
+
+        self.assertEqual(return_value, 1000)
+
+    def test_get_project_priority_throws_exception_when_query_returns_no_row(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = []
+
+        with self.assertRaises(ValueError) as exception:
+            self.mom_database_wrapper.get_project_priority(self.project_name)
+
+        self.assertEqual(exception.exception.message, "project name (%s) not found in MoM database" % self.project_name)
+
+    def test_add_trigger_logs_start_of_query(self):
+        self.mysql_mock.connect().cursor().lastrowid = 34
+
+        self.mom_database_wrapper.add_trigger(self.user_name, self.host_name, self.project_name, self.meta_data)
+
+        self.logger_mock.info.assert_any_call(
+            "add_trigger for user_name: %s, host_name: %s, project_name: %s, meta_data: %s",
+            self.user_name, self.host_name, self.project_name, self.meta_data)
+
+    def test_add_trigger_logs_end_of_query(self):
+        self.mysql_mock.connect().cursor().lastrowid  = 34
+
+        result = self.mom_database_wrapper.add_trigger(
+            self.user_name, self.host_name, self.project_name, self.meta_data)
+
+        self.logger_mock.info.assert_any_call(
+            "add_trigger for user_name(%s), host_name(%s), project_name(%s), meta_data(%s): %s",
+            self.user_name, self.host_name, self.project_name, self.meta_data, result)
+
+    def test_add_trigger_returns_row_id_from_query(self):
+        self.mysql_mock.connect().cursor().lastrowid = 34
+
+        result = self.mom_database_wrapper.add_trigger(
+            self.user_name, self.host_name, self.project_name, self.meta_data)
+
+        self.assertEqual(result, 34)
+
+    def test_get_trigger_id_logs_start_of_query(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = \
+            [{u'misc': '{"trigger_id": ' + str(self.trigger_id) + '}'}]
+
+        self.mom_database_wrapper.get_trigger_id(self.mom_id)
+
+        self.logger_mock.info.assert_any_call("get_trigger_id for mom_id: %s", self.mom_id)
+
+    def test_get_trigger_id_logs_end_of_query(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = \
+            [{u'misc': '{"trigger_id": ' + str(self.trigger_id) + '}'}]
+
+        result = self.mom_database_wrapper.get_trigger_id(self.mom_id)
+
+        self.logger_mock.info.assert_any_call("get_trigger_id for mom_id (%s): %s", self.mom_id, self.trigger_id)
+
+    def test_get_trigger_id_returns_row_id_from_query(self):
+        self.mysql_mock.connect().cursor().fetchall.return_value = \
+            [{u'misc': '{"trigger_id": ' + str(self.trigger_id) + '}'}]
+
+        result = self.mom_database_wrapper.get_trigger_id(self.mom_id)
+
+        self.assertEqual(result, self.trigger_id)
+
+
+@unittest.skip("Skipping integration test")
+class IntegrationTestMoMDatabaseWrapper(unittest.TestCase):
+    database_credentials = Credentials()
+    database_credentials.host = "localhost"
+    database_credentials.user = "root"
+    database_credentials.database = "mom"
+    database_credentials.password = None
+    database_credentials.config = {"useradministration_database": "useradministration",
+                                   "momprivilege_database": "momprivilege"}
+
+    project_name = "project name"
+    folder = "/project/folder1/folder2"
+
+    user_name = "lofar"
+    job_type = "observation"
+    status = "opened"
+
+    trigger_id = 1002
+
+    def setUp(self):
+        self.mysqld = testing.mysqld.Mysqld()
+
+        self.database_credentials.port = self.mysqld.my_cnf['port']
+        self.connection = connector.connect(**self.mysqld.dsn())
+
+        cursor = self.connection.cursor(dictionary=True)
+        # useradmin db
+        cursor.execute("CREATE DATABASE useradministration")
+        cursor.execute("CREATE TABLE useradministration.useraccount ( "
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "userid int(11) NOT NULL DEFAULT '0', "
+                       "username varchar(20) NOT NULL DEFAULT '', "
+                       "password varchar(32) NOT NULL DEFAULT '', "
+                       "publickey varchar(32) DEFAULT NULL, "
+                       "PRIMARY KEY (id), "
+                       "UNIQUE KEY useraccount_UNIQ (username) "
+                       ") ENGINE=InnoDB AUTO_INCREMENT=1787 DEFAULT CHARSET=latin1")
+        cursor.execute("CREATE TABLE useradministration.useraccountsystemrole ("
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "useraccountid int(11) NOT NULL DEFAULT '0', "
+                       "systemroleid int(11) NOT NULL DEFAULT '0', "
+                       "indexid int(11) NOT NULL DEFAULT '0', "
+                       "PRIMARY KEY (id), "
+                       "KEY useraccount_useraccountsystemrole_IND (useraccountid), "
+                       "KEY systemrole_useraccountsystemrole_IND (systemroleid), "
+                       "KEY useraccount_index_useraccountsystemrole_IND (indexid) "
+                       ") ENGINE=InnoDB AUTO_INCREMENT=3413 DEFAULT CHARSET=latin1")
+        # mom database
+        cursor.execute("CREATE DATABASE mom")
+        cursor.execute("USE mom")
+        cursor.execute("CREATE TABLE mom2objectstatus ( "
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "name varchar(255) DEFAULT NULL, "
+                       "roles varchar(512) DEFAULT NULL, "
+                       "userid int(11) DEFAULT NULL, "
+                       "statusid int(11) DEFAULT NULL, "
+                       "mom2objectid int(11) DEFAULT NULL, "
+                       "indexid int(11) DEFAULT NULL, "
+                       "statustime datetime NOT NULL DEFAULT '1000-01-01 00:00:00.000000', "
+                       "pending tinyint(1) DEFAULT 0, "
+                       "PRIMARY KEY (id) "
+                       ") ENGINE=InnoDB AUTO_INCREMENT=1725902 DEFAULT CHARSET=latin1")
+        cursor.execute("CREATE TABLE mom2object ("
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "parentid int(11) DEFAULT NULL, "
+                       "indexid int(11) DEFAULT NULL, "
+                       "mom2id int(11) NOT NULL DEFAULT 0, "
+                       "mom2objecttype char(25) NOT NULL, "
+                       "name varchar(100) NOT NULL DEFAULT '', "
+                       "description varchar(255) DEFAULT NULL, "
+                       "ownerprojectid int(11) DEFAULT NULL, "
+                       "currentstatusid int(11) DEFAULT NULL, "
+                       "topology varchar(100) DEFAULT NULL, "
+                       "predecessor varchar(512) DEFAULT NULL, "
+                       "topology_parent tinyint(1) DEFAULT 0, "
+                       "group_id int(11) DEFAULT 0, "
+                       "datasize bigint(20) DEFAULT 0, "
+                       "PRIMARY KEY (id), "
+                       "UNIQUE KEY mom2object_UNIQ (mom2id) "
+                       ") ENGINE=InnoDB AUTO_INCREMENT=331855 DEFAULT CHARSET=latin1")
+        cursor.execute("CREATE TABLE status ("
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "code char(15) NOT NULL DEFAULT '', "
+                       "type char(20) NOT NULL, "
+                       "description varchar(100) DEFAULT NULL, "
+                       "PRIMARY KEY (id), "
+                       "UNIQUE KEY status_UNIQ (code,type) "
+                       ") ENGINE=InnoDB AUTO_INCREMENT=712 DEFAULT CHARSET=latin1")
+        cursor.execute("CREATE TABLE member ( "
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "projectid int(11) DEFAULT NULL, "
+                       "indexid int(11) DEFAULT NULL, "
+                       "PRIMARY KEY (id), "
+                       "KEY mom2object_member_FK (projectid), "
+                       "KEY indexid_IND (indexid) "
+                       ") ENGINE=InnoDB AUTO_INCREMENT=1010 DEFAULT CHARSET=latin1")
+        cursor.execute("CREATE TABLE registeredmember ( "
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "memberid int(11) DEFAULT NULL, "
+                       "userid int(11) DEFAULT NULL, "
+                       "PRIMARY KEY (id), "
+                       "KEY member_registeredmember_FK (memberid), "
+                       "KEY userid_IND (userid) "
+                       ") ENGINE=InnoDB AUTO_INCREMENT=768 DEFAULT CHARSET=latin1")
+        cursor.execute("CREATE TABLE memberprojectrole ( "
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "memberid int(11) DEFAULT NULL, "
+                       "indexid int(11) DEFAULT NULL, "
+                       "projectroleid int(11) DEFAULT NULL, "
+                       "PRIMARY KEY (id), "
+                       "KEY member_memberprojectrole_FK (memberid), "
+                       "KEY projectrole_memberprojectrole_FK (projectroleid), "
+                       "KEY indexid_IND (indexid) "
+                       ") ENGINE=InnoDB AUTO_INCREMENT=1167 DEFAULT CHARSET=latin1")
+        cursor.execute("CREATE TABLE projectrole ( "
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "name char(15) NOT NULL DEFAULT '', "
+                       "description varchar(100) DEFAULT NULL, "
+                       "PRIMARY KEY (id), "
+                       "UNIQUE KEY projectrole_UNIQ (name) "
+                       ") ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=latin1")
+        cursor.execute("CREATE TABLE project ( "
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "mom2objectid int(11) DEFAULT NULL, "
+                       "releasedate date DEFAULT NULL, "
+                       "PRIMARY KEY (id), "
+                       "KEY mom2object_IND (mom2objectid) "
+                       ") ENGINE=InnoDB AUTO_INCREMENT=149 DEFAULT CHARSET=latin1")
+        cursor.execute("ALTER TABLE project "
+                       "ADD allowtriggers BOOLEAN NOT NULL DEFAULT FALSE AFTER releasedate, "
+                       "ADD priority int(11) NOT NULL DEFAULT 1000 AFTER allowtriggers")
+        cursor.execute("CREATE TABLE lofar_trigger ( "
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "username varchar(120) NOT NULL DEFAULT '', "
+                       "hostname varchar(128) NOT NULL DEFAULT '', "
+                       "arrivaltime datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, "
+                       "projectname varchar(100) NOT NULL DEFAULT '', "
+                       "metadata TEXT NOT NULL, "
+                       "PRIMARY KEY (id), "
+                       "FOREIGN KEY (username) REFERENCES useradministration.useraccount(username)"
+                       ") ")
+        cursor.execute("CREATE TABLE lofar_observation ( "
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "mom2objectid int(11) DEFAULT NULL, "
+                       "observation_id int(11) DEFAULT NULL, "
+                       "instrument char(32) DEFAULT NULL, "
+                       "user_specification_id int(11) DEFAULT NULL, "
+                       "system_specification_id int(11) DEFAULT NULL,"
+                       "default_template varchar(50) DEFAULT NULL,"
+                       "tbb_template varchar(50) DEFAULT NULL,"
+                       "tbb_piggyback_allowed tinyint(1) DEFAULT '0',"
+                       "parset mediumtext,"
+                       "nr_output_correlated int(11) DEFAULT NULL,"
+                       "nr_output_beamformed int(11) DEFAULT NULL,"
+                       "nr_output_coherent_stokes int(11) DEFAULT NULL,"
+                       "nr_output_incoherent_stokes int(11) DEFAULT NULL,"
+                       "nr_output_flyseye int(11) DEFAULT NULL,"
+                       "nr_output_correlated_valid int(11) DEFAULT NULL,"
+                       "nr_output_beamformed_valid int(11) DEFAULT NULL,"
+                       "nr_output_coherent_stokes_valid int(11) DEFAULT NULL,"
+                       "nr_output_incoherent_stokes_valid int(11) DEFAULT NULL,"
+                       "nr_output_flyseye_valid int(11) DEFAULT NULL,"
+                       "feedback text,"
+                       "aartfaac_piggyback_allowed bit(1) DEFAULT b'1',"
+                       "storage_cluster_id int(11) DEFAULT NULL,"
+                       "processing_cluster_id int(11) DEFAULT NULL,"
+                       "nico_testing int(11) DEFAULT NULL,"
+                       "PRIMARY KEY (id),"
+                       "KEY lofar_observation_observation_id_IND (observation_id),"
+                       "KEY mom2object_lofar_observation_FK (mom2objectid),"
+                       "KEY user_specification_lofar_observation_FK (user_specification_id),"
+                       "KEY system_specification_lofar_observation_FK (system_specification_id)"
+                       ") ENGINE=InnoDB AUTO_INCREMENT=52874 DEFAULT CHARSET=latin1")
+        cursor.execute("CREATE TABLE lofar_observation_specification ("
+                       "id int(11) NOT NULL AUTO_INCREMENT,"
+                       "type char(8) DEFAULT NULL,"
+                       "correlated_data tinyint(1) DEFAULT '1',"
+                       "filtered_data tinyint(1) DEFAULT '0',"
+                       "beamformed_data tinyint(1) DEFAULT '0',"
+                       "coherent_stokes_data tinyint(1) DEFAULT '0',"
+                       "incoherent_stokes_data tinyint(1) DEFAULT '0',"
+                       "antenna char(20) DEFAULT NULL,"
+                       "clock_mode char(10) DEFAULT NULL,"
+                       "instrument_filter char(15) DEFAULT NULL,"
+                       "integration_interval double DEFAULT NULL,"
+                       "channels_per_subband int(11) DEFAULT NULL,"
+                       "cn_integration_steps int(11) DEFAULT NULL,"
+                       "pencilbeams_flyseye tinyint(1) DEFAULT '0',"
+                       "pencilbeams_nr_pencil_rings int(11) DEFAULT NULL,"
+                       "pencilbeams_ring_size double DEFAULT NULL,"
+                       "stokes_selection char(4) DEFAULT NULL,"
+                       "stokes_integrate_channels tinyint(1) DEFAULT NULL,"
+                       "stokes_integration_steps int(11) unsigned DEFAULT NULL,"
+                       "station_set char(15) DEFAULT NULL,"
+                       "timeframe char(4) DEFAULT NULL,"
+                       "starttime datetime DEFAULT NULL,"
+                       "endtime datetime DEFAULT NULL,"
+                       "spec_duration double DEFAULT NULL,"
+                       "coherent_dedisperse_channels tinyint(1) DEFAULT '0',"
+                       "dispersion_measure float DEFAULT NULL,"
+                       "subbands_per_file_cs int(11) DEFAULT NULL,"
+                       "subbands_per_file_bf int(11) DEFAULT NULL,"
+                       "collapsed_channels_cs int(11) DEFAULT NULL,"
+                       "collapsed_channels_is int(11) DEFAULT NULL,"
+                       "downsampling_steps_cs int(11) DEFAULT NULL,"
+                       "downsampling_steps_is int(11) DEFAULT NULL,"
+                       "which_cs char(4) DEFAULT NULL,"
+                       "which_is char(4) DEFAULT NULL,"
+                       "bypass_pff tinyint(1) DEFAULT '0',"
+                       "enable_superterp tinyint(1) DEFAULT '0',"
+                       "flyseye tinyint(1) DEFAULT '0',"
+                       "tab_nr_rings int(11) DEFAULT NULL,"
+                       "tab_ring_size float DEFAULT NULL,"
+                       "bits_per_sample int(11) DEFAULT NULL,"
+                       "misc text,"
+                       "PRIMARY KEY (id),"
+                       "KEY lofar_observation_specification_type_IND (type)"
+                       ") ENGINE=InnoDB AUTO_INCREMENT=105645 DEFAULT CHARSET=latin1")
+        cursor.execute("CREATE TABLE lofar_pipeline ("
+                       "id int(11) NOT NULL AUTO_INCREMENT,"
+                       "mom2objectid int(11) DEFAULT NULL,"
+                       "starttime datetime DEFAULT NULL,"
+                       "endtime datetime DEFAULT NULL,"
+                       "pipeline_id int(11) DEFAULT NULL,"
+                       "pending tinyint(1) DEFAULT '0',"
+                       "template varchar(100) DEFAULT NULL,"
+                       "runtimeDirectory varchar(255) DEFAULT NULL,"
+                       "resultDirectory varchar(255) DEFAULT NULL,"
+                       "workingDirectory varchar(255) DEFAULT NULL,"
+                       "parset longtext,"
+                       "nr_output_correlated int(11) DEFAULT NULL,"
+                       "nr_output_beamformed int(11) DEFAULT NULL,"
+                       "nr_output_instrument_model int(11) DEFAULT NULL,"
+                       "nr_output_skyimage int(11) DEFAULT NULL,"
+                       "nr_output_correlated_valid int(11) DEFAULT NULL,"
+                       "nr_output_beamformed_valid int(11) DEFAULT NULL,"
+                       "nr_output_instrument_model_valid int(11) DEFAULT NULL,"
+                       "nr_output_skyimage_valid int(11) DEFAULT NULL,"
+                       "feedback text,"
+                       "demixing_parameters_id int(11) DEFAULT NULL,"
+                       "bbs_parameters_id int(11) DEFAULT NULL,"
+                       "duration double DEFAULT NULL,"
+                       "storage_cluster_id int(11) DEFAULT NULL,"
+                       "processing_cluster_id int(11) DEFAULT NULL,"
+                       "misc text,"
+                       "PRIMARY KEY (id),"
+                       "KEY lofar_pipeline_pipeline_id_IND (pipeline_id),"
+                       "KEY mom2object_lofar_pipeline_FK (mom2objectid),"
+                       "KEY demixing_parameters_FK (demixing_parameters_id),"
+                       "KEY bbs_parameters_FK (bbs_parameters_id)"
+                       ") ENGINE=InnoDB AUTO_INCREMENT=75471 DEFAULT CHARSET=latin1")
+        # mom privilege
+        cursor.execute("CREATE DATABASE momprivilege")
+        cursor.execute("CREATE TABLE momprivilege.statustransitionrole ( "
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "statustransitionid int(11) DEFAULT NULL, "
+                       "roleid int(11) NOT NULL, "
+                       "roletype char(100) NOT NULL, "
+                       "PRIMARY KEY (id), "
+                       "KEY roletype_IND (roleid,roletype), "
+                       "KEY statustransition_statustransitionrole_FK (statustransitionid) "
+                       ") ENGINE=InnoDB AUTO_INCREMENT=8572 DEFAULT CHARSET=latin1")
+        cursor.execute("CREATE TABLE momprivilege.statustransition ("
+                       "id int(11) NOT NULL AUTO_INCREMENT, "
+                       "oldstatusid int(11) NOT NULL, "
+                       "newstatusid int(11) NOT NULL, "
+                       "PRIMARY KEY (id), "
+                       "KEY oldstatus_IND (oldstatusid), "
+                       "KEY newstatus_IND (oldstatusid) "
+                       ") ENGINE=InnoDB AUTO_INCREMENT=1272 DEFAULT CHARSET=latin1")
+
+        self.mom_database_wrapper = MoMDatabaseWrapper(self.database_credentials)
+
+    def tearDown(self):
+        self.mysqld.stop()
+
+    def execute(self, query):
+        cursor = self.connection.cursor(dictionary=True)
+        cursor.execute(query)
+        self.connection.commit()
+        cursor.close()
+
+    def test_is_project_active_returns_false_on_empty_mom2object_table(self):
+        self.assertFalse(self.mom_database_wrapper.is_project_active("project_name"))
+
+    def test_is_project_active_returns_true_when_project_with_correct_name_and_status_is_available(self):
+        self.execute("insert into mom2object values(169900, NULL, NULL, 183526, 'PROJECT', 'LC0_011', "
+                     "'Pulsar timing with LOFAR', NULL, 966855, NULL, NULL, 0, 0, 0)")
+        self.execute("insert into mom2objectstatus values(966855, 'Pizzo, Dr. Roberto Francesco', "
+                     "'Administrative, LTA User, manager, Operator, Prospective, Review Manager, Reviewer, Scientist, "
+                     "System Scientist, Telescope Astronomer', 531, 7, 169900, 0, '2012-12-18 09:47:50', 0)")
+
+        self.assertTrue(self.mom_database_wrapper.is_project_active("LC0_011"))
+
+    def test_folder_exists_returns_false_on_empty_table(self):
+        self.assertFalse(self.mom_database_wrapper.folder_exists("/project/folder1/folder2"))
+
+    def test_folder_exists_returns_true_when_folder_exists(self):
+        self.execute("insert into mom2object values(1, NULL, NULL, 11, 'PROJECT', 'project', "
+                     "'Pulsar timing with LOFAR', NULL, 966855, NULL, NULL, 0, 0, 0)")
+        self.execute("insert into mom2object values(2, 1, NULL, 22, 'FOLDER', 'folder1', "
+                     "'Pulsar timing with LOFAR', NULL, 966855, NULL, NULL, 0, 0, 0)")
+        self.execute("insert into mom2object values(3, 2, NULL, 33, 'FOLDER', 'folder2', "
+                     "'Pulsar timing with LOFAR', NULL, 966855, NULL, NULL, 0, 0, 0)")
+
+        self.assertTrue(self.mom_database_wrapper.folder_exists(self.folder))
+
+    def test_folder_exists_returns_true_when_folder_exists_and_path_ends_on_forward_slash(self):
+        self.execute("insert into mom2object values(1, NULL, NULL, 11, 'PROJECT', 'project', "
+                     "'Pulsar timing with LOFAR', NULL, 966855, NULL, NULL, 0, 0, 0)")
+        self.execute("insert into mom2object values(2, 1, NULL, 22, 'FOLDER', 'folder1', "
+                     "'Pulsar timing with LOFAR', NULL, 966855, NULL, NULL, 0, 0, 0)")
+        self.execute("insert into mom2object values(3, 2, NULL, 33, 'FOLDER', 'folder2', "
+                     "'Pulsar timing with LOFAR', NULL, 966855, NULL, NULL, 0, 0, 0)")
+
+        self.assertTrue(self.mom_database_wrapper.folder_exists(self.folder))
+
+    def test_authorized_add_with_status_returns_false_on_empty_db(self):
+        self.assertFalse(self.mom_database_wrapper.authorized_add_with_status(self.user_name, self.project_name,
+                                                                              self.job_type, self.status))
+
+    def test_authorized_add_with_status_returns_true_on_when_rights_are_on_system_role(self):
+        # insert user
+        self.execute("insert into useradministration.useraccount "
+                     "values(1, 1, '%s', '26dcf77e2de89027e8895baea8e45057', 'sNgmwwN7fk')"
+                     % self.user_name)
+        # setup status
+        self.execute("insert into status values(101, 'opened', 'OBSERVATION', '')")
+        self.execute("insert into status values(104, 'approved', 'OBSERVATION', "
+                     "'The specification is in accordance with wishes of the PI.')")
+        # setup status transitions
+        self.execute("insert into momprivilege.statustransition values(1003, 0, 101)")
+        self.execute("insert into momprivilege.statustransition values(1059, 101, 104)")
+        # setup transition role
+        self.execute("insert into momprivilege.statustransitionrole "
+                     "values(1, 1003, 9, 'nl.astron.useradministration.data.entities.SystemRole')")
+        self.execute("insert into momprivilege.statustransitionrole "
+                     "values(2, 1059, 9, 'nl.astron.useradministration.data.entities.SystemRole')")
+        # user account system role
+        self.execute("insert into useradministration.useraccountsystemrole values(533, 1, 9, 0)")
+
+        self.assertTrue(self.mom_database_wrapper.authorized_add_with_status(self.user_name, self.project_name,
+                                                                             'observation', "approved"))
+        self.assertTrue(self.mom_database_wrapper.authorized_add_with_status(self.user_name, self.project_name,
+                                                                             'observation', "opened"))
+
+    def test_authorized_add_with_status_returns_true_on_when_rights_are_on_project_role(self):
+        # insert user
+        self.execute("insert into useradministration.useraccount "
+                     "values(1, 1, '%s', '26dcf77e2de89027e8895baea8e45057', 'sNgmwwN7fk')"
+                     % self.user_name)
+        # setup status
+        self.execute("insert into status values(101, 'opened', 'OBSERVATION', '')")
+        self.execute("insert into status values(104, 'approved', 'OBSERVATION', "
+                     "'The specification is in accordance with wishes of the PI.')")
+        # setup status transitions
+        self.execute("insert into momprivilege.statustransition values(1003, 0, 101)")
+        self.execute("insert into momprivilege.statustransition values(1059, 101, 104)")
+        # setup transition role
+        self.execute("insert into momprivilege.statustransitionrole "
+                     "values(1, 1003, 1, 'nl.astron.mom2.data.entities.ProjectRole')")
+        self.execute("insert into momprivilege.statustransitionrole "
+                     "values(2, 1059, 1, 'nl.astron.mom2.data.entities.ProjectRole')")
+        # setup project role
+        self.execute("insert into projectrole values(1, 'Pi', NULL)")
+        # setup member project role
+        self.execute("insert into memberprojectrole values(1, 1, 0, 1)")
+        # setup registered member
+        self.execute("insert into registeredmember values(1, 1, 1)")
+        # setup member
+        self.execute("insert into member values(1, 1, 0)")
+        # setup project
+        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'PROJECT', '%(project_name)s', 'test-lofar', "
+                     "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+
+        self.assertTrue(self.mom_database_wrapper.authorized_add_with_status(self.user_name, self.project_name,
+                                                                             'observation', 'approved'))
+        self.assertTrue(self.mom_database_wrapper.authorized_add_with_status(self.user_name, self.project_name,
+                                                                             'observation', 'opened'))
+
+    def test_allows_triggers_returns_raises_exception_on_empty_db(self):
+        with self.assertRaises(ValueError) as exception:
+            self.assertFalse(self.mom_database_wrapper.allows_triggers(self.project_name))
+
+        self.assertEqual(exception.exception.message, "project name (%s) not found in MoM database" % self.project_name)
+
+    def test_allows_triggers_returns_true_when_project_allows_triggers(self):
+        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'PROJECT', '%(project_name)s', 'test-lofar', "
+                     "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        self.execute("insert into project values(1, 1, '2012-09-14', TRUE, 1000)")
+
+        self.assertTrue(self.mom_database_wrapper.allows_triggers(self.project_name))
+
+    def test_allows_triggers_returns_false_when_project_does_not_allow_triggers(self):
+        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'PROJECT', '%(project_name)s', 'test-lofar', "
+                     "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        self.execute("insert into project values(1, 1, '2012-09-14', FALSE, 1000)")
+
+        self.assertFalse(self.mom_database_wrapper.allows_triggers(self.project_name))
+
+    def test_get_project_priority_raises_exception_on_empty_database(self):
+        with self.assertRaises(ValueError) as exception:
+            self.mom_database_wrapper.get_project_priority(self.project_name)
+
+        self.assertEqual(exception.exception.message, "project name (%s) not found in MoM database" % self.project_name)
+
+    def test_get_project_priority_returns_priority_of_project(self):
+        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'PROJECT', '%(project_name)s', 'test-lofar', "
+                     "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        self.execute("insert into project values(1, 1, '2012-09-14', FALSE, 5000)")
+
+        priority = self.mom_database_wrapper.get_project_priority(self.project_name)
+
+        self.assertEqual(priority, 5000)
+
+    def test_add_trigger_returns_row_id_1_on_empty_table(self):
+        self.execute("insert into useradministration.useraccount "
+                     "values(1, 1, '%s', '26dcf77e2de89027e8895baea8e45057', 'sNgmwwN7fk')"
+                     % self.user_name)
+        result = self.mom_database_wrapper.add_trigger(self.user_name, "host name", "project name", "meta data")
+
+        self.assertEqual(result, 1)
+
+    def test_add_trigger_returns_row_id_2_on_insert_delete_insert_on_empty_database(self):
+        # It is (maybe) not likely that triggers will be deleted but at least the code can handle it
+        self.execute("insert into useradministration.useraccount "
+                     "values(1, 1, '%s', '26dcf77e2de89027e8895baea8e45057', 'sNgmwwN7fk')"
+                     % self.user_name)
+
+        self.mom_database_wrapper.add_trigger(self.user_name, "host name", "project name", "meta data")
+        self.execute("delete from lofar_trigger "
+                     "where id = 1")
+        result = self.mom_database_wrapper.add_trigger(self.user_name, "host name", "project name", "meta data")
+
+        self.assertEqual(result, 2)
+
+    def test_get_trigger_id_returns_None_on_empty_database(self):
+        result = self.mom_database_wrapper.get_trigger_id("1")
+
+        self.assertEqual(result, None)
+
+    def test_get_trigger_id_returns_id_for_lofar_observation(self):
+        self.execute("insert into mom2object values(1, NULL, NULL, 2, 'LOFAR_OBSERVATION', '%(project_name)s', 'test-lofar', "
+                     "NULL, 1704653, NULL, NULL, 0, 0, 0)" % {"project_name": self.project_name})
+        # id, mom2objectid, observation_id, instrument, user_specification_id, system_specification_id, default_template, tbb_template, tbb_piggyback_allowed, parset, nr_output_correlated, nr_output_beamformed, nr_output_coherent_stokes, nr_output_incoherent_stokes, nr_output_flyseye, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_coherent_stokes_valid, nr_output_incoherent_stokes_valid, nr_output_flyseye_valid, feedback, aartfaac_piggyback_allowed, storage_cluster_id, processing_cluster_id, nico_testing
+        self.execute("insert into lofar_observation values(83, 2, NULL, 'Interferometer', 47, 48, NULL, NULL, 0,"
+                     " NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 1, NULL, NULL, NULL)")
+        # id, type, correlated_data, filtered_data, beamformed_data, coherent_stokes_data, incoherent_stokes_data, antenna, clock_mode, instrument_filter, integration_interval, channels_per_subband, cn_integration_steps, pencilbeams_flyseye, pencilbeams_nr_pencil_rings, pencilbeams_ring_size, stokes_selection, stokes_integrate_channels, stokes_integration_steps, station_set, timeframe, starttime, endtime, spec_duration, coherent_dedisperse_channels, dispersion_measure, subbands_per_file_cs, subbands_per_file_bf, collapsed_channels_cs, collapsed_channels_is, downsampling_steps_cs, downsampling_steps_is, which_cs, which_is, bypass_pff, enable_superterp, flyseye, tab_nr_rings, tab_ring_size, bits_per_sample, misc
+        self.execute("insert into lofar_observation_specification values(47, 'USER', 1, 0, 0, 0, 0, 'HBA Dual', "
+                     "'160 MHz', '170-230 MHz', 1, NULL, NULL, 0, NULL, NULL, NULL, 0, NULL, 'Custom', NULL, NULL, "
+                     "NULL, NULL, 0, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, 0, 0, NULL, NULL, "
+                     "16, '{\"trigger_id\": %(trigger_id)s}')" % {"trigger_id": self.trigger_id})
+
+        result = self.mom_database_wrapper.get_trigger_id("2")
+
+        self.assertEqual(result, self.trigger_id)
+
+    def test_get_trigger_id_returns_id_for_lofar_pipeline(self):
+        self.execute("insert into mom2object values(1, 104711, 4, 2, 'CALIBRATION_PIPELINE', 'Target Pipeline 1.3', "
+                     "'Target Pipeline 1.3 [1.P3]', 1, 1722446, 'mom_msss_117430.1.P3', 'M117434,M117435', 0, 117430,"
+                     " 0)")
+        # id, mom2objectid, starttime, endtime, pipeline_id, pending, template, runtimeDirectory, resultDirectory, workingDirectory, parset, nr_output_correlated, nr_output_beamformed, nr_output_instrument_model, nr_output_skyimage, nr_output_correlated_valid, nr_output_beamformed_valid, nr_output_instrument_model_valid, nr_output_skyimage_valid, feedback, demixing_parameters_id, bbs_parameters_id, duration, storage_cluster_id, processing_cluster_id, misc
+        self.execute("insert into lofar_pipeline values(1761, 2, NULL, NULL, 63722, 0, "
+                     "'Calibration Pipeline Calibrator', NULL, NULL, NULL, 'parset', 0, NULL, 244, NULL, 0, 0, NULL, 0,"
+                     " NULL, 3071, 3071, NULL, NULL, NULL, '{\"trigger_id\": %(trigger_id)s}')" % {"trigger_id": self.trigger_id})
+
+        result = self.mom_database_wrapper.get_trigger_id("2")
+
+        self.assertEqual(result, self.trigger_id)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/SAS/ResourceAssignment/TaskPrescheduler/test/test_taskprescheduler.run b/SAS/ResourceAssignment/TaskPrescheduler/test/test_taskprescheduler.run
new file mode 100755
index 0000000000000000000000000000000000000000..758c2e6eb767a52e85be2381224c0ff4d8c8f86a
--- /dev/null
+++ b/SAS/ResourceAssignment/TaskPrescheduler/test/test_taskprescheduler.run
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+# Run the unit test
+source python-coverage.sh
+python_coverage_test "TaskPrescheduler/*" test_taskprescheduler.py
diff --git a/SAS/ResourceAssignment/TaskPrescheduler/test/test_taskprescheduler.sh b/SAS/ResourceAssignment/TaskPrescheduler/test/test_taskprescheduler.sh
new file mode 100755
index 0000000000000000000000000000000000000000..e10662a2c6a9b5834840d4b85a299358c9c53f85
--- /dev/null
+++ b/SAS/ResourceAssignment/TaskPrescheduler/test/test_taskprescheduler.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+./runctest.sh test_taskprescheduler