diff --git a/LCS/PyCommon/test/CMakeLists.txt b/LCS/PyCommon/test/CMakeLists.txt
index b7c00cd2d37fcfd4ab607d4b5ffb8dd136fec65c..2494ca8ef8b34a62fae5bc2b0a9abe6c3d29d686 100644
--- a/LCS/PyCommon/test/CMakeLists.txt
+++ b/LCS/PyCommon/test/CMakeLists.txt
@@ -1,16 +1,30 @@
 # $Id$
 
-include(LofarCTest)
-
-file(COPY
-  ${CMAKE_CURRENT_SOURCE_DIR}/python-coverage.sh
-  DESTINATION ${CMAKE_BINARY_DIR}/bin)
-
-lofar_add_test(t_cache)
-lofar_add_test(t_dbcredentials)
-lofar_add_test(t_defaultmailaddresses)
-lofar_add_test(t_methodtrigger)
-lofar_add_test(t_util)
-lofar_add_test(t_test_utils)
-lofar_add_test(t_cep4_utils)
-lofar_add_test(t_postgres)
+IF(BUILD_TESTING)
+    lofar_find_package(Python 3.4 REQUIRED)
+
+    include(PythonInstall)
+
+    set(_py_files
+      postgres.py)
+
+    python_install(${_py_files} DESTINATION lofar/common/testing)
+
+    include(FindPythonModule)
+    find_python_module(testing.postgresql)
+
+    include(LofarCTest)
+
+    file(COPY
+      ${CMAKE_CURRENT_SOURCE_DIR}/python-coverage.sh
+      DESTINATION ${CMAKE_BINARY_DIR}/bin)
+
+    lofar_add_test(t_cache)
+    lofar_add_test(t_dbcredentials)
+    lofar_add_test(t_defaultmailaddresses)
+    lofar_add_test(t_methodtrigger)
+    lofar_add_test(t_util)
+    lofar_add_test(t_test_utils)
+    lofar_add_test(t_cep4_utils)
+    lofar_add_test(t_postgres)
+ENDIF()
\ No newline at end of file
diff --git a/LCS/PyCommon/test/postgres.py b/LCS/PyCommon/test/postgres.py
new file mode 100755
index 0000000000000000000000000000000000000000..50c98efc5dfa6bfbc407af8079b90b11a2bc499e
--- /dev/null
+++ b/LCS/PyCommon/test/postgres.py
@@ -0,0 +1,151 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2012-2015    ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.    See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+# $Id$
+import psycopg2
+import os, sys
+import logging
+
+logger = logging.getLogger(__name__)
+
+import testing.postgresql
+from lofar.common.dbcredentials import Credentials
+from lofar.common.postgres import PostgresDatabaseConnection
+
+class PostgresTestDatabaseInstance():
+    ''' A helper class which instantiates a running postgres server (not interfering with any other test/production postgres servers)
+    Best used in a 'with'-context so the server is destroyed automagically.
+    Derive your own sub-class and implement apply_database_schema with your own sql schema to setup your type of database.
+    '''
+
+    def __init__(self, user_name: str = 'test_user') -> None:
+        self._postgresql = None
+        self.dbcreds = Credentials()
+        self.dbcreds.user = user_name
+        self.dbcreds.password = 'test_password'  # cannot be empty...
+
+    def __enter__(self):
+        '''create/instantiate the postgres server'''
+        try:
+            self.create()
+        except Exception as e:
+            logger.exception(e)
+            self.destroy()
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        '''destroy the running postgres server'''
+        self.destroy()
+
+    def create(self):
+        '''instantiate the isolated postgres server'''
+        logger.info('creating test-database instance...')
+
+        self._postgresql = testing.postgresql.PostgresqlFactory(cache_initialized_db=True)()
+
+        # fill credentials with the dynamically created postgress instance (e.g. port changes for each time)
+        self.dbcreds.host = self._postgresql.dsn()['host']
+        self.dbcreds.database = self._postgresql.dsn()['database']
+        self.dbcreds.port = self._postgresql.dsn()['port']
+
+        try:
+            # connect to db as root
+            conn = psycopg2.connect(**self._postgresql.dsn())
+            cursor = conn.cursor()
+
+            # create user role
+            query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % (self.dbcreds.user, self.dbcreds.password)
+            cursor.execute(query)
+
+            logger.info('Created test-database instance. It is avaiblable at: %s', self.dbcreds.stringWithHiddenPassword())
+        finally:
+            cursor.close()
+            conn.commit()
+            conn.close()
+
+        logger.info('Applying test-database schema...')
+        self.apply_database_schema()
+
+    def destroy(self):
+        '''destroy the running postgres server'''
+        try:
+            if self._postgresql:
+                logger.info('removing test-database instance at %s', self.dbcreds.stringWithHiddenPassword())
+                self._postgresql.stop()
+                logger.info('test-database instance removed')
+        except Exception as e:
+            logger.info('error while removing test-database instance at %s: %s', self.dbcreds.stringWithHiddenPassword(), e)
+
+    def apply_database_schema(self):
+        ''' Override and implement this method. Open a connection to the database specified by self.dbcreds, and apply your database's sql schema.'''
+        raise NotImplementedError("Please override PostgresTestDatabaseInstance.apply_database_schema and setup your database with an sql schema.")
+
+    def create_database_connection(self) -> PostgresDatabaseConnection:
+        ''' Factory method to create a PostgresDatabaseConnection to the testing-database.
+        Override and implement this method if you want to use your PostgresDatabaseConnection-subclass using the given self.dbcreds, and return it.
+        Note: you should connect/disconnect the connection yourself, so recommended usage is in a 'with'-context'''
+        return PostgresDatabaseConnection(self.dbcreds)
+
+    def print_database_instance_log(self):
+        '''print the log of the testing-database instance (can help debugging sql statements)'''
+        try:
+            if self._postgresql:
+                db_log_file_name = os.path.join(self._postgresql.base_dir, '%s.log' % self._postgresql.name)
+                logger.info('Printing test-postgress-database server log for reference: %s', db_log_file_name)
+                with open(db_log_file_name, 'r') as db_log_file:
+                    for line in db_log_file.readlines():
+                        print("  postgres log: %s" % line.strip(), file=sys.stderr)
+        except Exception as e:
+            logger.error("Error while printing test-postgress-database server log: %s", e)
+
+class PostgresTestMixin():
+    '''
+    A common test mixin class from which you can/should derive to get a freshly setup postgres testing instance with your sql setup scripts applied.
+    It implements the unittest setUpClass/tearDownClass methods and uses them as a template method pattern to do all the testing-database setup/teardown work for you.
+    '''
+
+    # class variables are initialized in setUpClass
+    _test_db_instance  = None
+    db                 = None
+
+    @classmethod
+    def create_test_db_instance (cls) -> PostgresTestDatabaseInstance:
+        raise NotImplementedError("Please implement create_test_db_instance in your subclass and return your preferred PostgresTestDatabaseInstance-subclass")
+
+    @classmethod
+    def setUpClass(cls):
+        # create a running isolated test database instance
+        cls._test_db_instance = cls.create_test_db_instance()
+        cls._test_db_instance.create()
+
+        # create a single PostgresDatabaseConnection for the entire test suite
+        logger.info('Creating PostgresDatabaseConnection to test-database...')
+        cls.db = cls._test_db_instance.create_database_connection()
+        cls.db.connect()
+        logger.info('PostgresDatabaseConnection to test-database %s is ready to be used.', cls.db)
+
+    @classmethod
+    def tearDownClass(cls):
+        cls.db.disconnect()
+        cls._test_db_instance.print_database_instance_log()
+        cls._test_db_instance.destroy()
+
+    @property
+    def dbcreds(self) -> Credentials:
+        return self._test_db_instance.dbcreds
diff --git a/LCS/PyCommon/test/t_postgres.py b/LCS/PyCommon/test/t_postgres.py
index 29368d849be44e33dde4387c9b4ff9a0397b1fb9..7a2ba4fc67e917512a8b42e38a48110c897fb0af 100755
--- a/LCS/PyCommon/test/t_postgres.py
+++ b/LCS/PyCommon/test/t_postgres.py
@@ -3,65 +3,43 @@
 import unittest
 from unittest import mock
 from lofar.common.postgres import *
-import testing.postgresql
+from lofar.common.testing.postgres import PostgresTestDatabaseInstance, PostgresTestMixin
 import psycopg2
 import signal
-from lofar.common.dbcredentials import Credentials
+from copy import deepcopy
 
 import logging
 logger = logging.getLogger(__name__)
 
-def setUpModule():
-    pass
-
-def tearDownModule():
-    pass
-
-class TestPostgres(unittest.TestCase):
-    def setUp(self):
-        logger.debug("starting test-postgres-database-instance...")
-        self.pgfactory = testing.postgresql.PostgresqlFactory()
-        self.pginstance = self.pgfactory()
-
-        self.dbcreds = Credentials()
-        self.dbcreds.user = 'test_pg_user'
-        self.dbcreds.password = 'test_pg_password' # cannot be empty...
-
-        dsn = self.pginstance.dsn()
-
-        # update credentials from current testing pginstance (e.g. port changes for each test)
-        self.dbcreds.host = dsn['host']
-        self.dbcreds.database = dsn['database']
-        self.dbcreds.port = dsn['port']
-
+class MyPostgresTestDatabaseInstance(PostgresTestDatabaseInstance):
+    def apply_database_schema(self):
         # use 'normal' psycopg2 API to connect and setup the database,
         # not the PostgresDatabaseConnection class, because that's the object-under-test.
         logger.debug("connecting to %s test-postgres-database-instance...", self.dbcreds.stringWithHiddenPassword())
-        with psycopg2.connect(**dsn) as connection:
+        with psycopg2.connect(**self.dbcreds.psycopg2_connect_options()) as connection:
             with connection.cursor() as cursor:
                 logger.debug("creating database user and tables in %s test-postgres-database-instance...", self.dbcreds.stringWithHiddenPassword())
-                cursor.execute("CREATE USER %s WITH SUPERUSER PASSWORD '%s';" % (self.dbcreds.user,self.dbcreds.password))
                 cursor.execute("CREATE TABLE IF NOT EXISTS foo (id serial NOT NULL, bar text NOT NULL, PRIMARY KEY (id));")
                 connection.commit()
 
-        logger.info("created and started %s test-postgres-database-instance", self.dbcreds.stringWithHiddenPassword())
+class MyPostgresTestMixin(PostgresTestMixin):
+    @classmethod
+    def create_test_db_instance(cls) -> PostgresTestDatabaseInstance:
+        return MyPostgresTestDatabaseInstance()
 
-    def tearDown(self):
-        logger.debug("stopping %s test-postgres-database-instance...", self.dbcreds.stringWithHiddenPassword())
-        self.pginstance.stop()
-        logger.info("stopped %s test-postgres-database-instance", self.dbcreds.stringWithHiddenPassword())
 
-    def test_connection_error_on_stopped_pginstance(self):
-        # force to pginstance to stop so we cannot connect to it.
-        self.pginstance.stop()
-        logger.info("stopped %s test-postgres-database-instance", self.dbcreds.stringWithHiddenPassword())
+class TestPostgres(MyPostgresTestMixin, unittest.TestCase):
+    def test_connection_error_with_incorrect_dbcreds(self):
+        #connect to incorrect port -> should result in PostgresDBConnectionError
+        incorrect_dbcreds = deepcopy(self.dbcreds)
+        incorrect_dbcreds.port += 1
 
         # test if connecting fails
         with mock.patch('lofar.common.postgres.logger') as mocked_logger:
             with self.assertRaises(PostgresDBConnectionError):
                 NUM_CONNECT_RETRIES = 2
-                db = PostgresDatabaseConnection(dbcreds=self.dbcreds, connect_retry_interval=0.1, num_connect_retries=NUM_CONNECT_RETRIES)
-                db.connect()
+                with PostgresDatabaseConnection(dbcreds=incorrect_dbcreds, connect_retry_interval=0.1, num_connect_retries=NUM_CONNECT_RETRIES) as db:
+                    pass
 
             # check logging
             self.assertEqual(NUM_CONNECT_RETRIES, len([ca for ca in mocked_logger.info.call_args_list if 'retrying to connect' in ca[0][0]]))
@@ -92,7 +70,7 @@ class TestPostgres(unittest.TestCase):
                         self.pginstance.start()
                         logger.info("restarted test-postgres-database-instance")
 
-        with HelperPostgresDatabaseConnection(dbcreds=self.dbcreds, pginstance=self.pginstance) as db:
+        with HelperPostgresDatabaseConnection(dbcreds=self.dbcreds, pginstance=self._test_db_instance._postgresql) as db:
             # insert some test data
             db.executeQuery("INSERT INTO foo (bar) VALUES ('my_value');")
             db.commit()
@@ -102,7 +80,8 @@ class TestPostgres(unittest.TestCase):
             self.assertEqual([{'id':1, 'bar': 'my_value'}], result)
 
             # terminate the pginstance (simulating a production database malfunction)
-            self.pginstance.terminate(signal.SIGTERM)
+            logger.info("terminating %s test-postgres-database-instance...", self.dbcreds.stringWithHiddenPassword())
+            self._test_db_instance._postgresql.terminate(signal.SIGTERM)
             logger.info("terminated %s test-postgres-database-instance", self.dbcreds.stringWithHiddenPassword())
 
             # prove that the database is down by trying to connect which results in a PostgresDBConnectionError
@@ -131,11 +110,6 @@ class TestPostgres(unittest.TestCase):
                 db.executeQuery("SELECT * FROM error_func();")
 
 
-
-
-
-
-
 logging.basicConfig(format='%(asctime)s %(process)s %(threadName)s %(levelname)s %(message)s', level=logging.DEBUG)
 
 if __name__ == "__main__":
diff --git a/LTA/ltastorageoverview/test/CMakeLists.txt b/LTA/ltastorageoverview/test/CMakeLists.txt
index cf5f5379e5c432a0d9976a6b4130c1eb4603a864..7bd5d7f69fbb1a303b67fca083e527848c967d5e 100644
--- a/LTA/ltastorageoverview/test/CMakeLists.txt
+++ b/LTA/ltastorageoverview/test/CMakeLists.txt
@@ -1,12 +1,23 @@
 # $Id$
-include(LofarCTest)
 
-include(FindPythonModule) 
-find_python_module(flask.testing REQUIRED)
+IF(BUILD_TESTING)
+    lofar_find_package(Python 3.4 REQUIRED)
+    include(PythonInstall)
 
-lofar_add_test(test_store)
-lofar_add_test(test_scraper)
-lofar_add_test(test_lso_webservice)
-lofar_add_test(test_ingesteventhandler)
+    set(_py_files
+      common_test_ltastoragedb.py)
 
-lofar_add_test(integration_test_store)
+    python_install(${_py_files} DESTINATION lofar/lta/ltastorageoverview/testing)
+
+    include(FindPythonModule)
+    find_python_module(flask.testing REQUIRED)
+
+    include(LofarCTest)
+
+    lofar_add_test(test_store)
+    lofar_add_test(test_scraper)
+    lofar_add_test(test_lso_webservice)
+    lofar_add_test(test_ingesteventhandler)
+
+    lofar_add_test(integration_test_store)
+ENDIF()
\ No newline at end of file
diff --git a/LTA/ltastorageoverview/test/common_test_ltastoragedb.py b/LTA/ltastorageoverview/test/common_test_ltastoragedb.py
index 5a43cb1e81622f8b9c6b7060a457e792cf8cc85b..ac0342a9567508abbed1396916e27c38e4514531 100755
--- a/LTA/ltastorageoverview/test/common_test_ltastoragedb.py
+++ b/LTA/ltastorageoverview/test/common_test_ltastoragedb.py
@@ -17,57 +17,31 @@
 
 # $Id$
 
-import unittest
 import logging
 import os, os.path
-import psycopg2
-import lofar.common.dbcredentials as dbc
-
-try:
-    import testing.postgresql
-except ImportError as e:
-    print(str(e))
-    print('Please install python3 package testing.postgresql: sudo pip3 install testing.postgresql')
-    exit(3)    # special lofar test exit code: skipped test
 
+from lofar.common.testing.postgres import PostgresTestDatabaseInstance, PostgresTestMixin, PostgresDatabaseConnection
+from lofar.lta.ltastorageoverview import store
 logger = logging.getLogger(__name__)
 
-class CommonLTAStorageDbTest(unittest.TestCase):
-    def setUp(self):
-        logger.info('setting up test LTASO database server...')
-
-        # create a test db
-        logger.info('  creating test postgres server')
-        self.test_psql = testing.postgresql.Postgresql()
-        dsn = self.test_psql.dsn()
-        logger.info('  created test postgres server, dsn=%s', dsn)
-
-        self.dbcreds = dbc.Credentials()
-        self.dbcreds.user = 'test_user'
-        self.dbcreds.password = 'test_password'
+class LTAStorageDbTestInstance(PostgresTestDatabaseInstance):
+    def apply_database_schema(self):
+        create_script_path = os.path.normpath(os.path.join(os.environ['LOFARROOT'], 'share', 'ltaso', 'create_db_ltastorageoverview.sql'))
+        logger.info('  running ltaso create script create_script=%s', create_script_path)
+        with open(create_script_path, 'r') as script:
+            with PostgresDatabaseConnection(self.dbcreds) as db:
+                db.executeQuery(script.read())
 
-        with psycopg2.connect(**dsn) as conn:
-            cursor = conn.cursor()
-            # use same user/pass as stored in local dbcreds
-            query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % (self.dbcreds.user, self.dbcreds.password)
-            cursor.execute(query)
+    def create_database_connection(self) -> store.LTAStorageDb:
+        return store.LTAStorageDb(self.dbcreds)
 
-            create_script_path = os.path.normpath(os.path.join(os.environ['LOFARROOT'], 'share', 'ltaso', 'create_db_ltastorageoverview.sql'))
-            logger.info('  running ltaso create script create_script=%s', create_script_path)
-            with open(create_script_path, 'r') as script:
-                cursor.execute(script.read())
-            logger.info('  completed ltaso create script')
-
-        # copy the test postgres server settings into dbcreds
-        # we can use these dbcreds in each test method to connect to the testing ltaso database
-        self.dbcreds.host = dsn['host']
-        self.dbcreds.database = dsn['database']
-        self.dbcreds.port = dsn['port']
-
-        logger.info('finished setting up test LTASO database')
-
-    def tearDown(self):
-        logger.info('removing test LTASO database server...')
-        self.test_psql.stop()
-        logger.info('removed test LTASO database server')
+class LTAStorageDbTestMixin(PostgresTestMixin):
+    @classmethod
+    def create_test_db_instance(cls) -> PostgresTestDatabaseInstance:
+        return LTAStorageDbTestInstance()
 
+    def setUp(self):
+        # wipe all tables by truncating some which cascades into the rest.
+        logger.debug("setUp: Wiping tables for each unittest.")
+        self.db.executeQuery("TRUNCATE TABLE lta.site CASCADE;")
+        self.db.commit()
diff --git a/LTA/ltastorageoverview/test/db_performance_test.py b/LTA/ltastorageoverview/test/db_performance_test.py
index 5c03e116ecd2fdb0d9c083a5b2f9812081c64d49..b3d1f7a23a9815f49cb7dbc40bace2c2facd2030 100755
--- a/LTA/ltastorageoverview/test/db_performance_test.py
+++ b/LTA/ltastorageoverview/test/db_performance_test.py
@@ -19,61 +19,45 @@
 
 import logging
 from datetime import datetime, timedelta
-import os
 
 from lofar.lta.ltastorageoverview import store
 from lofar.common.datetimeutils import totalSeconds
+from lofar.lta.ltastorageoverview.testing.common_test_ltastoragedb import LTAStorageDbTestMixin
 
 logger = logging.getLogger()
 
+class LTAStorageDbTestInstance():
+    '''Helper class which uses the LTAStorageDbTestMixin without a unittest.TestCase to setup/teardown a test LTAStorageDb instance'''
+    def __init__(self):
+        self._db_creator = LTAStorageDbTestMixin()
+
+    @property
+    def dbcreds(self):
+        return self._db_creator.dbcreds
+
+    def __enter__(self):
+        self._db_creator.setUpClass()
+        self._db_creator.setUp()
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        self._db_creator.tearDownClass()
+
+
 def main():
     from optparse import OptionParser
     from lofar.common import dbcredentials
-    import testing.postgresql
-    import psycopg2
 
     # Check the invocation arguments
     parser = OptionParser("%prog [options]", description='execute a performance test by inserting many files on an empty test database.')
-    parser.add_option('-V', '--verbose', dest='verbose', action='store_true', help='verbose logging')
     (options, args) = parser.parse_args()
 
-    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
-                        level=logging.DEBUG if options.verbose else logging.INFO)
-
-
-    # create a test webservice.db
-    logger.info('  creating test postgres server')
-    with testing.postgresql.Postgresql() as test_psql:
-        dsn = test_psql.dsn()
-        logger.info('  created test postgres server, dsn=%s', dsn)
-
-        dbcreds = dbcredentials.Credentials()
-        dbcreds.user = 'test_user'
-        dbcreds.password = 'test_password'
-
-        with psycopg2.connect(**dsn) as conn:
-            cursor = conn.cursor()
-            #use same user/pass as stored in local webservice.dbcreds
-            query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % (dbcreds.user, dbcreds.password)
-            cursor.execute(query)
-
-            create_script_path = os.path.normpath(os.path.join(os.environ['LOFARROOT'], 'share', 'ltaso', 'create_db_ltastorageoverview.sql'))
-            logger.info('  running ltaso create script create_script=%s', create_script_path)
-            with open(create_script_path, 'r') as script:
-                cursor.execute(script.read())
-            logger.info('  completed ltaso create script')
-
-        # copy the test postgres server settings into webservice.dbcreds
-        # we can use these webservice.dbcreds in each test method to connect to the testing ltaso database
-        dbcreds.host = dsn['host']
-        dbcreds.database = dsn['database']
-        dbcreds.port = dsn['port']
-
-        logger.info('finished setting up test LTASO database')
+    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
 
+    with LTAStorageDbTestInstance() as test_db:
         base_date = datetime.utcnow()
 
-        db = store.LTAStorageDb(dbcreds, options.verbose)
+        db = store.LTAStorageDb(test_db.dbcreds)
 
         db.insertSiteIfNotExists('sara', 'srm://srm.siteA.nl:8444')
         rootdir_id = db.insertRootDirectory('sara', '/pnfs/grid.siteA.nl/data/lofar/ops')
diff --git a/LTA/ltastorageoverview/test/integration_test_store.py b/LTA/ltastorageoverview/test/integration_test_store.py
index d7450eebd2cef7b04393cfaff3c5f8c70052a837..10ecbf8a236e5be0b2e69b564bd9c77e52502111 100755
--- a/LTA/ltastorageoverview/test/integration_test_store.py
+++ b/LTA/ltastorageoverview/test/integration_test_store.py
@@ -21,13 +21,14 @@
 
 import logging
 from datetime import datetime, timedelta
-import time
-from common_test_ltastoragedb import *
-from lofar.lta.ltastorageoverview import store
+import unittest
+from lofar.lta.ltastorageoverview.testing.common_test_ltastoragedb import LTAStorageDbTestMixin
 
 logger = logging.getLogger(__name__)
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
 
-class IntegrationTestLTAStorageDb(CommonLTAStorageDbTest):
+
+class IntegrationTestLTAStorageDb(LTAStorageDbTestMixin, unittest.TestCase):
     """
     Bigger tests for the lofar.lta.ltastorageoverview.store.LTAStorageDb
     which test more complex behaviour with bigger amounts of data.
@@ -40,76 +41,41 @@ class IntegrationTestLTAStorageDb(CommonLTAStorageDbTest):
         2) test if the automatically computed tree- and dirstats are correct.
         """
 
-        with store.LTAStorageDb(self.dbcreds) as db:
-            base_time = datetime.utcnow()
-            base_time -= timedelta(seconds=base_time.second, microseconds=base_time.microsecond)
-
-            ###########################################################
-            # STAGE 1: insertion and check phase.
-            # insert the sites, directories, and files
-            # and check the dir- and tree stats directly after insertion
-            ###########################################################
-            NUM_SITES = 2
-            NUM_PROJECTS = 3
-            NUM_PROJECT_SUBDIRS = 4
-            NUM_SUB_SUBDIRS = 5
-
-            # helper dict to store all subdir id's for each dir.
-            dir2subdir = {}
-
-            for site_nr in range(NUM_SITES):
-                site_name = 'site%d' % site_nr
-                site_url = 'srm://%s.org' % site_name
-                db.insertSiteIfNotExists(site_name, site_url)
-
-                for project_nr in range(NUM_PROJECTS):
-                    rootDir_id = db.insertRootDirectory(site_name, 'rootDir_%d' % project_nr)
-                    dir2subdir[rootDir_id] = []
-
-                    for subdir_nr in range(NUM_PROJECT_SUBDIRS):
-                        subDir_id = db.insertSubDirectory('subDir_%d' % subdir_nr, rootDir_id)
-                        dir2subdir[subDir_id] = []
-                        dir2subdir[rootDir_id].append(subDir_id)
-                        for file_nr in range(project_nr*subdir_nr):
-                            db.insertFileInfo('file_%d' % file_nr, 271*(file_nr+1), base_time + timedelta(days=10*site_nr+project_nr, hours=subdir_nr, seconds=file_nr), subDir_id)
-
-                            dir_files = db.filesInDirectory(subDir_id)
-                            dir_stats = db.directoryTreeStats(subDir_id)
-
-                            self.assertEqual(sum(f['size'] for f in dir_files), dir_stats['dir_total_file_size'])
-                            self.assertEqual(len(dir_files), dir_stats['dir_num_files'])
-                            if dir_files:
-                                self.assertEqual(min(f['size'] for f in dir_files), dir_stats['dir_min_file_size'])
-                                self.assertEqual(max(f['size'] for f in dir_files), dir_stats['dir_max_file_size'])
-                                self.assertEqual(min(f['creation_date'] for f in dir_files), dir_stats['dir_min_file_creation_date'])
-                                self.assertEqual(max(f['creation_date'] for f in dir_files), dir_stats['dir_max_file_creation_date'])
+        base_time = datetime.utcnow()
+        base_time -= timedelta(seconds=base_time.second, microseconds=base_time.microsecond)
+
+        ###########################################################
+        # STAGE 1: insertion and check phase.
+        # insert the sites, directories, and files
+        # and check the dir- and tree stats directly after insertion
+        ###########################################################
+        NUM_SITES = 2
+        NUM_PROJECTS = 3
+        NUM_PROJECT_SUBDIRS = 4
+        NUM_SUB_SUBDIRS = 5
+
+        # helper dict to store all subdir id's for each dir.
+        dir2subdir = {}
+
+        for site_nr in range(NUM_SITES):
+            site_name = 'site%d' % site_nr
+            site_url = 'srm://%s.org' % site_name
+            self.db.insertSiteIfNotExists(site_name, site_url)
+
+            for project_nr in range(NUM_PROJECTS):
+                rootDir_id = self.db.insertRootDirectory(site_name, 'rootDir_%d' % project_nr)
+                dir2subdir[rootDir_id] = []
+
+                for subdir_nr in range(NUM_PROJECT_SUBDIRS):
+                    subDir_id = self.db.insertSubDirectory('subDir_%d' % subdir_nr, rootDir_id)
+                    dir2subdir[subDir_id] = []
+                    dir2subdir[rootDir_id].append(subDir_id)
+                    for file_nr in range(project_nr*subdir_nr):
+                        self.db.insertFileInfo('file_%d' % file_nr, 271*(file_nr+1), base_time + timedelta(days=10*site_nr+project_nr, hours=subdir_nr, seconds=file_nr), subDir_id)
+
+                        dir_files = self.db.filesInDirectory(subDir_id)
+                        dir_stats = self.db.directoryTreeStats(subDir_id)
 
-                        for subsubdir_nr in range(NUM_SUB_SUBDIRS):
-                            subsubDir_id = db.insertSubDirectory('subsubDir_%d' % subsubdir_nr, subDir_id)
-                            dir2subdir[subsubDir_id] = []
-                            dir2subdir[subDir_id].append(subsubDir_id)
-                            for kk in range(project_nr*subdir_nr*subsubdir_nr):
-                                db.insertFileInfo('file_%d_%d' % (subdir_nr,kk), 314*(kk+1), base_time + timedelta(days=10*site_nr+project_nr, hours=10*subdir_nr+subsubdir_nr+2, seconds=kk), subsubDir_id)
-
-                                dir_files = db.filesInDirectory(subsubDir_id)
-                                dir_stats = db.directoryTreeStats(subsubDir_id)
-
-                                self.assertEqual(sum(f['size'] for f in dir_files), dir_stats['dir_total_file_size'])
-                                self.assertEqual(len(dir_files), dir_stats['dir_num_files'])
-                                if dir_files:
-                                    self.assertEqual(min(f['size'] for f in dir_files), dir_stats['dir_min_file_size'])
-                                    self.assertEqual(max(f['size'] for f in dir_files), dir_stats['dir_max_file_size'])
-                                    self.assertEqual(min(f['creation_date'] for f in dir_files), dir_stats['dir_min_file_creation_date'])
-                                    self.assertEqual(max(f['creation_date'] for f in dir_files), dir_stats['dir_max_file_creation_date'])
-
-                                    tree_totals = db.totalFileSizeAndNumFilesInTree(subDir_id, dir_stats['dir_min_file_creation_date'], dir_stats['dir_max_file_creation_date'])
-                                    self.assertEqual(tree_totals['tree_num_files'], dir_stats['dir_num_files'])
-                                    self.assertEqual(tree_totals['tree_total_file_size'], dir_stats['dir_total_file_size'])
-
-                        # test 1st level subdir again, and also check inclusion of 2nd level subdirs in tree stats
-                        dir_files = db.filesInDirectory(subDir_id)
-                        dir_stats = db.directoryTreeStats(subDir_id)
-                        # this dir only...
                         self.assertEqual(sum(f['size'] for f in dir_files), dir_stats['dir_total_file_size'])
                         self.assertEqual(len(dir_files), dir_stats['dir_num_files'])
                         if dir_files:
@@ -118,87 +84,118 @@ class IntegrationTestLTAStorageDb(CommonLTAStorageDbTest):
                             self.assertEqual(min(f['creation_date'] for f in dir_files), dir_stats['dir_min_file_creation_date'])
                             self.assertEqual(max(f['creation_date'] for f in dir_files), dir_stats['dir_max_file_creation_date'])
 
-                        # including subdirs in tree...
-                        self.assertEqual(sum(f['file_size'] for f in db.filesInTree(subDir_id)), dir_stats['tree_total_file_size'])
-                        self.assertEqual(len(db.filesInTree(subDir_id)), dir_stats['tree_num_files'])
-
-            ####################################################################################
-            # STAGE 2: reporting phase.
-            # loop over the sites, directories, and files now that the database has been filled.
-            # and check the dir- and tree stats totals
-            ####################################################################################
-            for site in db.sites():
-                site_id = site['id']
-
-                rootDirs = db.rootDirectoriesForSite(site_id)
-                self.assertEquals(NUM_PROJECTS, len(rootDirs))
-
-                for root_dir_id in [x['root_dir_id'] for x in rootDirs]:
-                    subDirs = db.subDirectories(root_dir_id, 1, False)
-                    self.assertEquals(NUM_PROJECT_SUBDIRS, len(subDirs))
-
-                    for subDir in subDirs:
-                        subDir_parent_id = subDir['parent_dir_id']
-                        self.assertEquals(root_dir_id, subDir_parent_id)
-                        self.assertTrue(subDir['id'] in dir2subdir[root_dir_id])
-
-                        subsubDirs = db.subDirectories(subDir['id'], 1, False)
-                        self.assertEquals(NUM_SUB_SUBDIRS, len(subsubDirs))
-
-                        for subsubDir in subsubDirs:
-                            subsubDir_parent_id = subsubDir['parent_dir_id']
-                            self.assertEquals(subDir['id'], subsubDir_parent_id)
-                            self.assertTrue(subsubDir['id'] in dir2subdir[subDir['id']])
-
-                    # check various selects of files in the tree, for each file
-                    tree_files = sorted(db.filesInTree(root_dir_id), key=lambda f: f['file_creation_date'])
-                    for file in tree_files:
-                        # check if filesInTree return this one file when time delimited for this specific file_creation_date
-                        file_creation_date = file['file_creation_date']
-                        selected_tree_files = db.filesInTree(root_dir_id, file_creation_date, file_creation_date)
-                        self.assertEqual(1, len(selected_tree_files))
-                        self.assertEqual(file['file_creation_date'], selected_tree_files[0]['file_creation_date'])
-                        self.assertEqual(file['file_size'], selected_tree_files[0]['file_size'])
-
-                        # get the 'totals' for this root_dir, but select only this file by date.
-                        # should return 1 file.
-                        tree_totals = db.totalFileSizeAndNumFilesInTree(root_dir_id, file_creation_date, file_creation_date)
-                        self.assertEqual(1, tree_totals['tree_num_files'])
-                        self.assertEqual(file['file_size'], tree_totals['tree_total_file_size'])
-
-                    # check some ranges files/times
-                    for idx, file in enumerate(tree_files):
-                        file_creation_date = file['file_creation_date']
-
-                        #select any file >= file_creation_date
-                        expected_selected_tree_files = tree_files[idx:]
-                        selected_tree_files = db.filesInTree(root_dir_id, file_creation_date, None)
-                        self.assertEqual(len(expected_selected_tree_files), len(selected_tree_files))
-                        selected_tree_files_ids = set([f['file_id'] for f in selected_tree_files])
-                        for expected_file in expected_selected_tree_files:
-                            self.assertTrue(expected_file['file_id'] in selected_tree_files_ids)
-
-                        # and check the totals as well
-                        tree_totals = db.totalFileSizeAndNumFilesInTree(root_dir_id, file_creation_date, None)
-                        self.assertEqual(len(expected_selected_tree_files), tree_totals['tree_num_files'])
-                        self.assertEqual(sum(f['file_size'] for f in expected_selected_tree_files), tree_totals['tree_total_file_size'])
-
-                        #select any file <= file_creation_date
-                        expected_selected_tree_files = tree_files[:idx+1]
-                        selected_tree_files = db.filesInTree(root_dir_id, None, file_creation_date)
-                        self.assertEqual(len(expected_selected_tree_files), len(selected_tree_files))
-                        selected_tree_files_ids = set([f['file_id'] for f in selected_tree_files])
-                        for expected_file in expected_selected_tree_files:
-                            self.assertTrue(expected_file['file_id'] in selected_tree_files_ids)
-
-                        # and check the totals as well
-                        tree_totals = db.totalFileSizeAndNumFilesInTree(root_dir_id, None, file_creation_date)
-                        self.assertEqual(len(expected_selected_tree_files), tree_totals['tree_num_files'])
-                        self.assertEqual(sum(f['file_size'] for f in expected_selected_tree_files), tree_totals['tree_total_file_size'])
+                    for subsubdir_nr in range(NUM_SUB_SUBDIRS):
+                        subsubDir_id = self.db.insertSubDirectory('subsubDir_%d' % subsubdir_nr, subDir_id)
+                        dir2subdir[subsubDir_id] = []
+                        dir2subdir[subDir_id].append(subsubDir_id)
+                        for kk in range(project_nr*subdir_nr*subsubdir_nr):
+                            self.db.insertFileInfo('file_%d_%d' % (subdir_nr,kk), 314*(kk+1), base_time + timedelta(days=10*site_nr+project_nr, hours=10*subdir_nr+subsubdir_nr+2, seconds=kk), subsubDir_id)
+
+                            dir_files = self.db.filesInDirectory(subsubDir_id)
+                            dir_stats = self.db.directoryTreeStats(subsubDir_id)
+
+                            self.assertEqual(sum(f['size'] for f in dir_files), dir_stats['dir_total_file_size'])
+                            self.assertEqual(len(dir_files), dir_stats['dir_num_files'])
+                            if dir_files:
+                                self.assertEqual(min(f['size'] for f in dir_files), dir_stats['dir_min_file_size'])
+                                self.assertEqual(max(f['size'] for f in dir_files), dir_stats['dir_max_file_size'])
+                                self.assertEqual(min(f['creation_date'] for f in dir_files), dir_stats['dir_min_file_creation_date'])
+                                self.assertEqual(max(f['creation_date'] for f in dir_files), dir_stats['dir_max_file_creation_date'])
+
+                                tree_totals = self.db.totalFileSizeAndNumFilesInTree(subDir_id, dir_stats['dir_min_file_creation_date'], dir_stats['dir_max_file_creation_date'])
+                                self.assertEqual(tree_totals['tree_num_files'], dir_stats['dir_num_files'])
+                                self.assertEqual(tree_totals['tree_total_file_size'], dir_stats['dir_total_file_size'])
+
+                    # test 1st level subdir again, and also check inclusion of 2nd level subdirs in tree stats
+                    dir_files = self.db.filesInDirectory(subDir_id)
+                    dir_stats = self.db.directoryTreeStats(subDir_id)
+                    # this dir only...
+                    self.assertEqual(sum(f['size'] for f in dir_files), dir_stats['dir_total_file_size'])
+                    self.assertEqual(len(dir_files), dir_stats['dir_num_files'])
+                    if dir_files:
+                        self.assertEqual(min(f['size'] for f in dir_files), dir_stats['dir_min_file_size'])
+                        self.assertEqual(max(f['size'] for f in dir_files), dir_stats['dir_max_file_size'])
+                        self.assertEqual(min(f['creation_date'] for f in dir_files), dir_stats['dir_min_file_creation_date'])
+                        self.assertEqual(max(f['creation_date'] for f in dir_files), dir_stats['dir_max_file_creation_date'])
+
+                    # including subdirs in tree...
+                    self.assertEqual(sum(f['file_size'] for f in self.db.filesInTree(subDir_id)), dir_stats['tree_total_file_size'])
+                    self.assertEqual(len(self.db.filesInTree(subDir_id)), dir_stats['tree_num_files'])
+
+        ####################################################################################
+        # STAGE 2: reporting phase.
+        # loop over the sites, directories, and files now that the database has been filled.
+        # and check the dir- and tree stats totals
+        ####################################################################################
+        for site in self.db.sites():
+            site_id = site['id']
+
+            rootDirs = self.db.rootDirectoriesForSite(site_id)
+            self.assertEqual(NUM_PROJECTS, len(rootDirs))
+
+            for root_dir_id in [x['root_dir_id'] for x in rootDirs]:
+                subDirs = self.db.subDirectories(root_dir_id, 1, False)
+                self.assertEqual(NUM_PROJECT_SUBDIRS, len(subDirs))
+
+                for subDir in subDirs:
+                    subDir_parent_id = subDir['parent_dir_id']
+                    self.assertEqual(root_dir_id, subDir_parent_id)
+                    self.assertTrue(subDir['id'] in dir2subdir[root_dir_id])
+
+                    subsubDirs = self.db.subDirectories(subDir['id'], 1, False)
+                    self.assertEqual(NUM_SUB_SUBDIRS, len(subsubDirs))
+
+                    for subsubDir in subsubDirs:
+                        subsubDir_parent_id = subsubDir['parent_dir_id']
+                        self.assertEqual(subDir['id'], subsubDir_parent_id)
+                        self.assertTrue(subsubDir['id'] in dir2subdir[subDir['id']])
+
+                # check various selects of files in the tree, for each file
+                tree_files = sorted(self.db.filesInTree(root_dir_id), key=lambda f: f['file_creation_date'])
+                for file in tree_files:
+                    # check if filesInTree return this one file when time delimited for this specific file_creation_date
+                    file_creation_date = file['file_creation_date']
+                    selected_tree_files = self.db.filesInTree(root_dir_id, file_creation_date, file_creation_date)
+                    self.assertEqual(1, len(selected_tree_files))
+                    self.assertEqual(file['file_creation_date'], selected_tree_files[0]['file_creation_date'])
+                    self.assertEqual(file['file_size'], selected_tree_files[0]['file_size'])
+
+                    # get the 'totals' for this root_dir, but select only this file by date.
+                    # should return 1 file.
+                    tree_totals = self.db.totalFileSizeAndNumFilesInTree(root_dir_id, file_creation_date, file_creation_date)
+                    self.assertEqual(1, tree_totals['tree_num_files'])
+                    self.assertEqual(file['file_size'], tree_totals['tree_total_file_size'])
+
+                # check some ranges files/times
+                for idx, file in enumerate(tree_files):
+                    file_creation_date = file['file_creation_date']
+
+                    #select any file >= file_creation_date
+                    expected_selected_tree_files = tree_files[idx:]
+                    selected_tree_files = self.db.filesInTree(root_dir_id, file_creation_date, None)
+                    self.assertEqual(len(expected_selected_tree_files), len(selected_tree_files))
+                    selected_tree_files_ids = set([f['file_id'] for f in selected_tree_files])
+                    for expected_file in expected_selected_tree_files:
+                        self.assertTrue(expected_file['file_id'] in selected_tree_files_ids)
+
+                    # and check the totals as well
+                    tree_totals = self.db.totalFileSizeAndNumFilesInTree(root_dir_id, file_creation_date, None)
+                    self.assertEqual(len(expected_selected_tree_files), tree_totals['tree_num_files'])
+                    self.assertEqual(sum(f['file_size'] for f in expected_selected_tree_files), tree_totals['tree_total_file_size'])
+
+                    #select any file <= file_creation_date
+                    expected_selected_tree_files = tree_files[:idx+1]
+                    selected_tree_files = self.db.filesInTree(root_dir_id, None, file_creation_date)
+                    self.assertEqual(len(expected_selected_tree_files), len(selected_tree_files))
+                    selected_tree_files_ids = set([f['file_id'] for f in selected_tree_files])
+                    for expected_file in expected_selected_tree_files:
+                        self.assertTrue(expected_file['file_id'] in selected_tree_files_ids)
+
+                    # and check the totals as well
+                    tree_totals = self.db.totalFileSizeAndNumFilesInTree(root_dir_id, None, file_creation_date)
+                    self.assertEqual(len(expected_selected_tree_files), tree_totals['tree_num_files'])
+                    self.assertEqual(sum(f['file_size'] for f in expected_selected_tree_files), tree_totals['tree_total_file_size'])
 
 # run tests if main
 if __name__ == '__main__':
-    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
-                        level=logging.INFO)
-
     unittest.main()
diff --git a/LTA/ltastorageoverview/test/test_ingesteventhandler.py b/LTA/ltastorageoverview/test/test_ingesteventhandler.py
index 893d9a9bfa56eaa416f457a81d203a8bfdf80c56..5fc33386b15ee8175da53a32816b7a1268d68eab 100755
--- a/LTA/ltastorageoverview/test/test_ingesteventhandler.py
+++ b/LTA/ltastorageoverview/test/test_ingesteventhandler.py
@@ -20,203 +20,195 @@
 # $Id$
 
 from datetime import datetime
+import unittest
 
-from common_test_ltastoragedb import *
-from lofar.lta.ltastorageoverview import store
+from lofar.lta.ltastorageoverview.testing.common_test_ltastoragedb import LTAStorageDbTestMixin
 from lofar.lta.ltastorageoverview.ingesteventhandler import LTASOIngestEventHandler
 
 import logging
 logger = logging.getLogger(__name__)
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
 
-class TestLTASOIngestEventHandler(CommonLTAStorageDbTest):
+
+class TestLTASOIngestEventHandler(LTAStorageDbTestMixin, unittest.TestCase):
     def setUp(self):
-        # allow superclass to setup empty database
-        super(TestLTASOIngestEventHandler, self).setUp()
+        super().setUp()
+        self.fill_with_test_data()
 
-        # fill empty database with simple sites and root dirs
-        with store.LTAStorageDb(self.dbcreds) as db:
-            db.insertSiteIfNotExists('siteA', 'srm://siteA.foo.bar:8443')
-            db.insertSiteIfNotExists('siteB', 'srm://siteB.foo.bar:8443')
+    def fill_with_test_data(self):
+        self.db.insertSiteIfNotExists('siteA', 'srm://siteA.foo.bar:8443')
+        self.db.insertSiteIfNotExists('siteB', 'srm://siteB.foo.bar:8443')
 
-            db.insertRootDirectory('siteA', '/root_dir_1')
-            db.insertRootDirectory('siteA', '/root_dir_2')
-            db.insertRootDirectory('siteA', '/long/path/to/root_dir_3')
-            db.insertRootDirectory('siteB', '/root_dir_1')
+        self.db.insertRootDirectory('siteA', '/root_dir_1')
+        self.db.insertRootDirectory('siteA', '/root_dir_2')
+        self.db.insertRootDirectory('siteA', '/long/path/to/root_dir_3')
+        self.db.insertRootDirectory('siteB', '/root_dir_1')
 
         self._markAllDirectoriesRecentlyVisited()
 
     def _markAllDirectoriesRecentlyVisited(self):
         """pretend that all dirs were recently visited
         """
-        with store.LTAStorageDb(self.dbcreds) as db:
-            db.executeQuery('''update scraper.last_directory_visit
-                               set visit_date=%s;''', (datetime.utcnow(), ))
-            db.commit()
+        self.db.executeQuery('''update scraper.last_directory_visit set visit_date=%s;''', (datetime.utcnow(), ))
+        self.db.commit()
 
     def test_01_schedule_srmurl_for_visit_unknown_site(self):
         """ try to schedule some unknown site's surl. Should raise.
         """
-        with store.LTAStorageDb(self.dbcreds) as db:
-            handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
+        handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
 
-            with self.assertRaises(LookupError) as context:
-                surl = 'srm://foo.bar:1234/fdjsalfja5h43535h3oiu/5u905u3f'
-                handler._schedule_srmurl_for_visit(surl)
-            self.assertTrue('Could not find site' in str(context.exception))
+        with self.assertRaises(LookupError) as context:
+            surl = 'srm://foo.bar:1234/fdjsalfja5h43535h3oiu/5u905u3f'
+            handler._schedule_srmurl_for_visit(surl)
+        self.assertTrue('Could not find site' in str(context.exception))
 
     def test_02_mark_directory_for_a_visit(self):
         """ Test core method _mark_directory_for_a_visit for all known root dirs.
         Should set the last visit time for each dir way in the past.
         """
-        with store.LTAStorageDb(self.dbcreds) as db:
-            handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
-            now = datetime.utcnow()
+        handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
+        now = datetime.utcnow()
 
-            for site in db.sites():
-                for root_dir in db.rootDirectoriesForSite(site['id']):
-                    dir_id = root_dir['root_dir_id']
-                    # make sure the dir's last visit time is recent
-                    db.updateDirectoryLastVisitTime(dir_id, now)
-                    timestamp_before_mark = db.directoryLastVisitTime(dir_id)
-                    self.assertEqual(now, timestamp_before_mark)
+        for site in self.db.sites():
+            for root_dir in self.db.rootDirectoriesForSite(site['id']):
+                dir_id = root_dir['root_dir_id']
+                # make sure the dir's last visit time is recent
+                self.db.updateDirectoryLastVisitTime(dir_id, now)
+                timestamp_before_mark = self.db.directoryLastVisitTime(dir_id)
+                self.assertEqual(now, timestamp_before_mark)
 
-                    # let the handler mark the dir for a next visit...
-                    handler._mark_directory_for_a_visit(dir_id)
+                # let the handler mark the dir for a next visit...
+                handler._mark_directory_for_a_visit(dir_id)
 
-                    # by marking the dir for a next visit, the dir's last visit time is set way in the past.
-                    timestamp_after_mark = db.directoryLastVisitTime(dir_id)
-                    self.assertLess(timestamp_after_mark, timestamp_before_mark)
+                # by marking the dir for a next visit, the dir's last visit time is set way in the past.
+                timestamp_after_mark = self.db.directoryLastVisitTime(dir_id)
+                self.assertLess(timestamp_after_mark, timestamp_before_mark)
 
     def test_03_insert_missing_directory_tree_if_needed(self):
         """ Test core method _insert_missing_directory_tree_if_needed for all known root dirs.
         Should result in new directory entries in the database for the new sub directories only.
         """
-        with store.LTAStorageDb(self.dbcreds) as db:
-            handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
-
-            for site in db.sites():
-                site_surl = site['url']
-                site_id = site['id']
-                for root_dir in db.rootDirectoriesForSite(site_id):
-                    dir_path = root_dir['dir_name']
+        handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
+
+        for site in self.db.sites():
+            site_surl = site['url']
+            site_id = site['id']
+            for root_dir in self.db.rootDirectoriesForSite(site_id):
+                dir_path = root_dir['dir_name']
+                surl = site_surl + dir_path
+
+                # root dir should already exist
+                dir = self.db.directoryByName(dir_path, site_id)
+                self.assertIsNotNone(dir)
+
+                # let the handler insert the not-so-missing dirs.
+                # nothing should happen, because the root dir already exists
+                new_dir_ids = handler._insert_missing_directory_tree_if_needed(surl)
+                self.assertEqual(0, len(new_dir_ids))
+
+                # now insert some new subdirs, with multiple levels.
+                for subdir_path in ['/foo', '/bar/xyz']:
+                    dir_path = root_dir['dir_name'] + subdir_path
                     surl = site_surl + dir_path
+                    # dir should not exist yet
+                    self.assertIsNone(self.db.directoryByName(dir_path, site_id))
 
-                    # root dir should already exist
-                    dir = db.directoryByName(dir_path, site_id)
-                    self.assertIsNotNone(dir)
-
-                    # let the handler insert the not-so-missing dirs.
-                    # nothing should happen, because the root dir already exists
-                    new_dir_ids = handler._insert_missing_directory_tree_if_needed(surl)
-                    self.assertEqual(0, len(new_dir_ids))
-
-                    # now insert some new subdirs, with multiple levels.
-                    for subdir_path in ['/foo', '/bar/xyz']:
-                        dir_path = root_dir['dir_name'] + subdir_path
-                        surl = site_surl + dir_path
-                        # dir should not exist yet
-                        self.assertIsNone(db.directoryByName(dir_path, site_id))
-
-                        # let the handler insert the missing dirs.
-                        handler._insert_missing_directory_tree_if_needed(surl)
+                    # let the handler insert the missing dirs.
+                    handler._insert_missing_directory_tree_if_needed(surl)
 
-                        # dir should exist now
-                        dir = db.directoryByName(dir_path, site_id)
-                        self.assertIsNotNone(dir)
+                    # dir should exist now
+                    dir = self.db.directoryByName(dir_path, site_id)
+                    self.assertIsNotNone(dir)
 
-                        # check if new dir has expected root dir
-                        parents = db.parentDirectories(dir['dir_id'])
-                        self.assertEqual(root_dir['root_dir_id'], parents[0]['id'])
+                    # check if new dir has expected root dir
+                    parents = self.db.parentDirectories(dir['dir_id'])
+                    self.assertEqual(root_dir['root_dir_id'], parents[0]['id'])
 
     def test_04_insert_missing_directory_tree_if_needed_for_path_with_unknown_rootdir(self):
         """ Test core method _insert_missing_directory_tree_if_needed for a path with an unknown root dir
         Should raise LookupError.
         """
-        with store.LTAStorageDb(self.dbcreds) as db:
-            handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
+        handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
 
-            for site in db.sites():
-                with self.assertRaises(LookupError) as context:
-                    surl = site['url'] + '/fdjsalfja5h43535h3oiu/5u905u3f'
-                    handler._insert_missing_directory_tree_if_needed(surl)
-                self.assertTrue('Could not find parent root dir' in str(context.exception))
+        for site in self.db.sites():
+            with self.assertRaises(LookupError) as context:
+                surl = site['url'] + '/fdjsalfja5h43535h3oiu/5u905u3f'
+                handler._insert_missing_directory_tree_if_needed(surl)
+            self.assertTrue('Could not find parent root dir' in str(context.exception))
 
     def test_05_schedule_srmurl_for_visit_for_root_dir(self):
         """ Test higher level method _schedule_srmurl_for_visit for all known root dirs.
         Should result in marking the dir matching the surl as being the dir which should be visited next.
         """
-        with store.LTAStorageDb(self.dbcreds) as db:
-            handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
+        handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
 
-            for site in db.sites():
-                for root_dir in db.rootDirectoriesForSite(site['id']):
-                    self._markAllDirectoriesRecentlyVisited()
-                    now = datetime.utcnow()
+        for site in self.db.sites():
+            for root_dir in self.db.rootDirectoriesForSite(site['id']):
+                self._markAllDirectoriesRecentlyVisited()
+                now = datetime.utcnow()
 
-                    dir_id = root_dir['root_dir_id']
-                    surl = site['url'] + root_dir['dir_name']
-                    handler._schedule_srmurl_for_visit(surl)
+                dir_id = root_dir['root_dir_id']
+                surl = site['url'] + root_dir['dir_name']
+                handler._schedule_srmurl_for_visit(surl)
 
-                    # surl was scheduled for a visit, so this dir should be the least_recent_visited_dir
-                    site_visit_stats = db.visitStats(datetime.utcnow())[site['name']]
-                    self.assertEqual(dir_id, site_visit_stats['least_recent_visited_dir_id'])
+                # surl was scheduled for a visit, so this dir should be the least_recent_visited_dir
+                site_visit_stats = self.db.visitStats(datetime.utcnow())[site['name']]
+                self.assertEqual(dir_id, site_visit_stats['least_recent_visited_dir_id'])
 
-                    # mimick a directory visit by the scraper, by setting the last visit time to now.
-                    db.updateDirectoryLastVisitTime(dir_id, now)
+                # mimick a directory visit by the scraper, by setting the last visit time to now.
+                self.db.updateDirectoryLastVisitTime(dir_id, now)
 
-                    # we faked a visit, so this dir should not be the least_recent_visited_dir anymore
-                    site_visit_stats = db.visitStats(now)[site['name']]
-                    self.assertNotEqual(dir_id, site_visit_stats.get('least_recent_visited_dir_id'))
+                # we faked a visit, so this dir should not be the least_recent_visited_dir anymore
+                site_visit_stats = self.db.visitStats(now)[site['name']]
+                self.assertNotEqual(dir_id, site_visit_stats.get('least_recent_visited_dir_id'))
 
     def test_06_schedule_srmurl_for_visit_for_new_root_sub_dir(self):
         """ Test higher level method _schedule_srmurl_for_visit for all new unknown subdirs of the known root dirs.
         Should result in marking the dir matching the surl as being the dir which should be visited next.
         """
-        with store.LTAStorageDb(self.dbcreds) as db:
-            handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
+        handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
 
-            for site in db.sites():
-                for root_dir in db.rootDirectoriesForSite(site['id']):
-                    self._markAllDirectoriesRecentlyVisited()
-                    now = datetime.utcnow()
+        for site in self.db.sites():
+            for root_dir in self.db.rootDirectoriesForSite(site['id']):
+                self._markAllDirectoriesRecentlyVisited()
+                now = datetime.utcnow()
 
-                    # create the subdir surl
-                    sub_dir_name = '/foo'
-                    sub_dir_path = root_dir['dir_name'] + sub_dir_name
-                    surl = site['url'] + sub_dir_path
+                # create the subdir surl
+                sub_dir_name = '/foo'
+                sub_dir_path = root_dir['dir_name'] + sub_dir_name
+                surl = site['url'] + sub_dir_path
 
-                    # call the method under test
-                    handler._schedule_srmurl_for_visit(surl)
+                # call the method under test
+                handler._schedule_srmurl_for_visit(surl)
 
-                    # surl was scheduled for a visit, all other dir's were marked as visited already...
-                    # so there should be a new dir for this surl, and it should be the least_recent_visited_dir
-                    site_visit_stats = db.visitStats(datetime.utcnow())[site['name']]
+                # surl was scheduled for a visit, all other dir's were marked as visited already...
+                # so there should be a new dir for this surl, and it should be the least_recent_visited_dir
+                site_visit_stats = self.db.visitStats(datetime.utcnow())[site['name']]
 
-                    least_recent_visited_dir_id = site_visit_stats.get('least_recent_visited_dir_id')
-                    self.assertIsNotNone(least_recent_visited_dir_id)
+                least_recent_visited_dir_id = site_visit_stats.get('least_recent_visited_dir_id')
+                self.assertIsNotNone(least_recent_visited_dir_id)
 
-                    least_recent_visited_dir = db.directory(least_recent_visited_dir_id)
-                    self.assertEqual(sub_dir_path, least_recent_visited_dir['dir_name'])
+                least_recent_visited_dir = self.db.directory(least_recent_visited_dir_id)
+                self.assertEqual(sub_dir_path, least_recent_visited_dir['dir_name'])
 
-                    # mimick a directory visit by the scraper, by setting the last visit time to now.
-                    db.updateDirectoryLastVisitTime(least_recent_visited_dir_id, now)
+                # mimick a directory visit by the scraper, by setting the last visit time to now.
+                self.db.updateDirectoryLastVisitTime(least_recent_visited_dir_id, now)
 
-                    # we faked a visit, so this dir should not be the least_recent_visited_dir anymore
-                    site_visit_stats = db.visitStats(now)[site['name']]
-                    self.assertNotEqual(least_recent_visited_dir_id, site_visit_stats.get('least_recent_visited_dir_id'))
+                # we faked a visit, so this dir should not be the least_recent_visited_dir anymore
+                site_visit_stats = self.db.visitStats(now)[site['name']]
+                self.assertNotEqual(least_recent_visited_dir_id, site_visit_stats.get('least_recent_visited_dir_id'))
 
     def test_07_schedule_srmurl_for_visit_for_path_with_unknown_rootdir(self):
         """ Test higher level method _schedule_srmurl_for_visit for a path with an unknown root dir
         Should raise LookupError.
         """
-        with store.LTAStorageDb(self.dbcreds) as db:
-            handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
+        handler = LTASOIngestEventHandler(dbcreds=self.dbcreds)
 
-            for site in db.sites():
-                with self.assertRaises(LookupError) as context:
-                    surl = site['url'] + '/fdjsalfja5h43535h3oiu/5u905u3f'
-                    handler._schedule_srmurl_for_visit(surl)
-                self.assertTrue('Could not find parent root dir' in str(context.exception))
+        for site in self.db.sites():
+            with self.assertRaises(LookupError) as context:
+                surl = site['url'] + '/fdjsalfja5h43535h3oiu/5u905u3f'
+                handler._schedule_srmurl_for_visit(surl)
+            self.assertTrue('Could not find parent root dir' in str(context.exception))
 
     def test_08_integration_test_with_messagebus(self):
         """ Full blown integration test listening for notifications on the bus,
@@ -254,34 +246,33 @@ class TestLTASOIngestEventHandler(CommonLTAStorageDbTest):
                     sync_event.set()
 
             with SyncedLTASOIngestEventHandler(self.dbcreds, busname=busname):
-                with store.LTAStorageDb(self.dbcreds) as db:
-                    for site in db.sites():
-                        for root_dir in db.rootDirectoriesForSite(site['id']):
-                            self._markAllDirectoriesRecentlyVisited()
+                for site in self.db.sites():
+                    for root_dir in self.db.rootDirectoriesForSite(site['id']):
+                        self._markAllDirectoriesRecentlyVisited()
 
-                            # create the subdir surl
-                            sub_dir_name = '/foo'
-                            sub_dir_path = root_dir['dir_name'] + sub_dir_name
-                            surl = site['url'] + sub_dir_path
+                        # create the subdir surl
+                        sub_dir_name = '/foo'
+                        sub_dir_path = root_dir['dir_name'] + sub_dir_name
+                        surl = site['url'] + sub_dir_path
 
-                            with ToBus(busname) as sender:
-                                msg = EventMessage(subject=DEFAULT_INGEST_NOTIFICATION_PREFIX+"TaskFinished",
-                                                   content={'srm_url': surl})
-                                sender.send(msg)
+                        with ToBus(busname) as sender:
+                            msg = EventMessage(subject=DEFAULT_INGEST_NOTIFICATION_PREFIX+"TaskFinished",
+                                               content={'srm_url': surl})
+                            sender.send(msg)
 
-                            # wait for the handler to have processed the message
-                            self.assertTrue(sync_event.wait(2))
-                            sync_event.clear()
+                        # wait for the handler to have processed the message
+                        self.assertTrue(sync_event.wait(2))
+                        sync_event.clear()
 
-                            # surl should have been scheduled for a visit, all other dir's were marked as visited already...
-                            # so there should be a new dir for this surl, and it should be the least_recent_visited_dir
-                            site_visit_stats = db.visitStats(datetime.utcnow())[site['name']]
+                        # surl should have been scheduled for a visit, all other dir's were marked as visited already...
+                        # so there should be a new dir for this surl, and it should be the least_recent_visited_dir
+                        site_visit_stats = self.db.visitStats(datetime.utcnow())[site['name']]
 
-                            least_recent_visited_dir_id = site_visit_stats.get('least_recent_visited_dir_id')
-                            self.assertIsNotNone(least_recent_visited_dir_id)
+                        least_recent_visited_dir_id = site_visit_stats.get('least_recent_visited_dir_id')
+                        self.assertIsNotNone(least_recent_visited_dir_id)
 
-                            least_recent_visited_dir = db.directory(least_recent_visited_dir_id)
-                            self.assertEqual(sub_dir_path, least_recent_visited_dir['dir_name'])
+                        least_recent_visited_dir = self.db.directory(least_recent_visited_dir_id)
+                        self.assertEqual(sub_dir_path, least_recent_visited_dir['dir_name'])
 
         except ImportError as e:
             logger.warning("skipping test due to: %s", e)
@@ -297,7 +288,4 @@ class TestLTASOIngestEventHandler(CommonLTAStorageDbTest):
 
 # run tests if main
 if __name__ == '__main__':
-    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
-                        level=logging.INFO)
-
     unittest.main()
diff --git a/LTA/ltastorageoverview/test/test_lso_webservice.py b/LTA/ltastorageoverview/test/test_lso_webservice.py
index 7375d9b282d075ef293d50e7fa3066766ae7592b..ef44296d582a4ab3da1d8ab4bfd07d864ebda479 100755
--- a/LTA/ltastorageoverview/test/test_lso_webservice.py
+++ b/LTA/ltastorageoverview/test/test_lso_webservice.py
@@ -19,102 +19,55 @@
 
 # $Id$
 
-from unittest.mock import MagicMock
-from unittest.mock import patch
-from flask import Flask
-import testing.postgresql
 
 import unittest
-import sys
-import os
-import time
-import os.path
-import tempfile
 import urllib.request, urllib.error, urllib.parse
 import json
 import datetime
-import psycopg2
 from io import StringIO
-import lofar.common.dbcredentials as dbc
 from lofar.lta.ltastorageoverview import store
+from lofar.lta.ltastorageoverview.testing.common_test_ltastoragedb import LTAStorageDbTestMixin
 from lofar.lta.ltastorageoverview.webservice import webservice as webservice
 from flask_testing import LiveServerTestCase as FlaskLiveTestCase
-import testing.postgresql
 
 import logging
 logger = logging.getLogger(__name__)
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
 
-test_psql = None
-
-def setUpModule():
-    logger.info('setting up test LTASO database server...')
-
-    # create a test webservice.db
-    logger.info('  creating test postgres server')
-    global test_psql
-    test_psql = testing.postgresql.Postgresql()
-    dsn = test_psql.dsn()
-    logger.info('  created test postgres server, dsn=%s', dsn)
-
-    dbcreds = dbc.Credentials()
-    dbcreds.user = 'test_user'
-    dbcreds.password = 'test_password'
-
-    with psycopg2.connect(**dsn) as conn:
-        cursor = conn.cursor()
-        #use same user/pass as stored in local webservice.dbcreds
-        query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % (dbcreds.user, dbcreds.password)
-        cursor.execute(query)
-
-        create_script_path = os.path.normpath(os.path.join(os.environ['LOFARROOT'], 'share', 'ltaso', 'create_db_ltastorageoverview.sql'))
-        logger.info('  running ltaso create script create_script=%s', create_script_path)
-        with open(create_script_path, 'r') as script:
-            cursor.execute(script.read())
-        logger.info('  completed ltaso create script')
-
-    # copy the test postgres server settings into webservice.dbcreds
-    # we can use these webservice.dbcreds in each test method to connect to the testing ltaso database
-    dbcreds.host = dsn['host']
-    dbcreds.database = dsn['database']
-    dbcreds.port = dsn['port']
-
-    logger.info('finished setting up test LTASO database')
-
-    webservice.db = store.LTAStorageDb(dbcreds)
-
-    logger.info('filling test LTASO database with test data')
-    webservice.db.insertSiteIfNotExists('siteA', 'srm://siteA.org')
-    webservice.db.insertSiteIfNotExists('siteB', 'srm://siteB.org')
+class TestLTAStorageWebService(LTAStorageDbTestMixin, FlaskLiveTestCase):
+    def create_app(self):
+        webservice.db = self.db
+        return webservice.app
 
-    rootDir_ids = []
-    rootDir_ids.append(webservice.db.insertRootDirectory('siteA', 'rootDir1'))
-    rootDir_ids.append(webservice.db.insertRootDirectory('siteA', 'rootDir2'))
-    rootDir_ids.append(webservice.db.insertRootDirectory('siteB', 'path/to/rootDir3'))
+    def setUp(self):
+        super().setUp()
+        self.fill_with_test_data()
 
-    for rootDir_id in rootDir_ids:
-        for j in range(2):
-            subDir_id = webservice.db.insertSubDirectory('subDir_%d' % j, rootDir_id)
+    def fill_with_test_data(self):
+        logger.info('filling test LTASO database with test data')
+        self.db.insertSiteIfNotExists('siteA', 'srm://siteA.org')
+        self.db.insertSiteIfNotExists('siteB', 'srm://siteB.org')
 
-            if j == 0:
-                webservice.db.insertFileInfo('file_%d' % j, 271 * (j + 1), datetime.datetime.utcnow(), subDir_id)
+        rootDir_ids = []
+        rootDir_ids.append(self.db.insertRootDirectory('siteA', 'rootDir1'))
+        rootDir_ids.append(self.db.insertRootDirectory('siteA', 'rootDir2'))
+        rootDir_ids.append(self.db.insertRootDirectory('siteB', 'path/to/rootDir3'))
 
-            for k in range(2):
-                subsubDir_id = webservice.db.insertSubDirectory('subsubDir_%d' % k, subDir_id)
+        for rootDir_id in rootDir_ids:
+            for j in range(2):
+                subDir_id = self.db.insertSubDirectory('subDir_%d' % j, rootDir_id)
 
-                for l in range((j + 1) * (k + 1)):
-                    webservice.db.insertFileInfo('file_%d' % l, 314 * (l + 1), datetime.datetime.utcnow(), subsubDir_id)
+                if j == 0:
+                    self.db.insertFileInfo('file_%d' % j, 271 * (j + 1), datetime.datetime.utcnow(), subDir_id)
 
-    logger.info('finished filling test LTASO database with test data')
+                for k in range(2):
+                    subsubDir_id = self.db.insertSubDirectory('subsubDir_%d' % k, subDir_id)
 
-def tearDownModule():
-    logger.info('removing test LTASO database server...')
-    test_psql.stop()
-    logger.info('removed test LTASO database server')
+                    for l in range((j + 1) * (k + 1)):
+                        self.db.insertFileInfo('file_%d' % l, 314 * (l + 1), datetime.datetime.utcnow(), subsubDir_id)
 
+        logger.info('finished filling test LTASO database with test data')
 
-class TestLTAStorageWebService(FlaskLiveTestCase):
-    def create_app(self):
-        return webservice.app
 
     def testSites(self):
         response = urllib.request.urlopen('http://localhost:5000/rest/sites/')
@@ -161,11 +114,6 @@ class TestLTAStorageWebService(FlaskLiveTestCase):
         self.assertEqual('siteB', rootDirsDict['path/to/rootDir3']['site_name'])
 
 
-def main(argv):
-    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
-                        level=logging.INFO)
-    unittest.main()
-
 # run tests if main
 if __name__ == '__main__':
-    main(sys.argv[1:])
+    unittest.main()
diff --git a/LTA/ltastorageoverview/test/test_scraper.py b/LTA/ltastorageoverview/test/test_scraper.py
index 12278b4968088ef31e5f93d43b065dc97b998bcb..6e9c60845db2d61c90aeed826162c66d4e21b829 100755
--- a/LTA/ltastorageoverview/test/test_scraper.py
+++ b/LTA/ltastorageoverview/test/test_scraper.py
@@ -22,10 +22,12 @@
 import logging
 
 import unittest
-from common_test_ltastoragedb import *
+from lofar.lta.ltastorageoverview.testing.common_test_ltastoragedb import LTAStorageDbTestMixin
 from lofar.lta.ltastorageoverview import scraper
 
 logger = logging.getLogger(__name__)
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
+
 
 class TestLocation(unittest.TestCase):
     def test_isRoot(self):
@@ -45,12 +47,10 @@ class TestLocation(unittest.TestCase):
             self.assertTrue('malformed directory' in str(context.exception))
 
 
-class TestScraper(CommonLTAStorageDbTest):
+class TestScraper(LTAStorageDbTestMixin, unittest.TestCase):
+    # TODO: implement unittests
     pass
 
 # run tests if main
 if __name__ == '__main__':
-    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
-                        level=logging.INFO)
-
     unittest.main()
diff --git a/LTA/ltastorageoverview/test/test_store.py b/LTA/ltastorageoverview/test/test_store.py
index d4095f5465ab50cfa86a1ba016f5278da8a6c7b1..efe84f268d04b4d8bc3fd8bfa7fdd58530e0c22c 100755
--- a/LTA/ltastorageoverview/test/test_store.py
+++ b/LTA/ltastorageoverview/test/test_store.py
@@ -22,230 +22,220 @@
 from datetime import datetime
 import time
 from pprint import pformat
+import unittest
 
-from common_test_ltastoragedb import *
-from lofar.lta.ltastorageoverview import store
+from lofar.lta.ltastorageoverview.testing.common_test_ltastoragedb import LTAStorageDbTestMixin
 from lofar.common.postgres import FETCH_ALL, PostgresDBQueryExecutionError
 
 import logging
 logger = logging.getLogger(__name__)
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
 
 
-class TestLTAStorageDb(CommonLTAStorageDbTest):
+class TestLTAStorageDb(LTAStorageDbTestMixin, unittest.TestCase):
     def testSites(self):
-        with store.LTAStorageDb(self.dbcreds) as db:
-            siteA_id = db.insertSiteIfNotExists('siteA', 'srm://siteA.org')
-            siteB_id = db.insertSiteIfNotExists('siteB', 'srm://siteB.org')
+        siteA_id = self.db.insertSiteIfNotExists('siteA', 'srm://siteA.org')
+        siteB_id = self.db.insertSiteIfNotExists('siteB', 'srm://siteB.org')
 
-            sites = db.sites()
-            siteNames = [x['name'] for x in sites]
-            self.assertEquals(2, len(siteNames))
-            self.assertTrue('siteA' in siteNames)
-            self.assertTrue('siteB' in siteNames)
+        sites = self.db.sites()
+        siteNames = [x['name'] for x in sites]
+        self.assertEqual(2, len(siteNames))
+        self.assertTrue('siteA' in siteNames)
+        self.assertTrue('siteB' in siteNames)
 
-            site = db.site(siteA_id)
-            self.assertEqual('siteA', site['name'])
+        site = self.db.site(siteA_id)
+        self.assertEqual('siteA', site['name'])
 
-            site = db.site(siteB_id)
-            self.assertEqual('siteB', site['name'])
+        site = self.db.site(siteB_id)
+        self.assertEqual('siteB', site['name'])
 
     def testRootDirs(self):
-        with store.LTAStorageDb(self.dbcreds) as db:
-            siteA_id = db.insertSiteIfNotExists('siteA', 'srm://siteA.org')
-            siteB_id = db.insertSiteIfNotExists('siteB', 'srm://siteB.org')
+        siteA_id = self.db.insertSiteIfNotExists('siteA', 'srm://siteA.org')
+        siteB_id = self.db.insertSiteIfNotExists('siteB', 'srm://siteB.org')
 
-            dirA1_id = db.insertRootDirectory('siteA', 'rootDir1')
-            dirA2_id = db.insertRootDirectory('siteA', 'rootDir2')
-            dirA3_id = db.insertRootDirectory('siteA', 'path/to/rootDir3')
+        dirA1_id = self.db.insertRootDirectory('siteA', 'rootDir1')
+        dirA2_id = self.db.insertRootDirectory('siteA', 'rootDir2')
+        dirA3_id = self.db.insertRootDirectory('siteA', 'path/to/rootDir3')
 
-            dirB1_id = db.insertRootDirectory('siteB', 'rootDir1')
-            dirB2_id = db.insertRootDirectory('siteB', 'path/to/otherRootDir')
+        dirB1_id = self.db.insertRootDirectory('siteB', 'rootDir1')
+        dirB2_id = self.db.insertRootDirectory('siteB', 'path/to/otherRootDir')
 
-            rootDirs = db.rootDirectories()
-            self.assertEquals(5, len(rootDirs))
+        rootDirs = self.db.rootDirectories()
+        self.assertEqual(5, len(rootDirs))
 
-            rootDirsDict = {rd['root_dir_id']:rd for rd in rootDirs}
+        rootDirsDict = {rd['root_dir_id']:rd for rd in rootDirs}
 
-            self.assertEqual('rootDir1', rootDirsDict[dirA1_id]['dir_name'])
-            self.assertEqual(siteA_id, rootDirsDict[dirA1_id]['site_id'])
-            self.assertEqual('siteA', rootDirsDict[dirA1_id]['site_name'])
+        self.assertEqual('rootDir1', rootDirsDict[dirA1_id]['dir_name'])
+        self.assertEqual(siteA_id, rootDirsDict[dirA1_id]['site_id'])
+        self.assertEqual('siteA', rootDirsDict[dirA1_id]['site_name'])
 
-            self.assertEqual('rootDir2', rootDirsDict[dirA2_id]['dir_name'])
-            self.assertEqual(siteA_id, rootDirsDict[dirA2_id]['site_id'])
-            self.assertEqual('siteA', rootDirsDict[dirA2_id]['site_name'])
+        self.assertEqual('rootDir2', rootDirsDict[dirA2_id]['dir_name'])
+        self.assertEqual(siteA_id, rootDirsDict[dirA2_id]['site_id'])
+        self.assertEqual('siteA', rootDirsDict[dirA2_id]['site_name'])
 
-            self.assertEqual('path/to/rootDir3', rootDirsDict[dirA3_id]['dir_name'])
-            self.assertEqual(siteA_id, rootDirsDict[dirA3_id]['site_id'])
-            self.assertEqual('siteA', rootDirsDict[dirA3_id]['site_name'])
+        self.assertEqual('path/to/rootDir3', rootDirsDict[dirA3_id]['dir_name'])
+        self.assertEqual(siteA_id, rootDirsDict[dirA3_id]['site_id'])
+        self.assertEqual('siteA', rootDirsDict[dirA3_id]['site_name'])
 
-            self.assertEqual('rootDir1', rootDirsDict[dirB1_id]['dir_name'])
-            self.assertEqual(siteB_id, rootDirsDict[dirB1_id]['site_id'])
-            self.assertEqual('siteB', rootDirsDict[dirB1_id]['site_name'])
+        self.assertEqual('rootDir1', rootDirsDict[dirB1_id]['dir_name'])
+        self.assertEqual(siteB_id, rootDirsDict[dirB1_id]['site_id'])
+        self.assertEqual('siteB', rootDirsDict[dirB1_id]['site_name'])
 
-            self.assertEqual('path/to/otherRootDir', rootDirsDict[dirB2_id]['dir_name'])
-            self.assertEqual(siteB_id, rootDirsDict[dirB2_id]['site_id'])
-            self.assertEqual('siteB', rootDirsDict[dirB2_id]['site_name'])
+        self.assertEqual('path/to/otherRootDir', rootDirsDict[dirB2_id]['dir_name'])
+        self.assertEqual(siteB_id, rootDirsDict[dirB2_id]['site_id'])
+        self.assertEqual('siteB', rootDirsDict[dirB2_id]['site_name'])
 
-            root_dir_ids_siteA = set(d['root_dir_id'] for d in db.rootDirectoriesForSite(siteA_id))
-            self.assertEqual(set([dirA1_id, dirA2_id, dirA3_id]), root_dir_ids_siteA)
+        root_dir_ids_siteA = set(d['root_dir_id'] for d in self.db.rootDirectoriesForSite(siteA_id))
+        self.assertEqual(set([dirA1_id, dirA2_id, dirA3_id]), root_dir_ids_siteA)
 
-            root_dir_ids_siteB = set(d['root_dir_id'] for d in db.rootDirectoriesForSite(siteB_id))
-            self.assertEqual(set([dirB1_id, dirB2_id]), root_dir_ids_siteB)
+        root_dir_ids_siteB = set(d['root_dir_id'] for d in self.db.rootDirectoriesForSite(siteB_id))
+        self.assertEqual(set([dirB1_id, dirB2_id]), root_dir_ids_siteB)
 
-            root_dirs_non_existing_site = db.rootDirectoriesForSite(999)
-            self.assertEqual([], root_dirs_non_existing_site)
+        root_dirs_non_existing_site = self.db.rootDirectoriesForSite(999)
+        self.assertEqual([], root_dirs_non_existing_site)
 
     def testNonExistingDir(self):
-        with store.LTAStorageDb(self.dbcreds) as db:
-            dir = db.directoryByName('fjsdka;58432aek5843rfsjd8-sa')
-            self.assertEqual(None, dir)
+        dir = self.db.directoryByName('fjsdka;58432aek5843rfsjd8-sa')
+        self.assertEqual(None, dir)
 
     def testLeastRecentlyVisitedDirectory(self):
-        with store.LTAStorageDb(self.dbcreds) as db:
-            db.insertSiteIfNotExists('siteA', 'srm://siteA.org')
+        self.db.insertSiteIfNotExists('siteA', 'srm://siteA.org')
 
-            dir_ids = []
-            for i in range(3):
-                dir_id = db.insertRootDirectory('siteA', 'rootDir_%d' % i)
-                dir_ids.append(dir_id)
+        dir_ids = []
+        for i in range(3):
+            dir_id = self.db.insertRootDirectory('siteA', 'rootDir_%d' % i)
+            dir_ids.append(dir_id)
 
-                db.updateDirectoryLastVisitTime(dir_id, datetime.utcnow())
-                time.sleep(0.002)
+            self.db.updateDirectoryLastVisitTime(dir_id, datetime.utcnow())
+            time.sleep(0.002)
 
-            visitStats = db.visitStats()
-            self.assertTrue('siteA' in visitStats)
-            self.assertTrue('least_recent_visited_dir_id' in visitStats['siteA'])
+        visitStats = self.db.visitStats()
+        self.assertTrue('siteA' in visitStats)
+        self.assertTrue('least_recent_visited_dir_id' in visitStats['siteA'])
 
-            lvr_dir_id = visitStats['siteA']['least_recent_visited_dir_id']
-            self.assertEquals(dir_ids[0], lvr_dir_id)
+        lvr_dir_id = visitStats['siteA']['least_recent_visited_dir_id']
+        self.assertEqual(dir_ids[0], lvr_dir_id)
 
-            db.updateDirectoryLastVisitTime(dir_ids[0], datetime.utcnow())
-            db.updateDirectoryLastVisitTime(dir_ids[1], datetime.utcnow())
+        self.db.updateDirectoryLastVisitTime(dir_ids[0], datetime.utcnow())
+        self.db.updateDirectoryLastVisitTime(dir_ids[1], datetime.utcnow())
 
-            visitStats = db.visitStats()
-            lvr_dir_id = visitStats['siteA']['least_recent_visited_dir_id']
-            self.assertEquals(dir_ids[2], lvr_dir_id)
+        visitStats = self.db.visitStats()
+        lvr_dir_id = visitStats['siteA']['least_recent_visited_dir_id']
+        self.assertEqual(dir_ids[2], lvr_dir_id)
 
     def testDuplicateSubDirs(self):
-        with store.LTAStorageDb(self.dbcreds) as db:
-            db.insertSiteIfNotExists('siteA', 'srm://siteA.org')
-            db.insertSiteIfNotExists('siteB', 'srm://siteB.org')
+        self.db.insertSiteIfNotExists('siteA', 'srm://siteA.org')
+        self.db.insertSiteIfNotExists('siteB', 'srm://siteB.org')
 
-            dirA_id = db.insertRootDirectory('siteA', 'rootDir1')
-            dirB_id = db.insertRootDirectory('siteB', 'rootDir1')
+        dirA_id = self.db.insertRootDirectory('siteA', 'rootDir1')
+        dirB_id = self.db.insertRootDirectory('siteB', 'rootDir1')
 
-            subDirA1_id = db.insertSubDirectory('foo', dirA_id)
-            subDirA2_id = db.insertSubDirectory('bar', dirA_id)
-            subDirB1_id = db.insertSubDirectory('foo', dirB_id)
+        subDirA1_id = self.db.insertSubDirectory('foo', dirA_id)
+        subDirA2_id = self.db.insertSubDirectory('bar', dirA_id)
+        subDirB1_id = self.db.insertSubDirectory('foo', dirB_id)
 
-            self.assertNotEquals(None, subDirA1_id)
-            self.assertNotEquals(None, subDirA2_id)
-            self.assertNotEquals(None, subDirB1_id)
+        self.assertNotEquals(None, subDirA1_id)
+        self.assertNotEquals(None, subDirA2_id)
+        self.assertNotEquals(None, subDirB1_id)
 
-            with self.assertRaises(PostgresDBQueryExecutionError):
-                db.insertSubDirectory('foo', dirA_id)
+        with self.assertRaises(PostgresDBQueryExecutionError):
+            self.db.insertSubDirectory('foo', dirA_id)
 
-    def _fill_test_db_with_sites_and_root_dirs(self, db):
+    def _fill_test_db_with_sites_and_root_dirs(self):
         """
         helper method to fill empty database with simple sites and root dirs
         """
-        db.insertSiteIfNotExists('siteA', 'srm://siteA.foo.bar:8443')
-        db.insertSiteIfNotExists('siteB', 'srm://siteB.foo.bar:8443')
+        self.db.insertSiteIfNotExists('siteA', 'srm://siteA.foo.bar:8443')
+        self.db.insertSiteIfNotExists('siteB', 'srm://siteB.foo.bar:8443')
 
-        db.insertRootDirectory('siteA', '/root_dir_1')
-        db.insertRootDirectory('siteA', '/root_dir_2')
-        db.insertRootDirectory('siteA', '/long/path/to/root_dir_3')
-        db.insertRootDirectory('siteB', '/root_dir_1')
+        self.db.insertRootDirectory('siteA', '/root_dir_1')
+        self.db.insertRootDirectory('siteA', '/root_dir_2')
+        self.db.insertRootDirectory('siteA', '/long/path/to/root_dir_3')
+        self.db.insertRootDirectory('siteB', '/root_dir_1')
 
 
     def test_insert_missing_directory_tree_if_needed(self):
         """ Test core method _insertMissingDirectoryTreeIfNeeded for all known root dirs.
         Should result in new directory entries in the database for the new sub directories only.
         """
-        with store.LTAStorageDb(self.dbcreds) as db:
-            self._fill_test_db_with_sites_and_root_dirs(db)
+        self._fill_test_db_with_sites_and_root_dirs()
 
-            for site in db.sites():
-                site_id = site['id']
-                for root_dir in db.rootDirectoriesForSite(site_id):
-                    root_dir_path = root_dir['dir_name']
+        for site in self.db.sites():
+            site_id = site['id']
+            for root_dir in self.db.rootDirectoriesForSite(site_id):
+                root_dir_path = root_dir['dir_name']
 
-                    # root dir should already exist
-                    dir = db.directoryByName(root_dir_path, site_id)
-                    self.assertIsNotNone(dir)
+                # root dir should already exist
+                dir = self.db.directoryByName(root_dir_path, site_id)
+                self.assertIsNotNone(dir)
 
-                    # insert the not-so-missing root dir.
-                    # nothing should happen, because the root dir already exists
-                    new_dir_ids = db.insert_missing_directory_tree_if_needed(root_dir_path, site_id)
-                    self.assertEqual(0, len(new_dir_ids))
+                # insert the not-so-missing root dir.
+                # nothing should happen, because the root dir already exists
+                new_dir_ids = self.db.insert_missing_directory_tree_if_needed(root_dir_path, site_id)
+                self.assertEqual(0, len(new_dir_ids))
 
-                    # now insert some new subdirs, with multiple levels.
-                    for subdir_path in ['/foo', '/bar/xyz']:
-                        dir_path = root_dir_path + subdir_path
-                        # dir should not exist yet
-                        self.assertIsNone(db.directoryByName(dir_path, site_id))
+                # now insert some new subdirs, with multiple levels.
+                for subdir_path in ['/foo', '/bar/xyz']:
+                    dir_path = root_dir_path + subdir_path
+                    # dir should not exist yet
+                    self.assertIsNone(self.db.directoryByName(dir_path, site_id))
 
-                        # let the handler insert the missing dirs.
-                        db.insert_missing_directory_tree_if_needed(dir_path, site_id)
+                    # let the handler insert the missing dirs.
+                    self.db.insert_missing_directory_tree_if_needed(dir_path, site_id)
 
-                        # dir should exist now
-                        dir = db.directoryByName(dir_path, site_id)
-                        self.assertIsNotNone(dir)
+                    # dir should exist now
+                    dir = self.db.directoryByName(dir_path, site_id)
+                    self.assertIsNotNone(dir)
 
-                        # check if new dir has expected root dir
-                        parents = db.parentDirectories(dir['dir_id'])
-                        self.assertEqual(root_dir['root_dir_id'], parents[0]['id'])
+                    # check if new dir has expected root dir
+                    parents = self.db.parentDirectories(dir['dir_id'])
+                    self.assertEqual(root_dir['root_dir_id'], parents[0]['id'])
 
     def test_insert_missing_directory_tree_if_needed_for_path_with_unknown_rootdir(self):
         """ Test core method _insertMissingDirectoryTreeIfNeeded for a path with an unknown root dir
         Should raise LookupError.
         """
-        with store.LTAStorageDb(self.dbcreds) as db:
-            self._fill_test_db_with_sites_and_root_dirs(db)
+        self._fill_test_db_with_sites_and_root_dirs()
 
-            for site in db.sites():
-                site_id = site['id']
-                with self.assertRaises(LookupError) as context:
-                    incorrect_dir_path = '/fdjsalfja5h43535h3oiu/5u905u3f'
-                    db.insert_missing_directory_tree_if_needed(incorrect_dir_path, site_id)
-                self.assertTrue('Could not find parent root dir' in str(context.exception))
+        for site in self.db.sites():
+            site_id = site['id']
+            with self.assertRaises(LookupError) as context:
+                incorrect_dir_path = '/fdjsalfja5h43535h3oiu/5u905u3f'
+                self.db.insert_missing_directory_tree_if_needed(incorrect_dir_path, site_id)
+            self.assertTrue('Could not find parent root dir' in str(context.exception))
 
     def testProjectsAndObservations(self):
-        with store.LTAStorageDb(self.dbcreds) as db:
-            #first insert a lot of data...
-            db.insertSiteIfNotExists('juelich', 'srm://lofar-srm.fz-juelich.de:8443')
-            db.insertSiteIfNotExists('sara', 'srm://srm.grid.sara.nl:8443')
-
-            juelich_root_dir_id = db.insertRootDirectory('juelich', '/pnfs/fz-juelich.de/data/lofar/ops/')
-            sara_root_dir_id = db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/ops')
-
-            juelich_projects_dir_id = db.insertSubDirectory('/pnfs/fz-juelich.de/data/lofar/ops/projects', juelich_root_dir_id)
-            sara_projects_dir_id = db.insertSubDirectory('/pnfs/grid.sara.nl/data/lofar/ops/projects', sara_root_dir_id)
-
-            for project_nr, project_name in enumerate(['lc8_001', '2017lofarobs', 'ddt5_001']):
-                # projects are sometimes stored at multiple sites
-                for projects_dir_id in [juelich_projects_dir_id, sara_projects_dir_id]:
-                    project_dir_id = db.insertSubDirectory('/pnfs/fz-juelich.de/data/lofar/ops/projects/' + project_name,
-                                                           projects_dir_id)
-                    for obs_nr in range(3):
-                        obs_name = 'L%06d' % ((project_nr+1)*1000 + obs_nr)
-                        obs_dir_id = db.insertSubDirectory('/pnfs/fz-juelich.de/data/lofar/ops/projects/' + project_name + '/' + obs_name,
-                                                           project_dir_id)
-
-                        for sb_nr in range(244):
-                            file_name = '%s_SB%03d.MS.tar' % (obs_name, sb_nr)
-                            db.insertFileInfo(file_name, 1, datetime.utcnow(), obs_dir_id, False)
-                        db.commit()
-
-            # then check the results
-            # TODO check the results
-            logger.info(pformat(db.executeQuery('select * from metainfo.project_directory', fetch=FETCH_ALL)))
-            logger.info(pformat(db.executeQuery('select * from metainfo.project_stats', fetch=FETCH_ALL)))
-            logger.info(pformat(db.executeQuery('select * from metainfo.project_observation_dataproduct', fetch=FETCH_ALL)))
+        #first insert a lot of data...
+        self.db.insertSiteIfNotExists('juelich', 'srm://lofar-srm.fz-juelich.de:8443')
+        self.db.insertSiteIfNotExists('sara', 'srm://srm.grid.sara.nl:8443')
+
+        juelich_root_dir_id = self.db.insertRootDirectory('juelich', '/pnfs/fz-juelich.de/data/lofar/ops/')
+        sara_root_dir_id = self.db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/ops')
+
+        juelich_projects_dir_id = self.db.insertSubDirectory('/pnfs/fz-juelich.de/data/lofar/ops/projects', juelich_root_dir_id)
+        sara_projects_dir_id = self.db.insertSubDirectory('/pnfs/grid.sara.nl/data/lofar/ops/projects', sara_root_dir_id)
+
+        for project_nr, project_name in enumerate(['lc8_001', '2017lofarobs', 'ddt5_001']):
+            # projects are sometimes stored at multiple sites
+            for projects_dir_id in [juelich_projects_dir_id, sara_projects_dir_id]:
+                project_dir_id = self.db.insertSubDirectory('/pnfs/fz-juelich.de/data/lofar/ops/projects/' + project_name,
+                                                       projects_dir_id)
+                for obs_nr in range(3):
+                    obs_name = 'L%06d' % ((project_nr+1)*1000 + obs_nr)
+                    obs_dir_id = self.db.insertSubDirectory('/pnfs/fz-juelich.de/data/lofar/ops/projects/' + project_name + '/' + obs_name,
+                                                       project_dir_id)
+
+                    for sb_nr in range(244):
+                        file_name = '%s_SB%03d.MS.tar' % (obs_name, sb_nr)
+                        self.db.insertFileInfo(file_name, 1, datetime.utcnow(), obs_dir_id, False)
+                    self.db.commit()
+
+        # then check the results
+        # TODO check the results
+        logger.info(pformat(self.db.executeQuery('select * from metainfo.project_directory', fetch=FETCH_ALL)))
+        logger.info(pformat(self.db.executeQuery('select * from metainfo.project_stats', fetch=FETCH_ALL)))
+        logger.info(pformat(self.db.executeQuery('select * from metainfo.project_observation_dataproduct', fetch=FETCH_ALL)))
 
 # run tests if main
 if __name__ == '__main__':
-    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
-                        level=logging.INFO)
-
     unittest.main()
diff --git a/SAS/OTDB_Services/TreeStatusEvents.py b/SAS/OTDB_Services/TreeStatusEvents.py
index d6589a99126bc0d526d497a5d8b654e44f150a2f..064e42451644740f89153c9dfbe05fb87b36c654 100755
--- a/SAS/OTDB_Services/TreeStatusEvents.py
+++ b/SAS/OTDB_Services/TreeStatusEvents.py
@@ -98,7 +98,7 @@ def main():
 
     create_service(options.busname, dbcreds)
 
-def create_service(busname, dbcreds):
+def create_service(busname, dbcreds, state_file_path='~/.lofar/otdb_treestatusevent_state'):
     alive = True
     connected = False
     otdb_connection = None
@@ -124,7 +124,7 @@ def create_service(busname, dbcreds):
             if connected:
                 # Get start_time (= creation time of last retrieved record if any)
                 try:
-                    treestatuseventfilename = os.path.expanduser('~/.lofar/otdb_treestatusevent_state')
+                    treestatuseventfilename = os.path.expanduser(state_file_path)
                     with open(treestatuseventfilename, 'r') as f:
                         line = f.readline()
                         if line.rfind('.') > 0:
diff --git a/SAS/OTDB_Services/test/CMakeLists.txt b/SAS/OTDB_Services/test/CMakeLists.txt
index 279c28690e825ae4b41dca0b4a85b5c51636e2d1..638cc329d86cd972c6745f1bcf56f70c53a7c345 100644
--- a/SAS/OTDB_Services/test/CMakeLists.txt
+++ b/SAS/OTDB_Services/test/CMakeLists.txt
@@ -1,10 +1,19 @@
 # $Id: CMakeLists.txt 1576 2015-09-29 15:22:28Z loose $
 
-include(LofarCTest)
+if(BUILD_TESTING)
+    include(LofarCTest)
+
+    include(FindPythonModule)
+    find_python_module(dateutil)
+
+    include(PythonInstall)
+    python_install(otdb_common_testing.py DESTINATION lofar/sas/otdb/testing)
+
+
+    lofar_add_test(t_TreeService)
+    lofar_add_test(t_TreeStatusEvents)
+endif()
+
 
-lofar_find_package(Python 3.4 REQUIRED)
-find_python_module(testing.postgresql)
 
-lofar_add_test(t_TreeService)
-lofar_add_test(t_TreeStatusEvents)
 
diff --git a/SAS/OTDB_Services/test/otdb_common_testing.py b/SAS/OTDB_Services/test/otdb_common_testing.py
new file mode 100755
index 0000000000000000000000000000000000000000..c568187695f03f07a5a676d6d6b957c875bd8ffa
--- /dev/null
+++ b/SAS/OTDB_Services/test/otdb_common_testing.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python3
+
+# Copyright (C) 2012-2015    ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This file is part of the LOFAR software suite.
+# The LOFAR software suite is free software: you can redistribute it and/or
+# modify it under the terms of the GNU General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# The LOFAR software suite is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.    See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
+
+# $Id$
+import subprocess
+import os
+import logging
+
+logger = logging.getLogger(__name__)
+
+from lofar.common.testing.postgres import PostgresTestDatabaseInstance
+
+
+class OTDBTestInstance(PostgresTestDatabaseInstance):
+    '''Helper class which uses the OTDBCommonTestMixin without a unittest.TestCase to setup/teardown a test OTDB instance'''
+    def __init__(self, gzipped_schema_dump_filename):
+        super().__init__()
+        self.gzipped_schema_dump_filename = gzipped_schema_dump_filename
+
+    def apply_database_schema(self):
+        logger.info('applying OTDB sql schema to %s', self.dbcreds)
+
+        cmd1 = ['gzip', '-dc', self.gzipped_schema_dump_filename]
+
+        cmd2 = ['psql', '-U', self.dbcreds.user, '-h', self.dbcreds.host,
+                '-p', str(self.dbcreds.port), self.dbcreds.database]
+
+        logger.info('executing: %s', ' '.join(cmd1))
+        logger.info('executing: %s', ' '.join(cmd2))
+
+        proc1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE)
+        proc2 = subprocess.Popen(cmd2, stdin=proc1.stdout)
+        proc1.wait(timeout=60)
+        proc2.wait(timeout=60)
+
+        if proc1.returncode != 0:
+            raise RuntimeError("Could not execute cmd: '%s' error=%s" % (' '.join(cmd1), proc1.stderr))
+
+        if proc2.returncode != 0:
+            raise RuntimeError("Could not execute cmd: '%s' error=%s" % (' '.join(cmd2), proc2.stderr))
diff --git a/SAS/OTDB_Services/test/t_TreeService.py b/SAS/OTDB_Services/test/t_TreeService.py
index 598fa2c1c9dc0422099d004a6bf1dc52cd8a11a6..02070ccdb8f89b7ae88dfafbe103695d2924598d 100644
--- a/SAS/OTDB_Services/test/t_TreeService.py
+++ b/SAS/OTDB_Services/test/t_TreeService.py
@@ -28,49 +28,15 @@ KeyUpdateCommand        : function to update the value of multiple (existing) ke
 StatusUpdateCommand     : finction to update the status of a tree.
 """
 
-import logging
-import testing.postgresql
-import psycopg2
-import subprocess
 from lofar.sas.otdb.TreeService import create_service
-from lofar.common.dbcredentials import Credentials
-from lofar.messaging import TemporaryExchange, RPCClient
+from lofar.messaging import TemporaryExchange, RPCClient, BusListenerJanitor
+from lofar.sas.otdb.testing.otdb_common_testing import OTDBTestInstance
 
-logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
+import logging
 logger = logging.getLogger(__name__)
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
 
-try:
-    postgresql = testing.postgresql.PostgresqlFactory()()
-
-    database_credentials = Credentials()
-    database_credentials.host = postgresql.dsn()['host']
-    database_credentials.database = postgresql.dsn()['database']
-    database_credentials.port = postgresql.dsn()['port']
-
-    # connect to test-db as root
-    conn = psycopg2.connect(**postgresql.dsn())
-    cursor = conn.cursor()
-
-    # set credentials to be used during tests
-    database_credentials.user = 'otdb_test_user'
-    database_credentials.password = 'otdb_test_password'  # cannot be empty...
-
-    # create user role
-    query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % (database_credentials.user, database_credentials.password)
-    cursor.execute(query)
-    conn.commit()
-    conn.close()
-
-    cmd1 = ['gzip', '-dc', 't_TreeService.in.unittest_db.dump.gz']
-
-    cmd2 = ['psql', '-U', database_credentials.user, '-h', database_credentials.host,
-            '-p', str(database_credentials.port), database_credentials.database]
-
-    proc1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE)
-    proc2 = subprocess.Popen(cmd2, stdin=proc1.stdout)
-    proc1.wait(timeout=60)
-    proc2.wait(timeout=60)
-
+with OTDBTestInstance('t_TreeService.in.unittest_db.dump.gz') as test_db:
     def do_rpc_catch_exception(exc_text, rpc_instance, method_name, arg_dict):
         try:
             print("** Executing {0}({1})...".format(method_name, arg_dict))
@@ -90,7 +56,7 @@ try:
     with TemporaryExchange(__name__) as tmp_exchange:
         exchange = tmp_exchange.address
 
-        with create_service(exchange=exchange, dbcreds=database_credentials) as service:
+        with BusListenerJanitor(create_service(exchange=exchange, dbcreds=test_db.dbcreds)) as service:
 
             with RPCClient(service_name=service.service_name, exchange=exchange, timeout=10) as otdbRPC:    # Existing: otdb_id:1099268, mom_id:353713
                 do_rpc(otdbRPC, "TaskGetIDs", {'OtdbID': 1099268, 'MomID': 353713 })
@@ -190,5 +156,3 @@ try:
                 do_rpc_catch_exception('on invalid key', otdbRPC, "TaskSetSpecification", {'OtdbID':1099266,
                        'Specification':{'LOFAR.ObsSW.Observation.ObservationControl.PythonControl.NoSuchKey':'NameOfTestHost'}})
 
-finally:
-    postgresql.stop()
diff --git a/SAS/OTDB_Services/test/t_TreeStatusEvents.py b/SAS/OTDB_Services/test/t_TreeStatusEvents.py
index 0b070818a72eaa82b7a11afa068d022809d9d91c..f29a484f76cffc4fdfc511a4f844c9f7e9a1a334 100644
--- a/SAS/OTDB_Services/test/t_TreeStatusEvents.py
+++ b/SAS/OTDB_Services/test/t_TreeStatusEvents.py
@@ -28,69 +28,41 @@ KeyUpdateCommand        : function to update the value of multiple (existing) ke
 StatusUpdateCommand     : finction to update the status of a tree.
 """
 
-import sys, pg
-import logging
-import testing.postgresql
-import psycopg2
-import subprocess
 from lofar.messaging.messagebus import *
 from lofar.sas.otdb.TreeStatusEvents import create_service
-from lofar.common.dbcredentials import Credentials
 import threading
+import sys
+from datetime import datetime, timedelta
+from tempfile import NamedTemporaryFile
 
-logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
+import logging
 logger = logging.getLogger(__name__)
+logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
 
-try:
-    postgresql = testing.postgresql.PostgresqlFactory()()
-
-    database_credentials = Credentials()
-    database_credentials.host = postgresql.dsn()['host']
-    database_credentials.database = postgresql.dsn()['database']
-    database_credentials.port = postgresql.dsn()['port']
-
-    # connect to test-db as root
-    conn = psycopg2.connect(**postgresql.dsn())
-    cursor = conn.cursor()
-
-    # set credentials to be used during tests
-    database_credentials.user = 'otdb_test_user'
-    database_credentials.password = 'otdb_test_password'  # cannot be empty...
-
-    # create user role
-    query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % (database_credentials.user, database_credentials.password)
-    cursor.execute(query)
-    conn.commit()
-    conn.close()
-
-    cmd1 = ['gzip', '-dc', 't_TreeStatusEvents.in.unittest_db.dump.gz']
-
-    cmd2 = ['psql', '-U', database_credentials.user, '-h', database_credentials.host,
-            '-p', str(database_credentials.port), database_credentials.database]
-
-    proc1 = subprocess.Popen(cmd1, stdout=subprocess.PIPE)
-    proc2 = subprocess.Popen(cmd2, stdin=proc1.stdout)
-    proc1.wait(timeout=60)
-    proc2.wait(timeout=60)
-
-    otdb_connection = pg.connect(**database_credentials.pg_connect_options())
+from lofar.sas.otdb.testing.otdb_common_testing import OTDBTestInstance
 
+with OTDBTestInstance('t_TreeStatusEvents.in.unittest_db.dump.gz') as test_db:
     with TemporaryExchange(__name__) as tmp_exchange:
-        with TemporaryQueue(__name__, exchange=tmp_exchange.address) as tmp_queue:
+        with tmp_exchange.create_temporary_queue() as tmp_queue:
             with tmp_queue.create_frombus() as frombus:
 
-                t = threading.Thread(target=create_service, args=(tmp_exchange.address, database_credentials))
-                t.daemon = True
-                t.start()
+                with NamedTemporaryFile(mode='w+') as state_file:
+                    state_file.file.write((datetime.utcnow()-timedelta(seconds=2)).strftime("%Y-%m-%d %H:%M:%S"))
+                    state_file.file.flush()
+
+                    t = threading.Thread(target=create_service, args=(tmp_exchange.address, test_db.dbcreds, state_file.name))
+                    t.daemon = True
+                    t.start()
+
+                    with test_db.create_database_connection() as db:
+                        db.executeQuery("select setTreeState(1, %d, %d::INT2,'%s'::boolean);" % (1099266, 500, False))
+                        db.commit()
 
-                otdb_connection.query("select setTreeState(1, %d, %d::INT2,'%s')" % (1099266, 500, False))
-                msg = frombus.receive(timeout=5, acknowledge=True)	  # TreeStateEVent are send every 2 seconds
-                logger.info(msg)
-                try:
-                    ok = (msg.content['treeID'] == 1099266 and msg.content['state'] == 'queued')
-                except IndexError:
-                    ok = False
-    sys.exit(not ok)   # 0 = success
+                    msg = frombus.receive(timeout=500, acknowledge=True)	  # TreeStateEvent are send every 2 seconds
+                    logger.info(msg)
+                    try:
+                        ok = (msg.content['treeID'] == 1099266 and msg.content['state'] == 'queued')
+                    except IndexError:
+                        ok = False
 
-finally:
-    postgresql.stop()
+sys.exit(not ok)   # 0 = success
diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/CMakeLists.txt b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/CMakeLists.txt
index ccac9fb675a0aa71459daa90738baa0ccb2b9256..9c8006f1f22ee8522d9795544941aa1f916f682c 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/CMakeLists.txt
+++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/CMakeLists.txt
@@ -1,13 +1,12 @@
 # $Id: CMakeLists.txt 32679 2015-10-26 09:31:56Z schaap $
-include(LofarCTest)
 
 if(BUILD_TESTING)
-    include(PythonInstall)
-    include(FindPythonModule)
+    include(LofarCTest)
 
-    find_python_module(testing.postgresql)
+    include(FindPythonModule)
     find_python_module(dateutil)
 
+    include(PythonInstall)
     python_install(radb_common_testing.py DESTINATION lofar/sas/resourceassignment/database/testing)
 
     lofar_add_test(t_radb_functionality)
diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py
index e1d89daaf30d188d0da93daa25011c5c9e87802c..a5ae1f88d1b9e87b24f3812b6a62cede608ea907 100755
--- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py
+++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py
@@ -27,93 +27,64 @@ import logging
 
 logger = logging.getLogger(__name__)
 
-import testing.postgresql
-from lofar.common.dbcredentials import Credentials
 from lofar.common.postgres import PostgresDatabaseConnection, FETCH_ALL
+from lofar.common.testing.postgres import PostgresTestMixin, PostgresTestDatabaseInstance
 from lofar.sas.resourceassignment.database.radb import RADatabase
 
-class RADBCommonTestMixin():
+class RADBTestDatabaseInstance(PostgresTestDatabaseInstance):
     '''
-    A common test mixin class from which you can derive to get a freshly setup postgres testing instance with the latest RADB sql setup scripts applied.
     '''
 
-    @classmethod
-    def setUpClass(cls):
-        logger.info('setting up test database instance...')
-        # connect to shared test db
-        cls.postgresql = testing.postgresql.PostgresqlFactory(cache_initialized_db=True)()
-        cls.dbcreds    = Credentials()
+    def __init__(self) -> None:
+        super().__init__(user_name='resourceassignment')
 
-        # update credentials (e.g. port changes for each test)
-        cls.dbcreds.host = cls.postgresql.dsn()['host']
-        cls.dbcreds.database = cls.postgresql.dsn()['database']
-        cls.dbcreds.port = cls.postgresql.dsn()['port']
+    def apply_database_schema(self):
+        logger.info('applying RADB sql schema to %s', self.dbcreds)
 
-        # connect to db as root
-        conn = psycopg2.connect(**cls.postgresql.dsn())
-        cursor = conn.cursor()
+        with PostgresDatabaseConnection(self.dbcreds) as db:
+            # populate db tables
+            # These are applied in given order to set up test db
+            # Note: cannot use create_and_populate_database.sql since '\i' is not understood by cursor.execute()
+            sql_basepath = os.environ['LOFARROOT'] + "/share/radb/sql/"
+            sql_createdb_paths = [sql_basepath + "create_database.sql",
+                                  sql_basepath + "/add_resource_allocation_statics.sql",
+                                  sql_basepath + "/add_virtual_instrument.sql",
+                                  sql_basepath + "/add_notifications.sql",
+                                  sql_basepath + "/add_functions_and_triggers.sql"]
 
-        # set credentials to be used during tests
-        cls.dbcreds.user = 'resourceassignment'
-        cls.dbcreds.password = 'secret'    # cannot be empty...
+            for sql_path in sql_createdb_paths:
+                logger.debug("setting up database. applying sql file: %s", sql_path)
+                with open(sql_path) as sql:
+                    db.executeQuery(sql.read())
+                    db.commit()
 
-        # create user role
-        # Note: NOSUPERUSER currently raises "permission denied for schema virtual_instrument"
-        # Maybe we want to sort out user creation and proper permissions in the sql scripts?
-        query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % (cls.dbcreds.user, cls.dbcreds.password)
-        cursor.execute(query)
+    def create_database_connection(self) -> RADatabase:
+        self.radb = RADatabase(self.dbcreds)
+        return self.radb
 
-        cursor.close()
-        conn.commit()
-        conn.close()
 
-        logger.info('Finished setting up test database instance. It is avaiblable at: %s', cls.dbcreds.stringWithHiddenPassword())
-
-        cls.radb = RADatabase(cls.dbcreds)
-        cls.radb.connect()
+class RADBCommonTestMixin(PostgresTestMixin):
+    '''
+    A common test mixin class from which you can derive to get a freshly setup postgres testing instance with the latest RADB sql setup scripts applied.
+    '''
 
-        # set up a fresh copy of the RADB sql schema
-        cls._setup_database(cls.radb)
+    @classmethod
+    def setUpClass(cls):
+        super().setUpClass()
+        cls.radb = cls.db
 
     def setUp(self):
         # wipe all tables by truncating specification which cascades into the rest.
         logger.debug("setUp: Wiping radb tables for each unittest.")
-        self.radb.executeQuery("TRUNCATE TABLE resource_allocation.specification CASCADE;")
-        self.radb.commit()
+        self.db.executeQuery("TRUNCATE TABLE resource_allocation.specification CASCADE;")
+        self.db.executeQuery("TRUNCATE TABLE resource_allocation.resource_usage CASCADE;")
+        self.db.executeQuery("TRUNCATE TABLE resource_allocation.resource_usage_delta CASCADE;")
+        self.db.commit()
 
     @classmethod
-    def tearDownClass(cls):
-        cls.radb.disconnect()
-
-        db_log_file_name = os.path.join(cls.postgresql.base_dir, '%s.log' % cls.postgresql.name)
-        logger.info('Printing test-postgress-database server log: %s', db_log_file_name)
-        with open(db_log_file_name, 'r') as db_log_file:
-            for line in db_log_file.readlines():
-                print("  postgres log: %s" % line.strip(), file=sys.stderr)
-
-        logger.info('removing test RA database at %s', cls.dbcreds.stringWithHiddenPassword())
-        cls.postgresql.stop()
-        logger.info('test RA removed')
-
-    @staticmethod
-    def _setup_database(db: PostgresDatabaseConnection):
-        logger.info('applying RADB sql schema to %s', db)
-
-        # populate db tables
-        # These are applied in given order to set up test db
-        # Note: cannot use create_and_populate_database.sql since '\i' is not understood by cursor.execute()
-        sql_basepath = os.environ['LOFARROOT'] + "/share/radb/sql/"
-        sql_createdb_paths = [sql_basepath + "create_database.sql",
-                              sql_basepath + "/add_resource_allocation_statics.sql",
-                              sql_basepath + "/add_virtual_instrument.sql",
-                              sql_basepath + "/add_notifications.sql",
-                              sql_basepath + "/add_functions_and_triggers.sql"]
-
-        for sql_path in sql_createdb_paths:
-            logger.debug("setting up database. applying sql file: %s", sql_path)
-            with open(sql_path) as sql:
-                db.executeQuery(sql.read())
-                db.commit()
+    def create_test_db_instance(cls) -> RADBTestDatabaseInstance:
+        return RADBTestDatabaseInstance()
+
 
 class RADBCommonTest(RADBCommonTestMixin, unittest.TestCase):
     # database created?