diff --git a/LTA/ltastorageoverview/test/db_performance_test.py b/LTA/ltastorageoverview/test/db_performance_test.py index b603cec0881d9c8a71ac7ad05b510f81d737be1d..04b5f7f92831129c9bacd7da231dd1897350acf1 100755 --- a/LTA/ltastorageoverview/test/db_performance_test.py +++ b/LTA/ltastorageoverview/test/db_performance_test.py @@ -29,59 +29,79 @@ logger = logging.getLogger() def main(): from optparse import OptionParser from lofar.common import dbcredentials + import testing.postgresql + import psycopg2 # Check the invocation arguments - parser = OptionParser("%prog [options]", description='wipes the lta storageoverview database, and executes a performance test by inserting many files.') + parser = OptionParser("%prog [options]", description='execute a performance test by inserting many files on an empty test database.') parser.add_option('-V', '--verbose', dest='verbose', action='store_true', help='verbose logging') - parser.add_option_group(dbcredentials.options_group(parser)) - parser.set_defaults(dbcredentials="LTASO") (options, args) = parser.parse_args() logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG if options.verbose else logging.INFO) - dbcreds = dbcredentials.parse_options(options) - logger.info("Using dbcreds: %s" % dbcreds.stringWithHiddenPassword()) + # create a test webservice.db + logger.info(' creating test postgres server') + with testing.postgresql.Postgresql() as test_psql: + dsn = test_psql.dsn() + logger.info(' created test postgres server, dsn=%s', dsn) - base_date = datetime.utcnow() + dbcreds = dbcredentials.Credentials() + dbcreds.user = 'test_user' + dbcreds.password = 'test_password' - db = store.LTAStorageDb(dbcreds, options.verbose) - if raw_input('Are you sure you want to wipe the database? <y>/<n>:').lower() not in ['yes', 'y']: - return + with psycopg2.connect(**dsn) as conn: + cursor = conn.cursor() + #use same user/pass as stored in local webservice.dbcreds + query = "CREATE USER %s WITH SUPERUSER PASSWORD '%s'" % (dbcreds.user, dbcreds.password) + cursor.execute(query) - db.executeQuery('TRUNCATE lta.site CASCADE;', store.FETCH_NONE) - db.insertSite('sara', 'srm://srm.siteA.nl:8444') - rootdir_id = db.insertRootDirectory('sara', '/pnfs/grid.siteA.nl/data/lofar/ops') - projects_dir_id = db.insertSubDirectory(rootdir_id, '/pnfs/grid.siteA.nl/data/lofar/ops/projects') + create_script_path = os.path.normpath(os.path.join(os.environ['LOFARROOT'], 'share', 'ltaso', 'create_db_ltastorageoverview.sql')) + logger.info(' running ltaso create script create_script=%s', create_script_path) + with open(create_script_path, 'r') as script: + cursor.execute(script.read()) + logger.info(' completed ltaso create script') - # junk_dir_id = db.insertSubDirectory(rootdir_id, '/pnfs/grid.siteA.nl/data/lofar/ops/misc') - # fileinfos = [('testfile%d' % (i,), i+100, base_date, junk_dir_id) for i in range(0, 43)] - # file_ids = db.insertFileInfos(fileinfos) - # total_num_files_inserted = len(file_ids) - total_num_files_inserted = 0 + # copy the test postgres server settings into webservice.dbcreds + # we can use these webservice.dbcreds in each test method to connect to the testing ltaso database + dbcreds.host = dsn['host'] + dbcreds.database = dsn['database'] + dbcreds.port = dsn['port'] - with open('db_perf.csv', 'w') as file: - for cycle_nr in range(1, 10): - for project_nr in range(1, 10): - # project_name = 'lc%d_%03d/%d' % (cycle_nr, project_nr, os.getpid()) - project_name = 'lc%d_%03d' % (cycle_nr, project_nr) - projectdir_id = db.insertSubDirectory(projects_dir_id, '/pnfs/grid.siteA.nl/data/lofar/ops/projects/%s' % (project_name,)) + logger.info('finished setting up test LTASO database') - obs_base_id = cycle_nr*100000+project_nr*1000 - for obs_nr, obsId in enumerate(range(obs_base_id, obs_base_id+20)): - obsName = 'L%s' % obsId + base_date = datetime.utcnow() - obsdir_id = db.insertSubDirectory(projectdir_id, '/pnfs/grid.siteA.nl/data/lofar/ops/projects/%s/%s' % (project_name, obsName)) + db = store.LTAStorageDb(dbcreds, options.verbose) - fileinfos = [('%s_SB%3d' % (obsName, sbNr), 1000+sbNr+project_nr*cycle_nr, base_date + timedelta(days=10*cycle_nr+project_nr, minutes=obs_nr, seconds=sbNr), obsdir_id) for sbNr in range(0, 2)] - now = datetime.utcnow() - file_ids = db.insertFileInfos(fileinfos) - total_num_files_inserted += len(file_ids) - elapsed = totalSeconds(datetime.utcnow() - now) - line = '%s,%s' % (total_num_files_inserted, elapsed) - print line - file.write(line + '\n') + db.insertSite('sara', 'srm://srm.siteA.nl:8444') + rootdir_id = db.insertRootDirectory('sara', '/pnfs/grid.siteA.nl/data/lofar/ops') + projects_dir_id = db.insertSubDirectory(rootdir_id, '/pnfs/grid.siteA.nl/data/lofar/ops/projects') + + total_num_files_inserted = 0 + + with open('db_perf.csv', 'w') as file: + for cycle_nr in range(1, 10): + for project_nr in range(1, 10): + # project_name = 'lc%d_%03d/%d' % (cycle_nr, project_nr, os.getpid()) + project_name = 'lc%d_%03d' % (cycle_nr, project_nr) + projectdir_id = db.insertSubDirectory(projects_dir_id, '/pnfs/grid.siteA.nl/data/lofar/ops/projects/%s' % (project_name,)) + + obs_base_id = cycle_nr*100000+project_nr*1000 + for obs_nr, obsId in enumerate(range(obs_base_id, obs_base_id+20)): + obsName = 'L%s' % obsId + + obsdir_id = db.insertSubDirectory(projectdir_id, '/pnfs/grid.siteA.nl/data/lofar/ops/projects/%s/%s' % (project_name, obsName)) + + fileinfos = [('%s_SB%3d' % (obsName, sbNr), 1000+sbNr+project_nr*cycle_nr, base_date + timedelta(days=10*cycle_nr+project_nr, minutes=obs_nr, seconds=sbNr), obsdir_id) for sbNr in range(0, 2)] + now = datetime.utcnow() + file_ids = db.insertFileInfos(fileinfos) + total_num_files_inserted += len(file_ids) + elapsed = totalSeconds(datetime.utcnow() - now) + line = '%s,%s' % (total_num_files_inserted, elapsed) + print line + file.write(line + '\n') if __name__ == "__main__": main()