diff --git a/CEP/LAPS/DBToQDeamon/src/DBToQDeamon.py b/CEP/LAPS/DBToQDeamon/src/DBToQDeamon.py
index 36f9bdb6825bcdc30607abed8381c2e454fdfdac..f608ab4b64db32fd0be50f775ad439cf1a840fdc 100755
--- a/CEP/LAPS/DBToQDeamon/src/DBToQDeamon.py
+++ b/CEP/LAPS/DBToQDeamon/src/DBToQDeamon.py
@@ -17,26 +17,30 @@
 # with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
 #
 # $Id$
-import os,sys,time,pg
+import sys
+import time
+import pg
 from optparse import OptionParser
 import LAPS.MsgBus
+import getpass
 
 
-def createParsetFile(treeID, nodeID, fileName):
+def create_parset_file(tree_id, node_id, filename):
     """
     Create a parset file with name fileName from tree treeID starting at nodeID.
     """
-    parset = otdb.query("select * from exportTree(%s, %s, %s)" % (1, treeID, nodeID)).getresult()
-    print "   Creating parset %s" % fileName
-    file = open(fileName, 'w');
+    parset = otdb.query("select * from exportTree(%s, %s, %s)" % (1, tree_id, node_id)).getresult()
+    print "   Creating parset %s" % filename
+    file = open(filename, 'w')
     file.write(parset[0][0])
     file.close()
 
+
 if __name__ == '__main__':
     """
     DBToQDeamon checks the LOFAR database every n seconds for new AutoPipeline trees.
     """
-    parser = OptionParser("Usage: %prog [options]" )
+    parser = OptionParser("Usage: %prog [options]")
     parser.add_option("-D", "--database",
                       dest="dbName",
                       type="string",
@@ -49,6 +53,18 @@ if __name__ == '__main__':
                       default="sasdb",
                       help="Hostname of OTDB database")
 
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of OTDB database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of OTDB database")
+
     # parse arguments
     (options, args) = parser.parse_args()
 
@@ -60,9 +76,13 @@ if __name__ == '__main__':
 
     dbName = options.dbName
     dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    dbPassword = getpass.getpass()
 
     # calling stored procedures only works from the pg module for some reason.
-    otdb = pg.connect(user="postgres", host=dbHost, dbname=dbName)
+    otdb = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
 
     # connect to messaging system
     msgbus = LAPS.MsgBus.Bus("LAPS.retrieved.parsets")
@@ -72,18 +92,18 @@ if __name__ == '__main__':
     for t in treeList:
         print t['treeid']
         topNodeID = otdb.query("select nodeid from getTopNode(%s)" % t['treeid']).getresult()[0][0]
-        parset = otdb.query("select * from exportTree(%s, %s, %s)" % (1, t['treeid'], topNodeID)).getresult()
-        ###print parset[0][0]
+        parset = otdb.query("select * from exportTree(%s, %s, %s)" %
+                            (1, t['treeid'], topNodeID)).getresult()
+        # print parset[0][0]
 
-	### send( message , subject )
+        # send( message , subject )
         while True:
-		# 1000 msg / sec ?
-		time.sleep(0.01) 
-		msgbus.send(parset[0][0],"Observation%d" %(t['treeid']))
+            # 1000 msg / sec ?
+            time.sleep(0.01)
+            msgbus.send(parset[0][0], "Observation%d" % (t['treeid']))
 
+        # set state to 'queued'
+        # otdb.query("select * from setTreeState(1, %s, 500, false)" % t['treeid'])
 
-        ### set state to 'queued'
-        ### otdb.query("select * from setTreeState(1, %s, 500, false)" % t['treeid'])
-    
     otdb.close()
     sys.exit(0)
diff --git a/MAC/Deployment/data/Coordinates/CoordMenu.py b/MAC/Deployment/data/Coordinates/CoordMenu.py
index 717b37f801163be634f1cee32b5a8a36a0052c63..5af6b84ab0f47f3116ce709ddd29dbede58faa7b 100755
--- a/MAC/Deployment/data/Coordinates/CoordMenu.py
+++ b/MAC/Deployment/data/Coordinates/CoordMenu.py
@@ -1,17 +1,15 @@
 #!/usr/bin/python
 # P.Donker ASTRON
+import sys
+import pg
+from subprocess import Popen
+import os
+import getpass
+from optparse import OptionParser
 
-import sys,pgdb,pg
-from subprocess import *
-import os,sys,time
-from database import *
+VERSION = '0.0.1'  # version of this script
+default_targetdate = '2009.5'
 
-# get info from database.py
-dbName=getDBname()
-dbHost=getDBhost()
-
-VERSION = '0.0.1' # version of this script
-default_targetdate='2009.5'
 
 def menu():
     print """
@@ -35,68 +33,81 @@ def menu():
     |_____________________________________|
     """
 
-def getInputWithDefault(prompt, defaultValue):
-    answer = defaultValue
-    answer = raw_input(prompt+" ["+str(defaultValue)+"]: ")
-    if (len(answer)==0): answer=defaultValue
+
+def get_input_with_default(prompt, default_value):
+    answer = default_value
+    answer = raw_input(prompt+" ["+str(default_value)+"]: ")
+    if (len(answer) == 0):
+        answer = default_value
     return answer
 
-def create_CDB():
+
+def create_cdb():
     print 'Creating new database'
     res = Popen('./create_CDB.sh').wait()
     print res
-    
-def create_CDB_objects():
+
+
+def create_cdb_objects():
     print 'Creating database objects'
     res = Popen('./create_CDB_objects.py').wait()
     print res
 
+
 def load_normal_vectors():
     print 'Loading normal vectors'
-    filename = getInputWithDefault("enter filename to load","data/normal_vectors.dat")
+    filename = get_input_with_default("enter filename to load", "data/normal_vectors.dat")
     if len(filename) == 0:
         print 'Error, No filename given'
         sys.exit()
     if not os.path.exists(filename):
         print "File does not exist"
         sys.exit()
-    res = Popen(['./load_normal_vectors.py',filename]).wait()
-    if (res != 0): sys.exit(1)
-    #time.sleep(3)
-    
+    res = Popen(['./load_normal_vectors.py', filename]).wait()
+    if (res != 0):
+        sys.exit(1)
+    # time.sleep(3)
+
+
 def load_rotation_matrices():
     print 'Loading rotation matrices'
-    filename = getInputWithDefault("enter filename to load","data/rotation_matrices.dat")
+    filename = get_input_with_default("enter filename to load", "data/rotation_matrices.dat")
     if len(filename) == 0:
         print 'Error, No filename given'
         sys.exit()
     if not os.path.exists(filename):
         print "File does not exist"
         sys.exit()
-    res = Popen(['./load_rotation_matrices.py',filename]).wait()
-    if (res != 0): sys.exit(1)
-    #time.sleep(3)
-    
+    res = Popen(['./load_rotation_matrices.py', filename]).wait()
+    if (res != 0):
+        sys.exit(1)
+    # time.sleep(3)
+
+
 def load_hba_rotations():
     print 'Loading hba field rotations'
-    filename = getInputWithDefault("enter filename to load","data/hba-rotations.csv")
+    filename = get_input_with_default("enter filename to load", "data/hba-rotations.csv")
     if len(filename) == 0:
         print 'Error, No filename given'
         sys.exit()
     if not os.path.exists(filename):
         print "File does not exist"
         sys.exit()
-    res = Popen(['./load_hba_rotations.py',filename]).wait()
-    if (res != 0): sys.exit(1)
-    #time.sleep(3)
+    res = Popen(['./load_hba_rotations.py', filename]).wait()
+    if (res != 0):
+        sys.exit(1)
+    # time.sleep(3)
+
 
 def calculate_hba_deltas():
     print 'calculating hba-deltas'
-    #time.sleep(3)
+    # time.sleep(3)
     res = Popen(['./calc_hba_deltas.py']).wait()
-    if (res != 0): sys.exit(1)
-    
-def load_all_ETRF():
+    if (res != 0):
+        sys.exit(1)
+
+
+def load_all_etrf():
     print 'loading all ETRF files from .//ETRF_FILES'
     os.chdir(os.curdir+'/ETRF_FILES')
     dirs = os.listdir(os.curdir)
@@ -105,107 +116,183 @@ def load_all_ETRF():
         files = os.listdir(os.curdir)
         for filename in files:
             if not os.path.exists(filename):
-                print "File ",filename,"does not exist"
+                print "File ", filename, "does not exist"
                 sys.exit()
-            res = Popen(['../../load_expected_pos.py',filename]).wait()
-            if (res != 0): sys.exit(1)
+            res = Popen(['../../load_expected_pos.py', filename]).wait()
+            if (res != 0):
+                sys.exit(1)
         os.chdir(os.pardir)
     os.chdir(os.pardir)
-    
+
+
 def load_measurement():
     print 'load one measurement file'
-    filename = getInputWithDefault("enter filename to load","")
+    filename = get_input_with_default("enter filename to load", "")
     if len(filename) == 0:
         print 'Error, No filename given'
         sys.exit()
     if not os.path.exists(filename):
-        print "File ",filename,"does not exist"
+        print "File ", filename, "does not exist"
         sys.exit()
-    res = Popen(['./load_measurementfile.py',filename]).wait()
-    if (res != 0): sys.exit(1)
+    res = Popen(['./load_measurementfile.py', filename]).wait()
+    if (res != 0):
+        sys.exit(1)
 
-def transform_all():
-    db = pg.connect(user="postgres", host=dbHost, dbname=dbName)
+
+def transform_all(db_host, db_port, db_name, db_user, db_password):
+    db = pg.connect(user=db_user, host=db_host, dbname=db_name, port=db_port, passwd=db_password)
     print 'Transform all ETRF coordinates to ITRF coordinates for given date'
-    target  = getInputWithDefault("Enter target_date",default_targetdate)
-    all_stations=db.query("select distinct o.stationname from object o inner join field_rotations r on r.id = o.id").getresult();
-    ref_stations=db.query("select distinct o.stationname from object o inner join reference_coord r on r.id = o.id").getresult();
-    
+    target = get_input_with_default("Enter target_date", default_targetdate)
+    sql = "select distinct o.stationname from object o inner join field_rotations r on r.id = o.id"
+    all_stations = db.query(sql).getresult()
+    sql = "select distinct o.stationname from object o inner join reference_coord r on r.id = o.id"
+    ref_stations = db.query(sql).getresult()
+
     for stationname in ref_stations:
         station = stationname[0]
-        if 0 != Popen(['./calc_coordinates.py',station,"LBA",target]).wait(): sys.exit(1)
-        if 0 != Popen(['./calc_coordinates.py',station,"CLBA",target]).wait(): sys.exit(1)
-        #if station[:1] == 'C': # core station
-        if 0 != Popen(['./calc_coordinates.py',station,"HBA0",target]).wait(): sys.exit(1)
-        if 0 != Popen(['./calc_coordinates.py',station,"CHBA0",target]).wait(): sys.exit(1)
-        if 0 != Popen(['./calc_coordinates.py',station,"HBA1",target]).wait(): sys.exit(1)
-        if 0 != Popen(['./calc_coordinates.py',station,"CHBA1",target]).wait(): sys.exit(1)
-        #else: #remote or international station
-        if 0 != Popen(['./calc_coordinates.py',station,"HBA",target]).wait(): sys.exit(1)
-        if 0 != Popen(['./calc_coordinates.py',station,"CHBA",target]).wait(): sys.exit(1)
-         
+        if 0 != Popen(['./calc_coordinates.py', station, "LBA", target]).wait():
+            sys.exit(1)
+        if 0 != Popen(['./calc_coordinates.py', station, "CLBA", target]).wait():
+            sys.exit(1)
+        # if station[:1] == 'C': # core station
+        if 0 != Popen(['./calc_coordinates.py', station, "HBA0", target]).wait():
+            sys.exit(1)
+        if 0 != Popen(['./calc_coordinates.py', station, "CHBA0", target]).wait():
+            sys.exit(1)
+        if 0 != Popen(['./calc_coordinates.py', station, "HBA1", target]).wait():
+            sys.exit(1)
+        if 0 != Popen(['./calc_coordinates.py', station, "CHBA1", target]).wait():
+            sys.exit(1)
+        # else: #remote or international station
+        if 0 != Popen(['./calc_coordinates.py', station, "HBA", target]).wait():
+            sys.exit(1)
+        if 0 != Popen(['./calc_coordinates.py', station, "CHBA", target]).wait():
+            sys.exit(1)
+
     db.close()
-    missing_stations=list(set(all_stations) - set(ref_stations))
+    missing_stations = list(set(all_stations) - set(ref_stations))
     for stationname in missing_stations:
         station = stationname[0]
-        print "Station with known HBA rotation but no ETRF: ",station
-    
-    
+        print "Station with known HBA rotation but no ETRF: ", station
+
+
 def transform_one():
     print 'Transform ETRF coordinates to ITRF coordinates for given station and date'
-    station = getInputWithDefault("Enter station       ","")
-    anttype = getInputWithDefault("Enter type (LBA|HBA|HBA0|HBA1|CLBA|CHBA0|CHBA1|CHBA)","")
-    target  = getInputWithDefault("Enter target_date   ",default_targetdate)
-    res = Popen(['./calc_coordinates.py',station,anttype,target]).wait()
-    if (res != 0): sys.exit(1)
-
-def make_all_conf_files():
-    db = pg.connect(user="postgres", host=dbHost, dbname=dbName)
+    station = get_input_with_default("Enter station       ", "")
+    anttype = get_input_with_default("Enter type (LBA|HBA|HBA0|HBA1|CLBA|CHBA0|CHBA1|CHBA)", "")
+    target = get_input_with_default("Enter target_date   ", default_targetdate)
+    res = Popen(['./calc_coordinates.py', station, anttype, target]).wait()
+    if (res != 0):
+        sys.exit(1)
+
+
+def make_all_conf_files(db_host, db_port, db_name, db_user, db_password):
+    db = pg.connect(user=db_user, host=db_host, dbname=db_name, port=db_port, passwd=db_password)
     print 'Make all AntennaField.conf and iHBADeltas.conf files for given date'
-    target  = getInputWithDefault("Enter target_date",default_targetdate)
-    for stationname in db.query("select distinct o.stationname from object o inner join reference_coord r on r.id = o.id").getresult():
+    target = get_input_with_default("Enter target_date", default_targetdate)
+    query = """select distinct o.stationname from
+    object o inner join reference_coord r on r.id = o.id"""
+    results = db.query(query).getresult()
+    for stationname in results:
         station = stationname[0]
-        res = Popen(['./make_conf_files.py',station,target]).wait()
-        if (res != 0): sys.exit(1)
-    res = Popen(['./make_all_station_file.py',target]).wait()
-    if (res != 0): sys.exit(1)
-    db.close()    
-    
+        res = Popen(['./make_conf_files.py', station, target]).wait()
+        if (res != 0):
+            sys.exit(1)
+    res = Popen(['./make_all_station_file.py', target]).wait()
+    if (res != 0):
+        sys.exit(1)
+    db.close()
+
+
 def make_one_conf_file():
     print 'Make one AntennaField.conf and iHBADeltas.conf file for given date'
-    station = getInputWithDefault("Enter station    ","")
-    target  = getInputWithDefault("Enter target_date",default_targetdate)
-    res = Popen(['./make_conf_files.py',station,target]).wait()
-    if (res != 0): sys.exit(1)
+    station = get_input_with_default("Enter station    ", "")
+    target = get_input_with_default("Enter target_date", default_targetdate)
+    res = Popen(['./make_conf_files.py', station, target]).wait()
+    if (res != 0):
+        sys.exit(1)
 
 
 if __name__ == "__main__":
+    parser = OptionParser("Usage: %prog")
+
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="StationCoordinates",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="sasdb.control.lofar",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
+
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    dbPassword = None
+
     while(1):
         menu()
         sel = raw_input('Enter choice :')
-        if sel.upper() == 'Q': sys.exit(1)
-        if sel == '1': create_CDB()
-        if sel == '2': create_CDB_objects()
-        if sel == '3': load_normal_vectors()
-        if sel == '4': load_rotation_matrices()
-        if sel == '5': load_hba_rotations()
-        if sel == '6': calculate_hba_deltas()
-        if sel == '7': load_all_ETRF()
-        if sel == '8': load_measurement()
-        if sel == '9': transform_all()
-        if sel == '10': transform_one()
-        if sel == '11': make_all_conf_files()
-        if sel == '12': make_one_conf_file()
+        if sel.upper() == 'Q':
+            sys.exit(1)
+        if sel == '1':
+            create_cdb()
+        if sel == '2':
+            create_cdb_objects()
+        if sel == '3':
+            load_normal_vectors()
+        if sel == '4':
+            load_rotation_matrices()
+        if sel == '5':
+            load_hba_rotations()
+        if sel == '6':
+            calculate_hba_deltas()
+        if sel == '7':
+            load_all_etrf()
+        if sel == '8':
+            load_measurement()
+        if sel == '9':
+            if dbPassword is None:
+                dbPassword = getpass.getpass("Database password:")
+            transform_all(dbHost, dbPort, dbName, dbUser, dbPassword)
+        if sel == '10':
+            transform_one()
+        if sel == '11':
+            if dbPassword is None:
+                dbPassword = getpass.getpass("Database password:")
+            make_all_conf_files(dbHost, dbPort, dbName, dbUser, dbPassword)
+        if sel == '12':
+            make_one_conf_file()
         if sel == '0':
-            create_CDB()
-            create_CDB_objects()
+            if dbPassword is None:
+                dbPassword = getpass.getpass("Database password:")
+            create_cdb()
+            create_cdb_objects()
             load_normal_vectors()
             load_rotation_matrices()
             load_hba_rotations()
             calculate_hba_deltas()
-            load_all_ETRF()
-            transform_all()
-            make_all_conf_files()
-    
-
-    
+            load_all_etrf()
+            transform_all(dbHost, dbPort, dbName, dbUser, dbPassword)
+            make_all_conf_files(dbHost, dbPort, dbName, dbUser, dbPassword)
diff --git a/MAC/Deployment/data/Coordinates/ETRS89toITRS2005.py b/MAC/Deployment/data/Coordinates/ETRS89toITRS2005.py
index 73cbac0b4c6b2d0f54364e4a46ea891d98374486..2b1824632b9b668d86adf1a605b395c71f121044 100755
--- a/MAC/Deployment/data/Coordinates/ETRS89toITRS2005.py
+++ b/MAC/Deployment/data/Coordinates/ETRS89toITRS2005.py
@@ -1,14 +1,15 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-#import sys,pgdb
+# coding: iso-8859-15
 import sys
 from copy import deepcopy
-from math import *
-#from numpy import *
-from numarray import *
+from math import pi, radians, cos, sin, sqrt
+from numpy import array
+import pgdb
+import getpass
+from optparse import OptionParser
 
 
-INTRO=""" 
+INTRO = """
 Conversion between ETRS89 and ITRS2000 coordinates based on
  Memo : Specifications for reference frame fixing in the analysis of a
         EUREF GPS campaign
@@ -19,15 +20,17 @@ Conversion between ETRS89 and ITRS2000 coordinates based on
  In this utility I use the translational coefficients obtained by method "A" in
  section 4 and the rotational coefficients in section 5, both for the 2000 (00)
  reference frame.
-""" 
+"""
+
 
 def print_help():
     print "Usage: calc_coordinates <stationname> <objecttype> date"
     print "    <objecttype>: LBA|HBA|marker"
     print "    <date>      : yyyy.yy e.g. 2008.75 for Oct 1st 2008"
 
-def subtract(a,b):
-    return [x-y for x,y in zip(a,b)]
+
+def subtract(a, b):
+    return [x-y for x, y in zip(a, b)]
 
 
 def rad_from_mas(mas):
@@ -38,69 +41,69 @@ def rad_from_mas(mas):
     return pi*mas/3.6e+6/180.0
 
 
-def solve(M,y):
+def solve(M, y):  # noqa: N803
     """
     solve Mx=y. The algorithm is Gauss-Jordan elimination
     without pivoting, which is allowed in this case as M is
-    dominated by the diagonal. 
+    dominated by the diagonal.
     """
-    dim  = len(y)
-    A    = deepcopy(M)
-    sol  = deepcopy(y)
+    dim = len(y)
+    A = deepcopy(M)  # noqa: N806
+    sol = deepcopy(y)
     if (len(A) != len(A[0])) or len(A[0]) != len(y):
         raise 'Incompatible dimensions'
     for row in range(dim):
-        scale     = 1/float(A[row][row])
-        A[row]    = [x*scale for x in A[row]]
-        sol[row]  = scale*float(sol[row])
+        scale = 1/float(A[row][row])
+        A[row] = [x*scale for x in A[row]]
+        sol[row] = scale*float(sol[row])
         for ix in range(dim):
             if ix != row:
-               factor     = float(A[ix][row])
-               A[ix]      = subtract( A[ix], [factor*float(x) for x in A[row]])
-               A[ix][row] = 0.0
-               sol[ix]     -= factor*float(sol[row])
+                factor = float(A[ix][row])
+                A[ix] = subtract(A[ix], [factor*float(x) for x in A[row]])
+                A[ix][row] = 0.0
+                sol[ix] -= factor*float(sol[row])
     return sol
 
 
-def convert(XEtrs, date_years):
+def convert(XEtrs, date_years):  # noqa: N803
     """
     Solve equation:
      /X\Etrs   /T0\  = [[  1     , -R2*dt,  R1*dt]  /X\Itrs2000
      |Y|     - |T1|     [  R2*dt ,  1    , -R0*dt]  |Y|
      \Z/       \T2/     [ -R1*dt , R0*dt ,  1]]     \Z/
     """
-    T00    = [0.054, 0.051, -0.048] # meters
-    Rdot00 = [rad_from_mas(mas) for mas in [0.081, 0.490, -0.792]] # mas
-    dt     = date_years-1989.0
-    Matrix = [[ 1           , -Rdot00[2]*dt, Rdot00[1]*dt],
-              [ Rdot00[2]*dt, 1            , -Rdot00[0]*dt],
-              [-Rdot00[1]*dt, Rdot00[0]*dt , 1]]
-    XShifted = subtract(XEtrs,T00)
+    T00 = [0.054, 0.051, -0.048]  # noqa: N806 (meters)
+    Rdot00 = [rad_from_mas(mas) for mas in [0.081, 0.490, -0.792]]  # noqa: N806 (mas)
+    dt = date_years-1989.0
+    Matrix = [[1, -Rdot00[2]*dt, Rdot00[1]*dt],  # noqa: N806
+              [Rdot00[2]*dt, 1, -Rdot00[0]*dt],
+              [-Rdot00[1]*dt, Rdot00[0]*dt, 1]]
+    XShifted = subtract(XEtrs, T00)  # noqa: N806
     return solve(Matrix, XShifted)
 
 
-def latlonhgt2XYZ(lat, lon, height):
+def latlonhgt2XYZ(lat, lon, height):  # noqa: N802
     """
     Convert the latitude,longitude,height arguments to a X,Y,Z triple
     The arguments must be in degrees and meters.
     """
     # wgs84 ellips constants
-    wgs84a  = 6378137.0
+    wgs84a = 6378137.0
     wgs84df = 298.25722356
-    wgs84b  = (1.0-(1.0/wgs84df))*wgs84a
-    e2      =  1.0-((wgs84b*wgs84b)/(wgs84a*wgs84a))
+    wgs84b = (1.0-(1.0/wgs84df))*wgs84a
+    e2 = 1.0-((wgs84b*wgs84b)/(wgs84a*wgs84a))
 
     latrad = radians(lat)
     lonrad = radians(lon)
-    N = wgs84a / sqrt(1.0-(e2 * pow(sin(latrad),2)))
-    X = (N+height) * cos(latrad) * cos(lonrad)
-    Y = (N+height) * cos(latrad) * sin(lonrad)
-    Z = (N*(1-e2) + height) * sin(latrad)
+    N = wgs84a / sqrt(1.0-(e2 * pow(sin(latrad), 2)))  # noqa: N806
+    X = (N+height) * cos(latrad) * cos(lonrad)  # noqa: N806
+    Y = (N+height) * cos(latrad) * sin(lonrad)  # noqa: N806
+    Z = (N*(1-e2) + height) * sin(latrad)  # noqa: N806
+
+    return (X, Y, Z)
 
-    return ( X, Y, Z )
-    
 
-def I89toI2005(XEtrs89, date_years):
+def I89toI2005(XEtrs89, date_years):  # noqa: N803, N802
     """
     Convert the given Etrs89 coordinates to I2005 coordinates for the given date
     """
@@ -112,40 +115,86 @@ def I89toI2005(XEtrs89, date_years):
     #  ITRF89       2.97  4.75 -7.39    5.85    0.00    0.00   -0.18   1988.0    6
     # (rates        0.00 -0.06 -0.14    0.01    0.00    0.00    0.02)
 
-    T2005to2000 = array([  0.1, -0.8 , -5.8  ]) # ITRS2005 to ITRS2000
-    T2000to1989 = array([ 2.97,  4.75, -7.39 ]) # ITRS2000 to ITRS89 = ETRS89
-    Tdot2005    = array([ -0.2,  0.1,  -1.8  ]) # shift per year for I2005
-    S2005to2000 = 0.4
-    S2000to1989 = 5.85
-    Sdot2005    = 0.08
-    R2005to2000 = array([ 0.0, 0.0,  0.0  ])
-    R2000to1989 = array([ 0.0, 0.0, -0.18 ])
-    Rdot2005    = array([ 0.0, 0.0,  0.0  ])
-
-    Tfixed = T2005to2000 + T2000to1989
-    Rfixed = R2005to2000 + R2000to1989
-    Sfixed = S2005to2000 + S2000to1989
-    Ttot   = (Tfixed + (Tdot2005 * (date_years - 2005.0))) / 100.0     # meters
-    Rtot   = rad_from_mas(Rfixed + (Rdot2005 * (date_years - 2005.0))) # rad
-    Stot   = (Sfixed + (Sdot2005 * (date_years - 2005.0))) / 1.0e9
+    T2005to2000 = array([0.1, -0.8, -5.8])  # noqa: N806 (ITRS2005 to ITRS2000)
+    T2000to1989 = array([2.97, 4.75, -7.39])  # noqa: N806 (ITRS2000 to ITRS89 = ETRS89)
+    Tdot2005 = array([-0.2, 0.1, -1.8])  # noqa: N806 (shift per year for I2005)
+    S2005to2000 = 0.4  # noqa: N806
+    S2000to1989 = 5.85  # noqa: N806
+    Sdot2005 = 0.08  # noqa: N806
+    R2005to2000 = array([0.0, 0.0, 0.0])  # noqa: N806
+    R2000to1989 = array([0.0, 0.0, -0.18])  # noqa: N806
+    Rdot2005 = array([0.0, 0.0, 0.0])  # noqa: N806
+
+    Tfixed = T2005to2000 + T2000to1989  # noqa: N806
+    Rfixed = R2005to2000 + R2000to1989  # noqa: N806
+    Sfixed = S2005to2000 + S2000to1989  # noqa: N806
+    Ttot = (Tfixed + (Tdot2005 * (date_years - 2005.0))) / 100.0  # noqa: N806(meters)
+    Rtot = rad_from_mas(Rfixed + (Rdot2005 * (date_years - 2005.0)))  # noqa: N806 (rad)
+    Stot = (Sfixed + (Sdot2005 * (date_years - 2005.0))) / 1.0e9  # noqa: N806
     print "Ttot:", Ttot
     print "Rtot:", Rtot
     print "Stot:", Stot
 
-    Matrix = array([[        1,  Rtot[2], -Rtot[1]],
-                    [ -Rtot[2],        1,  Rtot[0]],
-                    [  Rtot[1], -Rtot[0],        1]])
+    Matrix = array([[1, Rtot[2], -Rtot[1]],  # noqa: N806
+                    [-Rtot[2], 1, Rtot[0]],
+                    [Rtot[1], -Rtot[0], 1]])
 
-    Xnow = Ttot + (1 + Stot) * Matrix * XEtrs89
+    Xnow = Ttot + (1 + Stot) * Matrix * XEtrs89  # noqa: N806
     return (Xnow[0][0], Xnow[1][1], Xnow[2][2])
 
+
 #
 # MAIN
 #
 if __name__ == '__main__':
-    if len(sys.argv) != 4:
-        print_help()
-        sys.exit(0)
+    parser = OptionParser("""Usage: %prog <stationname> <objecttype> date
+    <objecttype>: LBA|HBA|marker
+    <date>      : yyyy.yy e.g. 2008.75 for Oct 1st 2008""")
+
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="coordtest",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="dop50",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
+
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    if len(args) != 3:
+        parser.print_help()
+        sys.exit(1)
+
+    stationname = str(args[0])
+    objecttype = str(args[1])
+    date_years = float(args[2])
+
+    dbPassword = getpass.getpass()
+
+    host = "{}:{}".format(dbHost, dbPort)
 
     (X, Y, Z) = latlonhgt2XYZ(52.9129392, 6.8690294, 54.1)
     print X, Y, Z
@@ -153,20 +202,18 @@ if __name__ == '__main__':
     print Xn, Yn, Zn
     sys.exit(0)
 
-    date_years = float(sys.argv[3]) 
-    db = pgdb.connect(user="postgres", host="dop50", database="coordtest")
+    db = pgdb.connect(user=dbUser, host=host, database=dbName, password=dbPassword)
     cursor = db.cursor()
-    cursor.execute("select * from get_ref_objects(%s, %s)", (sys.argv[1],sys.argv[2]))
+    cursor.execute("select * from get_ref_objects(%s, %s)", (stationname, objecttype))
     while (1):
         record = cursor.fetchone()
-        if record == None:
+        if record is None:
             break
         XEtrs = [float(record[3]),
                  float(record[4]),
                  float(record[5])]
-             
+
         XItrs2000 = convert(XEtrs, date_years)
-        print record[2],'	',XItrs2000[0],'    ', XItrs2000[1],'    ', XItrs2000[2]
+        print record[2], '	', XItrs2000[0], '    ', XItrs2000[1], '    ', XItrs2000[2]
     db.close()
     sys.exit(1)
-
diff --git a/MAC/Deployment/data/Coordinates/calc_coordinates.py b/MAC/Deployment/data/Coordinates/calc_coordinates.py
index c4a8acdbeefba42f76c6695df17a89bb2ffd4436..f2e0d2c72fb70ecfc9374708cdff8e53f4847755 100755
--- a/MAC/Deployment/data/Coordinates/calc_coordinates.py
+++ b/MAC/Deployment/data/Coordinates/calc_coordinates.py
@@ -1,15 +1,13 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-import sys,pgdb,pg
+# coding: iso-8859-15
+import sys
+import pgdb
+import pg
 from copy import deepcopy
-from math import *
-from database import *
+from optparse import OptionParser
+import getpass
 
-# get info from database.py
-dbName=getDBname()
-dbHost=getDBhost()
-
-INTRO=""" 
+INTRO = """
 Conversion between ETRS89 and ITRS2000 coordinates based on
  Memo : Specifications for reference frame fixing in the analysis of a
         EUREF GPS campaign
@@ -20,47 +18,38 @@ Conversion between ETRS89 and ITRS2000 coordinates based on
  In this utility I use the translational coefficients obtained by method "A" in
  section 4 and the rotational coefficients in section 5, both for the 2000 (00)
  reference frame.
-""" 
-
-db1 = pgdb.connect(user="postgres", host=dbHost, database=dbName)
-cursor = db1.cursor()
+"""
 
-# calling stored procedures only works from the pg module for some reason.
-db2 = pg.connect(user="postgres", host=dbHost, dbname=dbName)
 
-def print_help():
-    print "Usage: calc_coordinates <stationname> <objecttype> date"
-    print "    <objecttype>: LBA|HBA|HBA0|HBA1|marker"
-    print "    <date>      : yyyy.yy e.g. 2008.75 for Oct 1st 2008"
+def subtract(a, b):
+    return [x - y for x, y in zip(a, b)]
 
-def subtract(a,b):
-    return [x-y for x,y in zip(a,b)]
 
-def solve(M,y):
+def solve(m, y):
     """
     solve Mx=y. The algorithm is Gauss-Jordan elimination
     without pivoting, which is allowed in this case as M is
-    dominated by the diagonal. 
+    dominated by the diagonal.
     """
-    dim  = len(y)
-    A    = deepcopy(M)
-    sol  = deepcopy(y)
-    if (len(A) != len(A[0])) or len(A[0]) != len(y):
+    dim = len(y)
+    a = deepcopy(m)
+    sol = deepcopy(y)
+    if (len(a) != len(a[0])) or len(a[0]) != len(y):
         raise 'Incompatible dimensions'
     for row in range(dim):
-        scale     = 1./float(A[row][row])
-        A[row]    = [x*scale for x in A[row]]
-        sol[row]  = scale*float(sol[row])
+        scale = 1./float(a[row][row])
+        a[row] = [x*scale for x in a[row]]
+        sol[row] = scale*float(sol[row])
         for ix in range(dim):
             if ix != row:
-               factor     = float(A[ix][row])
-               A[ix]      = subtract( A[ix], [factor*float(x) for x in A[row]])
-               A[ix][row] = 0.0
-               sol[ix]   -= factor*float(sol[row])
+                factor = float(a[ix][row])
+                a[ix] = subtract(a[ix], [factor*float(x) for x in a[row]])
+                a[ix][row] = 0.0
+                sol[ix] -= factor*float(sol[row])
     return sol
 
 
-def convert(XEtrs, date_years, trans):
+def convert(xetrs, date_years, trans):
     """
     Solve equation:
      /X\Etrs   /T0\  = [[  1     , -R2*dt,  R1*dt]  /X\Itrs2000
@@ -74,62 +63,111 @@ def convert(XEtrs, date_years, trans):
     # mas          = trans[5:8]   = Rx,Ry,Rz
     # diagonal(sf) = trans[4] + 1 = sf
     #
-   
-    T00    = [float(t) for t in trans[1:4]] # meters
-    Rdot00 = [float(t) for t in trans[5:8]] # mas
-    #print "T00=[%e %e %e]    Rdot00=[%e %e %e]" %(T00[0],T00[1],T00[2],Rdot00[0],Rdot00[1],Rdot00[2])
-    
+
+    t00 = [float(t) for t in trans[1:4]]  # meters
+    rdot00 = [float(t) for t in trans[5:8]]  # mas
+    # print "T00=[%e %e %e]    Rdot00=[%e %e %e]" % (t00[0], t00[1], t00[2],
+    #                                                rdot00[0], rdot00[1], rdot00[2])
+
     dt = date_years - 1989.0
-    #print 'date_years=%f  dt=%f' %(date_years, dt)
+    # print 'date_years=%f  dt=%f' %(date_years, dt)
     sf = float(trans[4]) + 1.
-    #print 'sf=',sf
-    Matrix = [[ sf          , -Rdot00[2]*dt, Rdot00[1]*dt ],
-              [ Rdot00[2]*dt, sf           , -Rdot00[0]*dt],
-              [-Rdot00[1]*dt, Rdot00[0]*dt , sf           ]]
-    XShifted = subtract(XEtrs,T00)
-    #print "Matrix=",Matrix
-    return solve(Matrix, XShifted)
+    # print 'sf=',sf
+    matrix = [[sf, -rdot00[2]*dt, rdot00[1]*dt],
+              [rdot00[2]*dt, sf, -rdot00[0]*dt],
+              [-rdot00[1]*dt, rdot00[0]*dt, sf]]
+    xshifted = subtract(xetrs, t00)
+    # print "Matrix=", matrix
+    return solve(matrix, xshifted)
+
 
 #
 # MAIN
 #
 if __name__ == '__main__':
-    #print sys.argv
-    if len(sys.argv) != 4:
-        print_help()
+    parser = OptionParser("""Usage: %prog [options]  <stationname> <objecttype> date
+    <objecttype>: LBA|HBA|HBA0|HBA1|marker
+    <date>      : yyyy.yy e.g. 2008.75 for Oct 1st 2008""")
+
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="StationCoordinates",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="sasdb.control.lofar",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
+
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    # print sys.argv
+    if len(args) != 3:
+        parser.print_help()
         sys.exit(1)
-    
-    #trans=[]
-    
-    date_years = float(sys.argv[3]) 
-    
+
+    station_name = str(args[0]).upper()
+    object_type = str(args[1]).upper()
+    date_years = float(args[2])
+
+    dbPassword = getpass.getpass()
+
+    host = "{}:{}".format(dbHost, dbPort)
+
+    db1 = pgdb.connect(user=dbUser, host=host, database=dbName, password=dbPassword)
+    cursor = db1.cursor()
+
+    # calling stored procedures only works from the pg module for some reason.
+    db2 = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
+
     cursor.execute("select * from get_transformation_info('ITRF2005')")
     trans = cursor.fetchone()
-    
-    #record = ['CS001','LBA','0','d',3828736.156, 443304.7520, 5064884.523]
-     
-    cursor.execute("select * from get_ref_objects(%s, %s)", (str(sys.argv[1]).upper(), str(sys.argv[2]).upper()))
-    
-    print "\n%s    %s    %8.3f" %(str(sys.argv[1]).upper(), str(sys.argv[2]).upper(),float(sys.argv[3]))
+
+    cursor.execute("select * from get_ref_objects(%s, %s)",
+                   (station_name, object_type))
+
+    print "\n%s    %s    %8.3f" % (station_name, object_type, date_years)
     while (1):
         record = cursor.fetchone()
-        if record == None:
+        if record is None:
             print 'record even = None'
             break
-        #print record
+        # print record
         XEtrs = [float(record[4]),
                  float(record[5]),
                  float(record[6])]
-        #print 'XEtrs=',XEtrs
+        # print 'XEtrs=',XEtrs
         XItrs2000 = convert(XEtrs, date_years, trans)
-        
+
         # write output to generated_coord ??
-        print "%s %d    %14.6f    %14.6f    %14.6f" %(str(record[1]), record[2], XItrs2000[0], XItrs2000[1],XItrs2000[2])
-        db2.query("select * from add_gen_coord('%s','%s',%s,%s,%s,%s,%s,'%s')" %\
-                 (record[0], record[1], record[2], XItrs2000[0], XItrs2000[1], XItrs2000[2], date_years, 'ITRF2005'))
-	#record = None
-    
+        print "%s %d    %14.6f    %14.6f    %14.6f" % (str(record[1]), record[2], XItrs2000[0],
+                                                       XItrs2000[1], XItrs2000[2])
+        db2.query("select * from add_gen_coord('%s', '%s', %s, %s, %s, %s, %s, '%s')" %
+                  (record[0], record[1], record[2], XItrs2000[0], XItrs2000[1], XItrs2000[2],
+                   date_years, 'ITRF2005'))
+        # record = None
+
     db1.close()
     db2.close()
     sys.exit(0)
-
diff --git a/MAC/Deployment/data/Coordinates/calc_hba_deltas.py b/MAC/Deployment/data/Coordinates/calc_hba_deltas.py
index e07ca2a58f4f1f88afacec62d88b38e526cd100f..f58d2607644117626125771719a86f4bf7601ea4 100755
--- a/MAC/Deployment/data/Coordinates/calc_hba_deltas.py
+++ b/MAC/Deployment/data/Coordinates/calc_hba_deltas.py
@@ -1,41 +1,32 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-#import sys,pgdb,pg
-#from copy import deepcopy
-import sys,pgdb,pg
-from math import *
+# coding: iso-8859-15
+import sys
+import pgdb
+import pg
+from math import sin, cos
 import numpy as np
-import string
-from database import *
-
-# get info from database.py
-dbName=getDBname()
-dbHost=getDBhost()
-
-db1 = pgdb.connect(user="postgres", host=dbHost, database=dbName)
-cursor = db1.cursor()
-
-# calling stored procedures only works from the pg module for some reason.
-db2 = pg.connect(user="postgres", host=dbHost, dbname=dbName)
+from optparse import OptionParser
+import getpass
 
 
 ##
-def getRotation(station, anttype):
+def get_rotation(station, anttype, cursor):
     cursor.execute("select * from get_field_rotation(%s, %s)", (station, anttype))
     record = cursor.fetchone()
     if record != None:
         rotation = float(record[2])
         return(rotation)
-    print "Could not find field rotation for station",station,anttype
+    print "Could not find field rotation for station", station, anttype
     exit(1)
 
+
 ##
-def getRotationMatrix(station, anttype):
-    matrix = np.zeros((3,3))
+def get_rotation_matrix(station, anttype, cursor):
+    matrix = np.zeros((3, 3))
     cursor.execute("select * from get_rotation_matrix(%s, %s)", (station, anttype))
     record = cursor.fetchone()
     if record != None:
-        record = str(record[2]).replace('{','').replace('}','').split(',')
+        record = str(record[2]).replace('{', '').replace('}', '').split(',')
         print record
         cnt = 0
         for row in range(3):
@@ -44,25 +35,29 @@ def getRotationMatrix(station, anttype):
                 cnt += 1
     return(matrix)
 
+
 ##
-def getStations(anttype):
+def get_stations(anttype, cursor):
     stations = []
-    query = "SELECT o.stationname FROM object o INNER JOIN rotation_matrices r ON r.id = o.id WHERE o.type='%s'" %(anttype)
+    query = """SELECT o.stationname FROM object o
+INNER JOIN rotation_matrices r ON r.id = o.id W
+HERE o.type='%s'""" % (anttype)
     print query
     cursor.execute(query)
     stations = cursor.fetchall()
     print stations
     return(stations)
 
+
 ##
-def makeDBmatrix(matrix):
+def make_db_matrix(matrix):
     shape = np.shape(matrix)
     # make db matrix [16][3]
     dbmatrix = "ARRAY["
     for row in range(shape[0]):
         dbmatrix += "["
         for col in range(shape[1]):
-            dbmatrix += "%f" %(float(matrix[row][col]))
+            dbmatrix += "%f" % (float(matrix[row][col]))
             if (col + 1) < shape[1]:
                 dbmatrix += ","
         dbmatrix += "]"
@@ -73,38 +68,83 @@ def makeDBmatrix(matrix):
 
 
 ##
-def rotate_pqr(coord,rad=0):
-    matrix = np.array([[cos(rad) ,sin(rad),0],
-                       [-sin(rad),cos(rad),0],
-                       [0        ,0       ,1]])
-    return(np.inner(matrix,coord))
+def rotate_pqr(coord, rad=0):
+    matrix = np.array([[cos(rad), sin(rad), 0],
+                       [-sin(rad), cos(rad), 0],
+                       [0, 0, 1]])
+    return(np.inner(matrix, coord))
+
 
 ##
 def rotate_pqr2etrf(coord, matrix):
-    return(np.inner(matrix,coord))
+    return(np.inner(matrix, coord))
 
-    
-##    
+
+##
 if __name__ == "__main__":
-    ideltas = np.zeros((16,3))    
-    deltas = np.zeros((16,3))    
-    
+    parser = OptionParser("Usage: %prog [options]")
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="StationCoordinates",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="sasdb.control.lofar",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
+
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    dbPassword = getpass.getpass()
+
+    host = "{}:{}".format(dbHost, dbPort)
+
+    db1 = pgdb.connect(user=dbUser, host=host, database=dbName, password=dbPassword)
+    cursor = db1.cursor()
+
+    # calling stored procedures only works from the pg module for some reason.
+    db2 = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
+
+    ideltas = np.zeros((16, 3))
+    deltas = np.zeros((16, 3))
+
     deltas_other = np.array([[-1.875,  1.875, 0.0],
                              [-0.625,  1.875, 0.0],
-                             [ 0.625,  1.875, 0.0],
-                             [ 1.875,  1.875, 0.0],
+                             [0.625,  1.875, 0.0],
+                             [1.875,  1.875, 0.0],
                              [-1.875,  0.625, 0.0],
                              [-0.625,  0.625, 0.0],
-                             [ 0.625,  0.625, 0.0],
-                             [ 1.875,  0.625, 0.0],
+                             [0.625,  0.625, 0.0],
+                             [1.875,  0.625, 0.0],
                              [-1.875, -0.625, 0.0],
                              [-0.625, -0.625, 0.0],
-                             [ 0.625, -0.625, 0.0],
-                             [ 1.875, -0.625, 0.0],
+                             [0.625, -0.625, 0.0],
+                             [1.875, -0.625, 0.0],
                              [-1.875, -1.875, 0.0],
                              [-0.625, -1.875, 0.0],
-                             [ 0.625, -1.875, 0.0],
-                             [ 1.875, -1.875, 0.0]], float)
+                             [0.625, -1.875, 0.0],
+                             [1.875, -1.875, 0.0]], float)
     
     deltas_de601 = np.array([[-1.875, -1.875, 0.0],
                              [-1.875, -0.625, 0.0],
@@ -114,34 +154,34 @@ if __name__ == "__main__":
                              [-0.625, -0.625, 0.0],
                              [-0.625,  0.625, 0.0],
                              [-0.625,  1.875, 0.0],
-                             [ 0.625, -1.875, 0.0],
-                             [ 0.625, -0.625, 0.0],
-                             [ 0.625,  0.625, 0.0],
-                             [ 0.625,  1.875, 0.0],
-                             [ 1.875, -1.875, 0.0],
-                             [ 1.875, -0.625, 0.0],
-                             [ 1.875,  0.625, 0.0],
-                             [ 1.875,  1.875, 0.0]], float)
-    
-    for anttype in ('HBA','HBA0','HBA1'):
+                             [0.625, -1.875, 0.0],
+                             [0.625, -0.625, 0.0],
+                             [0.625,  0.625, 0.0],
+                             [0.625,  1.875, 0.0],
+                             [1.875, -1.875, 0.0],
+                             [1.875, -0.625, 0.0],
+                             [1.875,  0.625, 0.0],
+                             [1.875,  1.875, 0.0]], float)
+
+    for anttype in ('HBA', 'HBA0', 'HBA1'):
         print anttype
-        for station in getStations(anttype):
+        for station in get_stations(anttype, cursor):
             print station[0]
-            
+
             # DE601 hba's have other placing 90deg ccw
             if station[0] == 'DE601':
                 deltas = deltas_de601
                 print deltas
             else:
                 deltas = deltas_other
-                
-            rad = getRotation(station,anttype)
-            matrix = getRotationMatrix(station,anttype)
+
+            rad = get_rotation(station, anttype. cursor)
+            matrix = get_rotation_matrix(station, anttype, cursor)
             inr = 0
             for d in deltas:
-                pqr = rotate_pqr(d,rad) 
-                etrf = rotate_pqr2etrf(pqr,matrix)
+                pqr = rotate_pqr(d, rad)
+                etrf = rotate_pqr2etrf(pqr, matrix)
                 ideltas[inr] = etrf
                 inr += 1
-            matrix = makeDBmatrix(ideltas)
-            db2.query("select * from add_hba_deltas('%s','%s',%s)" %(station[0], anttype, matrix))
+            matrix = make_db_matrix(ideltas)
+            db2.query("select * from add_hba_deltas('%s','%s',%s)" % (station[0], anttype, matrix))
diff --git a/MAC/Deployment/data/Coordinates/create_CDB_objects.py b/MAC/Deployment/data/Coordinates/create_CDB_objects.py
index 65472f890e38e6e50d7b13e7ff99a929b76073f5..e3f265edb36946b07ab9d3ba64f6f9ae4dfb2215 100755
--- a/MAC/Deployment/data/Coordinates/create_CDB_objects.py
+++ b/MAC/Deployment/data/Coordinates/create_CDB_objects.py
@@ -1,63 +1,101 @@
 #!/usr/bin/env python
-import re,sys,pg
-from database import *
+import re
+import pg
+from optparse import OptionParser
+import getpass
 
-# get info from database.py
-dbName=getDBname()
-dbHost=getDBhost()
 
 #
 # findStationInfo(stationName)
 #
-def findStationInfo(stationName):
+def find_station_info(station_name):
     """
     Return all basic station info (eg. nr RSPboards) from a station.
     """
-    pattern=re.compile("^"+stationName+"[ \t].*", re.IGNORECASE | re.MULTILINE)
+    pattern = re.compile("^"+station_name+"[ \t].*", re.IGNORECASE | re.MULTILINE)
     match = pattern.search(open("../StaticMetaData/StationInfo.dat").read())
     if not match:
-        raise "\nFatal error: "+stationName+" is not defined in file 'StationInfo.dat'"
+        raise "\nFatal error: "+station_name+" is not defined in file 'StationInfo.dat'"
     return match.group().split()
 
+
 #
 # getStationList
 #
-def getStationList():
+def get_station_list():
     """
     Returns a list containing all stationnames
     """
-    pattern=re.compile("^[A-Z]{2}[0-9]{3}[ \t].*", re.IGNORECASE | re.MULTILINE)
-    return [ station.split()[0] for station in pattern.findall(open("../StaticMetaData/StationInfo.dat").read())]
+    pattern = re.compile("^[A-Z]{2}[0-9]{3}[ \t].*", re.IGNORECASE | re.MULTILINE)
+    return [station.split()[0] for station in
+            pattern.findall(open("../StaticMetaData/StationInfo.dat").read())]
+
 
 #
 # MAIN
 #
 if __name__ == '__main__':
+    parser = OptionParser("Usage: %prog [options]")
+
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="StationCoordinates",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="sasdb.control.lofar",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
+
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    dbPassword = getpass.getpass()
+
     print "Connecting to database ", dbName
-    db = pg.connect(user="postgres", host=dbHost, dbname=dbName)
-    
-    pol = 2 # number of polarizations
-    for station in getStationList():
-        print findStationInfo(station)
-        if (len(findStationInfo(station)) < 13):
+    db = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
+
+    pol = 2  # number of polarizations
+    for station in get_station_list():
+        print find_station_info(station)
+        if (len(find_station_info(station)) < 13):
             continue
-        (name, stationID, stnType, long, lat, height, nrRSP, nrTBB, nrLBA, nrHBA, nrPowecs, HBAsplit, LBAcal, Aartfaac ) = findStationInfo(station)
+        (name, stationID, stnType, long, lat, height, nrRSP, nrTBB,
+         nrLBA, nrHBA, nrPowecs, HBAsplit, LBAcal, Aartfaac) = find_station_info(station)
         if height[0] != '0':
             print "updating %s to the coordinate database " % station
             for lba in xrange(0, int(nrLBA)*2):
-                db.query("select * from add_object('%s', '%s', %d)" % ( name, "LBA", lba ))
-            db.query("select * from add_object('%s', '%s', %d)" % ( name, "CLBA", -1 ))
+                db.query("select * from add_object('%s', '%s', %d)" % (name, "LBA", lba))
+            db.query("select * from add_object('%s', '%s', %d)" % (name, "CLBA", -1))
             if HBAsplit == 'Yes':
                 for hba in xrange(0, int(nrHBA)):
-                    db.query("select * from add_object('%s', '%s', %d)" % ( name, "HBA0", hba ))
-                db.query("select * from add_object('%s', '%s', %d)" % ( name, "CHBA0", -1 ))
+                    db.query("select * from add_object('%s', '%s', %d)" % (name, "HBA0", hba))
+                db.query("select * from add_object('%s', '%s', %d)" % (name, "CHBA0", -1))
                 for hba in xrange(int(nrHBA), int(nrHBA)*2):
-                    db.query("select * from add_object('%s', '%s', %d)" % ( name, "HBA1", hba ))
-                db.query("select * from add_object('%s', '%s', %d)" % ( name, "CHBA1", -1 ))
+                    db.query("select * from add_object('%s', '%s', %d)" % (name, "HBA1", hba))
+                db.query("select * from add_object('%s', '%s', %d)" % (name, "CHBA1", -1))
             else:
                 for hba in xrange(0, int(nrHBA)*2):
-                    db.query("select * from add_object('%s', '%s', %d)" % ( name, "HBA", hba ))
-                db.query("select * from add_object('%s', '%s', %d)" % ( name, "CHBA", -1 ))
-            
-
+                    db.query("select * from add_object('%s', '%s', %d)" % (name, "HBA", hba))
+                db.query("select * from add_object('%s', '%s', %d)" % (name, "CHBA", -1))
 # ... to be continued
diff --git a/MAC/Deployment/data/Coordinates/db_test.py b/MAC/Deployment/data/Coordinates/db_test.py
index 2ba3f0f369953df9dcb0c5d69c840894f39e2b9c..4e1e9c48465a80377d8768dd9b749c2574955808 100755
--- a/MAC/Deployment/data/Coordinates/db_test.py
+++ b/MAC/Deployment/data/Coordinates/db_test.py
@@ -1,18 +1,52 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-import re,sys,pgdb,pg
-import database
+# coding: iso-8859-15
+import pg
+import getpass
+from optparse import OptionParser
 
-aDataBase = database.getDBname()
-aHost = database.getDBhost()
 
 #
 # MAIN
 #
 if __name__ == '__main__':
+    parser = OptionParser("Usage: %prog [options]")
 
-    db = pg.DB(user="postgres", host=aHost, dbname=aDataBase)
-    print db.query("select * from reference_coord")
-    db.close()
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="StationCoordinates",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="sasdb.control.lofar",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
 
+    (options, args) = parser.parse_args()
 
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    dbPassword = getpass.getpass()
+
+    db = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
+
+    print db.query("select * from reference_coord")
+    db.close()
diff --git a/MAC/Deployment/data/Coordinates/load_expected_pos.py b/MAC/Deployment/data/Coordinates/load_expected_pos.py
index 1e5196e886b6755ba1dcad912b74cd43418b0f7d..bc5999f1b141a3557e266d6e6e7707af943e1e21 100755
--- a/MAC/Deployment/data/Coordinates/load_expected_pos.py
+++ b/MAC/Deployment/data/Coordinates/load_expected_pos.py
@@ -1,23 +1,23 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-import re,sys,pgdb,pg
-from database import *
+# coding: iso-8859-15
+import re
+import sys
+import pgdb
+import pg
+from optparse import OptionParser
+import getpass
 
-# get info from database.py
-dbName=getDBname()
-dbHost=getDBhost()
 
 #
 # getCoordLines
 #
-def getCoordLines(filename):
+def get_coord_lines(filename):
     """
     Returns a list containing all lines with coordinates
     """
-    pattern=re.compile(r"^[HLC]{1}[0-9A-Z ]+,.*", re.IGNORECASE | re.MULTILINE)
-    #print pattern.findall(open(filename).read())
-    return [ line for line in pattern.findall(open(filename).read())]
-
+    pattern = re.compile(r"^[HLC]{1}[0-9A-Z ]+,.*", re.IGNORECASE | re.MULTILINE)
+    # print pattern.findall(open(filename).read())
+    return [line for line in pattern.findall(open(filename).read())]
 
 
 #
@@ -25,43 +25,82 @@ def getCoordLines(filename):
 #
 if __name__ == '__main__':
 
+    parser = OptionParser("Usage: %prog [options] datafile")
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="StationCoordinates",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="sasdb.control.lofar",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
+
+    if len(args) != 1:
+        parser.print_help()
+        sys.exit(0)
+
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+    filename = args[0]
+
+    dbPassword = getpass.getpass()
+
     # check syntax of invocation
     # Expected syntax: load_measurement stationname objecttypes datafile
     #
-    if (len(sys.argv) != 2):
-        print "Syntax: %s datafile" % sys.argv[0]
-        sys.exit(1)
-    filename = str(sys.argv[1])
-    stationname = filename[ filename.find('/')+1 : filename.find('/')+1 + 5].upper()
+    stationname = filename[filename.find('/') + 1: filename.find('/') + 1 + 5].upper()
     objecttype = 'LBA, HBA'
-    refSys   = 'ETRS89'
+    refSys = 'ETRS89'
     refFrame = 'ETRF89'
-    method   = 'derived'
-    date     = '2010-01-01'
-    pers1    = 'Brentjens'
-    pers2    = 'Donker'
-    pers3    = ''
-    derived  = ''
-    absRef   = ''
-    comment  = 'expected coordinates, Brentjens'
+    method = 'derived'
+    date = '2010-01-01'
+    pers1 = 'Brentjens'
+    pers2 = 'Donker'
+    pers3 = ''
+    derived = ''
+    absRef = ''
+    comment = 'expected coordinates, Brentjens'
     # check some data against the database
     station = []
     station.append(stationname)
-    
-    db = pgdb.connect(user="postgres", host=dbHost, database=dbName)
+
+    host = "{}:{}".format(dbHost, dbPort)
+
+    db = pgdb.connect(user=dbUser, host=host, database=dbName, password=dbPassword)
     cursor = db.cursor()
-    
+
     # check person2
-    cursor.execute("select name from personnel where name = '%s'" % pers2 )
+    cursor.execute("select name from personnel where name = '%s'" % pers2)
     if cursor.rowcount != 1:
         print "Person: '%s' is not in the personnel file, add it (Y/N)?" % pers2
         if raw_input().upper() == "Y":
-            insertcmd = db.cursor();
+            insertcmd = db.cursor()
             insertcmd.execute("insert into personnel values ('%s')" % pers2)
             db.commit()
         else:
-            sys.exit(1);
-    
+            sys.exit(1)
+
     # check stationname
     cursor.execute("select name from station")
     stations = cursor.fetchall()
@@ -84,37 +123,39 @@ if __name__ == '__main__':
     print 'absolute reference   : ', absRef
     print 'comment              : ', comment
 
-    #if raw_input('Continue processing this file (Y/N)?').upper() != "Y":
+    # if raw_input('Continue processing this file (Y/N)?').upper() != "Y":
     #   sys.exit(1)
-    
+
     print 'processing ',
     sys.stdout.flush()
     # calling stored procedures only works from the pg module for some reason.
-    db = pg.connect(user="postgres", host=dbHost, dbname=dbName)
+    db = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
+
     sX = sY = sZ = 0
-    pol = 2 # number of polarizations
-    for cline in getCoordLines(sys.argv[1]):
-        if stationname == 'CS002': print cline
-        (name,X,Y,Z,P,Q,R,rcuX,rcuY) = cline.strip().split(',')
+    pol = 2  # number of polarizations
+    for cline in get_coord_lines(sys.argv[1]):
+        if stationname == 'CS002':
+            print cline
+        (name, X, Y, Z, P, Q, R, rcuX, rcuY) = cline.strip().split(',')
         # set object type (LBA, HBA, HBA0 or HBA1)
         objecttype = name.strip()
         print objecttype,
-        
-        if objecttype == 'CLBA' or objecttype == 'CHBA0' or objecttype == 'CHBA1' or objecttype == 'CHBA':
-            number     = -1
+
+        if(objecttype == 'CLBA' or objecttype == 'CHBA0' or
+           objecttype == 'CHBA1' or objecttype == 'CHBA'):
+            number = -1
             # make sure the object exists
-            db.query("select * from add_object('%s','%s',%s)" % ( stationname, objecttype, number))
+            db.query("select * from add_object('%s','%s',%s)" % (stationname, objecttype, number))
             # add the coord.
-            db.query("select * from add_ref_coord('%s','%s',%s,%s,%s,%s,%s,%s,%s,'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" %\
-                    ( stationname, objecttype, number,\
-                      X, Y, Z, sX, sY, sZ,\
-                      refSys, refFrame, method, date,\
+            db.query("select * from add_ref_coord('%s','%s',%s,%s,%s,%s,%s,%s,%s,'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" %
+                     (stationname, objecttype, number,
+                      X, Y, Z, sX, sY, sZ,
+                      refSys, refFrame, method, date,
                       pers1, pers2, pers3, absRef, derived, comment))
-            continue # next line
-
+            continue  # next line
 
         antType = name[:1]
-        
+
         if antType == 'L':
             objecttype = 'LBA'
 
@@ -129,25 +170,26 @@ if __name__ == '__main__':
             else:                      # remote station or internation station one hba filed
                 objecttype = 'HBA'
         else:
-           print '??',name,
+            print '??', name,
         sys.stdout.flush()
-        
-        
+
         # add RCU X coordinates
         number = int(name[1:]) * pol
-        #print objecttype, number
+        # print objecttype, number
         # make sure the object exists
-        db.query("select * from add_object('%s','%s',%s)" % ( stationname, objecttype, number))
+        db.query("select * from add_object('%s','%s',%s)" % (stationname, objecttype, number))
         # add the coord.
-        db.query("select * from add_ref_coord('%s','%s',%s,%s,%s,%s,%s,%s,%s,'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" %\
-            ( stationname, objecttype, number, X, Y, Z, sX, sY, sZ, refSys, refFrame, method, date, pers1, pers2, pers3, absRef, derived, comment))
-        
+        db.query("select * from add_ref_coord('%s','%s',%s,%s,%s,%s,%s,%s,%s,'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" %
+                 (stationname, objecttype, number, X, Y, Z, sX, sY, sZ, refSys, refFrame, method,
+                  date, pers1, pers2, pers3, absRef, derived, comment))
+
         # add RCU Y coordinates
         number = (int(name[1:]) * pol) + 1
-        #print objecttype, number
+        # print objecttype, number
         # make sure the object exists
-        db.query("select * from add_object('%s','%s',%s)" % ( stationname, objecttype, number))
+        db.query("select * from add_object('%s','%s',%s)" % (stationname, objecttype, number))
         # add the coord.
-        db.query("select * from add_ref_coord('%s','%s',%s,%s,%s,%s,%s,%s,%s,'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" %\
-            ( stationname, objecttype, number, X, Y, Z, sX, sY, sZ, refSys, refFrame, method, date, pers1, pers2, pers3, absRef, derived, comment))
+        db.query("select * from add_ref_coord('%s','%s',%s,%s,%s,%s,%s,%s,%s,'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" %
+                 (stationname, objecttype, number, X, Y, Z, sX, sY, sZ, refSys, refFrame, method,
+                  date, pers1, pers2, pers3, absRef, derived, comment))
     print ' Done'
diff --git a/MAC/Deployment/data/Coordinates/load_hba_rotations.py b/MAC/Deployment/data/Coordinates/load_hba_rotations.py
index 16d6d9aaa9e4abf4d6a3a63f28140375f9608ba2..2b96c3d1f61d30eb22ddd384d4b83038fe6b177d 100755
--- a/MAC/Deployment/data/Coordinates/load_hba_rotations.py
+++ b/MAC/Deployment/data/Coordinates/load_hba_rotations.py
@@ -1,77 +1,119 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-import re,sys,pgdb,pg
-from math import *
-from database import *
+# coding: iso-8859-15
+import sys
+import pgdb
+import pg
+from math import pi
+from optparse import OptionParser
+import getpass
 
-# get info from database.py
-dbName=getDBname()
-dbHost=getDBhost()
-
-db1 = pgdb.connect(user="postgres", host=dbHost, database=dbName)
-cursor = db1.cursor()
-
-# calling stored procedures only works from the pg module for some reason.
-db2 = pg.connect(user="postgres", host=dbHost, dbname=dbName)
 
 #
 # getRotationLines
 #
-def getRotationLines(filename):
+def get_rotation_lines(filename):
     """
     Returns a list containing all lines with rotations
     """
-    f = open(filename,'r')
+    f = open(filename, 'r')
     lines = f.readlines()
     f.close()
-    return [ line.strip().split(',') for line in lines[1:]]
+    return [line.strip().split(',') for line in lines[1:]]
+
 
 ##
-def getRotation(line):
+def get_rotation(line):
     hba0 = hba1 = None
     station = str(line[0]).upper()
     if line[1] != '':
         hba0 = (int(line[1])/360.) * 2. * pi
     if line[2] != '':
         hba1 = (int(line[2])/360.) * 2. * pi
-    return(station,hba0,hba1)
+    return(station, hba0, hba1)
+
 
 #
 # MAIN
 #
 if __name__ == '__main__':
+    parser = OptionParser("Usage: %prog [options] datafile")
+
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="StationCoordinates",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="sasdb.control.lofar",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
 
-    # check syntax of invocation
-    # Expected syntax: load_measurement stationname objecttypes datafile
-    #
-    if (len(sys.argv) != 2):
-        print "Syntax: %s datafile" % sys.argv[0]
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    # print sys.argv
+    if len(args) != 1:
+        parser.print_help()
         sys.exit(1)
-    filename = str(sys.argv[1])
-    
-    lines = getRotationLines(filename)
+
+    filename = str(args[0])
+
+    dbPassword = getpass.getpass()
+
+    host = "{}:{}".format(dbHost, dbPort)
+
+    db1 = pgdb.connect(user=dbUser, host=host, database=dbName, password=dbPassword)
+    cursor = db1.cursor()
+
+    # calling stored procedures only works from the pg module for some reason.
+    db2 = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
+
+    lines = get_rotation_lines(filename)
     for line in lines:
-        (stationname,rotation0,rotation1) = getRotation(line)
-                
+        (stationname, rotation0, rotation1) = get_rotation(line)
+
         # check stationname
         cursor.execute("select name from station")
         stations = cursor.fetchall()
-        
+
         station = []
         station.append(stationname)
         if station not in stations:
             print "station %s is not a legal stationame" % stationname
             sys.exit(1)
         try:
-            if rotation1 == None:
-                db2.query("select * from add_field_rotation('%s','HBA',%s)" %( stationname, rotation0))
-                print 'station %s  rotation=%f' %(stationname,rotation0)
-            if rotation0 != None and rotation1 != None:
-                db2.query("select * from add_field_rotation('%s','HBA0',%s)" %( stationname, rotation0))
-                db2.query("select * from add_field_rotation('%s','HBA1',%s)" %( stationname, rotation1))
-                print 'station %s  rotation0=%f  rotation1=%f' %(stationname,rotation0, rotation1)
+            if rotation1 is None:
+                db2.query("select * from add_field_rotation('%s','HBA',%s)" %
+                          (stationname, rotation0))
+                print 'station %s  rotation=%f' % (stationname, rotation0)
+            if rotation0 is not None and rotation1 is not None:
+                db2.query("select * from add_field_rotation('%s','HBA0',%s)" %
+                          (stationname, rotation0))
+                db2.query("select * from add_field_rotation('%s','HBA1',%s)" %
+                          (stationname, rotation1))
+                print 'station %s  rotation0=%f  rotation1=%f' % (stationname, rotation0, rotation1)
         except:
-            print 'WARN, station %s has no HBA types defined yet' %(stationname)
+            print 'WARN, station %s has no HBA types defined yet' % (stationname)
     print ' Done'
     db1.close()
     db2.close()
diff --git a/MAC/Deployment/data/Coordinates/load_measurementfile.py b/MAC/Deployment/data/Coordinates/load_measurementfile.py
index 1c5f4287113ccd3c9253a76f84c12d787841bdd0..0aedc27298028d1cd1ef54a33db9e3b5f1563e0e 100755
--- a/MAC/Deployment/data/Coordinates/load_measurementfile.py
+++ b/MAC/Deployment/data/Coordinates/load_measurementfile.py
@@ -1,63 +1,120 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-import re,sys,pgdb,pg
-from database import *
+# coding: iso-8859-15
+import re
+import sys
+import pgdb
+import pg
+from optparse import OptionParser
+import getpass
 
-# get info from database.py
-dbName=getDBname()
-dbHost=getDBhost()
 
 #
 # getHeaderLines
 #
-def getHeaderLines(filename):
+def get_header_lines(filename):
     """
     Returns a list containing all lines that do NOT contains coordinate data.
     """
-    pattern=re.compile("^[a-zA-Z]+.*", re.IGNORECASE | re.MULTILINE)
+    pattern = re.compile("^[a-zA-Z]+.*", re.IGNORECASE | re.MULTILINE)
     answer = {}
     for line in pattern.findall(open(filename).read()):
         if line.count(';') == 1:
-            (key, value) = line.split(';') 
-            answer[key]=value
+            (key, value) = line.split(';')
+            answer[key] = value
     return answer
 
+
 #
 # getCoordLines
 #
-def getCoordLines(filename):
+def get_coord_lines(filename):
     """
     Returns a list containing all lines with coordinates
     """
-    pattern=re.compile("^[0-9]+;.*", re.IGNORECASE | re.MULTILINE)
-    return [ line for line in pattern.findall(open(filename).read())]
+    pattern = re.compile("^[0-9]+;.*", re.IGNORECASE | re.MULTILINE)
+    return [line for line in pattern.findall(open(filename).read())]
+
 
 #
 # MAIN
 #
-if __name__ == '__main__':    
+if __name__ == '__main__':
+    parser = OptionParser("Usage: %prog [options] datafile")
+
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="StationCoordinates",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="sasdb.control.lofar",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
+
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
 
     # check syntax of invocation
     # Expected syntax: load_measurement stationname objecttypes datafile
     #
-    if (len(sys.argv) != 2):
-        print "Syntax: %s datafile" % sys.argv[0]
+    if (len(args) != 1):
+        parser.print_help()
         sys.exit(1)
 
+    dbPassword = getpass.getpass()
+
+    host = "{}:{}".format(dbHost, dbPort)
+
+    db = pgdb.connect(user=dbUser, host=host, database=dbName, password=dbPassword)
+    cursor = db.cursor()
+
     # process metadata info
-    stationname = objecttype = refSys = refFrame = method = date = pers1 = pers2 = pers3 = absRef = derived = comment = ""
-    metadata = getHeaderLines(sys.argv[1])
-    if metadata.has_key("stationname"): stationname = metadata["stationname"]
-    if metadata.has_key("infotype"): objecttype = metadata["infotype"]
-    if metadata.has_key("ref_system"): refSys = metadata["ref_system"]
-    if metadata.has_key("ref_frame"):  refFrame = metadata["ref_frame"]
-    if metadata.has_key("method"): method = metadata["method"]
-    if metadata.has_key("measure_date"): date = metadata["measure_date"]
-    if metadata.has_key("person1"): pers1 = metadata["person1"]
-    if metadata.has_key("person2"): pers2 = metadata["person2"]
-    if metadata.has_key("person3"): pers3 = metadata["person3"]
-    if metadata.has_key("absolute_reference"): absRef = metadata["absolute_reference"]
-    if metadata.has_key("comment"): comment = metadata["comment"]
+    stationname = objecttype = refSys = refFrame = method = date = pers1 = pers2 = pers3 = ""
+    absRef = derived = comment = ""
+
+    metadata = get_header_lines(args[0])
+    if "stationname" in metadata:
+        stationname = metadata["stationname"]
+    if "infotype" in metadata:
+        objecttype = metadata["infotype"]
+    if "ref_system" in metadata:
+        refSys = metadata["ref_system"]
+    if "ref_frame" in metadata:
+        refFrame = metadata["ref_frame"]
+    if "method" in metadata:
+        method = metadata["method"]
+    if "measure_date" in metadata:
+        date = metadata["measure_date"]
+    if "person1" in metadata:
+        pers1 = metadata["person1"]
+    if "person2" in metadata:
+        pers2 = metadata["person2"]
+    if "person3" in metadata:
+        pers3 = metadata["person3"]
+    if "absolute_reference" in metadata:
+        absRef = metadata["absolute_reference"]
+    if "comment" in metadata:
+        comment = metadata["comment"]
 
     # check some data against the database
     station = []
@@ -65,50 +122,48 @@ if __name__ == '__main__':
     objtype = []
     objtype.append(objecttype)
 
-    db = pgdb.connect(user="postgres", host=dbHost, database=dbName)
-    cursor = db.cursor()
     # check stationname
     cursor.execute("select name from station")
     stations = cursor.fetchall()
     if station not in stations:
         print "station %s is not a legal stationame" % stationname
         sys.exit(1)
-    #check objecttype
+    # check objecttype
     cursor.execute("select * from object_type")
     objecttypes = cursor.fetchall()
     if objtype not in objecttypes:
         print "objecttype must be one of: ",  objecttypes
         sys.exit(1)
     # check person1
-    cursor.execute("select name from personnel where name = '%s'" % pers1 )
+    cursor.execute("select name from personnel where name = '%s'" % pers1)
     if cursor.rowcount != 1:
         print "Person: '%s' is not in the personnel file, add it (Y/N)?" % pers1
         if raw_input().upper() == "Y":
-            insertcmd = db.cursor();
+            insertcmd = db.cursor()
             insertcmd.execute("insert into personnel values ('%s')" % pers1)
             db.commit()
         else:
-            sys.exit(1);
+            sys.exit(1)
     # check person2
-    cursor.execute("select name from personnel where name = '%s'" % pers2 )
+    cursor.execute("select name from personnel where name = '%s'" % pers2)
     if cursor.rowcount != 1:
         print "Person: '%s' is not in the personnel file, add it (Y/N)?" % pers2
         if raw_input().upper() == "Y":
-            insertcmd = db.cursor();
+            insertcmd = db.cursor()
             insertcmd.execute("insert into personnel values ('%s')" % pers2)
             db.commit()
         else:
-            sys.exit(1);
+            sys.exit(1)
     # check person3
-    cursor.execute("select name from personnel where name = '%s'" % pers3 )
+    cursor.execute("select name from personnel where name = '%s'" % pers3)
     if cursor.rowcount != 1:
         print "Person: '%s' is not in the personnel file, add it (Y/N)?" % pers3
         if raw_input().upper() == "Y":
-            insertcmd = db.cursor();
+            insertcmd = db.cursor()
             insertcmd.execute("insert into personnel values ('%s')" % pers3)
             db.commit()
         else:
-            sys.exit(1);
+            sys.exit(1)
     db.close()
 
     # show metadata to user
@@ -125,14 +180,17 @@ if __name__ == '__main__':
     print 'comment              : ', comment
 
     if raw_input('Continue processing this file (Y/N)?').upper() != "Y":
-       sys.exit(1)
+        sys.exit(1)
 
     # calling stored procedures only works from the pg module for some reason.
-    db = pg.connect(user="postgres", host=dbHost, dbname=dbName)
-    for cline in getCoordLines(sys.argv[1]):
-        ( number, X, Y, Z, sX, sY, sZ ) = cline.split(';')
+    db = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
+    for cline in get_coord_lines(args[0]):
+        (number, X, Y, Z, sX, sY, sZ) = cline.split(';')
         print objecttype, number
         # make sure the object exists
-        db.query("select * from add_object('%s','%s',%s)" % ( stationname, objecttype, number))
+        db.query("select * from add_object('%s','%s',%s)" % (stationname, objecttype, number))
         # add the coord.
-        db.query("select * from add_ref_coord('%s','%s',%s,%s,%s,%s,%s,%s,%s,'%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % ( stationname, objecttype, number, X, Y, Z, sX, sY, sZ, refSys, refFrame, method, date, pers1, pers2, pers3, absRef, derived, comment))
+        db.query("""select * from add_ref_coord('%s','%s',%s,%s,%s,%s,%s,%s,%s,'%s',
+        '%s','%s','%s','%s','%s','%s','%s','%s','%s')""" %
+                 (stationname, objecttype, number, X, Y, Z, sX, sY, sZ, refSys, refFrame, method,
+                  date, pers1, pers2, pers3, absRef, derived, comment))
diff --git a/MAC/Deployment/data/Coordinates/load_normal_vectors.py b/MAC/Deployment/data/Coordinates/load_normal_vectors.py
index 71626612e91573950ffd60b5d7bca2c1b7c7f422..2c7a4740445b66cbb78a88e770bbc69d16e5ae08 100755
--- a/MAC/Deployment/data/Coordinates/load_normal_vectors.py
+++ b/MAC/Deployment/data/Coordinates/load_normal_vectors.py
@@ -1,81 +1,112 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-import re,sys,pgdb,pg
-import numpy as np
-from math import *
-from database import *
+# coding: iso-8859-15
+import sys
+import pgdb
+import pg
+import getpass
+from optparse import OptionParser
 
-# get info from database.py
-dbName=getDBname()
-dbHost=getDBhost()
-
-
-db1 = pgdb.connect(user="postgres", host=dbHost, database=dbName)
-cursor = db1.cursor()
-
-# calling stored procedures only works from the pg module for some reason.
-db2 = pg.connect(user="postgres", host=dbHost, dbname=dbName)
 
 #
 # getRotationLines
 #
-def getLines(filename):
+def get_lines(filename):
     """
     Returns a list containing all lines with normal vectors
     """
-    f = open(filename,'r')
+    f = open(filename, 'r')
     lines = f.readlines()
     f.close()
-    return [ line.strip().split(',') for line in lines[3:]]
+    return [line.strip().split(',') for line in lines[3:]]
 
 
 ##
-def getNormalVector(line):
-    #print line
+def get_normal_vector(line):
+    # print line
     station = str(line[0]).upper().strip()
     anttype = str(line[1]).upper().strip()
     # make db vector [3]
-    vector = "ARRAY[%f,%f,%f]" %\
-             (float(line[2]),float(line[3]),float(line[4]))
-        
-    return(station,anttype,vector)
+    vector = "ARRAY[%f,%f,%f]" % (float(line[2]), float(line[3]), float(line[4]))
+
+    return(station, anttype, vector)
 
 
 #
 # MAIN
 #
 if __name__ == '__main__':
+    parser = OptionParser("Usage: %prog [options] filename")
+
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="StationCoordinates",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="sasdb.control.lofar",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
 
-    # check syntax of invocation
-    # Expected syntax: load_measurement stationname objecttypes datafile
-    #
-    if (len(sys.argv) != 2):
-        print "Syntax: %s datafile" % sys.argv[0]
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    if len(args) != 1:
+        parser.print_help()
         sys.exit(1)
-    filename = str(sys.argv[1])
-    
-    #filename = 'rotation-matrices/normal_vectors.dat'
-    
+
+    filename = str(args[0])
+
+    dbPassword = getpass.getpass()
+
+    host = "{}:{}".format(dbHost, dbPort)
+
+    db1 = pgdb.connect(user=dbUser, host=host, database=dbName, password=dbPassword)
+    cursor = db1.cursor()
+
+    # calling stored procedures only works from the pg module for some reason.
+    db2 = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
+
     # check stationname
     cursor.execute("select name from station")
     stations = cursor.fetchall()
     print stations
-        
-    lines = getLines(filename)
+
+    lines = get_lines(filename)
     for line in lines:
-        (stationname,anttype,vector) = getNormalVector(line)
-        
+        (stationname, anttype, vector) = get_normal_vector(line)
+
         station = []
         station.append(stationname)
         if station not in stations:
             print "station %s is not a legal stationame" % stationname
             sys.exit(1)
         try:
-            db2.query("select * from add_normal_vector('%s','%s',%s)" %(stationname, anttype, vector))
-            print "%s    %s    %s" %(stationname,anttype,vector)
+            db2.query("select * from add_normal_vector('%s','%s',%s)" % (stationname,
+                                                                         anttype, vector))
+            print "%s    %s    %s" % (stationname, anttype, vector)
         except:
-            print 'ERR, station=%s has no types defined' %(stationname)
-        
+            print 'ERR, station=%s has no types defined' % (stationname)
+
     print ' Done'
     db1.close()
     db2.close()
diff --git a/MAC/Deployment/data/Coordinates/load_rotation_matrices.py b/MAC/Deployment/data/Coordinates/load_rotation_matrices.py
index a5c748c1df7c3c49a3b1e5ac0652f64b6f001139..a2c4cb32bb650ddb0fba7d58930d92cb4dbfa3ff 100755
--- a/MAC/Deployment/data/Coordinates/load_rotation_matrices.py
+++ b/MAC/Deployment/data/Coordinates/load_rotation_matrices.py
@@ -1,82 +1,122 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-import re,sys,pgdb,pg
-import numpy as np
-from math import *
-from database import *
+# coding: iso-8859-15
+import sys
+import pgdb
+import pg
+from optparse import OptionParser
+import getpass
 
-# get info from database.py
-dbName=getDBname()
-dbHost=getDBhost()
-
-db1 = pgdb.connect(user="postgres", host=dbHost, database=dbName)
-cursor = db1.cursor()
-
-# calling stored procedures only works from the pg module for some reason.
-db2 = pg.connect(user="postgres", host=dbHost, dbname=dbName)
 
 #
 # getRotationLines
 #
-def getRotationLines(filename):
+def get_rotation_lines(filename):
     """
     Returns a list containing all lines with rotations
     """
-    f = open(filename,'r')
+    f = open(filename, 'r')
     lines = f.readlines()
     f.close()
-    return [ line.strip().split(',') for line in lines[3:]]
+    return [line.strip().split(',') for line in lines[3:]]
 
 
 ##
-def getRotationMatrix(line):
-    #print line
+def get_rotation_matrix(line):
+    # print line
     station = str(line[0]).upper().strip()
     anttype = str(line[1]).upper().strip()
     # make db matrix [3][3]
     matrix = "ARRAY[[%f,%f,%f],[%f,%f,%f],[%f,%f,%f]]" %\
-             (float(line[2]),float(line[3]),float(line[4]), \
-              float(line[5]),float(line[6]),float(line[7]), \
-              float(line[8]),float(line[9]),float(line[10]))
-        
-    return(station,anttype,matrix)
+             (float(line[2]), float(line[3]), float(line[4]),
+              float(line[5]), float(line[6]), float(line[7]),
+              float(line[8]), float(line[9]), float(line[10]))
+
+    return(station, anttype, matrix)
 
 
 #
 # MAIN
 #
 if __name__ == '__main__':
+    parser = OptionParser("Usage: %prog [options] datafile")
+
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="StationCoordinates",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="sasdb.control.lofar",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
+
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    dbPassword = getpass.getpass()
+
+    host = "{}:{}".format(dbHost, dbPort)
+
+    db1 = pgdb.connect(user=dbUser, host=host, database=dbName, password=dbPassword)
+    cursor = db1.cursor()
+
+    # calling stored procedures only works from the pg module for some reason.
+    db2 = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
+
+    # print sys.argv
+    if len(args) != 1:
+        parser.print_help()
+        sys.exit(1)
 
     # check syntax of invocation
     # Expected syntax: load_measurement stationname objecttypes datafile
     #
-    if (len(sys.argv) != 2):
-        print "Syntax: %s datafile" % sys.argv[0]
-        sys.exit(1)
-    filename = str(sys.argv[1])
+    filename = str(args[0])
+
+    # filename = 'rotation-matrices/rotation_matrices.dat'
 
-    #filename = 'rotation-matrices/rotation_matrices.dat'
-    
-    lines = getRotationLines(filename)
+    lines = get_rotation_lines(filename)
     for line in lines:
-        (stationname,anttype,matrix) = getRotationMatrix(line)
-        if stationname == 'CS001': print stationname,'  ',anttype,'  ',matrix[0]
+        (stationname, anttype, matrix) = get_rotation_matrix(line)
+        if stationname == 'CS001':
+            print stationname, '  ', anttype, '  ', matrix[0]
         # check stationname
         cursor.execute("select name from station")
         stations = cursor.fetchall()
-        
+
         station = []
         station.append(stationname)
         if station not in stations:
             print "station %s is not a legal stationame" % stationname
             sys.exit(1)
         try:
-            db2.query("select * from add_rotation_matrix('%s','%s',%s)" %(stationname, anttype, matrix))
-            
-            print stationname,'  ',anttype,'  ',matrix
+            db2.query("select * from add_rotation_matrix('%s','%s',%s)" %
+                      (stationname, anttype, matrix))
+
+            print stationname, '  ', anttype, '  ', matrix
         except:
-            print 'ERR, station=%s has no types defined' %(stationname)
-        
+            print 'ERR, station=%s has no types defined' % (stationname)
+
     print ' Done'
     db1.close()
     db2.close()
diff --git a/MAC/Deployment/data/Coordinates/make_all_station_file.py b/MAC/Deployment/data/Coordinates/make_all_station_file.py
index 9d0fb231848fb549cd16fb823ccca3a1329e0df6..d4aac0ea087a38c030fd0756805cc11fc12c1951 100755
--- a/MAC/Deployment/data/Coordinates/make_all_station_file.py
+++ b/MAC/Deployment/data/Coordinates/make_all_station_file.py
@@ -1,36 +1,20 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
+# coding: iso-8859-15
 #
 # Make AntennaField.conf and iHBADeltas.conf file for given station and date
 #
 
-import sys,pgdb, pg
-from datetime import *
-from copy import deepcopy
-from math import *
+import sys
+import pgdb
+import pg
 import numpy as np
-#import MLab as mlab
-from database import *
+import getpass
+from optparse import OptionParser
 
-# get info from database.py
-dbName=getDBname()
-dbHost=getDBhost()
 
-db1 = pgdb.connect(user="postgres", host=dbHost, database=dbName)
-cursor = db1.cursor()
-
-# calling stored procedures only works from the pg module for some reason.
-db2 = pg.connect(user="postgres", host=dbHost, dbname=dbName)
-
-
-##
-def print_help():
-    print "Usage: make_all_station_file  date"
-    print "    <date>      : yyyy.yy e.g. 2008.75 for Oct 1st 2008"
-
-##
-## write header to antennaField file
-##
+#
+# write header to antennaField file
+#
 def writeAntennaFieldHeader(frame):
     # add to All Station config file
     dataStr = ''
@@ -70,9 +54,9 @@ def writeNormalVector(station, anttype):
         print 'ERR, no normal-vector for %s, %s' %(station, anttype)
     return
 
-##
-## write rotation matrix
-##
+#
+# write rotation matrix
+#
 def writeRotationMatrix(station, anttype):
     try:
         cursor.execute("select * from get_rotation_matrix(%s, %s)", (station, anttype))
@@ -122,10 +106,56 @@ def writeAntennaField(station, anttype, aPos):
 ## MAIN
 ##
 if __name__ == '__main__':
-    
-    if len(sys.argv) != 2:
-        print_help()
+    parser = OptionParser("""Usage: %prog [options] data
+    <date>      : yyyy.yy e.g. 2008.75 for Oct 1st 2008""")
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="StationCoordinates",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="sasdb.control.lofar",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
+
+    if len(args) != 1:
+        parser.print_help()
         sys.exit(1)
+
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+    date = args[0]
+
+    dbPassword = getpass.getpass()
+
+    host = "{}:{}".format(dbHost, dbPort)
+
+    db1 = pgdb.connect(user=dbUser, host=host, database=dbName, password=dbPassword)
+    cursor = db1.cursor()
+
+    # calling stored procedures only works from the pg module for some reason.
+    db2 = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
+
     first = True
     for stationname in db2.query("select distinct o.stationname from object o inner join reference_coord r on r.id = o.id").getresult():
         station = stationname[0]
diff --git a/MAC/Deployment/data/Coordinates/make_antenna_list.py b/MAC/Deployment/data/Coordinates/make_antenna_list.py
index 2b01dcf440140f73bb1644006d21f9119ae0e604..35f666aa124cfee4fa5e618e46fa16976a5e4b22 100755
--- a/MAC/Deployment/data/Coordinates/make_antenna_list.py
+++ b/MAC/Deployment/data/Coordinates/make_antenna_list.py
@@ -1,54 +1,102 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-import re,sys,pgdb
-from copy import deepcopy
-from math import *
+# coding: iso-8859-15
+import re
+import sys
+import pgdb
+from optparse import OptionParser
+import getpass
 
 
-INTRO=""" 
+INTRO = """
 Created a file containing all antenna coordinates for the online software.
-""" 
+"""
 
-def print_help():
-    print "Usage: make_antenna_list [<stationname>]"
 
 #
 # findStationInfo(stationName)
 #
-def findStationInfo(stationName):
+def find_station_info(station_name):
     """
     Return all basic station info (eg. nr RSPboards) from a station.
     """
-    pattern=re.compile("^"+stationName+"[ \t].*", re.IGNORECASE | re.MULTILINE)
+    pattern = re.compile("^"+station_name+"[ \t].*", re.IGNORECASE | re.MULTILINE)
     match = pattern.search(open("../StaticMetaData/StationInfo.dat").read())
     if not match:
-        raise "\nFatal error: "+stationName+" is not defined in file 'StationInfo.dat'"
+        raise "\nFatal error: "+station_name+" is not defined in file 'StationInfo.dat'"
     return match.group().split()
 
+
 #
 # MAIN
 #
 if __name__ == '__main__':
-    if len(sys.argv) != 2:
-        print_help()
-        sys.exit(0)
+    parser = OptionParser("Usage: %prog [options] datafile")
+
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="coordtest",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="dop50",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
 
-    (name, stationID, stnType, long, lat, height, nrRSP, nrTBB, nrLBA, nrHBA, HBAsplit, LBAcal ) = findStationInfo(sys.argv[1])
-    db = pgdb.connect(user="postgres", host="dop50", database="coordtest")
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    if len(args) != 1:
+        parser.print_help()
+        sys.exit(1)
+
+    filename = str(args[0])
+
+    dbPassword = getpass.getpass()
+
+    host = "{}:{}".format(dbHost, dbPort)
+
+    db = pgdb.connect(user=dbUser, host=host, database=dbName, password=dbPassword)
+    cursor = db.cursor()
+
+    (name, stationID, stnType, long, lat, height, nrRSP,
+     nrTBB, nrLBA, nrHBA, HBAsplit, LBAcal) = find_station_info(filename)
     print "#Stn	ID	Type	RSP	RCU	Pol	Position					Orientation"
-    print "%s	%s	%s	%d	%d	-1	[%s,%s,%s]	[0,0,0]" % (name, stationID, "center", -1, -1, long, lat, height)
-    for infoType in [ 'marker', 'lba', 'hba' ]:
-        cursor = db.cursor()
-        cursor.execute("select * from get_ref_objects(%s, %s)", (sys.argv[1], infoType))
+    print "%s	%s	%s	%d	%d	-1	[%s,%s,%s]	[0,0,0]" % \
+        (name, stationID, "center", -1, -1, long, lat, height)
+    for infoType in ['marker', 'lba', 'hba']:
+        cursor.execute("select * from get_ref_objects(%s, %s)", (filename, infoType))
         counter = 0
         while (1):
             record = cursor.fetchone()
-            if record == None:
+            if record is None:
                 break
-            RSPnr = int(record[2]%100/4)
-            print "%s	%s	%s%d	%d	%d	x	[%s,%s,%s]	[0,0,0]" % (name, stationID, infoType, int(record[2])%100, RSPnr, counter, record[3], record[4], record[5])
-            print "%s	%s	%s%d	%d	%d	y	[%s,%s,%s]	[0,0,0]" % (name, stationID, infoType, int(record[2])%100, RSPnr, counter+1, record[3], record[4], record[5])
+            RSPnr = int(record[2] % 100/4)
+            print "%s	%s	%s%d	%d	%d	x	[%s,%s,%s]	[0,0,0]" % \
+                (name, stationID, infoType, int(record[2]) % 100, RSPnr, counter,
+                 record[3], record[4], record[5])
+            print "%s	%s	%s%d	%d	%d	y	[%s,%s,%s]	[0,0,0]" % \
+                (name, stationID, infoType, int(record[2]) % 100, RSPnr, counter+1,
+                 record[3], record[4], record[5])
             counter = counter + 2
     db.close()
     sys.exit(1)
-
diff --git a/MAC/Deployment/data/Coordinates/make_conf_files.py b/MAC/Deployment/data/Coordinates/make_conf_files.py
index b045af350197ff1c34f332ba258c1f07e18d6178..50282ee0b8f413a0cf7ff27271b885b0cd3d2d8c 100755
--- a/MAC/Deployment/data/Coordinates/make_conf_files.py
+++ b/MAC/Deployment/data/Coordinates/make_conf_files.py
@@ -1,33 +1,18 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
+# coding: iso-8859-15
 #
 # Make AntennaField.conf and iHBADeltas.conf file for given station and date
 #
 
-import sys,pgdb, pg
+import sys
+import pgdb
+import pg
 from datetime import *
 from copy import deepcopy
 from math import *
 import numpy as np
-#import MLab as mlab
-from database import *
-
-# get info from database.py
-dbName=getDBname()
-dbHost=getDBhost()
-
-
-db1 = pgdb.connect(user="postgres", host=dbHost, database=dbName)
-cursor = db1.cursor()
-
-# calling stored procedures only works from the pg module for some reason.
-db2 = pg.connect(user="postgres", host=dbHost, dbname=dbName)
-
-
-##
-def print_help():
-    print "Usage: make_conf_files <stationname> date"
-    print "    <date>      : yyyy.yy e.g. 2008.75 for Oct 1st 2008"
+import getpass
+from optparse import OptionParser
 
 
 ##
@@ -193,15 +178,59 @@ def writeAntennaField(station, anttype, aPos):
 ## MAIN
 ##
 if __name__ == '__main__':
-    
-    if len(sys.argv) != 3:
-        print_help()
+    parser = OptionParser("""Usage: %prog [options] station data
+    <date>      : yyyy.yy e.g. 2008.75 for Oct 1st 2008""")
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="StationCoordinates",
+                      help="Name of StationCoordinates database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="sasdb.control.lofar",
+                      help="Hostname of StationCoordinates database")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
+
+    if len(args) != 2:
+        parser.print_help()
         sys.exit(1)
 
-    station = str(sys.argv[1]).upper()
-    date_years = float(sys.argv[2]) 
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+    station = args[0].upper()
+    date_years = args[1]
+
+    dbPassword = getpass.getpass()
+
+    host = "{}:{}".format(dbHost, dbPort)
+
+    db1 = pgdb.connect(user=dbUser, host=host, database=dbName, password=dbPassword)
+    cursor = db1.cursor()
+
+    # calling stored procedures only works from the pg module for some reason.
+    db2 = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
+
     frame = ''
-    
+
     # from database select all antennas for given station and target-date
     # The ''order by'' statement is needed to prevent mixup of even/odd pairs
     # as was seen on sas001 (Arno)
diff --git a/MAC/Tools/Antennas/dumpAntennaStates.py b/MAC/Tools/Antennas/dumpAntennaStates.py
index 736b7d69338506b5f66fd6a20ee47e6eefa79ebb..78e5f50e8cba246dbd3ef8b3d55972751682c4aa 100755
--- a/MAC/Tools/Antennas/dumpAntennaStates.py
+++ b/MAC/Tools/Antennas/dumpAntennaStates.py
@@ -1,6 +1,8 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-import os,sys,time,pg
+# coding: iso-8859-15
+import sys
+import pg
+import getpass
 from optparse import OptionParser
 
 #
@@ -13,7 +15,7 @@ if __name__ == '__main__':
     that info in a station's WinCC database. Use putback_pvss.py for that.
     """
 
-    parser = OptionParser("Usage: %prog [options]" )
+    parser = OptionParser("Usage: %prog [options]")
     parser.add_option("-D", "--database",
                       dest="dbName",
                       type="string",
@@ -26,6 +28,18 @@ if __name__ == '__main__':
                       default="sasdb.control.lofar",
                       help="Hostname of OTDB database")
 
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of OTDB database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of OTDB database")
+
     parser.add_option("-F", "--file",
                       dest="outfile",
                       type="string",
@@ -42,24 +56,29 @@ if __name__ == '__main__':
         parser.print_help()
         sys.exit(0)
 
-    dbName   = options.dbName
-    dbHost   = options.dbHost
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
     filename = options.outfile
 
+    dbPassword = getpass.getpass()
+
     # calling stored procedures only works from the pg module for some reason.
     print "Connecting...",
-    otdb = pg.connect(user="postgres", host=dbHost, dbname=dbName)
+    otdb = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
 
     print "\nQuerying database...",
-    HWstates = otdb.query("""select p.pvssname,k.value,k.time from pickvt k left join picparamref p on p.paramid=k.paramid 
-                             where pvssname like '%%RCU%%state' OR pvssname like '%%BA%%state' order by p.pvssname,k.time
-                          """).dictresult()
+    HWstates = otdb.query("select p.pvssname,k.value,k.time from pickvt k " +
+                          "left join picparamref p on p.paramid=k.paramid " +
+                          "where pvssname like '%%RCU%%state' OR pvssname like '%%BA%%state' " +
+                          "order by p.pvssname,k.time").dictresult()
     otdb.close()
 
     print "\nWriting file...",
-    file = open(filename, 'w');
+    file = open(filename, 'w')
     for rec in HWstates:
-      file.write("%s | %s | %s\n" % (rec['pvssname'], rec['value'], rec['time']))
+        file.write("%s | %s | %s\n" % (rec['pvssname'], rec['value'], rec['time']))
     file.close()
     print "\nDone"
 
diff --git a/SAS/OTDB/bin/copyTree.py b/SAS/OTDB/bin/copyTree.py
index c81deb2e352a5248dde6b8d4ee6f996d2ea8b5de..b9683e95b57a554de43f09fcb741b06692fefdaa 100755
--- a/SAS/OTDB/bin/copyTree.py
+++ b/SAS/OTDB/bin/copyTree.py
@@ -1,7 +1,9 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-import os,sys,time,pg
+# coding: iso-8859-15
+import sys
+import pg
 from optparse import OptionParser
+import getpass
 
 
 #
@@ -214,17 +216,18 @@ def copyOrMapComponents(version):
             print "%s.%s (id=%d) => id=%d" % (comp['name'], par['name'], par['paramid'], newParID)
     print "Found %d parameters" % len(parIDmap)
 
+
 #
 # MAIN
 #
 if __name__ == '__main__':
     """
     copyTree copies 1 tree from 1 database to another. The tree may be a template or a VIC tree.
-    Ideally the components of the right version are already in the database but if they are not the components
-	are copied also. Idem with campaigns, users and units.
+    Ideally the components of the right version are already in the database but if they are not the
+    components are copied also. Idem with campaigns, users and units.
     """
 
-    parser = OptionParser("Usage: %prog [options]" )
+    parser = OptionParser("Usage: %prog [options]")
     parser.add_option("-D", "--sourcedatabase",
                       dest="fromDBname",
                       type="string",
@@ -237,7 +240,18 @@ if __name__ == '__main__':
                       default="localhost",
                       help="Hostname of source OTDB database")
 
-    
+    parser.add_option("-P", "--sourceport",
+                      dest="fromDBport",
+                      type="int",
+                      default="5432",
+                      help="Port of source OTDB database")
+
+    parser.add_option("-U", "--sourceuser",
+                      dest="fromDBuser",
+                      type="string",
+                      default="postgres",
+                      help="Username of source OTDB database")
+
     parser.add_option("-d", "--destdatabase",
                       dest="toDBname",
                       type="string",
@@ -250,6 +264,18 @@ if __name__ == '__main__':
                       default="localhost",
                       help="Hostname of destination OTDB database")
 
+    parser.add_option("-p", "--destport",
+                      dest="toDBport",
+                      type="int",
+                      default="5432",
+                      help="Port of destination OTDB database")
+
+    parser.add_option("-u", "--destuser",
+                      dest="toDBuser",
+                      type="string",
+                      default="postgres",
+                      help="Username of destination OTDB database")
+
     parser.add_option("-t", "--treeid",
                       dest="treeID",
                       type="int",
@@ -279,20 +305,31 @@ if __name__ == '__main__':
         sys.exit(1)
 
     # Fill variables used in remainder of code
-    fromDBhost=options.fromDBhost
-    fromDBname=options.fromDBname
-    toDBhost=options.toDBhost
-    toDBname=options.toDBname
-    treeID=options.treeID
+    fromDBhost = options.fromDBhost
+    fromDBname = options.fromDBname
+    fromDBuser = options.fromDBuser
+    fromDBport = options.fromDBport
+    toDBhost = options.toDBhost
+    toDBname = options.toDBname
+    toDBuser = options.toDBuser
+    toDBport = options.toDBport
+    treeID = options.treeID
+
+    fromDBpassword = getpass.getpass("Source database password:")
+    toDBpassword = getpass.getpass("Destination database password:")
 
     # calling stored procedures only works from the pg module for some reason.
-    fromDB = pg.connect(user="postgres", host=fromDBhost, dbname=fromDBname)
-    print "Connected to source database", fromDBname, "on host ",fromDBhost 
-    toDB   = pg.connect(user="postgres", host=toDBhost, dbname=toDBname)
-    print "Connected to destination database", toDBname, "on host ",toDBhost
+    fromDB = pg.connect(user=fromDBuser, host=fromDBhost, dbname=fromDBname, port=fromDBport,
+                        passwd=fromDBpassword)
+    print "Connected to source database", fromDBname, "on host ", fromDBhost
+    toDB = pg.connect(user=toDBuser, host=toDBhost, dbname=toDBname, port=toDBport,
+                      passwd=toDBpassword)
+    print "Connected to destination database", toDBname, "on host ", toDBhost
 
     # Check for tree-existance in both databases.
-    fromDBtree = fromDB.query("select * from OTDBtree t INNER JOIN campaign c ON c.ID = t.campaign where treeID=%d" % treeID).dictresult()
+    fromDBtree = fromDB.query(
+        "select * from OTDBtree t INNER JOIN campaign c ON c.ID = t.campaign where treeID=%d" %
+        treeID).dictresult()
     toDBtree = toDB.query("select * from otdbtree where treeID=%d" % treeID).dictresult()
     if len(fromDBtree) == 0:
         print "Tree with treeID %d not found in database %s" % (treeID, fromDBname)
@@ -301,27 +338,27 @@ if __name__ == '__main__':
         print "Tree with treeID %d already exists in database %s" % (treeID, toDBname)
         # TODO: implement -f option to copy the tree under a different number.
         sys.exit(1)
-    if fromDBtree[0]['treetype'] == 10:	# PIC tree?
+    if fromDBtree[0]['treetype'] == 10:	 # PIC tree?
         print "PIC trees cannot be copied"
         sys.exit(1)
 
     # If copying a default template check that we don't create duplicates
-    templateName=''
+    templateName = ''
     if fromDBtree[0]['treetype'] == 20:
         templateName = fromDB.query("select name from otdbtree where treeID=%d" % treeID).getresult()[0][0]
         try:
-          toTemplateID = toDB.query("select treeid from OTDBtree where name='%s'" % templateName).getresult()[0][0]
-          print "The destination database has already a default-template with the name: %s" % templateName
-          sys.exit(1)
-        except IndexError:
-          pass
-        if fromDBtree[0]['processtype'] != '':
-          try:
-            toTemplateID = toDB.query("select treeid from OTDBtree where processtype='%s' and processsubtype='%s' and strategy='%s'" % (fromDBtree[0]['processtype'],fromDBtree[0]['processsubtype'],fromDBtree[0]['strategy'])).getresult()[0][0]
-            print "Copying the tree would result in duplicate processtype/processsubtype/strategy combination"
+            toTemplateID = toDB.query("select treeid from OTDBtree where name='%s'" % templateName).getresult()[0][0]
+            print "The destination database has already a default-template with the name: %s" % templateName
             sys.exit(1)
-          except IndexError, e:
+        except IndexError:
             pass
+        if fromDBtree[0]['processtype'] != '':
+            try:
+                toTemplateID = toDB.query("select treeid from OTDBtree where processtype='%s' and processsubtype='%s' and strategy='%s'" % (fromDBtree[0]['processtype'],fromDBtree[0]['processsubtype'],fromDBtree[0]['strategy'])).getresult()[0][0]
+                print "Copying the tree would result in duplicate processtype/processsubtype/strategy combination"
+                sys.exit(1)
+            except IndexError, e:
+                pass
         print "Safe to copy default template '%s' to the new database." % templateName
 
     # What's the version of this tree?
@@ -341,7 +378,7 @@ if __name__ == '__main__':
     copyOrMapComponents(version)
     # components are now in the new database for sure and the node and par ID's are in the map dicts.
 
-	# make sure the campaign exists also
+    # make sure the campaign exists also
     newCampaignID = checkCampaign(fromDBtree[0]['name'])
 
     # TODO: check user table (owner of tree must exist)
@@ -352,9 +389,9 @@ if __name__ == '__main__':
     # copy the trees metadata first
     copyTreeMetaData(treeID, newCampaignID, templateName)
 
-    if fromDBtree[0]['treetype'] == 20:	# template?
+    if fromDBtree[0]['treetype'] == 20:	 # template?
         copyTemplateTree(treeID)
-    else:	# type must be 30 (VIC tree)
+    else:  # type must be 30 (VIC tree)
         copyVICtree(treeID)
 
     copyStateHistory(treeID)
@@ -367,4 +404,3 @@ if __name__ == '__main__':
     toDB.close()
     fromDB.close()
     sys.exit(0)
-
diff --git a/SAS/OTDB/bin/makeDefaultTemplates.py b/SAS/OTDB/bin/makeDefaultTemplates.py
index d1eff2bdda531d5726c44de4e15304606be9f1a4..a597758cd09a60f1f4756cdcf7b3f4621b974af5 100755
--- a/SAS/OTDB/bin/makeDefaultTemplates.py
+++ b/SAS/OTDB/bin/makeDefaultTemplates.py
@@ -1,180 +1,220 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-import os,sys,time,pg
+# coding: iso-8859-15
+import os
+import sys
+import time
+import pg
 from optparse import OptionParser
+import getpass
+
 
 #
 # addIndexedComponent(treeID, keyName, orgTreeID)
 #
-def addIndexedComponent(treeID, keyName, orgTreeID):
+def add_indexed_component(tree_id, key_name, org_tree_id):
     """
     When parameter belongs to indexed node try to find parent unindexed component in the newtree
     eg. keyName = ObsSW.Observation.Beam[5].angle1
     """
-    parts = keyName.rsplit('.', 2)                              # [ObsSW.Observation , Beam[5], angle1 ]
+    parts = key_name.rsplit('.', 2)  # [ObsSW.Observation , Beam[5], angle1 ]
     if len(parts) == 3 and parts[1].endswith(']'):
-        nodeName = parts[0]+'.'+parts[1].rstrip('[]0123456789') # ObsSW.Observation.Beam
-        dupIndex = parts[1].rstrip(']').split('[')[1]           # 5
-        orgNodeID = otdb.query("select * from getVTitem(%s, '%s')" % (treeID, nodeName)).getresult()[0][0]
-        newNodeID = otdb.query("select * from dupVTnode(1, %s, %s, '%s')" % (treeID, orgNodeID, dupIndex))
+        node_name = parts[0]+'.'+parts[1].rstrip('[]0123456789')  # ObsSW.Observation.Beam
+        dup_index = parts[1].rstrip(']').split('[')[1]           # 5
+        org_node_id = otdb.query("select * from getVTitem(%s, '%s')" %
+                                 (treeID, node_name)).getresult()[0][0]
+        new_node_id = otdb.query("select * from dupVTnode(1, %s, %s, '%s')" %
+                                 (tree_id, org_node_id, dup_index))
         print "   %s: %-75s added to the tree" % (treeID, parts[0]+'.'+parts[1])
         # copy nrInstances setting from base component from original tree
-        (instances, limits) = \
-              otdb.query("select instances,limits from getVTitem(%s, '%s')" % (orgTreeID, nodeName)).getresult()[0]
-        otdb.query("select * from updateVTnode(1, %s, %s, '%s', '%s')" % (treeID, orgNodeID, instances, limits))
-    return newNodeID
+        (instances, limits) = otdb.query("select instances,limits from getVTitem(%s, '%s')" %
+                                         (org_tree_id, node_name)).getresult()[0]
+        otdb.query("select * from updateVTnode(1, %s, %s, '%s', '%s')" %
+                   (tree_id, org_node_id, instances, limits))
+    return new_node_id
+
 
 #
 # removeElement(orgTree, newTree, key)
 #
-def removeElement(orgTmplID, newTmplID, key, always):
+def remove_element(org_tmpl_id, new_tmpl_id, key, always):
     """
-    Removes the given key from the new tree. If the remaining node is empty afterwards it is deleted also.
+    Removes the given key from the new tree. If the remaining node is empty afterwards it is deleted
+    also.
     """
-    parentname = key.rsplit('.',1)[0]
-    oldparentid = otdb.query("select nodeid from getVTitem(%s, '%s')" % (orgTmplID, parentname)).getresult()[0][0]
-    if oldparentid == None:
-        # parent of parameter was removed from old template, safe to delete it in the new template too
-        nodeid = otdb.query("select nodeid from getVTitem(%s, '%s')" % (newTmplID, parentname)).getresult()[0][0]
-        if nodeid != None:
-            otdb.query ("select * from removeVTNode(1, %s, %s)" % (newTmplID, nodeid))
-            print "   %s: %-75s removed node deleted" % (newTmplID, parentname)
+    parentname = key.rsplit('.', 1)[0]
+    oldparentid = otdb.query("select nodeid from getVTitem(%s, '%s')" %
+                             (org_tmpl_id, parentname)).getresult()[0][0]
+    if oldparentid is None:
+        # parent of parameter was removed from old template, safe to delete it in the new template
+        # too
+        nodeid = otdb.query("select nodeid from getVTitem(%s, '%s')" %
+                            (new_tmpl_id, parentname)).getresult()[0][0]
+        if nodeid is not None:
+            otdb.query("select * from removeVTNode(1, %s, %s)" % (new_tmpl_id, nodeid))
+            print "   %s: %-75s removed node deleted" % (new_tmpl_id, parentname)
             # new parent may also be a 'dangling' node, try that.
-            removeElement(orgTmplID, newTmplID, parentname, False)
+            remove_element(org_tmpl_id, new_tmpl_id, parentname, False)
     else:
-        if not always: # coming from a recursive call?
+        if not always:  # coming from a recursive call?
             return
         # parent of parameter still exists in old template, remove parameter itself only
-        nodeid = otdb.query("select nodeid from getVTitem(%s, '%s')" % (newTmplID, key)).getresult()[0][0]
-        if nodeid != None:
+        nodeid = otdb.query("select nodeid from getVTitem(%s, '%s')" %
+                            (new_tmpl_id, key)).getresult()[0][0]
+        if nodeid is not None:
             # found item: delete it
-            otdb.query ("select * from removeVTleafNode(%s)" % nodeid)
-            print "   %s: %-75s parameter deleted" % (newTmplID, key)
-        
+            otdb.query("select * from removeVTleafNode(%s)" % nodeid)
+            print "   %s: %-75s parameter deleted" % (new_tmpl_id, key)
+
+
 #
 # createNewDefaultTemplate(orgTemplateID, newMasterTemplateID, orgTemplateInfo)
 #
-def createNewDefaultTemplate(orgTmplID, orgMasterTmplID, newMasterTmplID, orgTmplInfo):
+def create_new_default_template(org_tmpl_id, org_master_tmpl_id, new_master_tmpl_id, org_tmpl_info):
     """
     Create a new defaultTemplate based on the 'newMaster' information that has the changed values
     of the original default template.
     """
     # copy tree including description and template name
-    print "=> Reconstructing tree %s" % orgTmplID
-    newTmplID = otdb.query("select * from copyTree(1, %s)" % newMasterTmplID).getresult()[0][0]
-    print "   copy has ID: %s" % newTmplID
-    otdb.query("select * from setDescription(1, %s, '%s')" % (newTmplID, orgTmplInfo['description']))
-    otdb.query("select * from classify(1, %s, '%s')" % (newTmplID, orgTmplInfo['classification']))
+    print "=> Reconstructing tree %s" % org_tmpl_id
+    new_tmpl_id = otdb.query("select * from copyTree(1, %s)" % new_master_tmpl_id).getresult()[0][0]
+    print "   copy has ID: %s" % new_tmpl_id
+    otdb.query("select * from setDescription(1, %s, '%s')" %
+               (new_tmpl_id, org_tmpl_info['description']))
+    otdb.query("select * from classify(1, %s, '%s')" %
+               (new_tmpl_id, org_tmpl_info['classification']))
     # set the old default template state to obsolete (1200)
-    otdb.query("select * from settreestate(1, %s, '1200')" % (orgTmplID))
+    otdb.query("select * from settreestate(1, %s, '1200')" % (org_tmpl_id))
     # rename the old template with a '# ' before its original name
-    otdb.query("select * from assignTemplateName(1, %s, '#%-.31s')" % (orgTmplID, orgTmplInfo['treeName']))
-    otdb.query("select * from assignTemplateName(1, %s, '%s')" % (newTmplID, orgTmplInfo['treeName']))
-    otdb.query("select * from assignProcessType (1, %s, '#%-.19s', '#%-.49s', '#%-.29s')" % (orgTmplID, orgTmplInfo['processType'], orgTmplInfo['processSubtype'], orgTmplInfo['strategy']))
-    otdb.query("select * from assignProcessType (1, %s, '%s', '%s', '%s')" % (newTmplID, orgTmplInfo['processType'], orgTmplInfo['processSubtype'], orgTmplInfo['strategy']))
+    otdb.query("select * from assignTemplateName(1, %s, '#%-.31s')" %
+               (org_tmpl_id, org_tmpl_info['treeName']))
+    otdb.query("select * from assignTemplateName(1, %s, '%s')" %
+               (new_tmpl_id, org_tmpl_info['treeName']))
+    otdb.query("select * from assignProcessType (1, %s, '#%-.19s', '#%-.49s', '#%-.29s')" %
+               (org_tmpl_id, org_tmpl_info['processType'], org_tmpl_info['processSubtype'],
+                org_tmpl_info['strategy']))
+    otdb.query("select * from assignProcessType (1, %s, '%s', '%s', '%s')" %
+               (new_tmpl_id, org_tmpl_info['processType'], org_tmpl_info['processSubtype'],
+                org_tmpl_info['strategy']))
 
     # loop over all values that were changed in the old template
-    treeIdentification = "%s%d" % (orgTmplInfo['nodeName'], orgTmplInfo['version'])
-    for line in os.popen("comm -23 dfltTree%s MasterTree_%s" % (orgTmplID, treeIdentification)).read().splitlines():
-        (key, value) = line.split('=',1)
+    tree_identification = "%s%d" % (org_tmpl_info['nodeName'], org_tmpl_info['version'])
+    for line in os.popen("comm -23 dfltTree%s MasterTree_%s" %
+                         (org_tmpl_id, tree_identification)).read().splitlines():
+        (key, value) = line.split('=', 1)
         # search same item in the new template
-        # (nodeid, parentid, paramdefid, name, index, leaf, instances, limits, description) 
+        # (nodeid, parentid, paramdefid, name, index, leaf, instances, limits, description)
         (nodeid, instances, limits) = \
-              otdb.query("select nodeid,instances,limits from getVTitem(%s, '%s')" % (newTmplID, key)).getresult()[0]
+            otdb.query("select nodeid,instances,limits from getVTitem(%s, '%s')" %
+                       (new_tmpl_id, key)).getresult()[0]
 
         # if it doesn't exist, add it when it is a parameter from an indexed node
-        if nodeid == None:
+        if nodeid is None:
             try:
-                dummy = addIndexedComponent(newTmplID, key, orgTmplID)
+                add_indexed_component(new_tmpl_id, key, org_tmpl_id)
             except:
-                print "   %s: %-75s not in the new tree"  % (newTmplID, key)
+                print "   %s: %-75s not in the new tree" % (new_tmpl_id, key)
                 continue
             else:
                 # no exception: try again to get the parameter in the new template
                 (nodeid, instances, limits) = \
-                      otdb.query("select nodeid,instances,limits from getVTitem(%s, '%s')" % (newTmplID, key)).getresult()[0]
+                      otdb.query("select nodeid,instances,limits from getVTitem(%s, '%s')" %
+                                 (new_tmpl_id, key)).getresult()[0]
 
         # update value if needed
         if limits == value:
-            print "   %s: %-75s value is equal"  % (newTmplID, key)
+            print "   %s: %-75s value is equal" % (new_tmpl_id, key)
         else:
-	   (old_nodeid, old_comp_value) = otdb.query("select nodeid, limits from getVTitem(%s, '%s')" % (orgMasterTmplID, key)).getresult()[0]
-	   (new_nodeid, new_comp_value) = otdb.query("select nodeid, limits from getVTitem(%s, '%s')" % (newMasterTmplID, key)).getresult()[0]
-	   if old_comp_value == new_comp_value:
-	      # no change in definition, copy old (modified) value
-	      print "   %s: %-75s %s --> %s" % (newTmplID, key, limits, value)
-	      otdb.query("select * from updateVTnode(1, %s, %s, '%s', '%s')" % (newTmplID, nodeid, instances, value))
-	   else:
-	      # value in new component is different from value in old component: use new component value
-              print "   %s: %-75s %s --> %s" % (newTmplID, key, limits, new_comp_value)
-              otdb.query("select * from updateVTnode(1, %s, %s, '%s', '%s')" % (newTmplID, nodeid, instances, new_comp_value))
-
-	# get a list with the removed items
-	# -13 -> items uniq in Master --> removed in template OR different value
-	# -23 -> items uniq in template --> added to template OR different value
-	# comm -23 d1 d2 --> removed in template irt Mastertree.
-    command = """comm -13 dfltTree%s MasterTree_%s | cut -d'=' -f1 | sort >diff1 ; 
-                 comm -23 dfltTree%s MasterTree_%s | cut -d'=' -f1 | sort >diff2 ; 
+            (old_nodeid, old_comp_value) = \
+                otdb.query("select nodeid, limits from getVTitem(%s, '%s')" %
+                           (org_master_tmpl_id, key)).getresult()[0]
+            (new_nodeid, new_comp_value) = \
+                otdb.query("select nodeid, limits from getVTitem(%s, '%s')" %
+                           (new_master_tmpl_id, key)).getresult()[0]
+            if old_comp_value == new_comp_value:
+                # no change in definition, copy old (modified) value
+                print "   %s: %-75s %s --> %s" % (new_tmpl_id, key, limits, value)
+                otdb.query("select * from updateVTnode(1, %s, %s, '%s', '%s')" %
+                           (new_tmpl_id, nodeid, instances, value))
+            else:
+                # value in new component is different from value in old component:
+                # use new component value
+                print "   %s: %-75s %s --> %s" % (new_tmpl_id, key, limits, new_comp_value)
+                otdb.query("select * from updateVTnode(1, %s, %s, '%s', '%s')" %
+                           (new_tmpl_id, nodeid, instances, new_comp_value))
+
+    # get a list with the removed items
+    # -13 -> items uniq in Master --> removed in template OR different value
+    # -23 -> items uniq in template --> added to template OR different value
+    # comm -23 d1 d2 --> removed in template irt Mastertree.
+    command = """comm -13 dfltTree%s MasterTree_%s | cut -d'=' -f1 | sort >diff1 ;
+                 comm -23 dfltTree%s MasterTree_%s | cut -d'=' -f1 | sort >diff2 ;
                  comm -23 diff1 diff2 ; rm diff1 diff2
-              """ % (orgTmplID, treeIdentification, orgTmplID, treeIdentification)
-    # loop over the list: when the NODE(=parent) of this parameter was removed in the ORIGINAL default template
-    # remove the NODE in the new template otherwise remove the parameter only
+              """ % (org_tmpl_id, tree_identification, org_tmpl_id, tree_identification)
+    # loop over the list: when the NODE(=parent) of this parameter was removed in the
+    # ORIGINAL default template remove the NODE in the new template otherwise remove
+    # the parameter only
     for key in os.popen(command).read().splitlines():
-        removeElement(orgTmplID, newTmplID, key, True)
+        remove_element(org_tmpl_id, new_tmpl_id, key, True)
 
-    # Almost ready... when adding Indexed components we might have added to many nodes, 
+    # Almost ready... when adding Indexed components we might have added to many nodes,
     # that is: the use might have removed subtrees in the index componenttree.
     # make an parset of the new created tree and delete the nodes(subtrees) that are obsolete
-    topNodeID = otdb.query("select nodeid from getTopNode(%s)" % newTmplID).getresult()[0][0]
-    createParsetFile(newTmplID, topNodeID, "newTree%s" % newTmplID)
-    command = """comm -13 newTree%s dfltTree%s | cut -d'=' -f1 | sort >diff1 ; 
-                 comm -23 newTree%s dfltTree%s | cut -d'=' -f1 | sort >diff2 ; 
+    top_node_id = otdb.query("select nodeid from getTopNode(%s)" % new_tmpl_id).getresult()[0][0]
+    create_parset_file(new_tmpl_id, top_node_id, "newTree%s" % new_tmpl_id)
+    command = """comm -13 newTree%s dfltTree%s | cut -d'=' -f1 | sort >diff1 ;
+                 comm -23 newTree%s dfltTree%s | cut -d'=' -f1 | sort >diff2 ;
                  comm -13 diff1 diff2 ; rm diff1 diff2
-              """ % (newTmplID, orgTmplID, newTmplID, orgTmplID)
+              """ % (new_tmpl_id, org_tmpl_id, new_tmpl_id, org_tmpl_id)
     # loop over the list of nodes that are in the newTree but not in the old tree.
     for key in os.popen(command).read().splitlines():
         print "Removing? ", key,
         # if none indexed node exists in mastertree then it was removed by the user.
-        grepcmd = "grep `echo %s | sed 's/\[.*\]//g'` MasterTree_%s 1>/dev/null 2>/dev/null; echo $?" % (key, treeIdentification)
+        grepcmd = \
+            "grep `echo %s | sed 's/\[.*\]//g'` MasterTree_%s 1>/dev/null 2>/dev/null; echo $?" % \
+            (key, tree_identification)
         result = os.popen(grepcmd).read().splitlines()[0]
         if result == "0":
             print " Yes"
-            parentname = key.rsplit('.',1)[0]
-            nodeid = otdb.query("select nodeid from getVTitem(%s, '%s')" % (newTmplID, parentname)).getresult()[0][0]
-            if nodeid != None:
-                otdb.query ("select * from removeVTNode(1, %s, %s)" % (newTmplID, nodeid))
-                print "   %s: %-75s removed node deleted" % (newTmplID, parentname)
+            parentname = key.rsplit('.', 1)[0]
+            nodeid = otdb.query("select nodeid from getVTitem(%s, '%s')" %
+                                (new_tmpl_id, parentname)).getresult()[0][0]
+            if nodeid is not None:
+                otdb.query("select * from removeVTNode(1, %s, %s)" % (new_tmpl_id, nodeid))
+                print "   %s: %-75s removed node deleted" % (new_tmpl_id, parentname)
         else:
             print " No"
-	
-       
+
+
 #
-# createParsetFile(treeID, nodeID, fileName)
+# create_parset_file(treeID, nodeID, fileName)
 #
-def createParsetFile(treeID, nodeID, fileName):
+def create_parset_file(tree_id, node_id, filename):
     """
     Create a parset file with name fileName from tree treeID starting at nodeID.
     """
-    parset = otdb.query("select * from exportTree(%s, %s, %s)" % (1, treeID, nodeID)).getresult()
-    print "   Creating parset %s" % fileName
-    file = open(fileName, 'w');
+    parset = otdb.query("select * from exportTree(%s, %s, %s)" % (1, tree_id, node_id)).getresult()
+    print "   Creating parset %s" % filename
+    file = open(filename, 'w')
     file.write(parset[0][0])
     file.close()
 
 
 #
-# makeMasterTemplateTreeAndParset(treeIdent, topNodeID) : templateID
+# make_master_template_tree_and_parset(treeIdent, top_node_id) : templateID
 #
-def makeMasterTemplateTreeAndParset(treeIdent, topNodeID):
+def make_master_template_tree_and_parset(tree_ident, top_node_id):
     """
     Create a template tree in OTDB and save its parset as a master template.
     """
-    templateID = otdb.query("select * from instanciateVTtree(1, %s, '4')" % topNodeID).getresult()[0][0]
-    otdb.query("select * from setDescription(1, %s, 'MasterTemplate %s')" % (templateID, treeIdent))
+    template_id = otdb.query("select * from instanciateVTtree(1, %s, '4')" %
+                             top_node_id).getresult()[0][0]
+    otdb.query("select * from setDescription(1, %s, 'MasterTemplate %s')" %
+               (template_id, tree_ident))
     # Create the corresponding parsetFile
-    nodeDefID = otdb.query("select * from getTopNode(%s)" % templateID).dictresult()[0]
-    createParsetFile(templateID, nodeDefID['nodeid'], "MasterTree_%s" % treeIdent)
-    return templateID
+    node_def_id = otdb.query("select * from getTopNode(%s)" % template_id).dictresult()[0]
+    create_parset_file(template_id, node_def_id['nodeid'], "MasterTree_%s" % tree_ident)
+    return template_id
+
 
 #
 # MAIN
@@ -182,11 +222,11 @@ def makeMasterTemplateTreeAndParset(treeIdent, topNodeID):
 if __name__ == '__main__':
     """
     makeDefaultTemplates reconstructs ALL default templates in OTDB to match a new master-tree.
-    Each default templates is compared with the master tree it originates from and the difference are applied
-    to a copy of the new master tree.
+    Each default templates is compared with the master tree it originates from and the difference
+    are applied to a copy of the new master tree.
     """
 
-    parser = OptionParser("Usage: %prog [options]" )
+    parser = OptionParser("Usage: %prog [options]")
     parser.add_option("-D", "--database",
                       dest="dbName",
                       type="string",
@@ -198,8 +238,18 @@ if __name__ == '__main__':
                       type="string",
                       default="sasdb",
                       help="Hostname of OTDB database")
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
 
-    parser.add_option("-v","--version",
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+    parser.add_option("-v", "--version",
                       dest="newVersion",
                       type="int",
                       default=0,
@@ -220,50 +270,64 @@ if __name__ == '__main__':
         print
         parser.print_help()
         sys.exit(0)
-    
+
     dbName = options.dbName
     dbHost = options.dbHost
     newVersion = options.newVersion
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    dbPassword = getpass.getpass()
 
     # calling stored procedures only works from the pg module for some reason.
-    otdb = pg.connect(user="postgres", host=dbHost, dbname=dbName)
+    otdb = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
 
     # Check if a component LOFAR of this version exists
-    versions = [v[0] for v in otdb.query("select version from getVCnodeList('LOFAR', 0, false)").getresult()]
+    versions = [v[0] for v in
+                otdb.query("select version from getVCnodeList('LOFAR', 0, false)").getresult()]
     versions.sort()
     if newVersion not in versions:
-        print "ERROR: There is no LOFAR component with version %s.\nAvailable versions: %s" % (newVersion, versions)
+        print "ERROR: There is no LOFAR component with version %s.\nAvailable versions: %s" % \
+            (newVersion, versions)
         sys.exit(1)
 
     # Give user escape possibility
-    print "About to create new default templates in database %s on host %s. Starting in 5 seconds..." % (dbName, dbHost)
+    print "About to create new default templates in database %s on host %s." % (dbName, dbHost)
+    print " Starting in 5 seconds..."
     time.sleep(5)
-    
+
     print "=> Collecting info about default templates..."
-    # built dictionary with componentID, nodeID, nodeName, version and treeName of the default templates like:
-    # {6171: (412, 2589, 'LOFAR', 40506, 'master template 4.5.6'), 
+    # built dictionary with componentID, nodeID, nodeName, version and treeName of the
+    # default templates like:
+    # {6171: (412, 2589, 'LOFAR', 40506, 'master template 4.5.6'),
     #  6121: (203,  426, 'LOFAR', 40000, 'test template')}
     dfltTmplInfo = {}
     dfltTemplateIDs = otdb.query("select * from getDefaultTemplates()").dictresult()
     for dfltTemplate in dfltTemplateIDs:
-        state       = otdb.query("select state from getTreeInfo(%s, 'false')" % dfltTemplate['treeid']).getresult()[0][0]
-        if state != 1200 :
-            treeInfo  = otdb.query("select classification,description from getTreeInfo(%s, 'false')" % dfltTemplate['treeid']).getresult()[0]
-            nodeDefID = otdb.query("select * from getTopNode(%s)" % dfltTemplate['treeid']).dictresult()[0]
-            nodeInfo  = otdb.query("select * from getVICnodedef(%s)" % nodeDefID['paramdefid']).dictresult()
-            dfltTmplInfo[dfltTemplate['treeid']] = \
-                    {'componentID'    : nodeDefID['paramdefid'], \
-                     'nodeID'         : nodeDefID['nodeid'], \
-                     'nodeName'       : nodeDefID['name'], \
-                     'version'        : nodeInfo[0]['version'], \
-                     'treeName'       : dfltTemplate['name'], \
-                     'processType'    : dfltTemplate['processtype'], \
-                     'processSubtype' : dfltTemplate['processsubtype'], \
-                     'strategy'       : dfltTemplate['strategy'], \
-                     'classification' : treeInfo[0], \
-                     'description'    : treeInfo[1]}
+        state = otdb.query("select state from getTreeInfo(%s, 'false')" %
+                           dfltTemplate['treeid']).getresult()[0][0]
+        if state != 1200:
+            treeInfo = \
+                otdb.query("select classification,description from getTreeInfo(%s, 'false')" %
+                           dfltTemplate['treeid']).getresult()[0]
+            nodeDefID = otdb.query("select * from getTopNode(%s)" %
+                                   dfltTemplate['treeid']).dictresult()[0]
+            nodeInfo = otdb.query("select * from getVICnodedef(%s)" %
+                                  nodeDefID['paramdefid']).dictresult()
+            dfltTmplInfo[dfltTemplate['treeid']] = {
+                'componentID': nodeDefID['paramdefid'],
+                'nodeID': nodeDefID['nodeid'],
+                'nodeName': nodeDefID['name'],
+                'version': nodeInfo[0]['version'],
+                'treeName': dfltTemplate['name'],
+                'processType': dfltTemplate['processtype'],
+                'processSubtype': dfltTemplate['processsubtype'],
+                'strategy': dfltTemplate['strategy'],
+                'classification': treeInfo[0],
+                'description': treeInfo[1]}
             print "   DefaultTemplate %s starts at %s (version %d) : %s" % \
-                   (dfltTemplate['treeid'], nodeDefID['name'], nodeInfo[0]['version'], dfltTemplate['name'])
+                (dfltTemplate['treeid'], nodeDefID['name'], nodeInfo[0]['version'],
+                 dfltTemplate['name'])
 
     # Wrap all modifications in a transaction, to avoid leaving behind a broken database
     otdb.query("BEGIN")
@@ -271,15 +335,16 @@ if __name__ == '__main__':
     # make all obsolete default templates non-default
     print "=> Making all obsolete default templates non-default"
     for dfltTemplate in dfltTemplateIDs:
-        state       = otdb.query("select state from getTreeInfo(%s, 'false')" % dfltTemplate['treeid']).getresult()[0][0]
-        if state == 1200 :
+        state = otdb.query("select state from getTreeInfo(%s, 'false')" %
+                           dfltTemplate['treeid']).getresult()[0][0]
+        if state == 1200:
             print "    Moving obsolete DefaultTemplate ", dfltTemplate['treeid']
             otdb.query("select * from assignTemplateName(1, %s, NULL)" % (dfltTemplate['treeid'],))
 
     # second step create temporarely parsetfiles from all DefaultTemplates
     print "=> Creating temporarely parsetfiles from the DefaultTemplates..."
     for treeID in dfltTmplInfo:
-        createParsetFile(treeID, dfltTmplInfo[treeID]['nodeID'], "dfltTree%s" % treeID)
+        create_parset_file(treeID, dfltTmplInfo[treeID]['nodeID'], "dfltTree%s" % treeID)
 
     # create parsets from the masterTemplates (original template)
     # Note: Since multiple defaultTemplates can have the same Master template remember the
@@ -289,32 +354,35 @@ if __name__ == '__main__':
     oldMasterID = 0
     masterTmplInfo = {}
     for dfltTmpl in dfltTmplInfo.values():
-        treeIdentification = "%s%d" % (dfltTmpl['nodeName'], dfltTmpl['version'])
+        tree_identification = "%s%d" % (dfltTmpl['nodeName'], dfltTmpl['version'])
         # if we didn't constructed it before do so now
-        if not masterTmplInfo.has_key(treeIdentification):
-            masterTmplID = makeMasterTemplateTreeAndParset(treeIdentification, dfltTmpl['componentID'])
-            masterTmplInfo[treeIdentification] = masterTmplID
-            print "   Master template '%s' version %s = %s" % (dfltTmpl['nodeName'], dfltTmpl['version'], masterTmplID)
-	    oldMasterID = masterTmplID
+        if tree_identification not in masterTmplInfo:
+            masterTmplID = make_master_template_tree_and_parset(tree_identification,
+                                                                dfltTmpl['componentID'])
+            masterTmplInfo[tree_identification] = masterTmplID
+            print "   Master template '%s' version %s = %s" % \
+                (dfltTmpl['nodeName'], dfltTmpl['version'], masterTmplID)
+            oldMasterID = masterTmplID
             # when this master template is the destination master remember its ID
             if dfltTmpl['version'] == newVersion:
                 newMasterID = masterTmplID
 
     # did we create a template for the new tree-version already
     if newMasterID == 0:
-        topComponent = otdb.query("select nodeid from getVCnodelist('LOFAR', %d, false)" % newVersion).getresult()[0]
-        newMasterID  = makeMasterTemplateTreeAndParset("LOFAR%d" % newVersion, topComponent)
+        topComponent = otdb.query("select nodeid from getVCnodelist('LOFAR', %d, false)" %
+                                  newVersion).getresult()[0]
+        newMasterID = make_master_template_tree_and_parset("LOFAR%d" % newVersion, topComponent)
 
     if oldMasterID == 0:
-	print "  Could not find old master template ID. Stopping now"
-	otdb.close()
-	sys.exit(1)
+        print "  Could not find old master template ID. Stopping now"
+        otdb.close()
+        sys.exit(1)
 
     # for each old default template make a new template
     print "   TreeID of new master template = %s" % newMasterID
     print "=> Creating new default templates for version %d" % newVersion
     for treeID in dfltTmplInfo:
-        createNewDefaultTemplate(treeID, oldMasterID, newMasterID, dfltTmplInfo[treeID])
+        create_new_default_template(treeID, oldMasterID, newMasterID, dfltTmplInfo[treeID])
 
     # Write all changes to the database
     otdb.query("COMMIT")
diff --git a/SAS/OTDB/bin/repairTree.py b/SAS/OTDB/bin/repairTree.py
index 2aa4fa65dc8eb74be73506a8f8fc51cbe6922f99..7db355a7c9e4b474a773cb9c63dbd3e7a963e1e2 100755
--- a/SAS/OTDB/bin/repairTree.py
+++ b/SAS/OTDB/bin/repairTree.py
@@ -1,7 +1,9 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-import os,sys,time,pg
-from database import *
+# coding: iso-8859-15
+import sys
+import pg
+from optparse import OptionParser
+import getpass
 
 
 #
@@ -11,58 +13,103 @@ if __name__ == '__main__':
     """
     repairTree is a temporarely script that adds an 'identifications' field to a given tree
     """
+    parser = OptionParser("Usage: %prog [options] MomID")
 
-    # check syntax of invocation
-    # Expected syntax: copyTree momID database
-    if (len(sys.argv) != 3):
-        print "Syntax: %s MoMID database" % sys.argv[0]
+    parser.add_option("-D", "--database",
+                      dest="dbName",
+                      type="string",
+                      default="LOFAR_4",
+                      help="Name of OTDB database to use")
+
+    parser.add_option("-H", "--host",
+                      dest="dbHost",
+                      type="string",
+                      default="sasdb.control.lofar",
+                      help="Hostname of OTDB database server")
+
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of OTDB database server")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of OTDB database")
+
+    # parse arguments
+
+    (options, args) = parser.parse_args()
+
+    dbName = options.dbName
+    dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    # print sys.argv
+    if len(args) != 1:
+        parser.print_help()
         sys.exit(1)
-    momID  = int(sys.argv[1])
-    DBname = sys.argv[2]
-    
+
+    momID = int(args[0])
+
+    dbPassword = getpass.getpass()
+
     # calling stored procedures only works from the pg module for some reason.
-    database = pg.connect(user="postgres", host="localhost", dbname=DBname)
-    print "Connected to database", DBname
+    database = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
+    print "Connected to database", dbName
 
     # Check for tree-existance in both databases.
-    DBtree = database.query("select * from gettreelist(0::int2,3::int2,0,'','','') where momid=%d" % momID).dictresult()
+    DBtree = database.query("select * from gettreelist(0::int2,3::int2,0,'','','') where momid=%d" %
+                            momID).dictresult()
     if len(DBtree) == 0:
-        print "Tree with MoMId %d not found in database %s" % (momID, DBname)
+        print "Tree with MoMId %d not found in database %s" % (momID, dbName)
         sys.exit(1)
-    if DBtree[0]['type'] == 10:	# PIC tree?
+    if DBtree[0]['type'] == 10:	 # PIC tree?
         print "PIC trees cannot be copied"
         sys.exit(1)
 
-    database.query("BEGIN");
+    database.query("BEGIN")
 
     # What's the version of this tree?
-    treeID    = DBtree[0]['treeid']
+    treeID = DBtree[0]['treeid']
     nodeDefID = database.query("select * from getTopNode(%d)" % treeID).dictresult()[0]
-    nodeInfo  = database.query("select * from getVICnodedef(%s)" % nodeDefID['paramdefid']).dictresult()[0]
-    version   = nodeInfo['version']
+    nodeInfo = database.query("select * from getVICnodedef(%s)" %
+                              nodeDefID['paramdefid']).dictresult()[0]
+    version = nodeInfo['version']
     print "Tree %d was built with components of version %d" % (treeID, version)
-    parentNodes = database.query("select * from VICnodedef where version=%d and name like 'Output_%%'" % version).dictresult()
+    parentNodes = database.query(
+        "select * from VICnodedef where version=%d and name like 'Output_%%'" %
+        version).dictresult()
     for node in parentNodes:
         print DBtree[0]['momid'], treeID, node['nodeid'], node['name'],
         paramid = 0
-        idnode = database.query("select * from vicparamdef where nodeid=%d and name='identifications'" % node['nodeid']).dictresult()
+        idnode = database.query(
+            "select * from vicparamdef where nodeid=%d and name='identifications'" %
+            node['nodeid']).dictresult()
         if len(idnode):
             paramid = idnode[0]['paramid']
             print "No need to insert the parameter, paramid=%d" % paramid
         else:
             print "Adding parameter to the component",
-            paramid = database.query("select * from savevicparamdef(1,%d,'identifications',212::int2,0::int2,10::int2,100::int2,true,'[]','identifications and topology of the output data products')" % node['nodeid']).getresult()[0]
-            print ", paramid=%d" % paramid;
+            paramid = database.query("select * from savevicparamdef(1,%d,'identifications',212::int2,0::int2,10::int2,100::int2,true,'[]','identifications and topology of the output data products')" %
+                                     node['nodeid']).getresult()[0]
+            print ", paramid=%d" % paramid
 
-        vicrecs = database.query("select * from vichierarchy where treeid=%d and paramrefid=%d" % (treeID, node['nodeid'])).dictresult()
+        vicrecs = database.query("select * from vichierarchy where treeid=%d and paramrefid=%d" %
+                                 (treeID, node['nodeid'])).dictresult()
         if len(vicrecs):
             print "parent node found in victree",
-            found = database.query("select * from vichierarchy where treeid=%d and parentid='%d' and name like '%%identifications'" % (treeID, vicrecs[0]['nodeid'])).dictresult()
+            found = database.query("select * from vichierarchy where treeid=%d and parentid='%d' and name like '%%identifications'" %
+                                   (treeID, vicrecs[0]['nodeid'])).dictresult()
             if len(found):
                 print ", parameter already added, id=%d" % found[0]['nodeid']
             else:
                 print ", parameter not in tree, adding it"
-                newid = database.query("insert into VIChierarchy(treeID, parentID, paramrefID, name, value) values (%d, %d, %d, '%s.identifications','[]')" % (treeID, vicrecs[0]['nodeid'], paramid, vicrecs[0]['name']))
+                newid = database.query("insert into VIChierarchy(treeID, parentID, paramrefID, name, value) values (%d, %d, %d, '%s.identifications','[]')" %
+                                       (treeID, vicrecs[0]['nodeid'], paramid, vicrecs[0]['name']))
         else:
             print "parent node NOT in victree, ready"
 
diff --git a/SAS/OTDB/bin/revertDefaultTemplates.py b/SAS/OTDB/bin/revertDefaultTemplates.py
index 52004f17a1d5ca5c3fafbff044a5ba4fd9d9816f..52b17610ca06c667d3321935e5e75f9d78459ffe 100755
--- a/SAS/OTDB/bin/revertDefaultTemplates.py
+++ b/SAS/OTDB/bin/revertDefaultTemplates.py
@@ -1,7 +1,11 @@
 #!/usr/bin/env python
-#coding: iso-8859-15
-import os,sys,time,pg
+# coding: iso-8859-15
+import sys
+import time
+import pg
 from optparse import OptionParser
+import getpass
+
 
 #
 # MAIN
@@ -10,11 +14,11 @@ if __name__ == '__main__':
     """
     revertDefaultTemplates reverts each default template in OTDB to a previous
     version, when the default template has a matching older one.
-    Two templates match when the templatename, the processType, the processSubtype and the Strategy values
-    only differ in a leading '#'
+    Two templates match when the templatename, the processType, the processSubtype and the
+    Strategy values only differ in a leading '#'
     """
 
-    parser = OptionParser("Usage: %prog [options]" )
+    parser = OptionParser("Usage: %prog [options]")
     parser.add_option("-D", "--database",
                       dest="dbName",
                       type="string",
@@ -27,6 +31,18 @@ if __name__ == '__main__':
                       default="sasdb",
                       help="Hostname of OTDB database")
 
+    parser.add_option("-P", "--port",
+                      dest="dbPort",
+                      type="int",
+                      default="5432",
+                      help="Port of StationCoordinates database")
+
+    parser.add_option("-U", "--user",
+                      dest="dbUser",
+                      type="string",
+                      default="postgres",
+                      help="Username of StationCoordinates database")
+
     # parse arguments
 
     (options, args) = parser.parse_args()
@@ -39,62 +55,74 @@ if __name__ == '__main__':
 
     dbName = options.dbName
     dbHost = options.dbHost
+    dbPort = options.dbPort
+    dbUser = options.dbUser
+
+    dbPassword = getpass.getpass()
 
     # calling stored procedures only works from the pg module for some reason.
-    otdb = pg.connect(user="postgres", host=dbHost, dbname=dbName)
+    otdb = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
 
     # Give user escape possibility
-    print "About to REVERT the default templates in database %s on host %s. Starting in 5 seconds..." % (dbName, dbHost)
+    print "About to REVERT the default templates in database %s on host %s." % (dbName, dbHost)
+    print "Starting in 5 seconds..."
     time.sleep(5)
 
     # Wrap all modifications in a transaction, to avoid leaving behind a broken database
     otdb.query("BEGIN")
-    
+
     print "=> Collecting info about default templates..."
-    # built dictionary with componentID, nodeID, nodeName, version and treeName of the default templates like:
-    # {6171: (412, 2589, 'LOFAR', 40506, 'master template 4.5.6'), 
+    # built dictionary with componentID, nodeID, nodeName, version and treeName of the
+    # default templates like:
+    # {6171: (412, 2589, 'LOFAR', 40506, 'master template 4.5.6'),
     #  6121: (203,  426, 'LOFAR', 40000, 'test template')}
     oldTrees = {}
     newTrees = {}
     dfltTmplInfo = {}
     dfltTemplateIDs = otdb.query("select * from getDefaultTemplates()").dictresult()
     for dfltTemplate in dfltTemplateIDs:
-        state     = otdb.query("select state from getTreeInfo(%s, 'false')" % dfltTemplate['treeid']).getresult()[0][0]
+        state = otdb.query("select state from getTreeInfo(%s, 'false')" %
+                           dfltTemplate['treeid']).getresult()[0][0]
         if state == 1200:
-            oldTrees[dfltTemplate['name']] = {  \
-                     'processType'    : dfltTemplate['processtype'], \
-                     'processSubtype' : dfltTemplate['processsubtype'], \
-                     'strategy'       : dfltTemplate['strategy'], \
-                     'treeID'         : dfltTemplate['treeid'] }
+            oldTrees[dfltTemplate['name']] = {
+                     'processType': dfltTemplate['processtype'],
+                     'processSubtype': dfltTemplate['processsubtype'],
+                     'strategy': dfltTemplate['strategy'],
+                     'treeID': dfltTemplate['treeid']}
         else:
-            newTrees[dfltTemplate['name']] = {  \
-                     'processType'    : dfltTemplate['processtype'], \
-                     'processSubtype' : dfltTemplate['processsubtype'], \
-                     'strategy'       : dfltTemplate['strategy'], \
-                     'treeID'         : dfltTemplate['treeid']}
-       
+            newTrees[dfltTemplate['name']] = {
+                     'processType': dfltTemplate['processtype'],
+                     'processSubtype': dfltTemplate['processsubtype'],
+                     'strategy': dfltTemplate['strategy'],
+                     'treeID': dfltTemplate['treeid']}
+
     # for each old default template make a new template
     for treeName in newTrees:
         if '#'+treeName in oldTrees:
             oTreeName = '#'+treeName
-            if oldTrees[oTreeName]['processType']    == '#'+newTrees[treeName]['processType'] and \
-               oldTrees[oTreeName]['processSubtype'] == '#'+newTrees[treeName]['processSubtype'] and \
-               oldTrees[oTreeName]['strategy']       == '#'+newTrees[treeName]['strategy']:
-               print newTrees[treeName]['treeID'],": ",treeName, newTrees[treeName]['processSubtype'], " <==> ", \
-                     oldTrees[oTreeName]['treeID'],": ",oTreeName, oldTrees[oTreeName]['processSubtype']
+            if(oldTrees[oTreeName]['processType'] == '#'+newTrees[treeName]['processType'] and
+               oldTrees[oTreeName]['processSubtype'] ==
+               '#'+newTrees[treeName]['processSubtype'] and
+               oldTrees[oTreeName]['strategy'] == '#'+newTrees[treeName]['strategy']):
+                print newTrees[treeName]['treeID'], ": ", treeName, \
+                    newTrees[treeName]['processSubtype'], " <==> ", \
+                    oldTrees[oTreeName]['treeID'], ": ", oTreeName, \
+                    oldTrees[oTreeName]['processSubtype']
 
         # delete new tree
-        #print ("select * from deleteTree(1, %s)" % newTrees[treeName]['treeID'])
+        # print ("select * from deleteTree(1, %s)" % newTrees[treeName]['treeID'])
         otdb.query("select * from deleteTree(1, %s)" % newTrees[treeName]['treeID'])
         # set the old default template state to described (1200)
         oldTreeID = oldTrees[oTreeName]['treeID']
-        #print ("select * from settreestate(1, %s, '100')" % (oldTreeID))
+        # print ("select * from settreestate(1, %s, '100')" % (oldTreeID))
         otdb.query("select * from settreestate(1, %s, '100')" % (oldTreeID))
         # rename the old template with a '# ' before its original name
-        #print ("select * from assignTemplateName(1, %s, '%s')" % (oldTreeID, oTreeName[1:]))
+        # print ("select * from assignTemplateName(1, %s, '%s')" % (oldTreeID, oTreeName[1:]))
         otdb.query("select * from assignTemplateName(1, %s, '%s')" % (oldTreeID, oTreeName[1:]))
-        #print ("select * from assignProcessType (1, %s, '%s', '%s', '%s')" % (oldTreeID, oldTrees[oTreeName]['processType'][1:], oldTrees[oTreeName]['processSubtype'][1:], oldTrees[oTreeName]['strategy'][1:]))
-        otdb.query("select * from assignProcessType (1, %s, '%s', '%s', '%s')" % (oldTreeID, oldTrees[oTreeName]['processType'][1:], oldTrees[oTreeName]['processSubtype'][1:], oldTrees[oTreeName]['strategy'][1:]))
+        # print ("select * from assignProcessType (1, %s, '%s', '%s', '%s')" % (oldTreeID, oldTrees[oTreeName]['processType'][1:], oldTrees[oTreeName]['processSubtype'][1:], oldTrees[oTreeName]['strategy'][1:]))
+        otdb.query("select * from assignProcessType (1, %s, '%s', '%s', '%s')" %
+                   (oldTreeID, oldTrees[oTreeName]['processType'][1:],
+                    oldTrees[oTreeName]['processSubtype'][1:], oldTrees[oTreeName]['strategy'][1:]))
 
     # Write all changes to the database
     otdb.query("COMMIT")
diff --git a/SAS/OTDB/src/OTDBconnection.cc b/SAS/OTDB/src/OTDBconnection.cc
index 7be8ec340074a8ef5758168cf7cf4badc0af954f..c36d79fb689ec548b28bcac13e5925081d0378bf 100644
--- a/SAS/OTDB/src/OTDBconnection.cc
+++ b/SAS/OTDB/src/OTDBconnection.cc
@@ -99,7 +99,8 @@ bool OTDBconnection::connect()
 	// is implemented in the SP's we will call.
 	string	connectString("host=" + itsHost + " port=" + itsPort +
 						  " dbname=" + itsDatabase +
-						  " user=postgres");
+						  " user=" + itsUser +
+                          " password=" + itsPassword);
 
 	// try to make the connection to the database
 	itsConnection = new connection(connectString);
@@ -111,8 +112,7 @@ bool OTDBconnection::connect()
 	uint32		authToken;
 	try {
 		work 	xAction(*itsConnection, "authenticate");
-		result  res = xAction.exec("SELECT OTDBlogin('" + itsUser +
-									"','" + itsPassword + "')");
+		result  res = xAction.exec("SELECT OTDBlogin('paulus','boskabouter')");
 		res[0][0].to(authToken);
 
 		if (authToken == 0) {