diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 717a5f86b3d62945959cb3cfa012c5e5816a13c8..189de0cb494d27bdbe0aba1d59b25a6c75bb78ca 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,64 +1,194 @@
 stages:
   - prepare
   - build
-  - dockerize
   - unit_test
+  - integration_test
+  - dockerize
   - deploy
-#  - integration_test
 
 #
 # PREPARE STAGE
 #
 
-prepare_RAServices:
+prepare_ci_sas_docker_image:
   stage: prepare
   script:
-    - docker build -t ci_raservices -f SubSystems/RAServices/Dockerfile .
+    - docker build -t ci_base -f  Docker/lofar-ci/Dockerfile_ci_base .
+    - docker build -t ci_sas -f  Docker/lofar-ci/Dockerfile_ci_sas .
+
+prepare_ci_lta_docker_image:
+  stage: prepare
+  script:
+    - docker build -t ci_base -f  Docker/lofar-ci/Dockerfile_ci_base .
+    - docker build -t ci_lta -f  Docker/lofar-ci/Dockerfile_ci_lta .
+
+#TODO: make proper MAC docker image with WinCC (rpm packages from mcu001)
+#prepare_ci_mac_docker_image:
+#  stage: prepare
+#  script:
+#    - docker build -t ci_base -f  Docker/lofar-ci/Dockerfile_ci_base .
+#    - docker build -t ci_mac -f  Docker/lofar-ci/Dockerfile_ci_mac .
 
 #
 # BUILD STAGE
 #
 
+build_TMSS:
+  stage: build
+  image: ci_sas:latest
+  script:
+    - PACKAGE=TMSS
+    - echo "Building $PACKAGE..."
+    - mkdir -p build/gnucxx11_opt
+    - cd build/gnucxx11_opt
+    - cmake -DBUILD_PACKAGES=$PACKAGE -DCASACORE_ROOT_DIR=/opt/casacore/ -DCASAREST_ROOT_DIR=/opt/casarest/ -DUSE_LOG4CPLUS=false ../..
+    - make -j 8
+    - make install
+  dependencies:
+    - prepare_ci_sas_docker_image
+  artifacts:
+    expire_in: 6 hours
+    paths:
+      - build/gnucxx11_opt
+
 build_RAServices:
   stage: build
-  image: ci_raservices:latest
+  image: ci_sas:latest
   script:
     - PACKAGE=RAServices
     - echo "Building $PACKAGE..."
-    - mkdir install
     - mkdir -p build/gnucxx11_opt
     - cd build/gnucxx11_opt
-    - cmake -DBUILD_PACKAGES=$PACKAGE -DCASACORE_ROOT_DIR=/opt/casacore/ -DCASAREST_ROOT_DIR=/opt/casarest/ -DCMAKE_INSTALL_PREFIX=/opt/lofar -DUSE_LOG4CPLUS=false ../..
-    - make -j 6
-    - make DESTDIR=../../install install
-    - cd ../../install/opt/lofar
-    - tar --ignore-failed-read --exclude=include -czf ../../RAServices_$CI_COMMIT_SHORT_SHA.ztar *
+    - cmake -DBUILD_PACKAGES=$PACKAGE -DCASACORE_ROOT_DIR=/opt/casacore/ -DCASAREST_ROOT_DIR=/opt/casarest/ -DUSE_LOG4CPLUS=false ../..
+    - make -j 8
+    - make install
   dependencies:
-    - prepare_RAServices
+    - prepare_ci_sas_docker_image
   artifacts:
     expire_in: 6 hours
     paths:
       - build/gnucxx11_opt
-      - install/*.ztar
 
-build_TMSS:
+build_LTAIngest:
   stage: build
-  image: ci_raservices:latest
+  image: ci_lta:latest
   script:
-    - PACKAGE=TMSS
+    - PACKAGE=LTAIngest
     - echo "Building $PACKAGE..."
     - mkdir -p build/gnucxx11_opt
     - cd build/gnucxx11_opt
-    - cmake -DBUILD_PACKAGES=$PACKAGE -DCASACORE_ROOT_DIR=/opt/casacore/ -DCASAREST_ROOT_DIR=/opt/casarest/ -DUSE_LOG4CPLUS=false ../..
-    - make -j 6
+    - cmake -DBUILD_PACKAGES=$PACKAGE -DUSE_LOG4CPLUS=false ../..
+    - make -j 8
     - make install
   dependencies:
-    - prepare_RAServices
+    - prepare_ci_lta_docker_image
   artifacts:
     expire_in: 6 hours
     paths:
       - build/gnucxx11_opt
 
+# TODO: enable when prepare_ci_mac_docker_image is fixed
+#build_MCU_MAC:
+#  stage: build
+#  image: ci_mac:latest
+#  script:
+#    - PACKAGE=MCU_MAC
+#    - echo "Building $PACKAGE..."
+#    - mkdir -p build/gnucxx11_opt
+#    - cd build/gnucxx11_opt
+#    - cmake -DBUILD_PACKAGES=$PACKAGE -DUSE_LOG4CPLUS=false ../..
+#    - make -j 8
+#    - make install
+#  dependencies:
+#    - prepare_ci_mac_docker_image
+#  artifacts:
+#    expire_in: 6 hours
+#    paths:
+#      - build/gnucxx11_opt
+
+#
+# UNIT TEST STAGE
+#
+
+unit_test_TMSS:
+  stage: unit_test
+  image: ci_sas:latest
+  script:
+    - PACKAGE=TMSS
+    - echo "Testing $PACKAGE..."
+    - cd build/gnucxx11_opt
+    - SKIP_INTEGRATION_TESTS=true ctest
+  dependencies:
+    - build_TMSS
+  artifacts:
+    name: unit-test-report
+    when: always
+    paths:
+      - build/gnucxx11_opt/Testing/Temporary/LastTest.log
+
+unit_test_RAServices:
+  stage: unit_test
+  image: ci_sas:latest
+  script:
+    - PACKAGE=RAServices
+    - echo "Testing $PACKAGE..."
+    - cd build/gnucxx11_opt
+    - SKIP_INTEGRATION_TESTS=true ctest
+  services:
+    - rabbitmq:latest
+  variables:
+    RABBITMQ_DEFAULT_USER: guest
+    RABBITMQ_DEFAULT_PASS: guest
+    LOFAR_DEFAULT_BROKER: 'rabbitmq' # override default 'localhost' which does not work for CI service rabbitmq.
+  dependencies:
+    - build_RAServices
+  artifacts:
+    name: unit-test-report
+    when: always
+    paths:
+      - build/gnucxx11_opt/Testing/Temporary/LastTest.log
+
+unit_test_LTAIngest:
+  stage: unit_test
+  image: ci_lta:latest
+  script:
+    - PACKAGE=LTAIngest
+    - echo "Testing $PACKAGE..."
+    - cd build/gnucxx11_opt
+    - SKIP_INTEGRATION_TESTS=true ctest
+  services:
+    - rabbitmq:latest
+  variables:
+    RABBITMQ_DEFAULT_USER: guest
+    RABBITMQ_DEFAULT_PASS: guest
+    LOFAR_DEFAULT_BROKER: 'rabbitmq' # override default 'localhost' which does not work for CI service rabbitmq.
+  dependencies:
+    - build_LTAIngest
+  artifacts:
+    name: unit-test-report
+    when: always
+    paths:
+      - build/gnucxx11_opt/Testing/Temporary/LastTest.log
+
+# TODO: enable when build_MCU_MAC is fixed
+#unit_test_MCU_MAC:
+#  stage: unit_test
+#  image: ci_mac:latest
+#  script:
+#    - PACKAGE=MCU_MAC
+#    - echo "Testing $PACKAGE..."
+#    - cd build/gnucxx11_opt
+#    - SKIP_INTEGRATION_TESTS=true ctest
+#  dependencies:
+#    - build_MCU_MAC
+#  artifacts:
+#    name: unit-test-report
+#    when: always
+#    paths:
+#      - build/gnucxx11_opt/Testing/Temporary/LastTest.log
+
+
+
 #
 # DOCKERIZE
 #
@@ -80,51 +210,85 @@ dockerize_TMSS:
     - docker logout $CI_NEXUS_REGISTRY  
   dependencies:
     - build_TMSS
+    - unit_test_TMSS
+    - integration_test_TMSS
+
 
 #
-# UNIT TEST STAGE
+# INTEGRATION TEST STAGE
 #
 
-unit_test_RAServices:
-  stage: unit_test
-  image: ci_raservices:latest
+integration_test_TMSS:
+  stage: integration_test
+  image: ci_sas:latest
+  script:
+    - PACKAGE=TMSS
+    - echo "Integration Testing $PACKAGE..."
+    - cd build/gnucxx11_opt
+    - SKIP_INTEGRATION_TESTS=false SKIP_UNIT_TESTS=true ctest
+  services:
+    - rabbitmq:latest
+  variables:
+    RABBITMQ_DEFAULT_USER: guest
+    RABBITMQ_DEFAULT_PASS: guest
+    LOFAR_DEFAULT_BROKER: 'rabbitmq' # override default 'localhost' which does not work for CI service rabbitmq.
+  dependencies:
+    - build_TMSS
+  artifacts:
+    name: integration-test-report
+    when: always
+    paths:
+      - build/gnucxx11_opt/Testing/Temporary/LastTest.log
+
+integration_test_RAServices:
+  stage: integration_test
+  image: ci_sas:latest
+  services:
+    - rabbitmq:latest
+  variables:
+    RABBITMQ_DEFAULT_USER: guest
+    RABBITMQ_DEFAULT_PASS: guest
+    LOFAR_DEFAULT_BROKER: 'rabbitmq' # override default 'localhost' which does not work for CI service rabbitmq.
   script:
     - PACKAGE=RAServices
-    - echo "Testing $PACKAGE..."
+    - echo "Integration Testing $PACKAGE..."
     - cd build/gnucxx11_opt
-    - SKIP_INTEGRATION_TESTS=true ctest
+    - SKIP_INTEGRATION_TESTS=false SKIP_UNIT_TESTS=true ctest
   dependencies:
     - build_RAServices
   artifacts:
-    name: unit-test-report
+    name: integration-test-report
     when: always
     paths:
       - build/gnucxx11_opt/Testing/Temporary/LastTest.log
 
+integration_test_LTAIngest:
+  stage: integration_test
+  image: ci_lta:latest
+  script:
+    - PACKAGE=LTAIngest
+    - echo "Integration Testing $PACKAGE..."
+    - cd build/gnucxx11_opt
+    - SKIP_INTEGRATION_TESTS=false SKIP_UNIT_TESTS=true ctest
+  services:
+    - rabbitmq:latest
+  variables:
+    RABBITMQ_DEFAULT_USER: guest
+    RABBITMQ_DEFAULT_PASS: guest
+    LOFAR_DEFAULT_BROKER: 'rabbitmq' # override default 'localhost' which does not work for CI service rabbitmq.
+  dependencies:
+    - build_LTAIngest
+  artifacts:
+    name: integration-test-report
+    when: always
+    paths:
+      - build/gnucxx11_opt/Testing/Temporary/LastTest.log
+
+
 #
-# INTEGRATION TEST STAGE
+# DEPLOY STAGE
 #
 
-# integration_test_RAServices:
-#   stage: integration_test
-#   image: ci_raservices:latest
-#   services:
-#     - rabbitmq:latest
-#   variables:
-#     LOFAR_DEFAULT_BROKER: "rabbitmq"
-#   script:
-#     - PACKAGE=RAServices
-#     - echo "Testing $PACKAGE..."
-#     - cd build/gnucxx11_opt
-#     - SKIP_UNIT_TESTS=true ctest
-#   dependencies:
-#     - build_RAServices
-#   artifacts:
-#     name: integration-test-report
-#     when: always
-#     paths:
-#       - build/gnucxx11_opt/Testing/Temporary/LastTest.log
-
 deploy-tmss-test:
   stage: deploy
   before_script:
@@ -145,7 +309,7 @@ deploy-tmss-test:
     - ssh lofarsys@scu199.control.lofar "docker tag ${CI_NEXUS_REGISTRY}/tmss_django:$CI_COMMIT_SHORT_SHA ${CI_NEXUS_REGISTRY}/tmss_django:latest"
     - ssh lofarsys@scu199.control.lofar "docker-compose -f docker-compose-scu199.yml up -d"
   dependencies:
-    - build_TMSS
+    - integration_test_TMSS
   when: manual
 
 deploy-tmss-ua:
@@ -168,7 +332,7 @@ deploy-tmss-ua:
     - ssh lofarsys@tmss-ua.control.lofar "docker tag ${CI_NEXUS_REGISTRY}/tmss_django:$CI_COMMIT_SHORT_SHA ${CI_NEXUS_REGISTRY}/tmss_django:latest"
     - ssh lofarsys@tmss-ua.control.lofar "docker-compose -f docker-compose-ua.yml up -d"
   dependencies:
-    - build_TMSS
+    - integration_test_TMSS
   when: manual
   only:
     - "master"
diff --git a/CMake/NPMInstall.cmake b/CMake/NPMInstall.cmake
index 7c28230f0d6e4fcba76ebed2904a150d9b88273b..de0738bf0a18d5948715757c3beb2f031b7dc172 100644
--- a/CMake/NPMInstall.cmake
+++ b/CMake/NPMInstall.cmake
@@ -177,7 +177,7 @@ function(npm_install NPM_PACKAGE_SPECIFICATION)
 
     add_custom_command(
     TARGET packing_javascript_files_${PACKAGE_NAME}
-    COMMAND npm run build
+    COMMAND CI=false npm run build
     DEPENDS "${INSTALLED_SOURCE_FILES}" "${INSTALLED_PUBLIC_FILES}"
     WORKING_DIRECTORY "${NPM_BINARY_DIR}"
     COMMENT "Packing javascript files for ${PACKAGE_NAME} into ${NPM_BINARY_DIR}/build for deployment")
diff --git a/Docker/lofar-ci/Dockerfile_ci_base b/Docker/lofar-ci/Dockerfile_ci_base
index 90c2c4e624ad7b29e974f3c53ca2b54c98f99a96..132ff1719a894e2b3d1068dce3f064b1df146610 100644
--- a/Docker/lofar-ci/Dockerfile_ci_base
+++ b/Docker/lofar-ci/Dockerfile_ci_base
@@ -3,10 +3,13 @@
 #
 # base
 #
-FROM centos:7
+FROM centos:centos7.6.1810 
 
 RUN yum -y groupinstall 'Development Tools' && \
     yum -y install epel-release && \
-    yum -y install cmake log4cplus-devel python3 python3-devel python3-pip
+    yum -y install cmake gcc git log4cplus-devel python3 python3-devel python3-pip which wget curl atop
 
+RUN pip3 install kombu requests coverage python-qpid-proton
+
+RUN adduser lofarsys
 
diff --git a/Docker/lofar-ci/Dockerfile_ci_lta b/Docker/lofar-ci/Dockerfile_ci_lta
index 1efc46400bf6d0a6c722c088bda7a8a687353047..e51f33db6cd031e8a37087df8b4c2eebf3a17f9e 100644
--- a/Docker/lofar-ci/Dockerfile_ci_lta
+++ b/Docker/lofar-ci/Dockerfile_ci_lta
@@ -5,6 +5,16 @@
 #
 FROM ci_base:latest
 
-RUN echo "Installing packages for LTA..." && \
-    yum -y install postgresql-devel && \
-    pip3 install kombu requests pysimplesoap mysql-connector psycopg2 flask
+RUN echo "Installing packages for LTA..."
+
+# see https://www.postgresql.org/download/linux/redhat/ on how to install postgresql-server > 9.2 on centos7
+RUN yum erase -y postgresql postgresql-server postgresql-devel && \
+    yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-7-x86_64/pgdg-redhat-repo-latest.noarch.rpm  && \
+    yum install -y postgresql96 postgresql96-server postgresql96-devel && \
+    cd /bin && ln -s /usr/pgsql-9.6/bin/initdb && ln -s /usr/pgsql-9.6/bin/postgres
+ENV PATH /usr/pgsql-9.6/bin:$PATH
+
+RUN pip3 install kombu requests pysimplesoap mysql-connector flask lxml jsonschema psycopg2 testing.postgresql
+
+RUN adduser ingest
+USER ingest
\ No newline at end of file
diff --git a/Docker/lofar-ci/Dockerfile_ci_mac b/Docker/lofar-ci/Dockerfile_ci_mac
index 697fbbdc32aabf8264f8cd76cbcf09eda5a9d522..1c9338822a3de0049213d2d3189bde09bc8ddf11 100644
--- a/Docker/lofar-ci/Dockerfile_ci_mac
+++ b/Docker/lofar-ci/Dockerfile_ci_mac
@@ -5,7 +5,7 @@
 #
 FROM ci_base:latest
 
-RUN echo "Installing packages for LCU..." && \
+RUN echo "Installing packages for MAC..." && \
     yum -y install readline-devel boost-python36-devel hdf5-devel blas-devel lapack-devel cfitsio-devel wcslib-devel autogen postgresql-devel cmake3 libpqxx-devel qpid-cpp-server qpid-cpp-client-devel qpid-tools unittest-cpp-devel && \
     pip3 install psycopg2 testing.postgresql lxml mock numpy kombu requests python-dateutil fabric
 
@@ -14,14 +14,14 @@ RUN echo "Installing Casacore..." && \
     mkdir /casacore/build/ && \
     cd /casacore/build/ && \
     cmake -DCMAKE_INSTALL_PREFIX=/opt/casacore -DBUILD_PYTHON3=ON -DBUILD_PYTHON=OFF -DPYTHON_EXECUTABLE=/usr/bin/python3 -DUSE_OPENMP=ON -DUSE_FFTW3=TRUE -DUSE_HDF5=ON -DCMAKE_BUILD_TYPE=Release .. && \
-    make && \
+    make -j 8 && \
     make install
 
 RUN echo "Installing Blitz++" && \
-    cd /
+    cd / && \
     git clone --depth 1 https://github.com/blitzpp/blitz.git && \
     mkdir -p /blitz/build && \
     cd /blitz/build && \
-    cmake3 --prefix=/opt/blitz/ .. && \
-    make lib && \
+    cmake --prefix=/opt/blitz/ .. && \
+    make -j 8 lib && \
     make install
\ No newline at end of file
diff --git a/Docker/lofar-ci/Dockerfile_ci_sas b/Docker/lofar-ci/Dockerfile_ci_sas
index f4b70027c4b5d250480d75a7bb69d60a19850f76..35632ec04f065843eec71d62ea63853b5c4d85f1 100644
--- a/Docker/lofar-ci/Dockerfile_ci_sas
+++ b/Docker/lofar-ci/Dockerfile_ci_sas
@@ -6,5 +6,20 @@
 FROM ci_base:latest
 
 RUN echo "Installing packages for SAS..." && \
-    yum -y install postgresql-devel openldap-devel readline-devel qpid-cpp-server qpid-cpp-client-devel qpid-tools libpqxx-devel java-devel qt-devel autogen boost-python36-devel && \
-    pip3 install kombu psycopg2 requests lxml xmljson pygcn python-dateutil django djangorestframework djangorestframework-xml django-auth-ldap mysql-connector testing.mysqld testing.postgresql
+    yum install -y log4cplus log4cplus-devel python3 python3-libs python3-devel boost readline-devel boost-devel binutils-devel boost-python36 boost-python36-devel gettext which openldap-devel npm nodejs git java-11-openjdk python-twisted-core
+    
+# see https://www.postgresql.org/download/linux/redhat/ on how to install postgresql-server > 9.2 on centos7 
+RUN yum erase -y postgresql postgresql-server postgresql-devel && \
+    yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-7-x86_64/pgdg-redhat-repo-latest.noarch.rpm  && \
+    yum install -y postgresql96 postgresql96-server postgresql96-devel  && \
+    cd /bin && ln -s /usr/pgsql-9.6/bin/initdb && ln -s /usr/pgsql-9.6/bin/postgres
+ENV PATH /usr/pgsql-9.6/bin:$PATH 
+
+RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil django djangorestframework djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 djangorestframework django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc jsonschema comet
+
+RUN npm install -g npx && \
+    npm install -g n && \
+    n stable && \
+    npm install -g serve
+
+USER lofarsys
\ No newline at end of file
diff --git a/LCS/Messaging/python/messaging/config.py b/LCS/Messaging/python/messaging/config.py
index 46bac97344646b23ff3ab117095c28de21cff396..417a0ea4c5063a932be2535b7c2eac6e405177a0 100644
--- a/LCS/Messaging/python/messaging/config.py
+++ b/LCS/Messaging/python/messaging/config.py
@@ -19,8 +19,8 @@ DEFAULT_BROKER = "scu001.control.lofar" if isProductionEnvironment() else \
 if 'LOFAR_DEFAULT_BROKER' in os.environ.keys():
     DEFAULT_BROKER = os.environ.get('LOFAR_DEFAULT_BROKER')
 
-DEFAULT_USER = "guest"
-DEFAULT_PASSWORD = "guest"
+DEFAULT_USER = os.environ.get('RABBITMQ_DEFAULT_USER', 'guest')
+DEFAULT_PASSWORD = os.environ.get('RABBITMQ_DEFAULT_PASS', 'guest')
 
 if isProductionEnvironment() or isTestEnvironment():
     # import the user and password from RabbitMQ 'db'credentials
@@ -35,12 +35,15 @@ if isProductionEnvironment() or isTestEnvironment():
 
 # dynamically determine port where RabbitMQ server runs by trying to connect
 DEFAULT_PORT = -1
+
+def broker_url(hostname: str=DEFAULT_BROKER, port: int=DEFAULT_PORT, userid: str=DEFAULT_USER, password :str=DEFAULT_PASSWORD) -> str:
+    return 'amqp://%s:%s@%s:%d//' % (userid, password, hostname, port)
+
 for port in [5672, 5675]:
     try:
         logger.debug("trying to connect to broker: hostname=%s port=%s userid=%s password=***",
                      DEFAULT_BROKER, port, DEFAULT_USER)
-        with kombu.Connection(hostname=DEFAULT_BROKER, port=port, userid=DEFAULT_USER, password=DEFAULT_PASSWORD,
-                              max_retries=0, connect_timeout=1) as connection:
+        with kombu.Connection(broker_url(port=port), max_retries=0, connect_timeout=1, ) as connection:
             connection.connect()
             DEFAULT_PORT = port
             logger.info("detected rabbitmq broker to which we can connect with hostname=%s port=%s userid=%s password=***",
diff --git a/LCS/Messaging/python/messaging/messagebus.py b/LCS/Messaging/python/messaging/messagebus.py
index 143006659b46b4874e9fc1bf90f039a2e571e4b6..de4479ed8eb9a730cf1f652c4b127aee26d69625 100644
--- a/LCS/Messaging/python/messaging/messagebus.py
+++ b/LCS/Messaging/python/messaging/messagebus.py
@@ -204,7 +204,7 @@ logger = logging.getLogger(__name__)
 from lofar.messaging.exceptions import *
 from lofar.messaging import adaptNameToEnvironment
 from lofar.messaging.messages import *
-from lofar.messaging.config import DEFAULT_BROKER, DEFAULT_BUSNAME, DEFAULT_PORT, DEFAULT_USER, DEFAULT_PASSWORD
+from lofar.messaging.config import DEFAULT_BROKER, DEFAULT_BUSNAME, DEFAULT_PORT, DEFAULT_USER, DEFAULT_PASSWORD, broker_url
 from lofar.common.threading_utils import TimeoutLock
 from lofar.common.util import program_name
 from lofar.common.util import is_empty_function
@@ -222,7 +222,7 @@ def can_connect_to_broker(broker: str=DEFAULT_BROKER, port: int=DEFAULT_PORT) ->
     try:
         logger.debug("trying to connect to broker: hostname=%s port=%s userid=%s password=***",
                      broker, port, DEFAULT_USER)
-        with kombu.Connection(hostname=broker, port=port, userid=DEFAULT_USER, password=DEFAULT_PASSWORD,
+        with kombu.Connection(broker_url(hostname=broker, port=port, userid=DEFAULT_USER, password=DEFAULT_PASSWORD),
                               max_retries=0, connect_timeout=1) as connection:
             connection.connect()
             logger.debug("can connect to broker with hostname=%s port=%s userid=%s password=***",
@@ -244,7 +244,7 @@ def create_exchange(name: str, durable: bool=True, broker: str=DEFAULT_BROKER, l
     :return True if created, False if not-created (because it already exists)
     """
     try:
-        with kombu.Connection(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD) as connection:
+        with kombu.Connection(broker_url(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD)) as connection:
             exchange = kombu.Exchange(name, durable=durable, type='topic')
             try:
                 exchange.declare(channel=connection.default_channel, passive=True)
@@ -266,7 +266,7 @@ def delete_exchange(name: str, broker: str=DEFAULT_BROKER, log_level=logging.DEB
     :return True if deleted, False if not-deleted (because it does not exist)
     """
     try:
-        with kombu.Connection(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD) as connection:
+        with kombu.Connection(broker_url(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD)) as connection:
             exchange = kombu.Exchange(name, channel=connection)
             try:
                 exchange.declare(channel=connection.default_channel, passive=True)
@@ -286,7 +286,7 @@ def exchange_exists(name: str, broker: str=DEFAULT_BROKER) -> bool:
     :return True if it exists, False if not.
     """
     try:
-        with kombu.Connection(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD) as connection:
+        with kombu.Connection(broker_url(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD)) as connection:
             exchange = kombu.Exchange(name, channel=connection)
             try:
                 exchange.declare(channel=connection.default_channel, passive=True)
@@ -309,7 +309,7 @@ def create_queue(name: str, durable: bool=True, broker: str=DEFAULT_BROKER, log_
     :return True if created, False if not-created (because it already exists)
     """
     try:
-        with kombu.Connection(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD) as connection:
+        with kombu.Connection(broker_url(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD)) as connection:
             queue = kombu.Queue(name,
                                 durable=durable,
                                 auto_delete=auto_delete,
@@ -335,7 +335,7 @@ def delete_queue(name: str, broker: str=DEFAULT_BROKER, log_level=logging.DEBUG)
     :return True if deleted, False if not-deleted (because it does not exist)
     """
     try:
-        with kombu.Connection(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD) as connection:
+        with kombu.Connection(broker_url(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD)) as connection:
             queue = kombu.Queue(name, no_declare=True, channel=connection)
             try:
                 queue.queue_declare(channel=connection.default_channel, passive=True)
@@ -355,7 +355,7 @@ def queue_exists(name: str, broker: str=DEFAULT_BROKER) -> bool:
     :return True if it exists, False if not.
     """
     try:
-        with kombu.Connection(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD) as connection:
+        with kombu.Connection(broker_url(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD)) as connection:
             queue = kombu.Queue(name, no_declare=True, channel=connection)
             try:
                 queue.queue_declare(channel=connection.default_channel, passive=True)
@@ -389,7 +389,7 @@ def create_binding(exchange: str, queue: str, routing_key: str='#', durable: boo
     :param log_level: optional logging level (to add/reduce spamming)
     """
     try:
-        with kombu.Connection(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD) as connection:
+        with kombu.Connection(broker_url(hostname=broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD)) as connection:
             kombu_exchange = kombu.Exchange(exchange, durable=durable, type='topic', no_declare=True)
             kombu_queue = kombu.Queue(queue, exchange=kombu_exchange, routing_key=routing_key, durable=durable, no_declare=True)
             if not kombu_queue.is_bound:
@@ -483,7 +483,7 @@ class _AbstractBus:
                     return
 
                 logger.debug("[%s] Connecting to broker: %s", self.__class__.__name__, self.broker)
-                self._connection = kombu.Connection(hostname=self.broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD)
+                self._connection = kombu.Connection(broker_url(hostname=self.broker, port=DEFAULT_PORT, userid=DEFAULT_USER, password=DEFAULT_PASSWORD))
                 self._connection.connect()
                 logger.debug("[%s] Connected to broker: %s (%s)", self.__class__.__name__, self.broker, self.connection_name)
 
diff --git a/LCS/Messaging/python/messaging/test/t_messagebus.py b/LCS/Messaging/python/messaging/test/t_messagebus.py
index e255e9d8844334478ca03ddb45d5823e5acc09d0..bd9b6697ac813911e9d13017ea89616fbaee463c 100644
--- a/LCS/Messaging/python/messaging/test/t_messagebus.py
+++ b/LCS/Messaging/python/messaging/test/t_messagebus.py
@@ -374,7 +374,7 @@ class FromBusInitFailed(unittest.TestCase):
         Connecting to broker on wrong port must raise MessageBusError
         """
         with self.assertRaisesRegex(MessageBusError, ".*failed to resolve broker hostname"):
-            with FromBus("fake" + self.test_queue.address, broker="localhost:4"):
+            with FromBus("fake" + self.test_queue.address, broker="fdjsafhdjlahflaieoruieow"):
                 pass
 
 
@@ -445,7 +445,7 @@ class ToBusInitFailed(unittest.TestCase):
         Connecting to broker on wrong port must raise MessageBusError
         """
         with self.assertRaisesRegex(MessageBusError, ".*failed to resolve broker hostname"):
-            with ToBus(self.test_exchange.address, broker="localhost:4"):
+            with ToBus(self.test_exchange.address, broker="fhjlahfowuefohwaueif"):
                 pass
 
 
@@ -886,8 +886,17 @@ class ReconnectOnConnectionLossTests(unittest.TestCase):
         self.tmp_exchange.close()
         self.assertFalse(exchange_exists(tmp_exchange_address))
 
+    def _can_connect_to_rabbitmq_admin_site(self, hostname: str):
+        try:
+            url = 'http://%s:15672/api' % (hostname,)
+            return requests.get(url, auth=(DEFAULT_USER, DEFAULT_PASSWORD)).status_code in [200, 202]
+        except requests.ConnectionError:
+            return False
 
     def _close_connection_of_bus_on_broker(self, bus: _AbstractBus):
+        if not self._can_connect_to_rabbitmq_admin_site(bus.broker):
+            raise unittest.SkipTest("Cannot connect tot RabbitMQ admin server to close connection %s" % (bus.connection_name))
+
         # use the http REST API using request to forcefully close the connection on the broker-side
         url = "http://%s:15672/api/connections/%s" % (bus.broker, bus.connection_name)
 
diff --git a/LCS/PyCommon/CMakeLists.txt b/LCS/PyCommon/CMakeLists.txt
index 990e2bcac04e8cb956e07b6af669d4a474fb6d43..4f82276090fd9bbcdc898ee21dc203390a238baa 100644
--- a/LCS/PyCommon/CMakeLists.txt
+++ b/LCS/PyCommon/CMakeLists.txt
@@ -6,6 +6,7 @@ lofar_find_package(Python 3.4 REQUIRED)
 include(PythonInstall)
 
 include(FindPythonModule)
+find_python_module(jsonschema)
 find_python_module(psycopg2)
 
 set(_py_files
diff --git a/LCS/PyCommon/test/CMakeLists.txt b/LCS/PyCommon/test/CMakeLists.txt
index 57ade8b7a0b8a757cdf6f5a8ce4775f2aec99747..bf1bfce981f17ca4553ce3fba4329c4d350298d9 100644
--- a/LCS/PyCommon/test/CMakeLists.txt
+++ b/LCS/PyCommon/test/CMakeLists.txt
@@ -27,7 +27,14 @@ IF(BUILD_TESTING)
     lofar_add_test(t_methodtrigger)
     lofar_add_test(t_util)
     lofar_add_test(t_test_utils)
-    lofar_add_test(t_json_utils)
     lofar_add_test(t_cep4_utils)
-    lofar_add_test(t_postgres)
+
+    IF(PYTHON_JSONSCHEMA)
+        lofar_add_test(t_json_utils)
+    ENDIF()
+
+    IF(PYTHON_PSYCOPG2 AND PYTHON_TESTING.POSTGRESQL)
+        lofar_add_test(t_postgres)
+    ENDIF()
+
 ENDIF()
\ No newline at end of file
diff --git a/QA/QA_Common/test/CMakeLists.txt b/QA/QA_Common/test/CMakeLists.txt
index 805b871beb4b8d2d05ee2172e6e3029bb915b496..e7de2d7e597a0f6ad83400dd57787ffdaf1fee05 100644
--- a/QA/QA_Common/test/CMakeLists.txt
+++ b/QA/QA_Common/test/CMakeLists.txt
@@ -19,5 +19,6 @@
 include(LofarCTest)
 
 lofar_add_test(t_hdf5_io)
+set_tests_properties(t_hdf5_io PROPERTIES TIMEOUT 300)
 
 
diff --git a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jCampaign.h b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jCampaign.h
index 60f79b70f58d7f60c54400d4a511b2320b0a014b..fb47bab411d4da7902aaf420267f9fc7695e8e68 100644
--- a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jCampaign.h
+++ b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jCampaign.h
@@ -1,61 +1,61 @@
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include <jni.h>
-/* Header for class nl_astron_lofar_sas_otb_jotdb3_jCampaign */
-
-#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jCampaign
-#define _Included_nl_astron_lofar_sas_otb_jotdb3_jCampaign
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jCampaign
- * Method:    initCampaign
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jCampaign_initCampaign
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jCampaign
- * Method:    getCampaign
- * Signature: (Ljava/lang/String;)Lnl/astron/lofar/sas/otb/jotdb3/jCampaignInfo;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jCampaign_getCampaign__Ljava_lang_String_2
-  (JNIEnv *, jobject, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jCampaign
- * Method:    getCampaign
- * Signature: (I)Lnl/astron/lofar/sas/otb/jotdb3/jCampaignInfo;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jCampaign_getCampaign__I
-  (JNIEnv *, jobject, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jCampaign
- * Method:    getCampaignList
- * Signature: ()Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jCampaign_getCampaignList
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jCampaign
- * Method:    saveCampaign
- * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jCampaignInfo;)I
- */
-JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jCampaign_saveCampaign
-  (JNIEnv *, jobject, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jCampaign
- * Method:    errorMsg
- * Signature: ()Ljava/lang/String;
- */
-JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jCampaign_errorMsg
-  (JNIEnv *, jobject);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class nl_astron_lofar_sas_otb_jotdb3_jCampaign */
+
+#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jCampaign
+#define _Included_nl_astron_lofar_sas_otb_jotdb3_jCampaign
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jCampaign
+ * Method:    initCampaign
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jCampaign_initCampaign
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jCampaign
+ * Method:    getCampaign
+ * Signature: (Ljava/lang/String;)Lnl/astron/lofar/sas/otb/jotdb3/jCampaignInfo;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jCampaign_getCampaign__Ljava_lang_String_2
+  (JNIEnv *, jobject, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jCampaign
+ * Method:    getCampaign
+ * Signature: (I)Lnl/astron/lofar/sas/otb/jotdb3/jCampaignInfo;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jCampaign_getCampaign__I
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jCampaign
+ * Method:    getCampaignList
+ * Signature: ()Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jCampaign_getCampaignList
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jCampaign
+ * Method:    saveCampaign
+ * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jCampaignInfo;)I
+ */
+JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jCampaign_saveCampaign
+  (JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jCampaign
+ * Method:    errorMsg
+ * Signature: ()Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jCampaign_errorMsg
+  (JNIEnv *, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jClassifConv.h b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jClassifConv.h
index e22c8386d645b06e090aca808b4f4b9db433cb7e..3780bdceec636d0f0d931d1ae7f562a2e14e9cde 100644
--- a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jClassifConv.h
+++ b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jClassifConv.h
@@ -1,61 +1,61 @@
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include <jni.h>
-/* Header for class nl_astron_lofar_sas_otb_jotdb3_jClassifConv */
-
-#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jClassifConv
-#define _Included_nl_astron_lofar_sas_otb_jotdb3_jClassifConv
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jClassifConv
- * Method:    initClassifConv
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jClassifConv_initClassifConv
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jClassifConv
- * Method:    get
- * Signature: (Ljava/lang/String;)S
- */
-JNIEXPORT jshort JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jClassifConv_get__Ljava_lang_String_2
-  (JNIEnv *, jobject, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jClassifConv
- * Method:    get
- * Signature: (S)Ljava/lang/String;
- */
-JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jClassifConv_get__S
-  (JNIEnv *, jobject, jshort);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jClassifConv
- * Method:    getTypes
- * Signature: ()Ljava/util/HashMap;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jClassifConv_getTypes
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jClassifConv
- * Method:    top
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jClassifConv_top
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jClassifConv
- * Method:    next
- * Signature: ()Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jClassifConv_next
-  (JNIEnv *, jobject);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class nl_astron_lofar_sas_otb_jotdb3_jClassifConv */
+
+#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jClassifConv
+#define _Included_nl_astron_lofar_sas_otb_jotdb3_jClassifConv
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jClassifConv
+ * Method:    initClassifConv
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jClassifConv_initClassifConv
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jClassifConv
+ * Method:    get
+ * Signature: (Ljava/lang/String;)S
+ */
+JNIEXPORT jshort JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jClassifConv_get__Ljava_lang_String_2
+  (JNIEnv *, jobject, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jClassifConv
+ * Method:    get
+ * Signature: (S)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jClassifConv_get__S
+  (JNIEnv *, jobject, jshort);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jClassifConv
+ * Method:    getTypes
+ * Signature: ()Ljava/util/HashMap;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jClassifConv_getTypes
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jClassifConv
+ * Method:    top
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jClassifConv_top
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jClassifConv
+ * Method:    next
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jClassifConv_next
+  (JNIEnv *, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger.h b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger.h
index 3ca5af5f737e165dc899d9f70dfff42c037e9cca..14d554bf175a27fb95495234391bf0b6764b50c0 100644
--- a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger.h
+++ b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger.h
@@ -1,21 +1,21 @@
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include <jni.h>
-/* Header for class nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger */
-
-#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger
-#define _Included_nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger
- * Method:    initLogger
- * Signature: (Ljava/lang/String;)V
- */
-JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger_initLogger
-  (JNIEnv *, jobject, jstring);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger */
+
+#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger
+#define _Included_nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger
+ * Method:    initLogger
+ * Signature: (Ljava/lang/String;)V
+ */
+JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jInitCPPLogger_initLogger
+  (JNIEnv *, jobject, jstring);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv.h b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv.h
index 12bbda58a2a44f699157f4e83b12d8b3c8004725..e8fc1fc5c34b62a3dba3d6bad14bad4ad68ef820 100644
--- a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv.h
+++ b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv.h
@@ -1,61 +1,61 @@
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include <jni.h>
-/* Header for class nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv */
-
-#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
-#define _Included_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
- * Method:    initParamTypeConv
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv_initParamTypeConv
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
- * Method:    get
- * Signature: (Ljava/lang/String;)S
- */
-JNIEXPORT jshort JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv_get__Ljava_lang_String_2
-  (JNIEnv *, jobject, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
- * Method:    get
- * Signature: (S)Ljava/lang/String;
- */
-JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv_get__S
-  (JNIEnv *, jobject, jshort);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
- * Method:    getTypes
- * Signature: ()Ljava/util/HashMap;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv_getTypes
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
- * Method:    top
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv_top
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
- * Method:    next
- * Signature: ()Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv_next
-  (JNIEnv *, jobject);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv */
+
+#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
+#define _Included_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
+ * Method:    initParamTypeConv
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv_initParamTypeConv
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
+ * Method:    get
+ * Signature: (Ljava/lang/String;)S
+ */
+JNIEXPORT jshort JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv_get__Ljava_lang_String_2
+  (JNIEnv *, jobject, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
+ * Method:    get
+ * Signature: (S)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv_get__S
+  (JNIEnv *, jobject, jshort);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
+ * Method:    getTypes
+ * Signature: ()Ljava/util/HashMap;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv_getTypes
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
+ * Method:    top
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv_top
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv
+ * Method:    next
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jParamTypeConv_next
+  (JNIEnv *, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance.h b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance.h
index 1dc68a093c08ac4ea303660d03a766e1ed0bb18a..d0d6ca9c597055928b6008dc582772476f03dd31 100644
--- a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance.h
+++ b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance.h
@@ -1,421 +1,421 @@
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include <jni.h>
-/* Header for class nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance */
-
-#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
-#define _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    initTreeMaintenance
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_initTreeMaintenance
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    loadMasterFile
- * Signature: (Ljava/lang/String;)I
- */
-JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_loadMasterFile
-  (JNIEnv *, jobject, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    loadComponentFile
- * Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)I
- */
-JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_loadComponentFile__Ljava_lang_String_2Ljava_lang_String_2Ljava_lang_String_2
-  (JNIEnv *, jobject, jstring, jstring, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    loadComponentFile
- * Signature: (Ljava/lang/String;Ljava/lang/String;)I
- */
-JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_loadComponentFile__Ljava_lang_String_2Ljava_lang_String_2
-  (JNIEnv *, jobject, jstring, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    loadComponentFile
- * Signature: (Ljava/lang/String;)I
- */
-JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_loadComponentFile__Ljava_lang_String_2
-  (JNIEnv *, jobject, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    getComponentList
- * Signature: (Ljava/lang/String;Z)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getComponentList__Ljava_lang_String_2Z
-  (JNIEnv *, jobject, jstring, jboolean);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    getComponentList
- * Signature: (Ljava/lang/String;)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getComponentList__Ljava_lang_String_2
-  (JNIEnv *, jobject, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    getComponentList
- * Signature: ()Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getComponentList__
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    getComponentNode
- * Signature: (I)Lnl/astron/lofar/sas/otb/jotdb3/jVICnodeDef;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getComponentNode
-  (JNIEnv *, jobject, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    getComponentParams
- * Signature: (I)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getComponentParams
-  (JNIEnv *, jobject, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    saveComponentNode
- * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jVICnodeDef;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_saveComponentNode
-  (JNIEnv *, jobject, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    isTopComponent
- * Signature: (I)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_isTopComponent
-  (JNIEnv *, jobject, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    deleteComponentNode
- * Signature: (I)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_deleteComponentNode
-  (JNIEnv *, jobject, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    getFullComponentName
- * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jVICnodeDef;)Ljava/lang/String;
- */
-JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getFullComponentName
-  (JNIEnv *, jobject, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    buildTemplateTree
- * Signature: (IS)I
- */
-JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_buildTemplateTree
-  (JNIEnv *, jobject, jint, jshort);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    newTemplateTree
- * Signature: ()I
- */
-JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_newTemplateTree
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    copyTemplateTree
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_copyTemplateTree
-  (JNIEnv *, jobject, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    assignTemplateName
- * Signature: (ILjava/lang/String;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_assignTemplateName
-  (JNIEnv *, jobject, jint, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    assignProcessType
- * Signature: (ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_assignProcessType
-  (JNIEnv *, jobject, jint, jstring, jstring, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    getNode
- * Signature: (II)Lnl/astron/lofar/sas/otb/jotdb3/jOTDBnode;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getNode
-  (JNIEnv *, jobject, jint, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    getParam
- * Signature: (II)Lnl/astron/lofar/sas/otb/jotdb3/jOTDBparam;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getParam__II
-  (JNIEnv *, jobject, jint, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    getParam
- * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jOTDBnode;)Lnl/astron/lofar/sas/otb/jotdb3/jOTDBparam;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getParam__Lnl_astron_lofar_sas_otb_jotdb3_jOTDBnode_2
-  (JNIEnv *, jobject, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    saveParam
- * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jOTDBparam;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_saveParam
-  (JNIEnv *, jobject, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    getItemList
- * Signature: (III)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getItemList__III
-  (JNIEnv *, jobject, jint, jint, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    getItemList
- * Signature: (ILjava/lang/String;)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getItemList__ILjava_lang_String_2
-  (JNIEnv *, jobject, jint, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    getItemList
- * Signature: (ILjava/lang/String;Z)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getItemList__ILjava_lang_String_2Z
-  (JNIEnv *, jobject, jint, jstring, jboolean);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    dupNode
- * Signature: (IIS)I
- */
-JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_dupNode
-  (JNIEnv *, jobject, jint, jint, jshort);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    addComponent
- * Signature: (IIILjava/lang/String;)I
- */
-JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_addComponent__IIILjava_lang_String_2
-  (JNIEnv *, jobject, jint, jint, jint, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    addComponent
- * Signature: (III)I
- */
-JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_addComponent__III
-  (JNIEnv *, jobject, jint, jint, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    saveNode
- * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jOTDBnode;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_saveNode
-  (JNIEnv *, jobject, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    saveNodeList
- * Signature: (Ljava/util/Vector;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_saveNodeList
-  (JNIEnv *, jobject, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    deleteNode
- * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jOTDBnode;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_deleteNode
-  (JNIEnv *, jobject, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    deleteNodeList
- * Signature: (Ljava/util/Vector;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_deleteNodeList
-  (JNIEnv *, jobject, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    checkTreeConstraints
- * Signature: (II)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_checkTreeConstraints__II
-  (JNIEnv *, jobject, jint, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    checkTreeConstraints
- * Signature: (I)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_checkTreeConstraints__I
-  (JNIEnv *, jobject, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    instanciateTree
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_instanciateTree
-  (JNIEnv *, jobject, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    pruneTree
- * Signature: (IS)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_pruneTree
-  (JNIEnv *, jobject, jint, jshort);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    exportTree
- * Signature: (IILjava/lang/String;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_exportTree
-  (JNIEnv *, jobject, jint, jint, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    exportResultTree
- * Signature: (IILjava/lang/String;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_exportResultTree
-  (JNIEnv *, jobject, jint, jint, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    exportMetadata
- * Signature: (ILjava/lang/String;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_exportMetadata__ILjava_lang_String_2
-  (JNIEnv *, jobject, jint, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    exportMetadata
- * Signature: (ILjava/lang/String;Z)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_exportMetadata__ILjava_lang_String_2Z
-  (JNIEnv *, jobject, jint, jstring, jboolean);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    deleteTree
- * Signature: (I)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_deleteTree
-  (JNIEnv *, jobject, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    getTopNode
- * Signature: (I)Lnl/astron/lofar/sas/otb/jotdb3/jOTDBnode;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getTopNode
-  (JNIEnv *, jobject, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    setMomInfo
- * Signature: (IIILjava/lang/String;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setMomInfo
-  (JNIEnv *, jobject, jint, jint, jint, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    setClassification
- * Signature: (IS)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setClassification
-  (JNIEnv *, jobject, jint, jshort);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    setTreeState
- * Signature: (IS)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setTreeState__IS
-  (JNIEnv *, jobject, jint, jshort);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    setTreeState
- * Signature: (ISZ)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setTreeState__ISZ
-  (JNIEnv *, jobject, jint, jshort, jboolean);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    setDescription
- * Signature: (ILjava/lang/String;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setDescription
-  (JNIEnv *, jobject, jint, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    setSchedule
- * Signature: (ILjava/lang/String;Ljava/lang/String;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setSchedule__ILjava_lang_String_2Ljava_lang_String_2
-  (JNIEnv *, jobject, jint, jstring, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    setSchedule
- * Signature: (ILjava/lang/String;Ljava/lang/String;Z)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setSchedule__ILjava_lang_String_2Ljava_lang_String_2Z
-  (JNIEnv *, jobject, jint, jstring, jstring, jboolean);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
- * Method:    errorMsg
- * Signature: ()Ljava/lang/String;
- */
-JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_errorMsg
-  (JNIEnv *, jobject);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance */
+
+#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+#define _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    initTreeMaintenance
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_initTreeMaintenance
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    loadMasterFile
+ * Signature: (Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_loadMasterFile
+  (JNIEnv *, jobject, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    loadComponentFile
+ * Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_loadComponentFile__Ljava_lang_String_2Ljava_lang_String_2Ljava_lang_String_2
+  (JNIEnv *, jobject, jstring, jstring, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    loadComponentFile
+ * Signature: (Ljava/lang/String;Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_loadComponentFile__Ljava_lang_String_2Ljava_lang_String_2
+  (JNIEnv *, jobject, jstring, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    loadComponentFile
+ * Signature: (Ljava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_loadComponentFile__Ljava_lang_String_2
+  (JNIEnv *, jobject, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    getComponentList
+ * Signature: (Ljava/lang/String;Z)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getComponentList__Ljava_lang_String_2Z
+  (JNIEnv *, jobject, jstring, jboolean);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    getComponentList
+ * Signature: (Ljava/lang/String;)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getComponentList__Ljava_lang_String_2
+  (JNIEnv *, jobject, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    getComponentList
+ * Signature: ()Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getComponentList__
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    getComponentNode
+ * Signature: (I)Lnl/astron/lofar/sas/otb/jotdb3/jVICnodeDef;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getComponentNode
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    getComponentParams
+ * Signature: (I)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getComponentParams
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    saveComponentNode
+ * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jVICnodeDef;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_saveComponentNode
+  (JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    isTopComponent
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_isTopComponent
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    deleteComponentNode
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_deleteComponentNode
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    getFullComponentName
+ * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jVICnodeDef;)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getFullComponentName
+  (JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    buildTemplateTree
+ * Signature: (IS)I
+ */
+JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_buildTemplateTree
+  (JNIEnv *, jobject, jint, jshort);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    newTemplateTree
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_newTemplateTree
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    copyTemplateTree
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_copyTemplateTree
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    assignTemplateName
+ * Signature: (ILjava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_assignTemplateName
+  (JNIEnv *, jobject, jint, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    assignProcessType
+ * Signature: (ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_assignProcessType
+  (JNIEnv *, jobject, jint, jstring, jstring, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    getNode
+ * Signature: (II)Lnl/astron/lofar/sas/otb/jotdb3/jOTDBnode;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getNode
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    getParam
+ * Signature: (II)Lnl/astron/lofar/sas/otb/jotdb3/jOTDBparam;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getParam__II
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    getParam
+ * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jOTDBnode;)Lnl/astron/lofar/sas/otb/jotdb3/jOTDBparam;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getParam__Lnl_astron_lofar_sas_otb_jotdb3_jOTDBnode_2
+  (JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    saveParam
+ * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jOTDBparam;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_saveParam
+  (JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    getItemList
+ * Signature: (III)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getItemList__III
+  (JNIEnv *, jobject, jint, jint, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    getItemList
+ * Signature: (ILjava/lang/String;)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getItemList__ILjava_lang_String_2
+  (JNIEnv *, jobject, jint, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    getItemList
+ * Signature: (ILjava/lang/String;Z)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getItemList__ILjava_lang_String_2Z
+  (JNIEnv *, jobject, jint, jstring, jboolean);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    dupNode
+ * Signature: (IIS)I
+ */
+JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_dupNode
+  (JNIEnv *, jobject, jint, jint, jshort);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    addComponent
+ * Signature: (IIILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_addComponent__IIILjava_lang_String_2
+  (JNIEnv *, jobject, jint, jint, jint, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    addComponent
+ * Signature: (III)I
+ */
+JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_addComponent__III
+  (JNIEnv *, jobject, jint, jint, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    saveNode
+ * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jOTDBnode;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_saveNode
+  (JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    saveNodeList
+ * Signature: (Ljava/util/Vector;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_saveNodeList
+  (JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    deleteNode
+ * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jOTDBnode;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_deleteNode
+  (JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    deleteNodeList
+ * Signature: (Ljava/util/Vector;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_deleteNodeList
+  (JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    checkTreeConstraints
+ * Signature: (II)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_checkTreeConstraints__II
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    checkTreeConstraints
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_checkTreeConstraints__I
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    instanciateTree
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_instanciateTree
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    pruneTree
+ * Signature: (IS)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_pruneTree
+  (JNIEnv *, jobject, jint, jshort);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    exportTree
+ * Signature: (IILjava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_exportTree
+  (JNIEnv *, jobject, jint, jint, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    exportResultTree
+ * Signature: (IILjava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_exportResultTree
+  (JNIEnv *, jobject, jint, jint, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    exportMetadata
+ * Signature: (ILjava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_exportMetadata__ILjava_lang_String_2
+  (JNIEnv *, jobject, jint, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    exportMetadata
+ * Signature: (ILjava/lang/String;Z)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_exportMetadata__ILjava_lang_String_2Z
+  (JNIEnv *, jobject, jint, jstring, jboolean);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    deleteTree
+ * Signature: (I)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_deleteTree
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    getTopNode
+ * Signature: (I)Lnl/astron/lofar/sas/otb/jotdb3/jOTDBnode;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_getTopNode
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    setMomInfo
+ * Signature: (IIILjava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setMomInfo
+  (JNIEnv *, jobject, jint, jint, jint, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    setClassification
+ * Signature: (IS)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setClassification
+  (JNIEnv *, jobject, jint, jshort);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    setTreeState
+ * Signature: (IS)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setTreeState__IS
+  (JNIEnv *, jobject, jint, jshort);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    setTreeState
+ * Signature: (ISZ)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setTreeState__ISZ
+  (JNIEnv *, jobject, jint, jshort, jboolean);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    setDescription
+ * Signature: (ILjava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setDescription
+  (JNIEnv *, jobject, jint, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    setSchedule
+ * Signature: (ILjava/lang/String;Ljava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setSchedule__ILjava_lang_String_2Ljava_lang_String_2
+  (JNIEnv *, jobject, jint, jstring, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    setSchedule
+ * Signature: (ILjava/lang/String;Ljava/lang/String;Z)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_setSchedule__ILjava_lang_String_2Ljava_lang_String_2Z
+  (JNIEnv *, jobject, jint, jstring, jstring, jboolean);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance
+ * Method:    errorMsg
+ * Signature: ()Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeMaintenance_errorMsg
+  (JNIEnv *, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv.h b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv.h
index 2ecc0a424175107300360e61d7f4d56ec0b6c85e..e5f4a904c8ab73798f386c50cd58a740d3ddd932 100644
--- a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv.h
+++ b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv.h
@@ -1,61 +1,61 @@
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include <jni.h>
-/* Header for class nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv */
-
-#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
-#define _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
- * Method:    initTreeStateConv
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv_initTreeStateConv
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
- * Method:    get
- * Signature: (Ljava/lang/String;)S
- */
-JNIEXPORT jshort JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv_get__Ljava_lang_String_2
-  (JNIEnv *, jobject, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
- * Method:    get
- * Signature: (S)Ljava/lang/String;
- */
-JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv_get__S
-  (JNIEnv *, jobject, jshort);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
- * Method:    getTypes
- * Signature: ()Ljava/util/HashMap;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv_getTypes
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
- * Method:    top
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv_top
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
- * Method:    next
- * Signature: ()Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv_next
-  (JNIEnv *, jobject);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv */
+
+#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
+#define _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
+ * Method:    initTreeStateConv
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv_initTreeStateConv
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
+ * Method:    get
+ * Signature: (Ljava/lang/String;)S
+ */
+JNIEXPORT jshort JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv_get__Ljava_lang_String_2
+  (JNIEnv *, jobject, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
+ * Method:    get
+ * Signature: (S)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv_get__S
+  (JNIEnv *, jobject, jshort);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
+ * Method:    getTypes
+ * Signature: ()Ljava/util/HashMap;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv_getTypes
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
+ * Method:    top
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv_top
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv
+ * Method:    next
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeStateConv_next
+  (JNIEnv *, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv.h b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv.h
index febd45b64cc35fe8cff287aa40d1d085866b7fe7..eaff8365b496132a378b016a1b47a9a84b5ce325 100644
--- a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv.h
+++ b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv.h
@@ -1,61 +1,61 @@
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include <jni.h>
-/* Header for class nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv */
-
-#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
-#define _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
- * Method:    initTreeTypeConv
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv_initTreeTypeConv
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
- * Method:    get
- * Signature: (Ljava/lang/String;)S
- */
-JNIEXPORT jshort JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv_get__Ljava_lang_String_2
-  (JNIEnv *, jobject, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
- * Method:    get
- * Signature: (S)Ljava/lang/String;
- */
-JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv_get__S
-  (JNIEnv *, jobject, jshort);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
- * Method:    getTypes
- * Signature: ()Ljava/util/HashMap;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv_getTypes
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
- * Method:    top
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv_top
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
- * Method:    next
- * Signature: ()Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv_next
-  (JNIEnv *, jobject);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv */
+
+#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
+#define _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
+ * Method:    initTreeTypeConv
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv_initTreeTypeConv
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
+ * Method:    get
+ * Signature: (Ljava/lang/String;)S
+ */
+JNIEXPORT jshort JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv_get__Ljava_lang_String_2
+  (JNIEnv *, jobject, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
+ * Method:    get
+ * Signature: (S)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv_get__S
+  (JNIEnv *, jobject, jshort);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
+ * Method:    getTypes
+ * Signature: ()Ljava/util/HashMap;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv_getTypes
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
+ * Method:    top
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv_top
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv
+ * Method:    next
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeTypeConv_next
+  (JNIEnv *, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeValue.h b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeValue.h
index 8f9b631c48c845985527f85d6f92d2e423cf88dc..d985bc65ff9730ec157cd73e0f4ecef7dc7bf92e 100644
--- a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeValue.h
+++ b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jTreeValue.h
@@ -1,109 +1,109 @@
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include <jni.h>
-/* Header for class nl_astron_lofar_sas_otb_jotdb3_jTreeValue */
-
-#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeValue
-#define _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeValue
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
- * Method:    addKVT
- * Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_addKVT__Ljava_lang_String_2Ljava_lang_String_2Ljava_lang_String_2
-  (JNIEnv *, jobject, jstring, jstring, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
- * Method:    addKVT
- * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jOTDBvalue;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_addKVT__Lnl_astron_lofar_sas_otb_jotdb3_jOTDBvalue_2
-  (JNIEnv *, jobject, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
- * Method:    addKVTlist
- * Signature: (Ljava/util/Vector;)Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_addKVTlist
-  (JNIEnv *, jobject, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
- * Method:    getBrokenHardware
- * Signature: (Ljava/lang/String;Ljava/lang/String;)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_getBrokenHardware__Ljava_lang_String_2Ljava_lang_String_2
-  (JNIEnv *, jobject, jstring, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
- * Method:    getBrokenHardware
- * Signature: (Ljava/lang/String;)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_getBrokenHardware__Ljava_lang_String_2
-  (JNIEnv *, jobject, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
- * Method:    getBrokenHardware
- * Signature: ()Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_getBrokenHardware__
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
- * Method:    searchInPeriod
- * Signature: (IILjava/lang/String;Ljava/lang/String;Z)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_searchInPeriod__IILjava_lang_String_2Ljava_lang_String_2Z
-  (JNIEnv *, jobject, jint, jint, jstring, jstring, jboolean);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
- * Method:    searchInPeriod
- * Signature: (IILjava/lang/String;Ljava/lang/String;)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_searchInPeriod__IILjava_lang_String_2Ljava_lang_String_2
-  (JNIEnv *, jobject, jint, jint, jstring, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
- * Method:    searchInPeriod
- * Signature: (IILjava/lang/String;)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_searchInPeriod__IILjava_lang_String_2
-  (JNIEnv *, jobject, jint, jint, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
- * Method:    searchInPeriod
- * Signature: (II)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_searchInPeriod__II
-  (JNIEnv *, jobject, jint, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
- * Method:    getSchedulableItems
- * Signature: (I)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_getSchedulableItems__I
-  (JNIEnv *, jobject, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
- * Method:    getSchedulableItems
- * Signature: ()Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_getSchedulableItems__
-  (JNIEnv *, jobject);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class nl_astron_lofar_sas_otb_jotdb3_jTreeValue */
+
+#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+#define _Included_nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+ * Method:    addKVT
+ * Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_addKVT__Ljava_lang_String_2Ljava_lang_String_2Ljava_lang_String_2
+  (JNIEnv *, jobject, jstring, jstring, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+ * Method:    addKVT
+ * Signature: (Lnl/astron/lofar/sas/otb/jotdb3/jOTDBvalue;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_addKVT__Lnl_astron_lofar_sas_otb_jotdb3_jOTDBvalue_2
+  (JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+ * Method:    addKVTlist
+ * Signature: (Ljava/util/Vector;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_addKVTlist
+  (JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+ * Method:    getBrokenHardware
+ * Signature: (Ljava/lang/String;Ljava/lang/String;)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_getBrokenHardware__Ljava_lang_String_2Ljava_lang_String_2
+  (JNIEnv *, jobject, jstring, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+ * Method:    getBrokenHardware
+ * Signature: (Ljava/lang/String;)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_getBrokenHardware__Ljava_lang_String_2
+  (JNIEnv *, jobject, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+ * Method:    getBrokenHardware
+ * Signature: ()Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_getBrokenHardware__
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+ * Method:    searchInPeriod
+ * Signature: (IILjava/lang/String;Ljava/lang/String;Z)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_searchInPeriod__IILjava_lang_String_2Ljava_lang_String_2Z
+  (JNIEnv *, jobject, jint, jint, jstring, jstring, jboolean);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+ * Method:    searchInPeriod
+ * Signature: (IILjava/lang/String;Ljava/lang/String;)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_searchInPeriod__IILjava_lang_String_2Ljava_lang_String_2
+  (JNIEnv *, jobject, jint, jint, jstring, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+ * Method:    searchInPeriod
+ * Signature: (IILjava/lang/String;)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_searchInPeriod__IILjava_lang_String_2
+  (JNIEnv *, jobject, jint, jint, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+ * Method:    searchInPeriod
+ * Signature: (II)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_searchInPeriod__II
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+ * Method:    getSchedulableItems
+ * Signature: (I)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_getSchedulableItems__I
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jTreeValue
+ * Method:    getSchedulableItems
+ * Signature: ()Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jTreeValue_getSchedulableItems__
+  (JNIEnv *, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jUnitConv.h b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jUnitConv.h
index 7bf1eaa4d4de00543e9c67b8ba26af1493569a0d..7aa6724caebd83ce95261df220016bd3e120e06a 100644
--- a/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jUnitConv.h
+++ b/SAS/OTB/jOTDB3/include/jOTDB3/nl_astron_lofar_sas_otb_jotdb3_jUnitConv.h
@@ -1,61 +1,61 @@
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include <jni.h>
-/* Header for class nl_astron_lofar_sas_otb_jotdb3_jUnitConv */
-
-#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jUnitConv
-#define _Included_nl_astron_lofar_sas_otb_jotdb3_jUnitConv
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jUnitConv
- * Method:    initUnitConv
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jUnitConv_initUnitConv
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jUnitConv
- * Method:    get
- * Signature: (Ljava/lang/String;)S
- */
-JNIEXPORT jshort JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jUnitConv_get__Ljava_lang_String_2
-  (JNIEnv *, jobject, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jUnitConv
- * Method:    get
- * Signature: (S)Ljava/lang/String;
- */
-JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jUnitConv_get__S
-  (JNIEnv *, jobject, jshort);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jUnitConv
- * Method:    getTypes
- * Signature: ()Ljava/util/HashMap;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jUnitConv_getTypes
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jUnitConv
- * Method:    top
- * Signature: ()V
- */
-JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jUnitConv_top
-  (JNIEnv *, jobject);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jotdb3_jUnitConv
- * Method:    next
- * Signature: ()Z
- */
-JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jUnitConv_next
-  (JNIEnv *, jobject);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class nl_astron_lofar_sas_otb_jotdb3_jUnitConv */
+
+#ifndef _Included_nl_astron_lofar_sas_otb_jotdb3_jUnitConv
+#define _Included_nl_astron_lofar_sas_otb_jotdb3_jUnitConv
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jUnitConv
+ * Method:    initUnitConv
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jUnitConv_initUnitConv
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jUnitConv
+ * Method:    get
+ * Signature: (Ljava/lang/String;)S
+ */
+JNIEXPORT jshort JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jUnitConv_get__Ljava_lang_String_2
+  (JNIEnv *, jobject, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jUnitConv
+ * Method:    get
+ * Signature: (S)Ljava/lang/String;
+ */
+JNIEXPORT jstring JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jUnitConv_get__S
+  (JNIEnv *, jobject, jshort);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jUnitConv
+ * Method:    getTypes
+ * Signature: ()Ljava/util/HashMap;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jUnitConv_getTypes
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jUnitConv
+ * Method:    top
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jUnitConv_top
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jotdb3_jUnitConv
+ * Method:    next
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_nl_astron_lofar_sas_otb_jotdb3_jUnitConv_next
+  (JNIEnv *, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/SAS/OTB/jParmFacade/include/jParmFacade/nl_astron_lofar_sas_otb_jparmfacade_jParmFacade.h b/SAS/OTB/jParmFacade/include/jParmFacade/nl_astron_lofar_sas_otb_jparmfacade_jParmFacade.h
index 5809eec90233829e34666a2b907e6a9c32820a20..414cfffb472c5f3bcee2cce4caea4288a0ae983a 100644
--- a/SAS/OTB/jParmFacade/include/jParmFacade/nl_astron_lofar_sas_otb_jparmfacade_jParmFacade.h
+++ b/SAS/OTB/jParmFacade/include/jParmFacade/nl_astron_lofar_sas_otb_jparmfacade_jParmFacade.h
@@ -1,45 +1,45 @@
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include <jni.h>
-/* Header for class nl_astron_lofar_sas_otb_jparmfacade_jParmFacade */
-
-#ifndef _Included_nl_astron_lofar_sas_otb_jparmfacade_jParmFacade
-#define _Included_nl_astron_lofar_sas_otb_jparmfacade_jParmFacade
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class:     nl_astron_lofar_sas_otb_jparmfacade_jParmFacade
- * Method:    getRange
- * Signature: (Ljava/lang/String;)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jparmfacade_jParmFacade_getRange
-  (JNIEnv *, jobject, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jparmfacade_jParmFacade
- * Method:    getNames
- * Signature: (Ljava/lang/String;)Ljava/util/Vector;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jparmfacade_jParmFacade_getNames
-  (JNIEnv *, jobject, jstring);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jparmfacade_jParmFacade
- * Method:    getValues
- * Signature: (Ljava/lang/String;DDIDDI)Ljava/util/HashMap;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jparmfacade_jParmFacade_getValues
-  (JNIEnv *, jobject, jstring, jdouble, jdouble, jint, jdouble, jdouble, jint);
-
-/*
- * Class:     nl_astron_lofar_sas_otb_jparmfacade_jParmFacade
- * Method:    getHistory
- * Signature: (Ljava/lang/String;DDDDDD)Ljava/util/HashMap;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jparmfacade_jParmFacade_getHistory
-  (JNIEnv *, jobject, jstring, jdouble, jdouble, jdouble, jdouble, jdouble, jdouble);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class nl_astron_lofar_sas_otb_jparmfacade_jParmFacade */
+
+#ifndef _Included_nl_astron_lofar_sas_otb_jparmfacade_jParmFacade
+#define _Included_nl_astron_lofar_sas_otb_jparmfacade_jParmFacade
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     nl_astron_lofar_sas_otb_jparmfacade_jParmFacade
+ * Method:    getRange
+ * Signature: (Ljava/lang/String;)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jparmfacade_jParmFacade_getRange
+  (JNIEnv *, jobject, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jparmfacade_jParmFacade
+ * Method:    getNames
+ * Signature: (Ljava/lang/String;)Ljava/util/Vector;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jparmfacade_jParmFacade_getNames
+  (JNIEnv *, jobject, jstring);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jparmfacade_jParmFacade
+ * Method:    getValues
+ * Signature: (Ljava/lang/String;DDIDDI)Ljava/util/HashMap;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jparmfacade_jParmFacade_getValues
+  (JNIEnv *, jobject, jstring, jdouble, jdouble, jint, jdouble, jdouble, jint);
+
+/*
+ * Class:     nl_astron_lofar_sas_otb_jparmfacade_jParmFacade
+ * Method:    getHistory
+ * Signature: (Ljava/lang/String;DDDDDD)Ljava/util/HashMap;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_sas_otb_jparmfacade_jParmFacade_getHistory
+  (JNIEnv *, jobject, jstring, jdouble, jdouble, jdouble, jdouble, jdouble, jdouble);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/SAS/OTB/jParmFacade/src/nl_astron_lofar_otb_jparmfacade_jParmFacade.cc b/SAS/OTB/jParmFacade/src/nl_astron_lofar_otb_jparmfacade_jParmFacade.cc
index 2856cc85c5e216d781e02d35d59f3998fe5511b7..bbadd7991814d0739fdda77d89ca670a888ea3c8 100644
--- a/SAS/OTB/jParmFacade/src/nl_astron_lofar_otb_jparmfacade_jParmFacade.cc
+++ b/SAS/OTB/jParmFacade/src/nl_astron_lofar_otb_jparmfacade_jParmFacade.cc
@@ -1,308 +1,308 @@
-//#  nl_astron_lofar_java_cep_jparmfacade_jParmFacade.cc: Manages the 
-//#              connection with the parameter database.
-//#
-//#  Copyright (C) 2005-2007
-//#  ASTRON (Netherlands Foundation for Research in Astronomy)
-//#  P.O.Box 2, 7990 AA Dwingeloo, The Netherlands, softwaresupport@astron.nl
-//#
-//#  This program is free software; you can redistribute it and/or modify
-//#  it under the terms of the GNU General Public License as published by
-//#  the Free Software Foundation; either version 2 of the License, or
-//#  (at your option) any later version.
-//#
-//#  This program is distributed in the hope that it will be useful,
-//#  but WITHOUT ANY WARRANTY; without even the implied warranty of
-//#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-//#  GNU General Public License for more details.
-//#
-//#  You should have received a copy of the GNU General Public License
-//#  along with this program; if not, write to the Free Software
-//#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
-//#
-
-//# Always #include <lofar_config.h> first!
-#include <lofar_config.h>
-
-#include <jni.h>
-#include <jParmFacade/nl_astron_lofar_java_cep_jparmfacade_jParmFacade.h>
-#include <jParmFacade/nl_astron_lofar_java_cep_jparmfacade_jCommon.h>
-#include <ParmFacade/ParmFacade.h>
-#include <iostream>
-#include <string>
-
-using namespace LOFAR::ParmDB;
-using namespace std;
-
-ParmFacade* theirPF;
-
-
-/*
- * Class:     nl_astron_lofar_java_cep_jparmfacade_jParmFacade
- * Method:    getRange
- * Signature: (Ljava/lang/String;)Ljava/util/ArrayList;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_java_cep_jparmfacade_jParmFacade_getRange  (JNIEnv *env, jobject obj, jstring parmNamePattern) {
-
-  jboolean isCopy;
-  jobject rangeArrayList;
-
-  // create the connection with the ParmDB
-  setParmDBConnection(env,obj);
-
-  const char* pattern = env->GetStringUTFChars (parmNamePattern, &isCopy);
-  vector<double> rangeList;
-  try {
-    rangeList = theirPF->getRange(pattern);
-
-    env->ReleaseStringUTFChars (parmNamePattern, pattern);
-
-    vector<double>::iterator rangeIterator;
-    
-    // Construct java Vector
-    jclass class_ArrayList = env->FindClass("java/util/ArrayList");
-    jmethodID mid_ArrayList_cons = env->GetMethodID(class_ArrayList, "<init>", "()V");
-    rangeArrayList = env->NewObject(class_ArrayList, mid_ArrayList_cons);
-    jmethodID mid_ArrayList_add = env->GetMethodID(class_ArrayList, "add", "(Ljava/lang/Object;)Z");
-    
-    // Double
-    jobject jDouble;
-    jclass class_Double = env->FindClass ("java/lang/Double");
-    jmethodID mid_Double_cons = env->GetMethodID (class_Double, "<init>", "(D)V");
-    
-    for (rangeIterator = rangeList.begin(); rangeIterator != rangeList.end(); rangeIterator++) {
-      jDouble = env->NewObject (class_Double, mid_Double_cons, *rangeIterator);
-      
-      env->CallObjectMethod(rangeArrayList, mid_ArrayList_add, jDouble);
-    }
-  } catch (exception &ex) {
-    string aStr= (string)ex.what();
-    cout << "Exception during getRange("<< pattern << "): "<< ex.what() << endl;
-    env->ThrowNew(env->FindClass("java/lang/Exception"),aStr.c_str());
-  }
-  
-
-  return rangeArrayList;
-}
-
-
-/*
- * Class:     nl_astron_lofar_java_cep_jParmFacade_jparmfacade
- * Method:    getNames
- * Signature: (Ljava/lang/String;)Ljava/util/ArrayList;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_java_cep_jparmfacade_jParmFacade_getNames  (JNIEnv *env, jobject obj, jstring parmNamePattern) {
-
-  jboolean isCopy;
-  jobject nameArrayList;
-
-  // create the connection with the ParmDB
-  setParmDBConnection(env,obj);
-
-  const char* pattern = env->GetStringUTFChars (parmNamePattern, &isCopy);
-  vector<string> nameList;
-  try {
-    nameList = theirPF->getNames(pattern);
-
-    env->ReleaseStringUTFChars (parmNamePattern, pattern);
-
-    vector<string>::iterator nameIterator;
-
-    // Construct java ArrayList
-    jclass class_ArrayList = env->FindClass("java/util/ArrayList");
-    jmethodID mid_ArrayList_cons = env->GetMethodID(class_ArrayList, "<init>", "()V");
-    nameArrayList = env->NewObject(class_ArrayList, mid_ArrayList_cons);
-    jmethodID mid_ArrayList_add = env->GetMethodID(class_ArrayList, "add", "(Ljava/lang/Object;)Z");
-
-    jstring jstr;
-    for (nameIterator = nameList.begin(); nameIterator != nameList.end(); nameIterator++) {
-      jstr = env->NewStringUTF (((string)*nameIterator).c_str());
-      env->CallObjectMethod(nameArrayList, mid_ArrayList_add, jstr);
-    }
-  } catch (exception &ex) {
-    string aStr= (string)ex.what();
-    cout << "Exception during getNames("<< pattern << "): "<< ex.what() << endl;
-    env->ThrowNew(env->FindClass("java/lang/Exception"),aStr.c_str());
-  }
-
-  return nameArrayList;
-}
-
-/*
- * Class:     nl_astron_lofar_java_cep_jparmfacade_jParmFacade
- * Method:    getValues
- * Signature: (Ljava/lang/String;DDIDDI)Ljava/util/HashMap;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_java_cep_jparmfacade_jParmFacade_getValues (JNIEnv *env, jobject obj, jstring parmNamePattern, jdouble startx, jdouble endx, jint nx, jdouble starty, jdouble endy, jint ny) {
-
-  jboolean isCopy;
-  jobject result;
-
-  // create the connection with the ParmDB
-  setParmDBConnection(env,obj);
-
-
-  const char* pattern = env->GetStringUTFChars (parmNamePattern, &isCopy);
-  map<string,vector<double> > valMap;
-  try {
-    valMap = theirPF->getValues(pattern,startx,endx,nx,starty,endy,ny);
-    env->ReleaseStringUTFChars (parmNamePattern, pattern);
-
-    // Construct java Map
-    jclass mapClass, doubleClass, ArrayListClass;
-    jmethodID mapInit, mapPut, ArrayListAdd, doubleInit, ArrayListInit;
-    
-    mapClass = env->FindClass("java/util/HashMap");
-    mapInit = env->GetMethodID(mapClass, "<init>", "()V");
-    result = env->NewObject(mapClass, mapInit);
-    mapPut= env->GetMethodID(mapClass, "put", "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
-    
-    // Construct java Double
-    jobject jDouble;
-    doubleClass = env->FindClass ("java/lang/Double");
-    doubleInit = env->GetMethodID (doubleClass, "<init>", "(D)V");
-    
-    
-    // Loop through map and convert to HashMap
-    
-    for (map<string,vector<double> >::const_iterator valIter=valMap.begin();
-         valIter != valMap.end();
-         valIter++) {
-      
-      // Construct java ArrayList
-      jobject valArrayList;
-      ArrayListClass = env->FindClass("java/util/ArrayList");
-      ArrayListInit = env->GetMethodID(ArrayListClass, "<init>", "()V");
-      valArrayList = env->NewObject(ArrayListClass, ArrayListInit);
-      ArrayListAdd = env->GetMethodID(ArrayListClass, "add", "(Ljava/lang/Object;)Z");
-      
-      
-      for (vector<double>::const_iterator iter=valIter->second.begin();
-	   iter != valIter->second.end();
-	   iter++) {
-	
-        jDouble = env->NewObject (doubleClass, doubleInit, *iter);
-	
-	env->CallObjectMethod(valArrayList, ArrayListAdd, jDouble);
-      }
-      
-      
-      env->CallObjectMethod(result, mapPut, env->NewStringUTF(valIter->first.c_str()),valArrayList);
-      
-    }
-  } catch (exception &ex) {
-    cout << "Exception during getValues("<< pattern << "," << startx << ","
-	 << endx << "," << nx << "," << starty << "," << endy << "," << ny 
-	 << ") : "<< ex.what() << endl;
-    string aStr= (string)ex.what();
-    jclass newExcCls = env->FindClass("java/lang/Exception");
-    if (newExcCls == 0) { 
-      cout << "Unable to find the new exception class, give up." << endl;
-      //      env->ReleaseStringUTFChars (parmNamePattern, pattern);
-      return result;
-    }
-
-    env->ThrowNew(newExcCls,aStr.c_str());
-  }
-  
-  return result;
-}
-
-/*
- * Class:     nl_astron_lofar_java_cep_jparmfacade_jParmFacade
- * Method:    getHistory
- * Signature: (Ljava/lang/String;DDIDDI)Ljava/util/HashMap;
- */
-JNIEXPORT jobject JNICALL Java_nl_astron_lofar_java_cep_jparmfacade_jParmFacade_getHistory (JNIEnv *env, jobject obj, jstring parmNamePattern, jdouble startx, jdouble endx, jdouble starty, jdouble endy, jdouble startSolveTime, jdouble endSolveTime) {
-
-  jboolean isCopy;
-  jobject result;
-
-  // create the connection with the ParmDB
-  setParmDBConnection(env,obj);
-
-
-  const char* pattern = env->GetStringUTFChars (parmNamePattern, &isCopy);
-  map<string,vector<double> > valMap;
-  try {
-    valMap = theirPF->getHistory(pattern,startx,endx,starty,endy,startSolveTime,endSolveTime);
-    env->ReleaseStringUTFChars (parmNamePattern, pattern);
-
-    // Construct java Map
-    jclass mapClass, doubleClass, ArrayListClass;
-    jmethodID mapInit, mapPut, ArrayListAdd, doubleInit, ArrayListInit;
-    
-    mapClass = env->FindClass("java/util/HashMap");
-    mapInit = env->GetMethodID(mapClass, "<init>", "()V");
-    result = env->NewObject(mapClass, mapInit);
-    mapPut= env->GetMethodID(mapClass, "put", "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
-    
-    // Construct java Double
-    jobject jDouble;
-    doubleClass = env->FindClass ("java/lang/Double");
-    doubleInit = env->GetMethodID (doubleClass, "<init>", "(D)V");
-    
-    
-    // Loop through map and convert to HashMap
-    
-    for (map<string,vector<double> >::const_iterator valIter=valMap.begin();
-         valIter != valMap.end();
-         valIter++) {
-      
-      // Construct java ArrayList
-      jobject valArrayList
-      ArrayListClass = env->FindClass("java/util/ArrayList");
-      ArrayListInit = env->GetMethodID(ArrayListClass, "<init>", "()V");
-      valArrayList = env->NewObject(ArrayListClass, ArrayListInit);
-      ArrayListAdd = env->GetMethodID(ArrayListClass, "add", "(Ljava/lang/Object;)Z");
-      
-      
-      for (vector<double>::const_iterator iter=valIter->second.begin();
-	   iter != valIter->second.end();
-	   iter++) {
-	
-        jDouble = env->NewObject (doubleClass, doubleInit, *iter);
-	
-	env->CallObjectMethod(valArrayList, ArrayListAdd, jDouble);
-      }
-      
-      
-      env->CallObjectMethod(result, mapPut, env->NewStringUTF(valIter->first.c_str()),valArrayList);
-      
-    }
-  } catch (exception &ex) {
-    cout << "Exception during getHistory("<< pattern << "," << startx << ","
-	 << endx << "," << starty << "," << endy << "," << startSolveTime << ","
-         << endSolveTime << ") : "<< ex.what() << endl;
-    string aStr= (string)ex.what();
-    jclass newExcCls = env->FindClass("java/lang/Exception");
-    if (newExcCls == 0) { 
-      cout << "Unable to find the new exception class, give up." << endl;
-      //      env->ReleaseStringUTFChars (parmNamePattern, pattern);
-      return result;
-    }
-
-    env->ThrowNew(newExcCls,aStr.c_str());
-  }
-  
-  return result;
-}
-
-
-void  setParmDBConnection(JNIEnv *env, jobject callerObject) {
-
-  // get the  callerclass
-  jclass jPF=env->GetObjectClass(callerObject);
-
-  // get the methodID
-  jfieldID id_PFID = env->GetFieldID (jPF, "itsParmFacadeDB","Ljava/lang/String;");
-
-  // get the value
-  jstring nstr = (jstring)env->GetObjectField (callerObject, id_PFID);
-
-  const char* n = env->GetStringUTFChars (nstr, 0);
-  const string name (n);
-  // create the connection with the c++ ParmFacade
-  cout << "Connect to :" << name << endl;
-  theirPF=new ParmFacade(name);
-  env->ReleaseStringUTFChars (nstr, n);
-}
+//#  nl_astron_lofar_java_cep_jparmfacade_jParmFacade.cc: Manages the 
+//#              connection with the parameter database.
+//#
+//#  Copyright (C) 2005-2007
+//#  ASTRON (Netherlands Foundation for Research in Astronomy)
+//#  P.O.Box 2, 7990 AA Dwingeloo, The Netherlands, softwaresupport@astron.nl
+//#
+//#  This program is free software; you can redistribute it and/or modify
+//#  it under the terms of the GNU General Public License as published by
+//#  the Free Software Foundation; either version 2 of the License, or
+//#  (at your option) any later version.
+//#
+//#  This program is distributed in the hope that it will be useful,
+//#  but WITHOUT ANY WARRANTY; without even the implied warranty of
+//#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+//#  GNU General Public License for more details.
+//#
+//#  You should have received a copy of the GNU General Public License
+//#  along with this program; if not, write to the Free Software
+//#  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
+//#
+
+//# Always #include <lofar_config.h> first!
+#include <lofar_config.h>
+
+#include <jni.h>
+#include <jParmFacade/nl_astron_lofar_java_cep_jparmfacade_jParmFacade.h>
+#include <jParmFacade/nl_astron_lofar_java_cep_jparmfacade_jCommon.h>
+#include <ParmFacade/ParmFacade.h>
+#include <iostream>
+#include <string>
+
+using namespace LOFAR::ParmDB;
+using namespace std;
+
+ParmFacade* theirPF;
+
+
+/*
+ * Class:     nl_astron_lofar_java_cep_jparmfacade_jParmFacade
+ * Method:    getRange
+ * Signature: (Ljava/lang/String;)Ljava/util/ArrayList;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_java_cep_jparmfacade_jParmFacade_getRange  (JNIEnv *env, jobject obj, jstring parmNamePattern) {
+
+  jboolean isCopy;
+  jobject rangeArrayList;
+
+  // create the connection with the ParmDB
+  setParmDBConnection(env,obj);
+
+  const char* pattern = env->GetStringUTFChars (parmNamePattern, &isCopy);
+  vector<double> rangeList;
+  try {
+    rangeList = theirPF->getRange(pattern);
+
+    env->ReleaseStringUTFChars (parmNamePattern, pattern);
+
+    vector<double>::iterator rangeIterator;
+    
+    // Construct java Vector
+    jclass class_ArrayList = env->FindClass("java/util/ArrayList");
+    jmethodID mid_ArrayList_cons = env->GetMethodID(class_ArrayList, "<init>", "()V");
+    rangeArrayList = env->NewObject(class_ArrayList, mid_ArrayList_cons);
+    jmethodID mid_ArrayList_add = env->GetMethodID(class_ArrayList, "add", "(Ljava/lang/Object;)Z");
+    
+    // Double
+    jobject jDouble;
+    jclass class_Double = env->FindClass ("java/lang/Double");
+    jmethodID mid_Double_cons = env->GetMethodID (class_Double, "<init>", "(D)V");
+    
+    for (rangeIterator = rangeList.begin(); rangeIterator != rangeList.end(); rangeIterator++) {
+      jDouble = env->NewObject (class_Double, mid_Double_cons, *rangeIterator);
+      
+      env->CallObjectMethod(rangeArrayList, mid_ArrayList_add, jDouble);
+    }
+  } catch (exception &ex) {
+    string aStr= (string)ex.what();
+    cout << "Exception during getRange("<< pattern << "): "<< ex.what() << endl;
+    env->ThrowNew(env->FindClass("java/lang/Exception"),aStr.c_str());
+  }
+  
+
+  return rangeArrayList;
+}
+
+
+/*
+ * Class:     nl_astron_lofar_java_cep_jParmFacade_jparmfacade
+ * Method:    getNames
+ * Signature: (Ljava/lang/String;)Ljava/util/ArrayList;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_java_cep_jparmfacade_jParmFacade_getNames  (JNIEnv *env, jobject obj, jstring parmNamePattern) {
+
+  jboolean isCopy;
+  jobject nameArrayList;
+
+  // create the connection with the ParmDB
+  setParmDBConnection(env,obj);
+
+  const char* pattern = env->GetStringUTFChars (parmNamePattern, &isCopy);
+  vector<string> nameList;
+  try {
+    nameList = theirPF->getNames(pattern);
+
+    env->ReleaseStringUTFChars (parmNamePattern, pattern);
+
+    vector<string>::iterator nameIterator;
+
+    // Construct java ArrayList
+    jclass class_ArrayList = env->FindClass("java/util/ArrayList");
+    jmethodID mid_ArrayList_cons = env->GetMethodID(class_ArrayList, "<init>", "()V");
+    nameArrayList = env->NewObject(class_ArrayList, mid_ArrayList_cons);
+    jmethodID mid_ArrayList_add = env->GetMethodID(class_ArrayList, "add", "(Ljava/lang/Object;)Z");
+
+    jstring jstr;
+    for (nameIterator = nameList.begin(); nameIterator != nameList.end(); nameIterator++) {
+      jstr = env->NewStringUTF (((string)*nameIterator).c_str());
+      env->CallObjectMethod(nameArrayList, mid_ArrayList_add, jstr);
+    }
+  } catch (exception &ex) {
+    string aStr= (string)ex.what();
+    cout << "Exception during getNames("<< pattern << "): "<< ex.what() << endl;
+    env->ThrowNew(env->FindClass("java/lang/Exception"),aStr.c_str());
+  }
+
+  return nameArrayList;
+}
+
+/*
+ * Class:     nl_astron_lofar_java_cep_jparmfacade_jParmFacade
+ * Method:    getValues
+ * Signature: (Ljava/lang/String;DDIDDI)Ljava/util/HashMap;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_java_cep_jparmfacade_jParmFacade_getValues (JNIEnv *env, jobject obj, jstring parmNamePattern, jdouble startx, jdouble endx, jint nx, jdouble starty, jdouble endy, jint ny) {
+
+  jboolean isCopy;
+  jobject result;
+
+  // create the connection with the ParmDB
+  setParmDBConnection(env,obj);
+
+
+  const char* pattern = env->GetStringUTFChars (parmNamePattern, &isCopy);
+  map<string,vector<double> > valMap;
+  try {
+    valMap = theirPF->getValues(pattern,startx,endx,nx,starty,endy,ny);
+    env->ReleaseStringUTFChars (parmNamePattern, pattern);
+
+    // Construct java Map
+    jclass mapClass, doubleClass, ArrayListClass;
+    jmethodID mapInit, mapPut, ArrayListAdd, doubleInit, ArrayListInit;
+    
+    mapClass = env->FindClass("java/util/HashMap");
+    mapInit = env->GetMethodID(mapClass, "<init>", "()V");
+    result = env->NewObject(mapClass, mapInit);
+    mapPut= env->GetMethodID(mapClass, "put", "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
+    
+    // Construct java Double
+    jobject jDouble;
+    doubleClass = env->FindClass ("java/lang/Double");
+    doubleInit = env->GetMethodID (doubleClass, "<init>", "(D)V");
+    
+    
+    // Loop through map and convert to HashMap
+    
+    for (map<string,vector<double> >::const_iterator valIter=valMap.begin();
+         valIter != valMap.end();
+         valIter++) {
+      
+      // Construct java ArrayList
+      jobject valArrayList;
+      ArrayListClass = env->FindClass("java/util/ArrayList");
+      ArrayListInit = env->GetMethodID(ArrayListClass, "<init>", "()V");
+      valArrayList = env->NewObject(ArrayListClass, ArrayListInit);
+      ArrayListAdd = env->GetMethodID(ArrayListClass, "add", "(Ljava/lang/Object;)Z");
+      
+      
+      for (vector<double>::const_iterator iter=valIter->second.begin();
+	   iter != valIter->second.end();
+	   iter++) {
+	
+        jDouble = env->NewObject (doubleClass, doubleInit, *iter);
+	
+	env->CallObjectMethod(valArrayList, ArrayListAdd, jDouble);
+      }
+      
+      
+      env->CallObjectMethod(result, mapPut, env->NewStringUTF(valIter->first.c_str()),valArrayList);
+      
+    }
+  } catch (exception &ex) {
+    cout << "Exception during getValues("<< pattern << "," << startx << ","
+	 << endx << "," << nx << "," << starty << "," << endy << "," << ny 
+	 << ") : "<< ex.what() << endl;
+    string aStr= (string)ex.what();
+    jclass newExcCls = env->FindClass("java/lang/Exception");
+    if (newExcCls == 0) { 
+      cout << "Unable to find the new exception class, give up." << endl;
+      //      env->ReleaseStringUTFChars (parmNamePattern, pattern);
+      return result;
+    }
+
+    env->ThrowNew(newExcCls,aStr.c_str());
+  }
+  
+  return result;
+}
+
+/*
+ * Class:     nl_astron_lofar_java_cep_jparmfacade_jParmFacade
+ * Method:    getHistory
+ * Signature: (Ljava/lang/String;DDIDDI)Ljava/util/HashMap;
+ */
+JNIEXPORT jobject JNICALL Java_nl_astron_lofar_java_cep_jparmfacade_jParmFacade_getHistory (JNIEnv *env, jobject obj, jstring parmNamePattern, jdouble startx, jdouble endx, jdouble starty, jdouble endy, jdouble startSolveTime, jdouble endSolveTime) {
+
+  jboolean isCopy;
+  jobject result;
+
+  // create the connection with the ParmDB
+  setParmDBConnection(env,obj);
+
+
+  const char* pattern = env->GetStringUTFChars (parmNamePattern, &isCopy);
+  map<string,vector<double> > valMap;
+  try {
+    valMap = theirPF->getHistory(pattern,startx,endx,starty,endy,startSolveTime,endSolveTime);
+    env->ReleaseStringUTFChars (parmNamePattern, pattern);
+
+    // Construct java Map
+    jclass mapClass, doubleClass, ArrayListClass;
+    jmethodID mapInit, mapPut, ArrayListAdd, doubleInit, ArrayListInit;
+    
+    mapClass = env->FindClass("java/util/HashMap");
+    mapInit = env->GetMethodID(mapClass, "<init>", "()V");
+    result = env->NewObject(mapClass, mapInit);
+    mapPut= env->GetMethodID(mapClass, "put", "(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
+    
+    // Construct java Double
+    jobject jDouble;
+    doubleClass = env->FindClass ("java/lang/Double");
+    doubleInit = env->GetMethodID (doubleClass, "<init>", "(D)V");
+    
+    
+    // Loop through map and convert to HashMap
+    
+    for (map<string,vector<double> >::const_iterator valIter=valMap.begin();
+         valIter != valMap.end();
+         valIter++) {
+      
+      // Construct java ArrayList
+      jobject valArrayList
+      ArrayListClass = env->FindClass("java/util/ArrayList");
+      ArrayListInit = env->GetMethodID(ArrayListClass, "<init>", "()V");
+      valArrayList = env->NewObject(ArrayListClass, ArrayListInit);
+      ArrayListAdd = env->GetMethodID(ArrayListClass, "add", "(Ljava/lang/Object;)Z");
+      
+      
+      for (vector<double>::const_iterator iter=valIter->second.begin();
+	   iter != valIter->second.end();
+	   iter++) {
+	
+        jDouble = env->NewObject (doubleClass, doubleInit, *iter);
+	
+	env->CallObjectMethod(valArrayList, ArrayListAdd, jDouble);
+      }
+      
+      
+      env->CallObjectMethod(result, mapPut, env->NewStringUTF(valIter->first.c_str()),valArrayList);
+      
+    }
+  } catch (exception &ex) {
+    cout << "Exception during getHistory("<< pattern << "," << startx << ","
+	 << endx << "," << starty << "," << endy << "," << startSolveTime << ","
+         << endSolveTime << ") : "<< ex.what() << endl;
+    string aStr= (string)ex.what();
+    jclass newExcCls = env->FindClass("java/lang/Exception");
+    if (newExcCls == 0) { 
+      cout << "Unable to find the new exception class, give up." << endl;
+      //      env->ReleaseStringUTFChars (parmNamePattern, pattern);
+      return result;
+    }
+
+    env->ThrowNew(newExcCls,aStr.c_str());
+  }
+  
+  return result;
+}
+
+
+void  setParmDBConnection(JNIEnv *env, jobject callerObject) {
+
+  // get the  callerclass
+  jclass jPF=env->GetObjectClass(callerObject);
+
+  // get the methodID
+  jfieldID id_PFID = env->GetFieldID (jPF, "itsParmFacadeDB","Ljava/lang/String;");
+
+  // get the value
+  jstring nstr = (jstring)env->GetObjectField (callerObject, id_PFID);
+
+  const char* n = env->GetStringUTFChars (nstr, 0);
+  const string name (n);
+  // create the connection with the c++ ParmFacade
+  cout << "Connect to :" << name << endl;
+  theirPF=new ParmFacade(name);
+  env->ReleaseStringUTFChars (nstr, n);
+}
diff --git a/SAS/TMSS/docker/tmss-testenv/tmss_testenv_Dockerfile b/SAS/TMSS/docker/tmss-testenv/tmss_testenv_Dockerfile
index 9df0c266349de01397167786a72dbc6515b15d1c..51319605fd0a2d14e5ff3e6e7a5849f23768b2a2 100644
--- a/SAS/TMSS/docker/tmss-testenv/tmss_testenv_Dockerfile
+++ b/SAS/TMSS/docker/tmss-testenv/tmss_testenv_Dockerfile
@@ -21,7 +21,7 @@ ENV PATH=$PATH:/usr/pgsql-9.4/bin/
 RUN echo "Checking out code base" && \
     git clone https://git.astron.nl/ro/lofar.git && \
     cd lofar && \
-    git checkout TMSS-138 && \
+    git checkout TMSS-146 && \
     . CMake/gen_LofarPackageList_cmake.sh && \
     PACKAGE=TMSS  && \
     VARIANT=gnucxx11_opt  && \
diff --git a/SAS/TMSS/frontend/frontend_poc/CMakeLists.txt b/SAS/TMSS/frontend/frontend_poc/CMakeLists.txt
index 1ae41c4cd29cef701b63346ec279750fda9a81c9..a7b29692312dd1bd73eb102ea0f6da8cf90cc6f1 100644
--- a/SAS/TMSS/frontend/frontend_poc/CMakeLists.txt
+++ b/SAS/TMSS/frontend/frontend_poc/CMakeLists.txt
@@ -1,4 +1,4 @@
-message(WARNING "disabled npm_install, because it currently fails to build. FIX THIS, or remove it.")
+#message(WARNING "disabled npm_install, because it currently fails to build. FIX THIS, or remove it.")
 
-#include(NPMInstall)
-#npm_install(package.json PUBLIC public SOURCE src DESTINATION share/www)
+include(NPMInstall)
+npm_install(package.json PUBLIC public SOURCE src DESTINATION share/www)
diff --git a/SAS/TMSS/frontend/frontend_poc/package.json b/SAS/TMSS/frontend/frontend_poc/package.json
index 18e6f9f550eb33fc02e6fd2d8e5e59b32039c240..66f613f597b47d4b42cbd2edd41540523cef7e65 100644
--- a/SAS/TMSS/frontend/frontend_poc/package.json
+++ b/SAS/TMSS/frontend/frontend_poc/package.json
@@ -4,12 +4,16 @@
   "private": true,
   "dependencies": {
     "bootstrap": "^4.3.1",
+    "core-js": "^3.6.4",
     "jquery": "^3.4.1",
     "popper.js": "^1.16.0",
     "react": "^16.11.0",
     "react-dom": "^16.11.0",
+    "react-jsonschema-form": "^1.8.1",
+    "react-jsonschema-form-bs4": "^1.7.1",
     "react-router-dom": "^5.1.2",
-    "react-scripts": "3.2.0"
+    "react-scripts": "3.2.0",
+    "typescript": "^3.7.5"
   },
   "scripts": {
     "start": "react-scripts start",
diff --git a/SAS/TMSS/frontend/frontend_poc/public/index.html b/SAS/TMSS/frontend/frontend_poc/public/index.html
index fd48159b584133cc32d4e8f2b58a32bb114b2152..bddfd4f241f7b630b7e93f26ab3620774bf5c2cc 100644
--- a/SAS/TMSS/frontend/frontend_poc/public/index.html
+++ b/SAS/TMSS/frontend/frontend_poc/public/index.html
@@ -10,6 +10,7 @@
       content="Web site created using create-react-app"
     />
     <link rel="apple-touch-icon" href="logo192.png" />
+    <link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.6.3/css/all.css"> <!-- for BS4 -->
     <!--
       manifest.json provides metadata used when your web app is installed on a
       user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
diff --git a/SAS/TMSS/frontend/frontend_poc/src/App.css b/SAS/TMSS/frontend/frontend_poc/src/App.css
index afc3885715f4a69457fdfccdb9aa4220c30ec1f5..9ecb272f6298180d506a00ba79ddc587e154b9fe 100644
--- a/SAS/TMSS/frontend/frontend_poc/src/App.css
+++ b/SAS/TMSS/frontend/frontend_poc/src/App.css
@@ -20,3 +20,8 @@
 .App-link {
   color: #09d3ac;
 }
+
+.jsonform {
+    border-left: 3px solid #007bff;
+    padding-left: 10pt;
+}
\ No newline at end of file
diff --git a/SAS/TMSS/frontend/frontend_poc/src/CycleList.js b/SAS/TMSS/frontend/frontend_poc/src/CycleList.js
index 3db6f67ad075d26801a54ca3747f6af2d3859e11..f643715d44925a87b388980e0cd07d3ecbd5dd59 100644
--- a/SAS/TMSS/frontend/frontend_poc/src/CycleList.js
+++ b/SAS/TMSS/frontend/frontend_poc/src/CycleList.js
@@ -5,7 +5,7 @@ import 'bootstrap/dist/js/bootstrap.js';
 // Procedures
 
 var headers = new Headers();
-headers.append('Authorization', 'Basic ' + btoa('paulus:pauluspass'));
+headers.append('Authorization', 'Basic ' + btoa('test:test'));
 headers.append('Content-Type', 'application/json');
 
 var api_url = '/api/'
diff --git a/SAS/TMSS/frontend/frontend_poc/src/ProjectList.js b/SAS/TMSS/frontend/frontend_poc/src/ProjectList.js
index 62f0e3e601ec393957a4e4ffe09a905fd39d6cc8..05516e7e454f3dce9a42af5db702aa186bee1ef1 100644
--- a/SAS/TMSS/frontend/frontend_poc/src/ProjectList.js
+++ b/SAS/TMSS/frontend/frontend_poc/src/ProjectList.js
@@ -6,7 +6,7 @@ import 'bootstrap/dist/js/bootstrap.js';
 // Procedures
 
 var headers = new Headers();
-headers.append('Authorization', 'Basic ' + btoa('paulus:pauluspass'));
+headers.append('Authorization', 'Basic ' + btoa('test:test'));
 headers.append('Content-Type', 'application/json');
 
 var api_url = '/api/'
diff --git a/SAS/TMSS/frontend/frontend_poc/src/UC1.js b/SAS/TMSS/frontend/frontend_poc/src/UC1.js
new file mode 100644
index 0000000000000000000000000000000000000000..f01fd526d11fef0c2f68cfaf6425da904eb1bb7f
--- /dev/null
+++ b/SAS/TMSS/frontend/frontend_poc/src/UC1.js
@@ -0,0 +1,415 @@
+// React
+import React, {Component} from 'react';
+import ReactDOM from 'react-dom';
+
+// Bootstrap
+import 'bootstrap/dist/css/bootstrap.css';
+import 'bootstrap/dist/js/bootstrap.js';
+
+// JSON editor
+// ...Bootstrap v3 is used by default, but for various reasons, we want v4:
+import Form from 'react-jsonschema-form-bs4'; // todo: use main line "react-jsonschema-form" once it supports Bootstrap v4
+// ...only supports latest out of the box, older schemas need to be passed to Form via this:
+const additionalMetaSchemas = require("ajv/lib/refs/json-schema-draft-06.json");
+
+
+// todo: The BasicAuth should only be used for testing, remove once we serve from same webserver as api
+// todo: add a check that we have an active session with the API, redirect offer login if not
+var headers = new Headers();
+headers.append('Authorization', 'Basic ' + btoa('test:test'));
+headers.append('Content-Type', 'application/json');
+
+var api_url = 'http://localhost:8008/api/';
+//var api_url = '/api/'  // todo: use this again, once we serve this from same webserver as api
+
+
+// Procedures   // todo: revise and put these somewhere so they can be shared by entire frontend
+
+function tmssGetList(url, component){
+   if(!url.startsWith('http')){
+        url = api_url+url;
+   }
+   fetch(url, {headers: headers})
+     .then(response => response.json())
+     .then(response => {// React cannot handle deep states, so we have to stringify nested objects before setState()
+                        response.results.forEach(result => {result.specifications_doc = JSON.stringify(result.specifications_doc);});
+                        component.setState({items:response.results});})
+     .catch(err => alert(err))
+   }
+
+function tmssGet(url, component){
+   if(!url.startsWith('http')){
+        url = api_url+url;
+   }
+   fetch(url, {headers: headers})
+     .then(response => response.json())
+     .then(response => {component.setState(response);
+                        component.updateSchema();
+                        })
+     .catch(err => alert(err))
+   }
+
+function tmssGetReferenceList(url, state_name, component){
+   if(!url.startsWith('http')){
+        url = api_url+url;
+   }
+   fetch(url, {headers: headers})
+                .then(response => {return response.json();})
+                .then(response => {
+                    var references = response.results.map((reference) => {return reference.url});
+                    component.setState({
+                        [state_name]: references
+                    });
+                 })}
+
+function tmssPost(url, data, component){
+   if(!url.startsWith('http')){
+        url = api_url+url;
+   }
+   fetch(url, {headers: headers, method: 'POST', body: data})
+     .then(ReactDOM.render(<TaskDraftList />, document.getElementById('root')))
+     .catch(err => alert(err))
+   }
+
+function tmssPut(url, data, component){
+   if(!url.startsWith('http')){
+        url = api_url+url;
+   }
+   fetch(url, {headers: headers, method: 'PUT', body: data})
+     .then(ReactDOM.render(<TaskDraftList />, document.getElementById('root')))
+     .catch(err => alert(err))
+   }
+
+function tmssPatch(url, data, component){
+   if(!url.startsWith('http')){
+        url = api_url+url;
+   }
+   fetch(url, {headers: headers, method: 'PATCH', body: data})
+     .then(ReactDOM.render(<TaskDraftList />, document.getElementById('root')))
+     .catch(err => alert(err))
+   }
+
+function tmssDelete(url){
+   if(!url.startsWith('http')){
+        url = api_url+url;
+   }
+   fetch(url, {headers: headers, method: 'DELETE'})
+     .catch(err => alert(err))
+   }
+
+
+// Components
+
+const Tag = props => (
+   <span className={props.tag === 'test' ? 'badge badge-primary' : 'badge badge-secondary'}>{props.tag}</span>
+)
+
+const TaskDraft = props => (
+    <tr>
+	<td>{props.task_draft.tags.map(
+        function(currentTag, i){
+            return <Tag tag={currentTag} key={i} />
+        }
+        )}</td>
+    <td>{props.task_draft.name}</td>
+    <td>{props.task_draft.description}</td>
+    <td>{props.task_draft.scheduling_unit_draft}</td>
+    <td>{props.task_draft.specifications_template}</td>
+    <td>{props.task_draft.specifications_doc}</td>
+    <td>
+        <button className="btn btn-primary" onClick={() => ReactDOM.render(<EditTaskDraft id={props.task_draft.url}/>, document.getElementById('root'))}>Edit</button>
+        <button className="btn btn-danger"onClick={() => {
+                tmssDelete(props.task_draft.url);
+                    props.deleteItem(props.index);
+                }}>Delete</button>
+    </td>
+    </tr>
+)
+
+// Main Components
+
+class EditTaskDraft extends Component {
+
+    constructor(props) {
+        super(props);
+
+        this.onChangeName = this.onChangeName.bind(this);
+        this.onChangeDescription = this.onChangeDescription.bind(this);
+        this.onChangeSpecificationsDoc = this.onChangeSpecificationsDoc.bind(this);
+        this.onChangeCopies = this.onChangeCopies.bind(this);
+        this.onChangeCopyReason = this.onChangeCopyReason.bind(this);
+        this.onChangeSchedulingUnitDraft = this.onChangeSchedulingUnitDraft.bind(this);
+        this.onChangeSpecificationsTemplate = this.onChangeSpecificationsTemplate.bind(this);
+        this.onSubmit = this.onSubmit.bind(this);
+        this.updateSchema = this.updateSchema.bind(this);
+
+        this.state = {
+            isnew: props.isnew,
+            id: props.id,
+            schema: {},
+            templates: [],
+            drafts: [],
+            // default values for new item (state gets overwritten by componentDidMount for non-new):
+            name: "my_name",
+            tags: ["test"],
+            description: 'my_description',
+            specifications_doc: {
+                  "stations": [
+                    {
+                      "group": "ALL",
+                      "min_stations": 1
+                    }
+                  ],
+                  "antenna_set": "HBA_DUAL",
+                  "filter": "HBA_110_190",
+                  "analog_pointing": {
+                    "direction_type": "J2000",
+                    "angle1": 42,
+                    "angle2": 42
+                  },
+                  "beams": [
+                    {
+                      "name": "calibrator",
+                      "digital_pointing": {
+                        "direction_type": "J2000",
+                        "angle1": 24,
+                        "angle2": 24
+                      },
+                      "subbands": [
+                        1,
+                        2,
+                        3
+                      ]
+                    }
+                  ]
+                  },
+            copies: null,
+            copy_reason: null,
+            scheduling_unit_draft: api_url + "scheduling_unit_draft/1/",
+            specifications_template: api_url + "task_template/1/",
+            related_task_blueprint: [],
+            produced_by: [],
+            consumed_by: []
+        }
+    }
+
+    componentDidMount() {
+        if(!this.state.isnew){
+            // update state with db info of the represented entity
+    	    tmssGet(this.state.id, this);
+        }
+        //update list  entities for UI elements
+    	tmssGetReferenceList('task_template/', 'templates', this);
+    	tmssGetReferenceList('scheduling_unit_draft/', 'drafts', this);
+    }
+
+    updateSchema(){
+        fetch(this.state.specifications_template, {headers: headers})
+            .then(response => response.json())
+            .then(response => {
+                this.setState({
+                    schema: response.schema
+                    });
+                })
+    }
+
+    onChangeName(e) {
+        this.setState({
+            name: e.target.value
+        });
+    }
+
+    onChangeDescription(e) {
+        e.preventDefault()
+        this.setState({
+            description: e.target.value
+        });
+    }
+
+    onChangeSpecificationsDoc(e) {
+        this.setState({
+            specifications_doc: e.formData
+        });
+    }
+
+    onChangeCopies(e) {
+        this.setState({
+            copies: e.target.value
+        });
+    }
+
+    onChangeCopyReason(e) {
+        this.setState({
+            copy_reason: e.target.value
+        });
+    }
+
+    onChangeSchedulingUnitDraft(e) {
+        this.setState({
+            scheduling_unit_draft: e.target.value
+        });
+    }
+
+    onChangeSpecificationsTemplate(e) {
+        this.setState({
+            specifications_template: e.target.value
+        });
+        this.updateSchema();
+    }
+
+    onSubmit(e) {
+        e.preventDefault();
+        const data = {
+            name: this.state.name,
+            tags: this.state.tags,
+            description: this.state.description,
+            specifications_doc: this.state.specifications_doc,
+            copies: this.state.copies,
+            copy_reason: this.state.copy_reason,
+            scheduling_unit_draft: this.state.scheduling_unit_draft,
+            specifications_template: this.state.specifications_template,
+            related_task_blueprint: this.state.related_task_blueprint,
+            produced_by: this.state.produced_by,
+            consumed_by: this.state.consumed_by
+        };
+        if(this.state.isnew){
+            tmssPost('task_draft/', JSON.stringify(data), this);
+        }else{
+            tmssPatch(this.props.id, JSON.stringify(data), this);
+        }
+    }
+
+    render() {
+        return (
+            <div>
+                <h3 align="center">Edit Task Draft</h3>
+                <form onSubmit={this.onSubmit}>
+                    <div className="form-group">
+                        <label>Name: </label>
+                        <input  type="text"
+                                className="form-control"
+                                value={this.state.name}
+                                onChange={this.onChangeName}
+                                />
+                    </div>
+                    <div className="form-group">
+                        <label>Description: </label>
+                        <input  type="text"
+                                className="form-control"
+                                value={this.state.description}
+                                onChange={this.onChangeDescription}
+                                />
+                    </div>
+                    <div className="form-group">
+                        <label>Copies: </label>
+                        <input
+                                type="text"
+                                className="form-control"
+                                value={this.state.copies}
+                                onChange={this.onChangeCopies}
+                                />
+                    </div>
+                    <div className="form-group">
+                        <label>CopyReason: </label>
+                        <input
+                                type="text"
+                                className="form-control"
+                                value={this.state.copy_reason}
+                                onChange={this.onChangeCopyReason}
+                                />
+                    </div>
+                    <div className="form-group">
+                        <label>SchedulingUnitDraft: </label>
+                        <select
+                                className="form-control"
+                                value={this.state.scheduling_unit_draft}
+                                onChange={this.onChangeSchedulingUnitDraft}
+                                >
+                                {this.state.drafts.map((opt) => {return <option key={opt} value={opt}>{opt}</option>;})}
+                        </select>
+                    </div>
+                    <div className="form-group">
+                        <label>SpecificationsTemplate: </label>
+                        <select
+                                className="form-control"
+                                value={this.state.specifications_template}
+                                onChange={this.onChangeSpecificationsTemplate}
+                                >
+                                {this.state.templates.map((opt) => {return <option key={opt} value={opt}>{opt}</option>;})}
+                        </select>
+                    </div>
+                    <div className="form-group">
+                        <label>SpecificationsDoc: </label>
+                          <Form className="jsonform"
+                            schema={this.state.schema}
+                            additionalMetaSchemas={[additionalMetaSchemas]}
+                            liveValidate={true}
+                            show_opt_in={true}
+                            formData={this.state.specifications_doc}
+                            onChange={this.onChangeSpecificationsDoc} />
+                    </div>
+                    <br />
+
+                    {/*
+                    We get a submit button from the JSONEditor, so we don't need it here.
+                    todo: It would be cleaner to use our own and hide away the JSONeditor one...
+                    <div className="form-group">
+                            <input type="submit" value="Submit" className="btn btn-primary" />
+                    </div>
+                    */}
+                </form>
+            </div>
+        )
+    }
+}
+
+class TaskDraftList extends Component {
+
+    constructor(props) {
+        super(props);
+        this.state = {items: []};
+    }
+
+    componentDidMount(){
+        tmssGetList('task_draft/', this);
+    }
+
+    deleteItem = (index) => {
+        var itms = this.state.items;
+        itms.splice(index, 1);
+        this.setState( {items: itms} );
+    }
+
+    task_drafts() {
+        return this.state.items.map(
+		(currentTaskDraft, i) => <TaskDraft task_draft={currentTaskDraft} key={i} index={i} deleteItem={this.deleteItem} />
+	);
+    }
+
+    render() {
+        return (
+            <div>
+                <h3>Task Draft List <button className="btn btn-primary" onClick={() => ReactDOM.render(<EditTaskDraft id={'dummy_id'} isnew={true} />, document.getElementById('root'))}>Create New</button></h3>
+                <table className="table table-striped table-bordered table-hover">
+                    <thead>
+                        <tr>
+                            <th>Tags</th>
+                            <th>Name</th>
+                            <th>Description</th>
+                            <th>SchedulingUnitDraft</th>
+                            <th>SpecificationsTemplate</th>
+			                <th>SpecificationsDoc</th>
+                            <th>Actions</th>
+                        </tr>
+                    </thead>
+                    <tbody>
+                        { this.task_drafts() }
+                    </tbody>
+                </table>
+            </div>
+        )
+    }
+}
+
+export default TaskDraftList
+
diff --git a/SAS/TMSS/frontend/frontend_poc/src/index.js b/SAS/TMSS/frontend/frontend_poc/src/index.js
index a809b448c60bb2bb463fd0cd447cd90fa3df484f..33cee0742276e8f49c03c939d54f674dbcf15313 100644
--- a/SAS/TMSS/frontend/frontend_poc/src/index.js
+++ b/SAS/TMSS/frontend/frontend_poc/src/index.js
@@ -3,6 +3,7 @@ import ReactDOM from 'react-dom';
 //<import './index.css';
 import CycleList from './CycleList';
 import ProjectList from './ProjectList';
+import UC1 from './UC1';
 import App from './App';
 import * as serviceWorker from './serviceWorker';
 
@@ -14,6 +15,7 @@ const routing = (
       <Route exact path="/frontend/" component={App} />
       <Route path="/frontend/cycle" component={CycleList} />
       <Route path="/frontend/project" component={ProjectList} />
+      <Route path="/frontend/uc1" component={UC1} />
     </div>
   </Router>
 )
diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
index 7e06297f42bb8e1ff4b2dc75b272a558ec3ac8a9..c463f9fceb55b63dbec8e0f6d700b853211d78f0 100644
--- a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
@@ -157,10 +157,11 @@ class SubtaskSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializer):
         import json
 
         try:
-            schema = self.instance.specifications_template.schema
-            self.fields['specifications_doc'] = serializers.JSONField(
-                style={'template': 'josdejong_jsoneditor_widget.html',
-                       'schema': json.dumps(schema)})
+            if isinstance(self.instance, models.Subtask):
+                schema = self.instance.specifications_template.schema
+                self.fields['specifications_doc'] = serializers.JSONField(
+                    style={'template': 'josdejong_jsoneditor_widget.html',
+                           'schema': json.dumps(schema)})
         except Exception as e:
             # todo: Shall we use one of the default templates for the init?
             logger.exception('Could not determine schema, hence no fancy JSON form. Expected for list view.')
diff --git a/SAS/TMSS/test/CMakeLists.txt b/SAS/TMSS/test/CMakeLists.txt
index 80a4fc73d59f80cc899d0e0692a42ab626a200ab..980f937556504a4b8ad1ca13cc28dfab705fdf9f 100644
--- a/SAS/TMSS/test/CMakeLists.txt
+++ b/SAS/TMSS/test/CMakeLists.txt
@@ -28,5 +28,6 @@ if(BUILD_TESTING)
     lofar_add_test(t_tmssapp_specification_permissions)
     lofar_add_test(t_tmss_session_auth)
 
-    # set_tests_properties(t_tmssapp_scheduling_functional PROPERTIES TIMEOUT 300)
+    set_tests_properties(t_tmssapp_scheduling_functional PROPERTIES TIMEOUT 300)
+    set_tests_properties(t_tmssapp_specification_functional PROPERTIES TIMEOUT 300)
 endif()
diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_functional.py b/SAS/TMSS/test/t_tmssapp_scheduling_functional.py
index ba1681a9c1983c8af828c608eb1b7b24fb25f2db..1c46c09ee95846377399d1b63c8f1c45e11e4bc1 100755
--- a/SAS/TMSS/test/t_tmssapp_scheduling_functional.py
+++ b/SAS/TMSS/test/t_tmssapp_scheduling_functional.py
@@ -39,13 +39,12 @@ logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=loggin
 from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import *
 from lofar.sas.tmss.test.tmss_test_data_django_models import *
 from lofar.sas.tmss.tmss.tmssapp import models
+from lofar.common.datetimeutils import formatDatetime
 
 # import and setup test data creator
 from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator
 test_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH)
 
-DJANGO_TIMEFORMAT = "%Y-%m-%dT%H:%M:%S"
-
 
 class SubtaskTemplateTestCase(unittest.TestCase):
     def test_subtask_template_list_apiformat(self):
@@ -251,6 +250,13 @@ class DefaultSubtaskTemplatesTestCase(unittest.TestCase):
 
 
 class SubtaskTestCase(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.cluster_url = test_data_creator.post_data_and_get_url(test_data_creator.Cluster(), '/cluster/')
+        cls.task_blueprint_data = test_data_creator.TaskBlueprint()
+        cls.task_blueprint_url = test_data_creator.post_data_and_get_url(cls.task_blueprint_data, '/task_blueprint/')
+        cls.specifications_template_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskTemplate(), '/subtask_template/')
+
     def test_subtask_list_apiformat(self):
         r = requests.get(BASE_URL + '/subtask/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -260,7 +266,7 @@ class SubtaskTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/subtask/1234321/', 404, {})
 
     def test_subtask_POST_and_GET(self):
-        st_test_data = test_data_creator.Subtask()
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)
@@ -271,13 +277,13 @@ class SubtaskTestCase(unittest.TestCase):
         self.assertGreaterEqual(int(subtask_id), minimium_subtaskid)
 
     def test_subtask_PUT_invalid_raises_error(self):
-        st_test_data = test_data_creator.Subtask()
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
 
         PUT_and_assert_expected_response(self, BASE_URL + '/subtask/9876789876/', st_test_data, 404, {})
 
     def test_subtask_PUT(self):
-        st_test_data = test_data_creator.Subtask()
-        st_test_data2 = test_data_creator.Subtask()
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
+        st_test_data2 = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)
@@ -289,7 +295,7 @@ class SubtaskTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, st_test_data2)
 
     def test_subtask_PATCH(self):
-        st_test_data = test_data_creator.Subtask()
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)
@@ -305,7 +311,7 @@ class SubtaskTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_subtask_DELETE(self):
-        st_test_data = test_data_creator.Subtask()
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)
@@ -316,7 +322,7 @@ class SubtaskTestCase(unittest.TestCase):
         DELETE_and_assert_gone(self, url)
 
     def test_subtask_PROTECT_behavior_on_state_choice_deleted(self):
-        st_test_data = test_data_creator.Subtask()
+        st_test_data = test_data_creator.Subtask(cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url, specifications_template_url=self.specifications_template_url)
 
         # create dependency that is safe to delete (enums are not populated / re-established between tests)
         state_data = {'value': 'kickme'}
@@ -337,9 +343,12 @@ class SubtaskTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, state_url, 200, state_data)
 
     def test_subtask_SET_NULL_behavior_on_task_blueprint_deleted(self):
-        tbp_test_data = test_data_creator.TaskBlueprint()
+        # make new task_blueprint_url instance, but reuse related data for speed
+        tbp_test_data = test_data_creator.TaskBlueprint(draft_url=self.task_blueprint_data['draft'],
+                                                        template_url=self.task_blueprint_data['specifications_template'],
+                                                        scheduling_unit_blueprint_url=self.task_blueprint_data['scheduling_unit_blueprint'])
         task_blueprint_url = test_data_creator.post_data_and_get_url(tbp_test_data, '/task_blueprint/')
-        st_test_data = test_data_creator.Subtask(task_blueprint_url=task_blueprint_url)
+        st_test_data = test_data_creator.Subtask(task_blueprint_url=task_blueprint_url, cluster_url=self.cluster_url, specifications_template_url=self.specifications_template_url)
 
         # POST new item and verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)['url']
@@ -356,7 +365,7 @@ class SubtaskTestCase(unittest.TestCase):
     def test_subtask_PROTECT_behavior_on_template_deleted(self):
         stt_test_data = test_data_creator.SubtaskTemplate()
         specifications_template_url = test_data_creator.post_data_and_get_url(stt_test_data, '/subtask_template/')
-        st_test_data = test_data_creator.Subtask(specifications_template_url=specifications_template_url)
+        st_test_data = test_data_creator.Subtask(specifications_template_url=specifications_template_url, cluster_url=self.cluster_url, task_blueprint_url=self.task_blueprint_url)
 
         # POST new item and verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/subtask/', st_test_data, 201, st_test_data)['url']
@@ -371,6 +380,12 @@ class SubtaskTestCase(unittest.TestCase):
 
 
 class DataproductTestCase(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.specifications_template_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskTemplate(), '/dataproduct_specifications_template/')
+        cls.subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(), '/subtask_output/')
+        cls.dataproduct_feedback_template_url = test_data_creator.post_data_and_get_url(test_data_creator.DataproductFeedbackTemplate(), '/dataproduct_feedback_template/')
+
     def test_dataproduct_list_apiformat(self):
         r = requests.get(BASE_URL + '/dataproduct/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -380,7 +395,7 @@ class DataproductTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/dataproduct/1234321/', 404, {})
 
     def test_dataproduct_POST_and_GET(self):
-        dp_test_data = test_data_creator.Dataproduct()
+        dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.specifications_template_url, subtask_output_url=self.subtask_output_url, dataproduct_feedback_template_url=self.dataproduct_feedback_template_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data)
@@ -388,13 +403,13 @@ class DataproductTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, dp_test_data)
 
     def test_dataproduct_PUT_invalid_raises_error(self):
-        dp_test_data = test_data_creator.Dataproduct()
+        dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.specifications_template_url, subtask_output_url=self.subtask_output_url, dataproduct_feedback_template_url=self.dataproduct_feedback_template_url)
 
         PUT_and_assert_expected_response(self, BASE_URL + '/dataproduct/9876789876/', dp_test_data, 404, {})
 
     def test_dataproduct_PUT(self):
-        dp_test_data = test_data_creator.Dataproduct()
-        dp_test_data2 = test_data_creator.Dataproduct()
+        dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.specifications_template_url, subtask_output_url=self.subtask_output_url, dataproduct_feedback_template_url=self.dataproduct_feedback_template_url)
+        dp_test_data2 = test_data_creator.Dataproduct(specifications_template_url=self.specifications_template_url, subtask_output_url=self.subtask_output_url, dataproduct_feedback_template_url=self.dataproduct_feedback_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data)
@@ -406,7 +421,7 @@ class DataproductTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, dp_test_data2)
 
     def test_dataproduct_PATCH(self):
-        dp_test_data = test_data_creator.Dataproduct()
+        dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.specifications_template_url, subtask_output_url=self.subtask_output_url, dataproduct_feedback_template_url=self.dataproduct_feedback_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data)
@@ -423,7 +438,7 @@ class DataproductTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_dataproduct_DELETE(self):
-        dp_test_data = test_data_creator.Dataproduct()
+        dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.specifications_template_url, subtask_output_url=self.subtask_output_url, dataproduct_feedback_template_url=self.dataproduct_feedback_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data)
@@ -434,7 +449,7 @@ class DataproductTestCase(unittest.TestCase):
         DELETE_and_assert_gone(self, url)
 
     def test_dataproduct_PROTECT_behavior_on_dataformat_deleted(self):
-        dp_test_data = test_data_creator.Dataproduct()
+        dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.specifications_template_url, subtask_output_url=self.subtask_output_url, dataproduct_feedback_template_url=self.dataproduct_feedback_template_url)
 
         # create dependency that is safe to delete (enums are not populated / re-established between tests)
         dataformat_data = {'value': 'kickme'}
@@ -456,7 +471,7 @@ class DataproductTestCase(unittest.TestCase):
 
     def test_dataproduct_CASCADE_behavior_on_specifications_template_deleted(self):
         specifications_template_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskTemplate(), '/dataproduct_specifications_template/')
-        dp_test_data = test_data_creator.Dataproduct(specifications_template_url=specifications_template_url)
+        dp_test_data = test_data_creator.Dataproduct(specifications_template_url=specifications_template_url, subtask_output_url=self.subtask_output_url, dataproduct_feedback_template_url=self.dataproduct_feedback_template_url)
 
         # POST new item, verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct/', dp_test_data, 201, dp_test_data)['url']
@@ -578,6 +593,18 @@ class SubtaskConnectorTestCase(unittest.TestCase):
 
 
 class SubtaskInputTestCase(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.subtask_data = test_data_creator.Subtask()
+        cls.subtask_url = test_data_creator.post_data_and_get_url(cls.subtask_data, '/subtask/')
+        cls.task_relation_blueprint_data = test_data_creator.TaskRelationBlueprint()
+        cls.task_relation_blueprint_url = test_data_creator.post_data_and_get_url(cls.task_relation_blueprint_data, '/task_relation_blueprint/')
+        cls.dataproduct_urls = [test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(), '/dataproduct/'), test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(), '/dataproduct/')]
+        cls.subtask_connector_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskConnector(), '/subtask_connector/')
+        cls.subtask_output_data = test_data_creator.SubtaskOutput()
+        cls.subtask_output_url = test_data_creator.post_data_and_get_url(cls.subtask_output_data, '/subtask_output/')
+        cls.subtask_input_selection_template_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskInputSelectionTemplate(), '/subtask_input_selection_template/')
+
     def test_subtask_input_list_apiformat(self):
         r = requests.get(BASE_URL + '/subtask_input/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -587,7 +614,7 @@ class SubtaskInputTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/subtask_input/1234321/', 404, {})
 
     def test_subtask_input_POST_and_GET(self):
-        sti_test_data = test_data_creator.SubtaskInput()
+        sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)
@@ -595,12 +622,12 @@ class SubtaskInputTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, sti_test_data)
 
     def test_subtask_input_PUT_invalid_raises_error(self):
-        sti_test_data = test_data_creator.SubtaskInput()
+        sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url)
 
         PUT_and_assert_expected_response(self, BASE_URL + '/subtask_input/9876789876/', sti_test_data, 404, {})
 
     def test_subtask_input_PUT(self):
-        sti_test_data = test_data_creator.SubtaskInput()
+        sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)
@@ -608,19 +635,23 @@ class SubtaskInputTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, sti_test_data)
 
         # PUT new values, verify
-        sti_test_data2 = test_data_creator.SubtaskInput()
+        sti_test_data2 = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url)
         PUT_and_assert_expected_response(self, url, sti_test_data2, 200, sti_test_data2)
         GET_and_assert_expected_response(self, url, 200, sti_test_data2)
 
     def test_subtask_input_PATCH(self):
-        sti_test_data = test_data_creator.SubtaskInput()
+        sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)
         url = r_dict['url']
         GET_and_assert_expected_response(self, url, 200, sti_test_data)
 
-        subtask_url = test_data_creator.post_data_and_get_url(test_data_creator.Subtask(), '/subtask/')
+        # make new subtask_url instance, but reuse related data for speed
+        subtask_url = test_data_creator.post_data_and_get_url(test_data_creator.Subtask(cluster_url=self.subtask_data['cluster'],
+                                                                                        task_blueprint_url=self.subtask_data['task_blueprint'],
+                                                                                        specifications_template_url=self.subtask_data['specifications_template'],
+                                                                                        specifications_doc=self.subtask_data['specifications_doc']), '/subtask/')
         test_patch = {"subtask": subtask_url,
                       "tags": ['FANCYTAG'],
                       }
@@ -632,7 +663,7 @@ class SubtaskInputTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_subtask_input_DELETE(self):
-        sti_test_data = test_data_creator.SubtaskInput()
+        sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)
@@ -643,8 +674,12 @@ class SubtaskInputTestCase(unittest.TestCase):
         DELETE_and_assert_gone(self, url)
 
     def test_subtask_input_CASCADE_behavior_on_subtask_deleted(self):
-        subtask_url = test_data_creator.post_data_and_get_url(test_data_creator.Subtask(), '/subtask/')
-        sti_test_data = test_data_creator.SubtaskInput(subtask_url=subtask_url)
+        # make new subtask_url instance, but reuse related data for speed
+        subtask_url = test_data_creator.post_data_and_get_url(test_data_creator.Subtask(cluster_url=self.subtask_data['cluster'],
+                                                                                        task_blueprint_url=self.subtask_data['task_blueprint'],
+                                                                                        specifications_template_url=self.subtask_data['specifications_template'],
+                                                                                        specifications_doc=self.subtask_data['specifications_doc']), '/subtask/')
+        sti_test_data = test_data_creator.SubtaskInput(subtask_url=subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url)
 
         # POST new item, verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url']
@@ -658,7 +693,7 @@ class SubtaskInputTestCase(unittest.TestCase):
 
     def test_subtask_input_SET_NULL_behavior_on_connector_deleted(self):
         subtask_connector_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskConnector(), '/subtask_connector/')
-        sti_test_data = test_data_creator.SubtaskInput(subtask_connector_url=subtask_connector_url)
+        sti_test_data = test_data_creator.SubtaskInput(subtask_connector_url=subtask_connector_url, subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url)
 
         # POST new item, verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url']
@@ -673,8 +708,11 @@ class SubtaskInputTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_subtask_input_SET_NULL_behavior_on_task_relation_blueprint_deleted(self):
-        task_relation_blueprint_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskRelationBlueprint(), '/task_relation_blueprint/')
-        sti_test_data = test_data_creator.SubtaskInput(task_relation_blueprint_url=task_relation_blueprint_url)
+        # make new task_relation_blueprint instance, but reuse related data for speed
+        task_relation_blueprint_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskRelationBlueprint(draft_url=self.task_relation_blueprint_data['draft'], template_url=self.task_relation_blueprint_data['selection_template'],
+                                                                                                                      input_url=self.task_relation_blueprint_data['input'], output_url=self.task_relation_blueprint_data['output'],
+                                                                                                                      consumer_url=self.task_relation_blueprint_data['consumer'], producer_url=self.task_relation_blueprint_data['producer']), '/task_relation_blueprint/')
+        sti_test_data = test_data_creator.SubtaskInput(task_relation_blueprint_url=task_relation_blueprint_url, subtask_url=self.subtask_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url)
 
         # POST new item, verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url']
@@ -689,8 +727,9 @@ class SubtaskInputTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_subtask_input_PROTECT_behavior_on_producer_deleted(self):
-        subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(), '/subtask_output/')
-        sti_test_data = test_data_creator.SubtaskInput(subtask_output_url=subtask_output_url)
+        # make new subtask_output_url instance, but reuse related data for speed
+        subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=self.subtask_output_data['subtask'], subtask_connector_url=self.subtask_output_data['connector']), '/subtask_output/')
+        sti_test_data = test_data_creator.SubtaskInput(subtask_output_url=subtask_output_url, subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url)
 
         # POST with dependency
         url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url']
@@ -705,7 +744,12 @@ class SubtaskInputTestCase(unittest.TestCase):
 
     def test_subtask_input_PROTECT_behavior_on_selection_template_deleted(self):
         subtask_input_selection_template_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskInputSelectionTemplate(), '/subtask_input_selection_template/')
-        sti_test_data = test_data_creator.SubtaskInput(subtask_input_selection_template_url=subtask_input_selection_template_url)
+        sti_test_data = test_data_creator.SubtaskInput(subtask_input_selection_template_url=subtask_input_selection_template_url,
+                                                       subtask_url=self.subtask_url,
+                                                       task_relation_blueprint_url=self.task_relation_blueprint_url,
+                                                       dataproduct_urls=self.dataproduct_urls,
+                                                       subtask_connector_url=self.subtask_connector_url,
+                                                       subtask_output_url=self.subtask_output_url)
 
         # POST with dependency
         url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url']
@@ -720,6 +764,13 @@ class SubtaskInputTestCase(unittest.TestCase):
 
 
 class SubtaskOutputTestCase(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.subtask_data = test_data_creator.Subtask()
+        cls.subtask_url = test_data_creator.post_data_and_get_url(cls.subtask_data, '/subtask/')
+        cls.subtask_connector_data = test_data_creator.SubtaskConnector()
+        cls.subtask_connector_url = test_data_creator.post_data_and_get_url(cls.subtask_connector_data, '/subtask_connector/')
+
     def test_subtask_output_list_apiformat(self):
         r = requests.get(BASE_URL + '/subtask_output/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -729,7 +780,7 @@ class SubtaskOutputTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/subtask_output/1234321/', 404, {})
 
     def test_subtask_output_POST_and_GET(self):
-        sto_test_data = test_data_creator.SubtaskOutput()
+        sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201,
@@ -738,12 +789,12 @@ class SubtaskOutputTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, sto_test_data)
 
     def test_subtask_output_PUT_invalid_raises_error(self):
-        sto_test_data = test_data_creator.SubtaskOutput()
+        sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url)
         PUT_and_assert_expected_response(self, BASE_URL + '/subtask_output/9876789876/', sto_test_data, 404, {})
 
     def test_subtask_output_PUT(self):
-        sto_test_data = test_data_creator.SubtaskOutput()
-        sto_test_data2 = test_data_creator.SubtaskOutput()
+        sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url)
+        sto_test_data2 = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201,sto_test_data)
@@ -755,8 +806,8 @@ class SubtaskOutputTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, sto_test_data2)
 
     def test_subtask_output_PATCH(self):
-        sto_test_data = test_data_creator.SubtaskOutput()
-        sto_test_data2 = test_data_creator.SubtaskOutput()
+        sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url)
+        sto_test_data2 = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201,
@@ -774,7 +825,7 @@ class SubtaskOutputTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_subtask_output_DELETE(self):
-        sto_test_data = test_data_creator.SubtaskOutput()
+        sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201,
@@ -786,9 +837,9 @@ class SubtaskOutputTestCase(unittest.TestCase):
         DELETE_and_assert_gone(self, url)
 
     def test_subtask_output_CASCADE_behavior_on_subtask_deleted(self):
-        st_test_data = test_data_creator.Subtask()
-        subtask_url = test_data_creator.post_data_and_get_url(st_test_data, '/subtask/')
-        sto_test_data = test_data_creator.SubtaskOutput(subtask_url=subtask_url)
+        # make new subtask_url instance, but reuse related data for speed
+        subtask_url = test_data_creator.post_data_and_get_url(self.subtask_data, '/subtask/')
+        sto_test_data = test_data_creator.SubtaskOutput(subtask_url=subtask_url, subtask_connector_url=self.subtask_connector_url)
 
         # POST new item, verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, sto_test_data)['url']
@@ -801,7 +852,7 @@ class SubtaskOutputTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 404, {})
 
     def test_subtask_output_SET_NULL_behavior_on_connector_deleted(self):
-        sto_test_data = test_data_creator.SubtaskOutput()
+        sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url)
 
         # POST new item, verify
         url = \
@@ -905,6 +956,13 @@ class AntennaSetTestCase(unittest.TestCase):
 
 
 class DataproductTransformTestCase(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.input_dataproduct_data = test_data_creator.Dataproduct()
+        cls.input_dataproduct_url = test_data_creator.post_data_and_get_url(cls.input_dataproduct_data, '/dataproduct/')
+        cls.output_dataproduct_data = test_data_creator.Dataproduct()
+        cls.output_dataproduct_url = test_data_creator.post_data_and_get_url(cls.output_dataproduct_data, '/dataproduct/')
+
     def test_dataproduct_transform_list_apiformat(self):
         r = requests.get(BASE_URL + '/dataproduct_transform/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -914,7 +972,7 @@ class DataproductTransformTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/1234321/', 404, {})
 
     def test_dataproduct_transform_POST_and_GET(self):
-        dpt_test_data = test_data_creator.DataproductTransform()
+        dpt_test_data = test_data_creator.DataproductTransform(input_dataproduct_url=self.input_dataproduct_url, output_dataproduct_url=self.output_dataproduct_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data)
@@ -922,13 +980,13 @@ class DataproductTransformTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, dpt_test_data)
 
     def test_dataproduct_transform_PUT_invalid_raises_error(self):
-        dpt_test_data = test_data_creator.DataproductTransform()
+        dpt_test_data = test_data_creator.DataproductTransform(input_dataproduct_url=self.input_dataproduct_url, output_dataproduct_url=self.output_dataproduct_url)
 
         PUT_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/9876789876/', dpt_test_data, 404, {})
 
     def test_dataproduct_transform_PUT(self):
-        dpt_test_data = test_data_creator.DataproductTransform()
-        dpt_test_data2 = test_data_creator.DataproductTransform()
+        dpt_test_data = test_data_creator.DataproductTransform(input_dataproduct_url=self.input_dataproduct_url, output_dataproduct_url=self.output_dataproduct_url)
+        dpt_test_data2 = test_data_creator.DataproductTransform(input_dataproduct_url=self.input_dataproduct_url, output_dataproduct_url=self.output_dataproduct_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data)
@@ -940,14 +998,18 @@ class DataproductTransformTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, dpt_test_data2)
 
     def test_dataproduct_transform_PATCH(self):
-        dpt_test_data = test_data_creator.DataproductTransform()
+        dpt_test_data = test_data_creator.DataproductTransform(input_dataproduct_url=self.input_dataproduct_url, output_dataproduct_url=self.output_dataproduct_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data)
         url = r_dict['url']
         GET_and_assert_expected_response(self, url, 200, dpt_test_data)
 
-        output_dataproduct_url = test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(), '/dataproduct/')
+        # make new output_dataproduct_url instance, but reuse related data for speed
+        output_dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.output_dataproduct_data['specifications_template'],
+                                                           subtask_output_url=self.output_dataproduct_data['producer'],
+                                                           dataproduct_feedback_template_url=self.output_dataproduct_data['feedback_template'])
+        output_dataproduct_url = test_data_creator.post_data_and_get_url(output_dp_test_data, '/dataproduct/')
 
         test_patch = {"output": output_dataproduct_url,
                       "identity": False }
@@ -959,7 +1021,7 @@ class DataproductTransformTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_dataproduct_transform_DELETE(self):
-        dpt_test_data = test_data_creator.DataproductTransform()
+        dpt_test_data = test_data_creator.DataproductTransform(input_dataproduct_url=self.input_dataproduct_url, output_dataproduct_url=self.output_dataproduct_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data)
@@ -970,9 +1032,12 @@ class DataproductTransformTestCase(unittest.TestCase):
         DELETE_and_assert_gone(self, url)
 
     def test_dataproduct_transform_PROTECT_behavior_on_input_deleted(self):
-        input_dp_test_data = test_data_creator.Dataproduct()
+        # make new input_dataproduct_url instance, but reuse related data for speed
+        input_dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.input_dataproduct_data['specifications_template'],
+                                                           subtask_output_url=self.input_dataproduct_data['producer'],
+                                                           dataproduct_feedback_template_url=self.input_dataproduct_data['feedback_template'])
         input_dataproduct_url = test_data_creator.post_data_and_get_url(input_dp_test_data, '/dataproduct/')
-        dpt_test_data = test_data_creator.DataproductTransform(input_dataproduct_url=input_dataproduct_url)
+        dpt_test_data = test_data_creator.DataproductTransform(input_dataproduct_url=input_dataproduct_url, output_dataproduct_url=self.output_dataproduct_url)
 
         # POST new item and verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data)['url']
@@ -986,9 +1051,12 @@ class DataproductTransformTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, input_dataproduct_url, 200, input_dp_test_data)
 
     def test_dataproduct_transform_PROTECT_behavior_on_output_deleted(self):
-        output_dp_test_data = test_data_creator.Dataproduct()
+        # make new output_dataproduct_url instance, but reuse related data for speed
+        output_dp_test_data = test_data_creator.Dataproduct(specifications_template_url=self.output_dataproduct_data['specifications_template'],
+                                                           subtask_output_url=self.output_dataproduct_data['producer'],
+                                                           dataproduct_feedback_template_url=self.output_dataproduct_data['feedback_template'])
         output_dataproduct_url = test_data_creator.post_data_and_get_url(output_dp_test_data, '/dataproduct/')
-        dpt_test_data = test_data_creator.DataproductTransform(output_dataproduct_url=output_dataproduct_url)
+        dpt_test_data = test_data_creator.DataproductTransform(output_dataproduct_url=output_dataproduct_url, input_dataproduct_url=self.input_dataproduct_url)
 
         # POST new item and verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_transform/', dpt_test_data, 201, dpt_test_data)['url']
@@ -1152,6 +1220,10 @@ class ClusterTestCase(unittest.TestCase):
 
 
 class DataproductHashTestCase(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.dataproduct_url = test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(), '/dataproduct/')
+
     def test_dataproduct_hash_list_apiformat(self):
         r = requests.get(BASE_URL + '/dataproduct_hash/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -1161,7 +1233,7 @@ class DataproductHashTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/1234321/', 404, {})
 
     def test_dataproduct_hash_POST_and_GET(self):
-        dph_test_data = test_data_creator.DataproductHash()
+        dph_test_data = test_data_creator.DataproductHash(dataproduct_url=self.dataproduct_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data,
@@ -1170,14 +1242,14 @@ class DataproductHashTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, dph_test_data)
 
     def test_dataproduct_hash_PUT_invalid_raises_error(self):
-        dph_test_data = test_data_creator.DataproductHash()
+        dph_test_data = test_data_creator.DataproductHash(dataproduct_url=self.dataproduct_url)
 
         PUT_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/9876789876/', dph_test_data,
                                          404, {})
 
     def test_dataproduct_hash_PUT(self):
-        dph_test_data = test_data_creator.DataproductHash(hash="the one")
-        dph_test_data2 = test_data_creator.DataproductHash(hash="the other")
+        dph_test_data = test_data_creator.DataproductHash(hash="the one", dataproduct_url=self.dataproduct_url)
+        dph_test_data2 = test_data_creator.DataproductHash(hash="the other", dataproduct_url=self.dataproduct_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data,
@@ -1190,7 +1262,7 @@ class DataproductHashTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, dph_test_data2)
 
     def test_dataproduct_hash_PATCH(self):
-        dph_test_data = test_data_creator.DataproductHash()
+        dph_test_data = test_data_creator.DataproductHash(dataproduct_url=self.dataproduct_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data,
@@ -1208,7 +1280,7 @@ class DataproductHashTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_dataproduct_hash_DELETE(self):
-        dph_test_data = test_data_creator.DataproductHash()
+        dph_test_data = test_data_creator.DataproductHash(dataproduct_url=self.dataproduct_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data,
@@ -1220,7 +1292,7 @@ class DataproductHashTestCase(unittest.TestCase):
         DELETE_and_assert_gone(self, url)
 
     def test_dataproduct_hash_PROTECT_behavior_on_dataproduct_deleted(self):
-        dph_test_data = test_data_creator.DataproductHash()
+        dph_test_data = test_data_creator.DataproductHash(dataproduct_url=self.dataproduct_url)
 
         # POST new item and verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, 201,
@@ -1235,7 +1307,7 @@ class DataproductHashTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, dph_test_data['dataproduct'], 200, {})
 
     def test_dataproduct_hash_PROTECT_behavior_on_algorithm_deleted(self):
-        dph_test_data = test_data_creator.DataproductHash()
+        dph_test_data = test_data_creator.DataproductHash(dataproduct_url=self.dataproduct_url)
 
         # POST new item and verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_hash/', dph_test_data, 201,
@@ -1251,6 +1323,10 @@ class DataproductHashTestCase(unittest.TestCase):
 
 
 class DataproductArchiveInfoTestCase(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.dataproduct_url = test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(), '/dataproduct/')
+
     def test_dataproduct_archive_info_list_apiformat(self):
         r = requests.get(BASE_URL + '/dataproduct_archive_info/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -1260,7 +1336,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/1234321/', 404, {})
 
     def test_dataproduct_archive_info_POST_and_GET(self):
-        dpai_test_data = test_data_creator.DataproductArchiveInfo()
+        dpai_test_data = test_data_creator.DataproductArchiveInfo(dataproduct_url=self.dataproduct_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data,
@@ -1269,14 +1345,14 @@ class DataproductArchiveInfoTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, dpai_test_data)
 
     def test_dataproduct_archive_info_PUT_invalid_raises_error(self):
-        dpai_test_data = test_data_creator.DataproductArchiveInfo()
+        dpai_test_data = test_data_creator.DataproductArchiveInfo(dataproduct_url=self.dataproduct_url)
 
         PUT_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/9876789876/', dpai_test_data,
                                          404, {})
 
     def test_dataproduct_archive_info_PUT(self):
-        dpai_test_data = test_data_creator.DataproductArchiveInfo()
-        dpai_test_data2 = test_data_creator.DataproductArchiveInfo()
+        dpai_test_data = test_data_creator.DataproductArchiveInfo(dataproduct_url=self.dataproduct_url)
+        dpai_test_data2 = test_data_creator.DataproductArchiveInfo(dataproduct_url=self.dataproduct_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data,
@@ -1289,7 +1365,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, dpai_test_data2)
 
     def test_dataproduct_archive_info_PATCH(self):
-        dpai_test_data = test_data_creator.DataproductArchiveInfo()
+        dpai_test_data = test_data_creator.DataproductArchiveInfo(dataproduct_url=self.dataproduct_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data,
@@ -1306,7 +1382,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_dataproduct_archive_info_DELETE(self):
-        dpai_test_data = test_data_creator.DataproductArchiveInfo()
+        dpai_test_data = test_data_creator.DataproductArchiveInfo(dataproduct_url=self.dataproduct_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data,
@@ -1318,7 +1394,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase):
         DELETE_and_assert_gone(self, url)
 
     def test_dataproduct_archive_info_PROTECT_behavior_on_dataproduct_deleted(self):
-        dpai_test_data = test_data_creator.DataproductArchiveInfo()
+        dpai_test_data = test_data_creator.DataproductArchiveInfo(dataproduct_url=self.dataproduct_url)
 
         # POST new item and verify
         url = POST_and_assert_expected_response(self, BASE_URL + '/dataproduct_archive_info/', dpai_test_data, 201,
@@ -1343,6 +1419,7 @@ class SubtaskQuery(unittest.TestCase):
     - query stop time and cluster
     - query with incorrect input
     """
+    #TODO: add proper indexes on start and stop time
 
     def check_response_OK_and_result_count(self, response, expected_count):
         """
@@ -1376,8 +1453,8 @@ class SubtaskQuery(unittest.TestCase):
         for day_idx in range(0, total_number):
             start_time = datetime.now() + timedelta(hours=2, days=day_idx)
             stop_time = datetime.now() + timedelta(hours=4, days=day_idx)
-            subtask_data = Subtask_test_data(start_time=start_time.strftime(DJANGO_TIMEFORMAT),
-                                             stop_time=stop_time.strftime(DJANGO_TIMEFORMAT),
+            subtask_data = Subtask_test_data(start_time=formatDatetime(start_time),
+                                             stop_time=formatDatetime(stop_time),
                                              cluster_object=cluster_object)
             models.Subtask.objects.create(**subtask_data)
 
@@ -1420,8 +1497,7 @@ class SubtaskQuery(unittest.TestCase):
             start_time = datetime.now()
             stop_time = start_time + timedelta(days=period_length_in_days)
             expected_count = period_length_in_days
-            logger.info("Check query in a period (%s until %s) for %s",
-                        (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT), cluster_name))
+            logger.info("Check query in a period (%s until %s) for %s", formatDatetime(start_time), formatDatetime(stop_time), cluster_name)
             response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s&cluster__name=%s' %
                                     (start_time, stop_time, cluster_name), auth=AUTH)
             self.check_response_OK_and_result_count(response, expected_count)
@@ -1435,7 +1511,7 @@ class SubtaskQuery(unittest.TestCase):
                 self.check_response_OK_and_result_count(response, 1)
 
         logger.info("Check query in a period (%s until %s) for clusterNotExist" %
-                    (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT)))
+                    (formatDatetime(start_time), formatDatetime(stop_time)))
         response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s&cluster__name=%s' %
                                 (start_time, stop_time, "clusterNotExist"), auth=AUTH)
         self.check_response_OK_and_result_count(response, 0)
@@ -1450,7 +1526,7 @@ class SubtaskQuery(unittest.TestCase):
         start_time = datetime.now()
         stop_time = start_time + timedelta(days=period_length_in_days)
         logger.info("Check query in a period (%s until %s)" %
-                    (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT)))
+                    (formatDatetime(start_time), formatDatetime(stop_time)))
         response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s' %
                                 (start_time, stop_time), auth=AUTH)
         self.check_response_OK_and_result_count(response, expected_count)
@@ -1476,7 +1552,7 @@ class SubtaskQuery(unittest.TestCase):
             start_time = datetime.now()
             expected_count = period_length_in_days
             logger.info("Check query greater than start_time (%s) for %s " %
-                        (start_time.strftime(DJANGO_TIMEFORMAT), cluster_name))
+                        (formatDatetime(start_time), cluster_name))
             response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&cluster__name=%s' %
                                     (start_time, cluster_name), auth=AUTH)
             self.check_response_OK_and_result_count(response, expected_count)
@@ -1498,7 +1574,7 @@ class SubtaskQuery(unittest.TestCase):
         for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items():
             stop_time = datetime.now() + timedelta(days=period_length_in_days)
             logger.info("Check query less than stop_time (%s) for %s " %
-                        (stop_time.strftime(DJANGO_TIMEFORMAT), cluster_name))
+                        (formatDatetime(stop_time), cluster_name))
             response = requests.get(BASE_URL + '/subtask/?stop_time__lt=%s&cluster__name=%s' %
                                     (stop_time, cluster_name), auth=AUTH)
             self.check_response_OK_and_result_count(response, period_length_in_days)
@@ -1533,7 +1609,7 @@ class SubtaskQuery(unittest.TestCase):
         stop_time = datetime.now()
         start_time = stop_time + timedelta(days=period_length_in_days)
         logger.info("Check 'wrong' query in a period (%s until %s)" %
-                    (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT)))
+                    (formatDatetime(start_time), formatDatetime(stop_time)))
         response = requests.get(BASE_URL + '/subtask/?start_time__gt=%s&stop_time__lt=%s' %
                                 (start_time, stop_time), auth=AUTH)
         self.check_response_OK_and_result_count(response, 0)
@@ -1541,7 +1617,7 @@ class SubtaskQuery(unittest.TestCase):
         start_time = datetime.now()
         stop_time = start_time + timedelta(days=period_length_in_days)
         logger.info("Check 'wrong' query in a period (%s until %s)" %
-                    (start_time.strftime(DJANGO_TIMEFORMAT), stop_time.strftime(DJANGO_TIMEFORMAT)))
+                    (formatDatetime(start_time), formatDatetime(stop_time)))
         response = requests.get(BASE_URL + '/subtask/?start_time__lt=%s&stop_time__gt=%s' %
                                 (start_time, stop_time), auth=AUTH)
         self.check_response_OK_and_result_count(response, 0)
diff --git a/SAS/TMSS/test/t_tmssapp_specification_functional.py b/SAS/TMSS/test/t_tmssapp_specification_functional.py
index 2aba43f467afcbf8f133184e618112a0a7258a5e..fb3deef98e6e7f768d10ec60207e6453c257c5e4 100755
--- a/SAS/TMSS/test/t_tmssapp_specification_functional.py
+++ b/SAS/TMSS/test/t_tmssapp_specification_functional.py
@@ -277,6 +277,10 @@ class WorkRelationSelectionTemplateTestCase(unittest.TestCase):
 
 
 class TaskConnectorsTestCase(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.input_of_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/')
+        cls.output_of_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/')
 
     def test_task_connectors_list_apiformat(self):
         r = requests.get(BASE_URL + '/task_connectors/?format=api', auth=AUTH)
@@ -287,7 +291,7 @@ class TaskConnectorsTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/task_connectors/1234321/', 404, {})
 
     def test_task_connectors_POST_and_GET(self):
-        tc_test_data = test_data_creator.TaskConnectors()
+        tc_test_data = test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url)
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data)
         url = r_dict['url']
@@ -296,7 +300,7 @@ class TaskConnectorsTestCase(unittest.TestCase):
     def test_task_connectors_POST_invalid_role_raises_error(self):
 
         # POST a new item with invalid choice
-        test_data_invalid_role = dict(test_data_creator.TaskConnectors())
+        test_data_invalid_role = dict(test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url))
         test_data_invalid_role['role'] = BASE_URL + '/role/forbidden/'
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid_role, 400, {})
         self.assertTrue('Invalid hyperlink' in str(r_dict['role']))
@@ -304,7 +308,7 @@ class TaskConnectorsTestCase(unittest.TestCase):
     def test_task_connectors_POST_invalid_datatype_raises_error(self):
 
         # POST a new item with invalid choice
-        test_data_invalid = dict(test_data_creator.TaskConnectors())
+        test_data_invalid = dict(test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url))
         test_data_invalid['datatype'] = BASE_URL + '/datatype/forbidden/'
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid, 400, {})
         self.assertTrue('Invalid hyperlink' in str(r_dict['datatype']))
@@ -312,7 +316,7 @@ class TaskConnectorsTestCase(unittest.TestCase):
     def test_task_connectors_POST_invalid_dataformats_raises_error(self):
 
         # POST a new item with invalid choice
-        test_data_invalid = dict(test_data_creator.TaskConnectors())
+        test_data_invalid = dict(test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url))
         test_data_invalid['dataformats'] = [BASE_URL + '/dataformat/forbidden/']
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid, 400, {})
         self.assertTrue('Invalid hyperlink' in str(r_dict['dataformats']))
@@ -320,7 +324,7 @@ class TaskConnectorsTestCase(unittest.TestCase):
     def test_task_connectors_POST_nonexistant_input_of_raises_error(self):
 
         # POST a new item with wrong reference
-        test_data_invalid = dict(test_data_creator.TaskConnectors())
+        test_data_invalid = dict(test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url))
         test_data_invalid['input_of'] = BASE_URL + "/task_template/6353748/"
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid, 400, {})
         self.assertTrue('Invalid hyperlink' in str(r_dict['input_of']))
@@ -328,7 +332,7 @@ class TaskConnectorsTestCase(unittest.TestCase):
     def test_task_connectors_POST_nonexistant_output_of_raises_error(self):
 
         # POST a new item with wrong reference
-        test_data_invalid = dict(test_data_creator.TaskConnectors())
+        test_data_invalid = dict(test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url))
         test_data_invalid['output_of'] = BASE_URL + "/task_template/6353748/"
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_invalid, 400, {})
         self.assertTrue('Invalid hyperlink' in str(r_dict['output_of']))
@@ -340,16 +344,16 @@ class TaskConnectorsTestCase(unittest.TestCase):
         url = r_dict['url']
 
         # POST a new item with correct reference
-        test_data_valid = dict(test_data_creator.TaskConnectors())
+        test_data_valid = dict(test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url))
         test_data_valid['output_of'] = url
         POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', test_data_valid, 201, test_data_valid)
 
     def test_task_connectors_PUT_nonexistant_raises_error(self):
-        PUT_and_assert_expected_response(self, BASE_URL + '/task_connectors/9876789876/', test_data_creator.TaskConnectors(), 404, {})
+        PUT_and_assert_expected_response(self, BASE_URL + '/task_connectors/9876789876/', test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url), 404, {})
 
     def test_task_connectors_PUT(self):
-        tc_test_data1 = test_data_creator.TaskConnectors(role="correlator")
-        tc_test_data2 = test_data_creator.TaskConnectors(role="beamformer")
+        tc_test_data1 = test_data_creator.TaskConnectors(role="correlator", input_of_url=self.input_of_url, output_of_url=self.output_of_url)
+        tc_test_data2 = test_data_creator.TaskConnectors(role="beamformer", input_of_url=self.input_of_url, output_of_url=self.output_of_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data1, 201, tc_test_data1)
@@ -361,7 +365,7 @@ class TaskConnectorsTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, tc_test_data2)
 
     def test_task_connectors_PATCH(self):
-        tc_test_data = test_data_creator.TaskConnectors()
+        tc_test_data = test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data)
@@ -379,7 +383,7 @@ class TaskConnectorsTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_task_connectors_DELETE(self):
-        tc_test_data = test_data_creator.TaskConnectors()
+        tc_test_data = test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=self.output_of_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data)
@@ -391,7 +395,8 @@ class TaskConnectorsTestCase(unittest.TestCase):
 
 
     def test_task_relation_blueprint_CASCADE_behavior_on_inputs_template_deleted(self):
-        tc_test_data = test_data_creator.TaskConnectors()
+        input_of_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/')
+        tc_test_data = test_data_creator.TaskConnectors(input_of_url=input_of_url, output_of_url=self.output_of_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data)['url']
@@ -400,14 +405,15 @@ class TaskConnectorsTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, tc_test_data)
 
         # DELETE dependency
-        DELETE_and_assert_gone(self, tc_test_data['input_of'])
+        DELETE_and_assert_gone(self, input_of_url)
 
         # assert
         GET_and_assert_expected_response(self, url, 404, {})
 
 
     def test_task_relation_blueprint_CASCADE_behavior_on_outputs_template_deleted(self):
-        tc_test_data = test_data_creator.TaskConnectors()
+        output_of_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/')
+        tc_test_data = test_data_creator.TaskConnectors(input_of_url=self.input_of_url, output_of_url=output_of_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_connectors/', tc_test_data, 201, tc_test_data)['url']
@@ -416,7 +422,7 @@ class TaskConnectorsTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, tc_test_data)
 
         # DELETE dependency
-        DELETE_and_assert_gone(self, tc_test_data['output_of'])
+        DELETE_and_assert_gone(self, output_of_url)
 
         # assert
         GET_and_assert_expected_response(self, url, 404, {})
@@ -787,6 +793,11 @@ class SchedulingSetTestCase(unittest.TestCase):
 
 
 class SchedulingUnitDraftTestCase(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.scheduling_set_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingSet(), '/scheduling_set/')
+        cls.template_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitTemplate(), '/scheduling_unit_template/')
+
     def test_scheduling_unit_draft_list_apiformat(self):
         r = requests.get(BASE_URL + '/scheduling_unit_draft/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -796,7 +807,7 @@ class SchedulingUnitDraftTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/1234321/', 404, {})
 
     def test_scheduling_unit_draft_POST_and_GET(self):
-        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft()
+        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=self.scheduling_set_url, template_url=self.template_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data)
@@ -804,25 +815,25 @@ class SchedulingUnitDraftTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, schedulingunitdraft_test_data)
 
     def test_scheduling_unit_draft_PUT_invalid_raises_error(self):
-        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft()
+        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=self.scheduling_set_url, template_url=self.template_url)
         PUT_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/9876789876/', schedulingunitdraft_test_data, 404, {})
 
     def test_scheduling_unit_draft_PUT(self):
-        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft()
+        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=self.scheduling_set_url, template_url=self.template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data)
         url = r_dict['url']
         GET_and_assert_expected_response(self, url, 200, schedulingunitdraft_test_data)
 
-        schedulingunitdraft_test_data2 = test_data_creator.SchedulingUnitDraft("my_scheduling_unit_draft2")
+        schedulingunitdraft_test_data2 = test_data_creator.SchedulingUnitDraft("my_scheduling_unit_draft2", scheduling_set_url=self.scheduling_set_url, template_url=self.template_url)
 
         # PUT new values, verify
         PUT_and_assert_expected_response(self, url, schedulingunitdraft_test_data2, 200, schedulingunitdraft_test_data2)
         GET_and_assert_expected_response(self, url, 200, schedulingunitdraft_test_data2)
 
     def test_scheduling_unit_draft_PATCH(self):
-        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft()
+        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=self.scheduling_set_url, template_url=self.template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data)
@@ -839,7 +850,7 @@ class SchedulingUnitDraftTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_scheduling_unit_draft_DELETE(self):
-        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft()
+        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=self.scheduling_set_url, template_url=self.template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data)
@@ -851,7 +862,7 @@ class SchedulingUnitDraftTestCase(unittest.TestCase):
 
     def test_scheduling_unit_draft_CASCADE_behavior_on_scheduling_unit_template_deleted(self):
         template_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitTemplate(), '/scheduling_unit_template/')
-        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(template_url=template_url)
+        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(template_url=template_url, scheduling_set_url=self.scheduling_set_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/',  schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data)['url']
@@ -867,7 +878,7 @@ class SchedulingUnitDraftTestCase(unittest.TestCase):
 
     def test_scheduling_unit_draft_CASCADE_behavior_on_scheduling_set_deleted(self):
         scheduling_set_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingSet(), '/scheduling_set/')
-        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=scheduling_set_url)
+        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=scheduling_set_url, template_url=self.template_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/',  schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data)['url']
@@ -882,7 +893,7 @@ class SchedulingUnitDraftTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 404, {})
 
     def test_scheduling_unit_draft_SET_NULL_behavior_on_copies_deleted(self):
-        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft()
+        schedulingunitdraft_test_data = test_data_creator.SchedulingUnitDraft(scheduling_set_url=self.scheduling_set_url, template_url=self.template_url)
 
         # POST new item with dependency
         copy_url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_draft/', schedulingunitdraft_test_data, 201, schedulingunitdraft_test_data)['url']
@@ -902,6 +913,11 @@ class SchedulingUnitDraftTestCase(unittest.TestCase):
 
 
 class TaskDraftTestCase(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.scheduling_unit_draft_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitDraft(), '/scheduling_unit_draft/')
+        cls.template_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/')
+
     def test_task_draft_list_apiformat(self):
         r = requests.get(BASE_URL + '/task_draft/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -911,7 +927,7 @@ class TaskDraftTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/task_draft/1234321/', 404, {})
 
     def test_task_draft_POST_and_GET(self):
-        taskdraft_test_data = test_data_creator.TaskDraft()
+        taskdraft_test_data = test_data_creator.TaskDraft(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data, 201, taskdraft_test_data)
@@ -919,12 +935,12 @@ class TaskDraftTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, taskdraft_test_data)
 
     def test_task_draft_PUT_invalid_raises_error(self):
-        taskdraft_test_data = test_data_creator.TaskDraft()
+        taskdraft_test_data = test_data_creator.TaskDraft(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
         PUT_and_assert_expected_response(self, BASE_URL + '/task_draft/9876789876/', taskdraft_test_data, 404, {})
 
     def test_task_draft_PUT(self):
-        taskdraft_test_data1 = test_data_creator.TaskDraft(name="the one")
-        taskdraft_test_data2 = test_data_creator.TaskDraft(name="the other")
+        taskdraft_test_data1 = test_data_creator.TaskDraft(name="the one", scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
+        taskdraft_test_data2 = test_data_creator.TaskDraft(name="the other", scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data1, 201, taskdraft_test_data1)
@@ -936,7 +952,7 @@ class TaskDraftTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, taskdraft_test_data2)
 
     def test_task_draft_PATCH(self):
-        taskdraft_test_data = test_data_creator.TaskDraft()
+        taskdraft_test_data = test_data_creator.TaskDraft(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data, 201, taskdraft_test_data)
@@ -953,7 +969,7 @@ class TaskDraftTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_task_draft_DELETE(self):
-        taskdraft_test_data = test_data_creator.TaskDraft()
+        taskdraft_test_data = test_data_creator.TaskDraft(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data, 201, taskdraft_test_data)
@@ -965,7 +981,7 @@ class TaskDraftTestCase(unittest.TestCase):
 
     def test_task_draft_CASCADE_behavior_on_task_template_deleted(self):
         template_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/')
-        taskdraft_test_data = test_data_creator.TaskDraft(name="task draft 2", template_url=template_url)
+        taskdraft_test_data = test_data_creator.TaskDraft(name="task draft 2", template_url=template_url, scheduling_unit_draft_url=self.scheduling_unit_draft_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/',  taskdraft_test_data, 201, taskdraft_test_data)['url']
@@ -981,7 +997,7 @@ class TaskDraftTestCase(unittest.TestCase):
 
     def test_task_draft_CASCADE_behavior_on_scheduling_unit_draft_deleted(self):
         scheduling_unit_draft_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitDraft(), '/scheduling_unit_draft/')
-        taskdraft_test_data = test_data_creator.TaskDraft(name="task draft 2", scheduling_unit_draft_url=scheduling_unit_draft_url)
+        taskdraft_test_data = test_data_creator.TaskDraft(name="task draft 2", scheduling_unit_draft_url=scheduling_unit_draft_url, template_url=self.template_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/',  taskdraft_test_data, 201, taskdraft_test_data)['url']
@@ -996,8 +1012,8 @@ class TaskDraftTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 404, {})
 
     def test_task_draft_SET_NULL_behavior_on_copies_deleted(self):
-        taskdraft_test_data1 = test_data_creator.TaskDraft(name="the one")
-        taskdraft_test_data2 = test_data_creator.TaskDraft(name="the other")
+        taskdraft_test_data1 = test_data_creator.TaskDraft(name="the one", scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
+        taskdraft_test_data2 = test_data_creator.TaskDraft(name="the other", scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
 
         # POST new item with dependency
         copy_url = POST_and_assert_expected_response(self, BASE_URL + '/task_draft/', taskdraft_test_data2, 201, taskdraft_test_data2)['url']
@@ -1017,6 +1033,14 @@ class TaskDraftTestCase(unittest.TestCase):
 
 
 class TaskRelationDraftTestCase(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/')
+        cls.consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/')
+        cls.template_url = test_data_creator.post_data_and_get_url(test_data_creator.WorkRelationSelectionTemplate(), '/work_relation_selection_template/')
+        cls.input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/')
+        cls.output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/')
+
     def test_task_relation_draft_list_apiformat(self):
         r = requests.get(BASE_URL + '/task_relation_draft/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -1026,7 +1050,7 @@ class TaskRelationDraftTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/1234321/', 404, {})
 
     def test_task_relation_draft_POST_and_GET(self):
-        trd_test_data = test_data_creator.TaskRelationDraft()
+        trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data)
@@ -1034,12 +1058,12 @@ class TaskRelationDraftTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, trd_test_data)
 
     def test_task_relation_draft_PUT_invalid_raises_error(self):
-        trd_test_data = test_data_creator.TaskRelationDraft()
+        trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url)
         PUT_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/9876789876/', trd_test_data, 404, {})
 
     def test_task_relation_draft_PUT(self):
-        trd_test_data1 = test_data_creator.TaskRelationDraft()
-        trd_test_data2 = test_data_creator.TaskRelationDraft()
+        trd_test_data1 = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url)
+        trd_test_data2 = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data1, 201, trd_test_data1)
@@ -1051,7 +1075,7 @@ class TaskRelationDraftTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, trd_test_data2)
 
     def test_task_relation_draft_PATCH(self):
-        trd_test_data = test_data_creator.TaskRelationDraft()
+        trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data)
@@ -1067,7 +1091,7 @@ class TaskRelationDraftTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_task_relation_draft_DELETE(self):
-        trd_test_data = test_data_creator.TaskRelationDraft()
+        trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data)
@@ -1079,7 +1103,7 @@ class TaskRelationDraftTestCase(unittest.TestCase):
 
     def test_task_relation_draft_CASCADE_behavior_on_work_relation_selection_template_deleted(self):
         template_url = test_data_creator.post_data_and_get_url(test_data_creator.WorkRelationSelectionTemplate(), '/work_relation_selection_template/')
-        trd_test_data = test_data_creator.TaskRelationDraft(template_url=template_url)
+        trd_test_data = test_data_creator.TaskRelationDraft(template_url=template_url, producer_url=self.producer_url, consumer_url=self.consumer_url, input_url=self.input_url, output_url=self.output_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/',  trd_test_data, 201, trd_test_data)['url']
@@ -1095,7 +1119,7 @@ class TaskRelationDraftTestCase(unittest.TestCase):
 
     def test_task_relation_draft_CASCADE_behavior_on_producer_deleted(self):
         producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/')
-        trd_test_data = test_data_creator.TaskRelationDraft(producer_url=producer_url)
+        trd_test_data = test_data_creator.TaskRelationDraft(producer_url=producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/',
@@ -1113,7 +1137,7 @@ class TaskRelationDraftTestCase(unittest.TestCase):
 
     def test_task_relation_draft_CASCADE_behavior_on_consumer_deleted(self):
         consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/')
-        trd_test_data = test_data_creator.TaskRelationDraft(consumer_url=consumer_url)
+        trd_test_data = test_data_creator.TaskRelationDraft(consumer_url=consumer_url, producer_url=self.producer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url)
 
         # POST new item with dependency
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/',
@@ -1131,7 +1155,7 @@ class TaskRelationDraftTestCase(unittest.TestCase):
 
     def test_task_relation_draft_CASCADE_behavior_on_input_deleted(self):
         input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/')
-        trd_test_data = test_data_creator.TaskRelationDraft(input_url=input_url)
+        trd_test_data = test_data_creator.TaskRelationDraft(input_url=input_url, producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, output_url=self.output_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/',
@@ -1149,7 +1173,7 @@ class TaskRelationDraftTestCase(unittest.TestCase):
 
     def test_task_relation_draft_CASCADE_behavior_on_output_deleted(self):
         output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/')
-        trd_test_data = test_data_creator.TaskRelationDraft(output_url=output_url)
+        trd_test_data = test_data_creator.TaskRelationDraft(output_url=output_url, producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url)
 
         # POST new item with dependency
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/',
@@ -1166,6 +1190,11 @@ class TaskRelationDraftTestCase(unittest.TestCase):
 
 
 class SchedulingUnitBlueprintTestCase(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.scheduling_unit_draft_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitDraft(), '/scheduling_unit_draft/')
+        cls.template_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitTemplate(), '/scheduling_unit_template/')
+
     def test_scheduling_unit_blueprint_list_apiformat(self):
         r = requests.get(BASE_URL + '/scheduling_unit_blueprint/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -1175,7 +1204,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/1234321/', 404, {})
 
     def test_scheduling_unit_blueprint_POST_and_GET(self):
-        sub_test_data = test_data_creator.SchedulingUnitBlueprint()
+        sub_test_data = test_data_creator.SchedulingUnitBlueprint(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data)
@@ -1183,12 +1212,12 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, sub_test_data)
 
     def test_scheduling_unit_blueprint_PUT_invalid_raises_error(self):
-        sub_test_data = test_data_creator.SchedulingUnitBlueprint()
+        sub_test_data = test_data_creator.SchedulingUnitBlueprint(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
         PUT_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/9876789876/', sub_test_data, 404, {})
 
     def test_scheduling_unit_blueprint_PUT(self):
-        sub_test_data1 = test_data_creator.SchedulingUnitBlueprint(name="the one")
-        sub_test_data2 = test_data_creator.SchedulingUnitBlueprint(name="the other")
+        sub_test_data1 = test_data_creator.SchedulingUnitBlueprint(name="the one", scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
+        sub_test_data2 = test_data_creator.SchedulingUnitBlueprint(name="the other", scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data1, 201, sub_test_data1)
@@ -1200,7 +1229,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, sub_test_data2)
 
     def test_scheduling_unit_blueprint_PATCH(self):
-        sub_test_data = test_data_creator.SchedulingUnitBlueprint()
+        sub_test_data = test_data_creator.SchedulingUnitBlueprint(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data)
@@ -1217,7 +1246,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_scheduling_unit_blueprint_DELETE(self):
-        sub_test_data = test_data_creator.SchedulingUnitBlueprint()
+        sub_test_data = test_data_creator.SchedulingUnitBlueprint(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=self.template_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data)
@@ -1228,7 +1257,8 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase):
         DELETE_and_assert_gone(self, url)
 
     def test_scheduling_unit_blueprint_CASCADE_behavior_on_scheduling_unit_template_deleted(self):
-        sub_test_data = test_data_creator.SchedulingUnitBlueprint()
+        template_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitTemplate(), '/scheduling_unit_template/')
+        sub_test_data = test_data_creator.SchedulingUnitBlueprint(scheduling_unit_draft_url=self.scheduling_unit_draft_url, template_url=template_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/',  sub_test_data, 201, sub_test_data)['url']
@@ -1237,13 +1267,14 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, sub_test_data)
 
         # DELETE dependency
-        DELETE_and_assert_gone(self, sub_test_data['requirements_template'])
+        DELETE_and_assert_gone(self, template_url)
 
         # assert
         GET_and_assert_expected_response(self, url, 404, {})
 
     def test_scheduling_unit_blueprint_CASCADE_behavior_on_scheduling_unit_draft_deleted(self):
-        sub_test_data = test_data_creator.SchedulingUnitBlueprint()
+        scheduling_unit_draft_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitDraft(), '/scheduling_unit_draft/')
+        sub_test_data = test_data_creator.SchedulingUnitBlueprint(scheduling_unit_draft_url=scheduling_unit_draft_url, template_url=self.template_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_blueprint/', sub_test_data, 201, sub_test_data)['url']
@@ -1252,12 +1283,18 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, sub_test_data)
 
         # DELETE dependency
-        DELETE_and_assert_gone(self, sub_test_data['draft'])
+        DELETE_and_assert_gone(self, scheduling_unit_draft_url)
 
         # assert
         GET_and_assert_expected_response(self, url, 404, {})
 
 class TaskBlueprintTestCase(unittest.TestCase):
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.draft_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/')
+        cls.template_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/')
+        cls.scheduling_unit_blueprint_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitBlueprint(), '/scheduling_unit_blueprint/')
+
     def test_task_blueprint_list_apiformat(self):
         r = requests.get(BASE_URL + '/task_blueprint/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -1267,7 +1304,7 @@ class TaskBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/task_blueprint/1234321/', 404, {})
 
     def test_task_blueprint_POST_and_GET(self):
-        tb_test_data = test_data_creator.TaskBlueprint()
+        tb_test_data = test_data_creator.TaskBlueprint(draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data)
@@ -1275,12 +1312,12 @@ class TaskBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, tb_test_data)
 
     def test_task_blueprint_PUT_invalid_raises_error(self):
-        tb_test_data = test_data_creator.TaskBlueprint()
+        tb_test_data = test_data_creator.TaskBlueprint(draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url)
         PUT_and_assert_expected_response(self, BASE_URL + '/task_blueprint/9876789876/', tb_test_data, 404, {})
 
     def test_task_blueprint_PUT(self):
-        tb_test_data1 = test_data_creator.TaskBlueprint(name="the one")
-        tb_test_data2 = test_data_creator.TaskBlueprint(name="the other")
+        tb_test_data1 = test_data_creator.TaskBlueprint(name="the one", draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url)
+        tb_test_data2 = test_data_creator.TaskBlueprint(name="the other", draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data1, 201, tb_test_data1)
@@ -1292,7 +1329,7 @@ class TaskBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, tb_test_data2)
 
     def test_task_blueprint_PATCH(self):
-        tb_test_data = test_data_creator.TaskBlueprint()
+        tb_test_data = test_data_creator.TaskBlueprint(draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data)
@@ -1309,7 +1346,7 @@ class TaskBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_task_blueprint_DELETE(self):
-        tb_test_data = test_data_creator.TaskBlueprint()
+        tb_test_data = test_data_creator.TaskBlueprint(draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/', tb_test_data, 201, tb_test_data)
@@ -1320,7 +1357,7 @@ class TaskBlueprintTestCase(unittest.TestCase):
         DELETE_and_assert_gone(self, url)
 
     def test_task_blueprint_prevents_missing_specification_template(self):
-        tb_test_data = test_data_creator.TaskBlueprint()
+        tb_test_data = test_data_creator.TaskBlueprint(draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url)
 
         # test data
         test_data = dict(tb_test_data)
@@ -1331,7 +1368,7 @@ class TaskBlueprintTestCase(unittest.TestCase):
         self.assertTrue('This field may not be null' in str(r_dict['specifications_template']))
 
     def test_task_blueprint_prevents_missing_draft(self):
-        tb_test_data = test_data_creator.TaskBlueprint()
+        tb_test_data = test_data_creator.TaskBlueprint(draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url)
 
         # test data
         test_data = dict(tb_test_data)
@@ -1342,7 +1379,7 @@ class TaskBlueprintTestCase(unittest.TestCase):
         self.assertTrue('This field may not be null' in str(r_dict['draft']))
 
     def test_task_blueprint_prevents_missing_scheduling_unit_blueprint(self):
-        tb_test_data = test_data_creator.TaskBlueprint()
+        tb_test_data = test_data_creator.TaskBlueprint(draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url)
 
         # test data
         test_data = dict(tb_test_data)
@@ -1353,7 +1390,8 @@ class TaskBlueprintTestCase(unittest.TestCase):
         self.assertTrue('This field may not be null' in str(r_dict['scheduling_unit_blueprint']))
 
     def test_task_blueprint_CASCADE_behavior_on_task_template_deleted(self):
-        tb_test_data = test_data_creator.TaskBlueprint()
+        template_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/')
+        tb_test_data = test_data_creator.TaskBlueprint(draft_url=self.draft_url, template_url=template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/',  tb_test_data, 201, tb_test_data)['url']
@@ -1362,13 +1400,14 @@ class TaskBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, tb_test_data)
 
         # DELETE dependency
-        DELETE_and_assert_gone(self, tb_test_data['specifications_template'])
+        DELETE_and_assert_gone(self, template_url)
 
         # assert
         GET_and_assert_expected_response(self, url, 404, {})
 
     def test_task_blueprint_CASCADE_behavior_on_task_draft_deleted(self):
-        tb_test_data = test_data_creator.TaskBlueprint()
+        draft_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/')
+        tb_test_data = test_data_creator.TaskBlueprint(draft_url=draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=self.scheduling_unit_blueprint_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/',  tb_test_data, 201, tb_test_data)['url']
@@ -1377,13 +1416,14 @@ class TaskBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, tb_test_data)
 
         # DELETE dependency
-        DELETE_and_assert_gone(self, tb_test_data['draft'])
+        DELETE_and_assert_gone(self, draft_url)
 
         # assert
         GET_and_assert_expected_response(self, url, 404, {})
 
     def test_task_blueprint_CASCADE_behavior_on_scheduling_unit_blueprint_deleted(self):
-        tb_test_data = test_data_creator.TaskBlueprint()
+        scheduling_unit_blueprint_url = test_data_creator.post_data_and_get_url(test_data_creator.SchedulingUnitBlueprint(), '/scheduling_unit_blueprint/')
+        tb_test_data = test_data_creator.TaskBlueprint(draft_url=self.draft_url, template_url=self.template_url, scheduling_unit_blueprint_url=scheduling_unit_blueprint_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_blueprint/',  tb_test_data, 201, tb_test_data)['url']
@@ -1392,13 +1432,23 @@ class TaskBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, tb_test_data)
 
         # DELETE dependency
-        DELETE_and_assert_gone(self, tb_test_data['scheduling_unit_blueprint'])
+        DELETE_and_assert_gone(self, scheduling_unit_blueprint_url)
 
         # assert
         GET_and_assert_expected_response(self, url, 404, {})
 
 
 class TaskRelationBlueprintTestCase(unittest.TestCase):
+
+    @classmethod
+    def setUpClass(cls) -> None:
+        cls.draft_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskRelationDraft(), '/task_relation_draft/')
+        cls.producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/')
+        cls.consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/')
+        cls.template_url = test_data_creator.post_data_and_get_url(test_data_creator.WorkRelationSelectionTemplate(), '/work_relation_selection_template/')
+        cls.input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/')
+        cls.output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/')
+
     def test_task_relation_blueprint_list_apiformat(self):
         r = requests.get(BASE_URL + '/task_relation_blueprint/?format=api', auth=AUTH)
         self.assertEqual(r.status_code, 200)
@@ -1408,7 +1458,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/1234321/', 404, {})
 
     def test_task_relation_blueprint_POST_and_GET(self):
-        trb_test_data = test_data_creator.TaskRelationBlueprint()
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
 
         # POST and GET a new item and assert correctness
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data)
@@ -1416,12 +1466,12 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, trb_test_data)
 
     def test_task_relation_blueprint_PUT_invalid_raises_error(self):
-        trb_test_data = test_data_creator.TaskRelationBlueprint()
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
         PUT_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/9876789876/', trb_test_data, 404, {})
 
     def test_task_relation_blueprint_PUT(self):
-        trb_test_data1 = test_data_creator.TaskRelationBlueprint()
-        trb_test_data2 = test_data_creator.TaskRelationBlueprint()
+        trb_test_data1 = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
+        trb_test_data2 = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data1, 201, trb_test_data1)
@@ -1433,7 +1483,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, trb_test_data2)
 
     def test_task_relation_blueprint_PATCH(self):
-        trb_test_data = test_data_creator.TaskRelationBlueprint()
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data)
@@ -1449,7 +1499,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
         GET_and_assert_expected_response(self, url, 200, expected_data)
 
     def test_task_relation_blueprint_DELETE(self):
-        trb_test_data = test_data_creator.TaskRelationBlueprint()
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
 
         # POST new item, verify
         r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data)
@@ -1460,7 +1510,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
         DELETE_and_assert_gone(self, url)
 
     def test_task_relation_blueprint_prevents_missing_selection_template(self):
-        trb_test_data = test_data_creator.TaskRelationBlueprint()
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
 
         # test data
         test_data = dict(trb_test_data)
@@ -1471,7 +1521,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
         self.assertTrue('This field may not be null' in str(r_dict['selection_template']))
 
     def test_task_relation_blueprint_prevents_missing_draft(self):
-        trb_test_data = test_data_creator.TaskRelationBlueprint()
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
 
         # test data
         test_data = dict(trb_test_data)
@@ -1482,7 +1532,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
         self.assertTrue('This field may not be null' in str(r_dict['draft']))
 
     def test_task_relation_blueprint_prevents_missing_producer(self):
-        trb_test_data = test_data_creator.TaskRelationBlueprint()
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
 
         # test data
         test_data = dict(trb_test_data)
@@ -1493,7 +1543,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
         self.assertTrue('This field may not be null' in str(r_dict['producer']))
 
     def test_task_relation_blueprint_prevents_missing_consumer(self):
-        trb_test_data = test_data_creator.TaskRelationBlueprint()
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
 
         # test data
         test_data = dict(trb_test_data)
@@ -1504,7 +1554,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
         self.assertTrue('This field may not be null' in str(r_dict['consumer']))
 
     def test_task_relation_blueprint_prevents_missing_input(self):
-        trb_test_data = test_data_creator.TaskRelationBlueprint()
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
 
         # test data
         test_data = dict(trb_test_data)
@@ -1515,7 +1565,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
         self.assertTrue('This field may not be null' in str(r_dict['input']))
 
     def test_task_relation_blueprint_prevents_missing_output(self):
-        trb_test_data = test_data_creator.TaskRelationBlueprint()
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
 
         # test data
         test_data = dict(trb_test_data)
@@ -1527,7 +1577,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
 
     def test_task_relation_blueprint_CASCADE_behavior_on_work_relation_selection_template_deleted(self):
         template_url = test_data_creator.post_data_and_get_url(test_data_creator.WorkRelationSelectionTemplate(), '/work_relation_selection_template/')
-        trb_test_data = test_data_creator.TaskRelationBlueprint(template_url=template_url)
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/',  trb_test_data, 201, trb_test_data)['url']
@@ -1543,7 +1593,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
 
     def test_task_relation_blueprint_CASCADE_behavior_on_producer_deleted(self):
         producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/')
-        trb_test_data = test_data_creator.TaskRelationBlueprint(producer_url=producer_url)
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=producer_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/',
@@ -1561,7 +1611,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
 
     def test_task_relation_blueprint_CASCADE_behavior_on_consumer_deleted(self):
         consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/')
-        trb_test_data = test_data_creator.TaskRelationBlueprint(consumer_url=consumer_url)
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=consumer_url, producer_url=self.producer_url)
 
         # POST new item with dependency
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/',
@@ -1579,7 +1629,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
 
     def test_task_relation_blueprint_CASCADE_behavior_on_input_deleted(self):
         input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/')
-        trb_test_data = test_data_creator.TaskRelationBlueprint(input_url=input_url)
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
 
         # POST new item
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/',
@@ -1597,7 +1647,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase):
 
     def test_task_relation_blueprint_CASCADE_behavior_on_output_deleted(self):
         output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectors(), '/task_connectors/')
-        trb_test_data = test_data_creator.TaskRelationBlueprint(output_url=output_url)
+        trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=output_url, consumer_url=self.consumer_url, producer_url=self.producer_url)
 
         # POST new item with dependency
         url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/',
diff --git a/SAS/TMSS/test/tmss_test_data_rest.py b/SAS/TMSS/test/tmss_test_data_rest.py
index 8c1a62ae360f77f89a83725709ce0931ec122a48..60cb443aa880c4a6802688b008deb621d6c1d2ee 100644
--- a/SAS/TMSS/test/tmss_test_data_rest.py
+++ b/SAS/TMSS/test/tmss_test_data_rest.py
@@ -169,7 +169,7 @@ class TMSSRESTTestDataCreator():
                 'consumed_by': []}
     
     
-    def TaskRelationDraft(self, name="myTaskRelationDraft", producer_url=None, consumer_url=None, template_url=None, input_url=None, output_url=None):
+    def TaskRelationDraft(self, producer_url=None, consumer_url=None, template_url=None, input_url=None, output_url=None):
         if producer_url is None:
             producer_url = self.post_data_and_get_url(self.TaskDraft(), '/task_draft/')
     
diff --git a/SAS/XML_generator/test/test_regression.in_data/txt/20150713_4C17.31.txt b/SAS/XML_generator/test/test_regression.in_data/txt/20150713_4C17.31.txt
index 3b5dbd6f1fd9e19fe415ef8b81295faafd777cb3..2676df1936e87853dbea19ca364bf9b2d301dd75 100644
--- a/SAS/XML_generator/test/test_regression.in_data/txt/20150713_4C17.31.txt
+++ b/SAS/XML_generator/test/test_regression.in_data/txt/20150713_4C17.31.txt
@@ -1,88 +1,88 @@
-################################################################################
-## Parameters for project: LC4_010
-################################################################################
-
-
-################################################################################
-## PARAMETER FILE SETUP 
-projectName=LC4_035
-mainFolderName=4C17.31-20150713
-mainFolderDescription=4C17.31-Moon HBA 05:07:59.5;+17:23:40.0
-
-BLOCK
-
-split_targets = F # T/F
-calibration = none # internal / external / none
-create_calibrator_observations = F # create cal-observations before target (T/F ignored if calibration is none)
-create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none)
-processing=Preprocessing # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
-imagingPipeline=none # can be one of MSSS, standard, none
-repeat=1 # the number of repeats of this block
-
-packageName=4C17.31-20150713 # name of package folder containing observation/pipelines
-packageDescription=4C17.31-Moon HBA 05:07:59.5;+17:23:40.0
-packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
-
-antennaMode=HBA Dual
-clock=200 MHz
-instrumentFilter=110-190 MHz
-numberOfBitsPerSample=8 # Change to 16 if requested
-integrationTime=1 # Script identified wrong input in Northstar: 18000.0
-channelsPerSubband=64
-stationList=all
-tbbPiggybackAllowed=T
-aartfaacPiggybackAllowed=T
-
-###### Which data types should be produced: ######
-
-correlatedData=T
-coherentStokesData=F
-incoherentStokesData=F
-flysEye=F
-coherentDedisperseChannels=False
-
-###### Coherent Stokes parameters ######
-#subbandsPerFileCS=
-#numberCollapsedChannelsCS=
-#stokesDownsamplingStepsCS=
-#whichCS=
-###### Coherent Stokes parameters ######
-#subbandsPerFileIS=
-#numberCollapsedChannelsIS=
-#stokesDownsamplingStepsIS=
-#whichIS=
-
-flaggingStrategy=HBAdefault
-calibratorDuration_s=0 # duration of calibration observations in seconds
-targetDuration_s=10800 # duration of target observations in seconds
-
-
-# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
-# un-comment the startTimeUTC to have the observation start times generated
-startTimeUTC=2015-07-13 06:10:00
-# timeStep's in seconds
-timeStep1=60
-timeStep2=60
-
-
-
-###### Pipeline settings ######
-## GENERAL INFO
-## Processing mode: Pre processing only
-
-
-targetBeams=  ## MAKE SURE TO SELECT THIS PROPERLY
-05:07:59.5;+17:23:40.0;4c17.31;76..394;319;;;T;10800
-Demix=4;1;64;10
-
-05:34:32.0;+22:00:52;3C144;76,80,84,88,92,96,100,104,108,112,116,120,124,128,132,136,140,144,148,152,156,160,164,168,172,176,180,184,188,192,196,200,204,208,212,216,220,224,228,232,236,240,244,248,252,256,260,264,268,272,276,280,284,288,292,296,300,304,308,312,316,320,324,328,332,336,340,344,348,352,356,360,364,368,372,376,380,384,388,392,394;81;;;T;10800
-Demix=4;1;64;10
-
-
-05:10:02.37;+18:00:41.58;J0510+1800;76,80,84,88,92,96,100,104,108,112,116,120,124,128,132,136,140,144,148,152,156,160,164,168,172,176,180,184,188,192,196,200,204,208,212,216,220,224,228,232,236,240,244,248,252,256,260,264,268,272,276,280,284,288,292,296,300,304,308,312,316,320,324,328,332,336,340,344,348,352,356,360,364,368,372,376,380,384,388,392,394;81;;;T;10800
-Demix=4;1;64;10
-
-
-# BBS: Add parameters if BBS needed
-# Pulsar: Add parameters if pulsar pipeline needed
-
+################################################################################
+## Parameters for project: LC4_010
+################################################################################
+
+
+################################################################################
+## PARAMETER FILE SETUP 
+projectName=LC4_035
+mainFolderName=4C17.31-20150713
+mainFolderDescription=4C17.31-Moon HBA 05:07:59.5;+17:23:40.0
+
+BLOCK
+
+split_targets = F # T/F
+calibration = none # internal / external / none
+create_calibrator_observations = F # create cal-observations before target (T/F ignored if calibration is none)
+create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none)
+processing=Preprocessing # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
+imagingPipeline=none # can be one of MSSS, standard, none
+repeat=1 # the number of repeats of this block
+
+packageName=4C17.31-20150713 # name of package folder containing observation/pipelines
+packageDescription=4C17.31-Moon HBA 05:07:59.5;+17:23:40.0
+packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
+
+antennaMode=HBA Dual
+clock=200 MHz
+instrumentFilter=110-190 MHz
+numberOfBitsPerSample=8 # Change to 16 if requested
+integrationTime=1 # Script identified wrong input in Northstar: 18000.0
+channelsPerSubband=64
+stationList=all
+tbbPiggybackAllowed=T
+aartfaacPiggybackAllowed=T
+
+###### Which data types should be produced: ######
+
+correlatedData=T
+coherentStokesData=F
+incoherentStokesData=F
+flysEye=F
+coherentDedisperseChannels=False
+
+###### Coherent Stokes parameters ######
+#subbandsPerFileCS=
+#numberCollapsedChannelsCS=
+#stokesDownsamplingStepsCS=
+#whichCS=
+###### Coherent Stokes parameters ######
+#subbandsPerFileIS=
+#numberCollapsedChannelsIS=
+#stokesDownsamplingStepsIS=
+#whichIS=
+
+flaggingStrategy=HBAdefault
+calibratorDuration_s=0 # duration of calibration observations in seconds
+targetDuration_s=10800 # duration of target observations in seconds
+
+
+# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
+# un-comment the startTimeUTC to have the observation start times generated
+startTimeUTC=2015-07-13 06:10:00
+# timeStep's in seconds
+timeStep1=60
+timeStep2=60
+
+
+
+###### Pipeline settings ######
+## GENERAL INFO
+## Processing mode: Pre processing only
+
+
+targetBeams=  ## MAKE SURE TO SELECT THIS PROPERLY
+05:07:59.5;+17:23:40.0;4c17.31;76..394;319;;;T;10800
+Demix=4;1;64;10
+
+05:34:32.0;+22:00:52;3C144;76,80,84,88,92,96,100,104,108,112,116,120,124,128,132,136,140,144,148,152,156,160,164,168,172,176,180,184,188,192,196,200,204,208,212,216,220,224,228,232,236,240,244,248,252,256,260,264,268,272,276,280,284,288,292,296,300,304,308,312,316,320,324,328,332,336,340,344,348,352,356,360,364,368,372,376,380,384,388,392,394;81;;;T;10800
+Demix=4;1;64;10
+
+
+05:10:02.37;+18:00:41.58;J0510+1800;76,80,84,88,92,96,100,104,108,112,116,120,124,128,132,136,140,144,148,152,156,160,164,168,172,176,180,184,188,192,196,200,204,208,212,216,220,224,228,232,236,240,244,248,252,256,260,264,268,272,276,280,284,288,292,296,300,304,308,312,316,320,324,328,332,336,340,344,348,352,356,360,364,368,372,376,380,384,388,392,394;81;;;T;10800
+Demix=4;1;64;10
+
+
+# BBS: Add parameters if BBS needed
+# Pulsar: Add parameters if pulsar pipeline needed
+
diff --git a/SAS/XML_generator/test/test_regression.in_data/txt/20150731_G46_run1_HBA.txt b/SAS/XML_generator/test/test_regression.in_data/txt/20150731_G46_run1_HBA.txt
index 833e5569f2de0128ea7070e6e9453c4f26cdec94..6f51c7c88ae3db786130946265edb163f35b1e16 100644
--- a/SAS/XML_generator/test/test_regression.in_data/txt/20150731_G46_run1_HBA.txt
+++ b/SAS/XML_generator/test/test_regression.in_data/txt/20150731_G46_run1_HBA.txt
@@ -1,220 +1,220 @@
-################################################################################
-## Parameters for project: LC4_010
-################################################################################
-
-
-################################################################################
-## RUN 03: 2 targets -- duration:  30600s (  8.50h)
-## G46_lba, 19:15:36.34,+11:36:33.0
-## 3C380, 18:29:31.72,+48:44:47.0
- 
-## PARAMETER FILE SETUP 
-projectName=LC4_010
-mainFolderName=20150731_G46_run1_HBA
-mainFolderDescription=20150731_G46_run1_HBA
-
-BLOCK
-
-split_targets = F # T/F
-calibration = none # internal / external / none
-create_calibrator_observations = F # create cal-observations before target (T/F ignored if calibration is none)
-create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none)
-processing=Preprocessing # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
-imagingPipeline=none # can be one of MSSS, standard, none
-repeat=1 # the number of repeats of this block
-
-packageName=3C380 # name of package folder containing observation/pipelines
-packageDescription=3C380
-packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
-
-antennaMode=HBA Dual Inner
-clock=200 MHz
-instrumentFilter=110-190 MHz
-numberOfBitsPerSample=8 # Change to 16 if requested
-integrationTime=2.0
-channelsPerSubband=256
-stationList=nl
-tbbPiggybackAllowed=T
-aartfaacPiggybackAllowed=T
-
-###### Which data types should be produced: ######
-
-correlatedData=T
-coherentStokesData=F
-incoherentStokesData=F
-flysEye=F
-coherentDedisperseChannels=False
-
-###### Coherent Stokes parameters ######
-#subbandsPerFileCS=
-#numberCollapsedChannelsCS=
-#stokesDownsamplingStepsCS=
-#whichCS=
-###### Coherent Stokes parameters ######
-#subbandsPerFileIS=
-#numberCollapsedChannelsIS=
-#stokesDownsamplingStepsIS=
-#whichIS=
-
-flaggingStrategy=HBAdefault
-calibratorDuration_s=0 # duration of calibration observations in seconds
-targetDuration_s=600 # duration of target observations in seconds
-
-
-# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
-# un-comment the startTimeUTC to have the observation start times generated
-startTimeUTC=2015-07-31 20:00:00
-# timeStep's in seconds
-timeStep1=60
-timeStep2=60
-
-
-
-targetBeams=  ## MAKE SURE TO SELECT THIS PROPERLY
-
-18:29:31.72;+48:44:47.0;3C380RR;54,58,62,67,71,76,80,81,85,90,94,95,99,104,109,114,119,124,129,134,140,145,150,156,161,166,167,172,178,183,184,189,195,201,207,213,219,225,231,238,244,250,257,263,264,270,277,284,290,291,297,298,304,305,312,319,326,333,334,341,349,356,364,372,380,388,396,404,412,413,421,429,430,438,447,455,456;77;;;T;600
-Demix=1;1;64;10;;
-
-
-18:29:31.72;+48:44:47.0;3C380Con;120,125,126,127,128,130,131,132,133,135,136,137,138,139,141,142,143,144,146,147,148,149,151,152,153,154,155,157,158,159,160,162,163,164,165,168,169,170,171,173,174,175,176,177,179,180,181,182,185,186,187,188,190,191,192,193,194,196,197,198,199,200,202,203,204,205,206,208,209,210,211,212,214,215,216,217,218,220,221,222,223,224,226,227,228,229,230,232,233,234,235,236,237,239,240,241,242,243,245,246,247,248,249,251,252,253,254,255,256,258,259,260,261,262,265,266,267,268,269,271,272,273,274,275,276,278,279,280,281,282,283,285,286,287,288,289,292,293,294,295,296,299,300,301,302,303,306,307,308,309,310,311,313,314,315,316,317,318,320,321,322,323,324,325,327,328,332;167;;;T;600
-Demix=64;1;64;10;;
-
-BLOCK
-
-
-
-split_targets = F # T/F
-calibration = none # internal / external / none
-create_calibrator_observations = F # create cal-observations before target (T/F ignored if calibration is none)
-create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none)
-processing=Preprocessing # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
-imagingPipeline=none # can be one of MSSS, standard, none
-repeat=1 # the number of repeats of this block
-
-packageName=20150731_G46_run1_HBA # name of package folder containing observation/pipelines
-packageDescription=20150731_G46_run1_HBA
-packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
-
-antennaMode=HBA Dual Inner
-clock=200 MHz
-instrumentFilter=110-190 MHz
-numberOfBitsPerSample=8 # Change to 16 if requested
-integrationTime=2.0
-channelsPerSubband=256
-stationList=nl
-tbbPiggybackAllowed=T
-aartfaacPiggybackAllowed=T
-
-###### Which data types should be produced: ######
-
-correlatedData=T
-coherentStokesData=F
-incoherentStokesData=F
-flysEye=F
-coherentDedisperseChannels=False
-
-###### Coherent Stokes parameters ######
-#subbandsPerFileCS=
-#numberCollapsedChannelsCS=
-#stokesDownsamplingStepsCS=
-#whichCS=
-###### Coherent Stokes parameters ######
-#subbandsPerFileIS=
-#numberCollapsedChannelsIS=
-#stokesDownsamplingStepsIS=
-#whichIS=
-
-flaggingStrategy=HBAdefault
-calibratorDuration_s=600 # duration of calibration observations in seconds
-targetDuration_s=15000 # duration of target observations in seconds
-
-
-# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
-# un-comment the startTimeUTC to have the observation start times generated
-startTimeUTC=2015-07-31 20:11:00
-# timeStep's in seconds
-timeStep1=60
-timeStep2=60
-
-
-
-targetBeams=  ## MAKE SURE TO SELECT THIS PROPERLY
-
-19:15:36.34;+11:36:33.0;G46_hba_RRL;54,58,62,67,71,76,80,81,85,90,94,95,99,104,109,114,119,124,129,134,140,145,150,156,161,166,167,172,178,183,184,189,195,201,207,213,219,225,231,238,244,250,257,263,264,270,277,284,290,291,297,298,304,305,312,319,326,333,334,341,349,356,364,372,380,388,396,404,412,413,421,429,430,438,447,455,456;77;;;T;15000
-
-Demix=1;1;64;10;;

-
-
-19:15:36.34;+11:36:33.0;G46_hba_CNT;120,125,126,127,128,130,131,132,133,135,136,137,138,139,141,142,143,144,146,147,148,149,151,152,153,154,155,157,158,159,160,162,163,164,165,168,169,170,171,173,174,175,176,177,179,180,181,182,185,186,187,188,190,191,192,193,194,196,197,198,199,200,202,203,204,205,206,208,209,210,211,212,214,215,216,217,218,220,221,222,223,224,226,227,228,229,230,232,233,234,235,236,237,239,240,241,242,243,245,246,247,248,249,251,252,253,254,255,256,258,259,260,261,262,265,266,267,268,269,271,272,273,274,275,276,278,279,280,281,282,283,285,286,287,288,289,292,293,294,295,296,299,300,301,302,303,306,307,308,309,310,311,313,314,315,316,317,318,320,321,322,323,324,325,327,328,332;167;;;T;15000
-Demix=64;1;64;10;;

-
-
-18:38:25.46;+17:12:12.4;phase_ref;54,58,62,67,71,76,80,81,85,90,94,95,99,104,109,114,119,120,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,332,333,334,341,349,356,364,372,380,388,396,404,412,413,421,429,430,438,447,455,456;244;;;T;15000
-
-Demix=64;1;64;10;;

-
-BLOCK
-
-split_targets = F # T/F
-calibration = none # internal / external / none
-create_calibrator_observations = F # create cal-observations before target (T/F ignored if calibration is none)
-create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none)
-processing=Preprocessing # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
-imagingPipeline=none # can be one of MSSS, standard, none
-repeat=1 # the number of repeats of this block
-
-packageName=3C380 # name of package folder containing observation/pipelines
-packageDescription=3C380
-packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
-
-antennaMode=HBA Dual Inner
-clock=200 MHz
-instrumentFilter=110-190 MHz
-numberOfBitsPerSample=8 # Change to 16 if requested
-integrationTime=2.0
-channelsPerSubband=256
-stationList=nl
-tbbPiggybackAllowed=T
-aartfaacPiggybackAllowed=T
-
-###### Which data types should be produced: ######
-
-correlatedData=T
-coherentStokesData=F
-incoherentStokesData=F
-flysEye=F
-coherentDedisperseChannels=False
-
-###### Coherent Stokes parameters ######
-#subbandsPerFileCS=
-#numberCollapsedChannelsCS=
-#stokesDownsamplingStepsCS=
-#whichCS=
-###### Coherent Stokes parameters ######
-#subbandsPerFileIS=
-#numberCollapsedChannelsIS=
-#stokesDownsamplingStepsIS=
-#whichIS=
-
-flaggingStrategy=HBAdefault
-calibratorDuration_s=0 # duration of calibration observations in seconds
-targetDuration_s=600 # duration of target observations in seconds
-
-
-# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
-# un-comment the startTimeUTC to have the observation start times generated
-startTimeUTC=2015-08-01 00:21:00
-# timeStep's in seconds
-timeStep1=60
-timeStep2=60
-
-
-
-targetBeams=  ## MAKE SURE TO SELECT THIS PROPERLY
-
-18:29:31.72;+48:44:47.0;3C380RR;54,58,62,67,71,76,80,81,85,90,94,95,99,104,109,114,119,124,129,134,140,145,150,156,161,166,167,172,178,183,184,189,195,201,207,213,219,225,231,238,244,250,257,263,264,270,277,284,290,291,297,298,304,305,312,319,326,333,334,341,349,356,364,372,380,388,396,404,412,413,421,429,430,438,447,455,456;77;;;T;600
-Demix=1;1;64;10;;
-
-
-18:29:31.72;+48:44:47.0;3C380Con;120,125,126,127,128,130,131,132,133,135,136,137,138,139,141,142,143,144,146,147,148,149,151,152,153,154,155,157,158,159,160,162,163,164,165,168,169,170,171,173,174,175,176,177,179,180,181,182,185,186,187,188,190,191,192,193,194,196,197,198,199,200,202,203,204,205,206,208,209,210,211,212,214,215,216,217,218,220,221,222,223,224,226,227,228,229,230,232,233,234,235,236,237,239,240,241,242,243,245,246,247,248,249,251,252,253,254,255,256,258,259,260,261,262,265,266,267,268,269,271,272,273,274,275,276,278,279,280,281,282,283,285,286,287,288,289,292,293,294,295,296,299,300,301,302,303,306,307,308,309,310,311,313,314,315,316,317,318,320,321,322,323,324,325,327,328,332;167;;;T;600
-Demix=64;1;64;10;;
+################################################################################
+## Parameters for project: LC4_010
+################################################################################
+
+
+################################################################################
+## RUN 03: 2 targets -- duration:  30600s (  8.50h)
+## G46_lba, 19:15:36.34,+11:36:33.0
+## 3C380, 18:29:31.72,+48:44:47.0
+ 
+## PARAMETER FILE SETUP 
+projectName=LC4_010
+mainFolderName=20150731_G46_run1_HBA
+mainFolderDescription=20150731_G46_run1_HBA
+
+BLOCK
+
+split_targets = F # T/F
+calibration = none # internal / external / none
+create_calibrator_observations = F # create cal-observations before target (T/F ignored if calibration is none)
+create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none)
+processing=Preprocessing # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
+imagingPipeline=none # can be one of MSSS, standard, none
+repeat=1 # the number of repeats of this block
+
+packageName=3C380 # name of package folder containing observation/pipelines
+packageDescription=3C380
+packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
+
+antennaMode=HBA Dual Inner
+clock=200 MHz
+instrumentFilter=110-190 MHz
+numberOfBitsPerSample=8 # Change to 16 if requested
+integrationTime=2.0
+channelsPerSubband=256
+stationList=nl
+tbbPiggybackAllowed=T
+aartfaacPiggybackAllowed=T
+
+###### Which data types should be produced: ######
+
+correlatedData=T
+coherentStokesData=F
+incoherentStokesData=F
+flysEye=F
+coherentDedisperseChannels=False
+
+###### Coherent Stokes parameters ######
+#subbandsPerFileCS=
+#numberCollapsedChannelsCS=
+#stokesDownsamplingStepsCS=
+#whichCS=
+###### Coherent Stokes parameters ######
+#subbandsPerFileIS=
+#numberCollapsedChannelsIS=
+#stokesDownsamplingStepsIS=
+#whichIS=
+
+flaggingStrategy=HBAdefault
+calibratorDuration_s=0 # duration of calibration observations in seconds
+targetDuration_s=600 # duration of target observations in seconds
+
+
+# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
+# un-comment the startTimeUTC to have the observation start times generated
+startTimeUTC=2015-07-31 20:00:00
+# timeStep's in seconds
+timeStep1=60
+timeStep2=60
+
+
+
+targetBeams=  ## MAKE SURE TO SELECT THIS PROPERLY
+
+18:29:31.72;+48:44:47.0;3C380RR;54,58,62,67,71,76,80,81,85,90,94,95,99,104,109,114,119,124,129,134,140,145,150,156,161,166,167,172,178,183,184,189,195,201,207,213,219,225,231,238,244,250,257,263,264,270,277,284,290,291,297,298,304,305,312,319,326,333,334,341,349,356,364,372,380,388,396,404,412,413,421,429,430,438,447,455,456;77;;;T;600
+Demix=1;1;64;10;;
+
+
+18:29:31.72;+48:44:47.0;3C380Con;120,125,126,127,128,130,131,132,133,135,136,137,138,139,141,142,143,144,146,147,148,149,151,152,153,154,155,157,158,159,160,162,163,164,165,168,169,170,171,173,174,175,176,177,179,180,181,182,185,186,187,188,190,191,192,193,194,196,197,198,199,200,202,203,204,205,206,208,209,210,211,212,214,215,216,217,218,220,221,222,223,224,226,227,228,229,230,232,233,234,235,236,237,239,240,241,242,243,245,246,247,248,249,251,252,253,254,255,256,258,259,260,261,262,265,266,267,268,269,271,272,273,274,275,276,278,279,280,281,282,283,285,286,287,288,289,292,293,294,295,296,299,300,301,302,303,306,307,308,309,310,311,313,314,315,316,317,318,320,321,322,323,324,325,327,328,332;167;;;T;600
+Demix=64;1;64;10;;
+
+BLOCK
+
+
+
+split_targets = F # T/F
+calibration = none # internal / external / none
+create_calibrator_observations = F # create cal-observations before target (T/F ignored if calibration is none)
+create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none)
+processing=Preprocessing # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
+imagingPipeline=none # can be one of MSSS, standard, none
+repeat=1 # the number of repeats of this block
+
+packageName=20150731_G46_run1_HBA # name of package folder containing observation/pipelines
+packageDescription=20150731_G46_run1_HBA
+packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
+
+antennaMode=HBA Dual Inner
+clock=200 MHz
+instrumentFilter=110-190 MHz
+numberOfBitsPerSample=8 # Change to 16 if requested
+integrationTime=2.0
+channelsPerSubband=256
+stationList=nl
+tbbPiggybackAllowed=T
+aartfaacPiggybackAllowed=T
+
+###### Which data types should be produced: ######
+
+correlatedData=T
+coherentStokesData=F
+incoherentStokesData=F
+flysEye=F
+coherentDedisperseChannels=False
+
+###### Coherent Stokes parameters ######
+#subbandsPerFileCS=
+#numberCollapsedChannelsCS=
+#stokesDownsamplingStepsCS=
+#whichCS=
+###### Coherent Stokes parameters ######
+#subbandsPerFileIS=
+#numberCollapsedChannelsIS=
+#stokesDownsamplingStepsIS=
+#whichIS=
+
+flaggingStrategy=HBAdefault
+calibratorDuration_s=600 # duration of calibration observations in seconds
+targetDuration_s=15000 # duration of target observations in seconds
+
+
+# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
+# un-comment the startTimeUTC to have the observation start times generated
+startTimeUTC=2015-07-31 20:11:00
+# timeStep's in seconds
+timeStep1=60
+timeStep2=60
+
+
+
+targetBeams=  ## MAKE SURE TO SELECT THIS PROPERLY
+
+19:15:36.34;+11:36:33.0;G46_hba_RRL;54,58,62,67,71,76,80,81,85,90,94,95,99,104,109,114,119,124,129,134,140,145,150,156,161,166,167,172,178,183,184,189,195,201,207,213,219,225,231,238,244,250,257,263,264,270,277,284,290,291,297,298,304,305,312,319,326,333,334,341,349,356,364,372,380,388,396,404,412,413,421,429,430,438,447,455,456;77;;;T;15000
+
+Demix=1;1;64;10;;
+
+
+19:15:36.34;+11:36:33.0;G46_hba_CNT;120,125,126,127,128,130,131,132,133,135,136,137,138,139,141,142,143,144,146,147,148,149,151,152,153,154,155,157,158,159,160,162,163,164,165,168,169,170,171,173,174,175,176,177,179,180,181,182,185,186,187,188,190,191,192,193,194,196,197,198,199,200,202,203,204,205,206,208,209,210,211,212,214,215,216,217,218,220,221,222,223,224,226,227,228,229,230,232,233,234,235,236,237,239,240,241,242,243,245,246,247,248,249,251,252,253,254,255,256,258,259,260,261,262,265,266,267,268,269,271,272,273,274,275,276,278,279,280,281,282,283,285,286,287,288,289,292,293,294,295,296,299,300,301,302,303,306,307,308,309,310,311,313,314,315,316,317,318,320,321,322,323,324,325,327,328,332;167;;;T;15000
+Demix=64;1;64;10;;
+
+
+18:38:25.46;+17:12:12.4;phase_ref;54,58,62,67,71,76,80,81,85,90,94,95,99,104,109,114,119,120,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,313,314,315,316,317,318,319,320,321,322,323,324,325,326,327,328,332,333,334,341,349,356,364,372,380,388,396,404,412,413,421,429,430,438,447,455,456;244;;;T;15000
+
+Demix=64;1;64;10;;
+
+BLOCK
+
+split_targets = F # T/F
+calibration = none # internal / external / none
+create_calibrator_observations = F # create cal-observations before target (T/F ignored if calibration is none)
+create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none)
+processing=Preprocessing # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
+imagingPipeline=none # can be one of MSSS, standard, none
+repeat=1 # the number of repeats of this block
+
+packageName=3C380 # name of package folder containing observation/pipelines
+packageDescription=3C380
+packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
+
+antennaMode=HBA Dual Inner
+clock=200 MHz
+instrumentFilter=110-190 MHz
+numberOfBitsPerSample=8 # Change to 16 if requested
+integrationTime=2.0
+channelsPerSubband=256
+stationList=nl
+tbbPiggybackAllowed=T
+aartfaacPiggybackAllowed=T
+
+###### Which data types should be produced: ######
+
+correlatedData=T
+coherentStokesData=F
+incoherentStokesData=F
+flysEye=F
+coherentDedisperseChannels=False
+
+###### Coherent Stokes parameters ######
+#subbandsPerFileCS=
+#numberCollapsedChannelsCS=
+#stokesDownsamplingStepsCS=
+#whichCS=
+###### Coherent Stokes parameters ######
+#subbandsPerFileIS=
+#numberCollapsedChannelsIS=
+#stokesDownsamplingStepsIS=
+#whichIS=
+
+flaggingStrategy=HBAdefault
+calibratorDuration_s=0 # duration of calibration observations in seconds
+targetDuration_s=600 # duration of target observations in seconds
+
+
+# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
+# un-comment the startTimeUTC to have the observation start times generated
+startTimeUTC=2015-08-01 00:21:00
+# timeStep's in seconds
+timeStep1=60
+timeStep2=60
+
+
+
+targetBeams=  ## MAKE SURE TO SELECT THIS PROPERLY
+
+18:29:31.72;+48:44:47.0;3C380RR;54,58,62,67,71,76,80,81,85,90,94,95,99,104,109,114,119,124,129,134,140,145,150,156,161,166,167,172,178,183,184,189,195,201,207,213,219,225,231,238,244,250,257,263,264,270,277,284,290,291,297,298,304,305,312,319,326,333,334,341,349,356,364,372,380,388,396,404,412,413,421,429,430,438,447,455,456;77;;;T;600
+Demix=1;1;64;10;;
+
+
+18:29:31.72;+48:44:47.0;3C380Con;120,125,126,127,128,130,131,132,133,135,136,137,138,139,141,142,143,144,146,147,148,149,151,152,153,154,155,157,158,159,160,162,163,164,165,168,169,170,171,173,174,175,176,177,179,180,181,182,185,186,187,188,190,191,192,193,194,196,197,198,199,200,202,203,204,205,206,208,209,210,211,212,214,215,216,217,218,220,221,222,223,224,226,227,228,229,230,232,233,234,235,236,237,239,240,241,242,243,245,246,247,248,249,251,252,253,254,255,256,258,259,260,261,262,265,266,267,268,269,271,272,273,274,275,276,278,279,280,281,282,283,285,286,287,288,289,292,293,294,295,296,299,300,301,302,303,306,307,308,309,310,311,313,314,315,316,317,318,320,321,322,323,324,325,327,328,332;167;;;T;600
+Demix=64;1;64;10;;
diff --git a/SAS/XML_generator/test/test_regression.in_data/txt/B1834620_HBA_LB_20150817.txt b/SAS/XML_generator/test/test_regression.in_data/txt/B1834620_HBA_LB_20150817.txt
index 40714ed24ad61ab4a656970dd283cb5efe13b06c..5ae0d31e518d1b3f941500ee3f3a077d30a22f3e 100644
--- a/SAS/XML_generator/test/test_regression.in_data/txt/B1834620_HBA_LB_20150817.txt
+++ b/SAS/XML_generator/test/test_regression.in_data/txt/B1834620_HBA_LB_20150817.txt
@@ -1,93 +1,93 @@
-
-projectName=Commissioning2015
-mainFolderName=B1834+620_phase_shift3
-mainFolderDescription=B1834+620_phase_shift HBA LB
-
-BLOCK
-
-split_targets = F                       # true:create a separate target observation for every target (beam) line or false:combine them in a multi-beam observation
-calibration = external                  # internal / external / none
-create_calibrator_observations = T      # create calibration observations before target observations? (ignored if calibration = none)
-create_target_cal_beam = F              # create a calibration beam in the target observation(s)? (ignored if calibration = none)
-processing= LongBaseline                 # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
-imagingPipeline=none                    # can be one of MSSS, standard, none
-repeat=2                                # the number of repeats of this block
-
-packageName=20150907_B1834+620              # the name that will be given to the package folder that contains this block's observation and pipelines
-packageDescription=B1834+620_phase_shift HBA LB
-packageTag=                             # optional tag that will be prepended before every observation and pipeline name/description (Max 8 characters).
-
-antennaMode=HBA Dual Inner
-clock=200 MHz
-instrumentFilter=110-190 MHz
-numberOfBitsPerSample=8
-integrationTime=1.0
-channelsPerSubband=64
-stationList=all                        # comma-separated list of station names and/or the following aliasses: core, superterp, remote, international, all, nl
-tbbPiggybackAllowed=T
-aartfaacPiggybackAllowed=F
-subbandsPerSubbandGroup=16
-subbandGroupsPerMS=1
-###### Which data types should be produced: ######
-correlatedData=T
-coherentStokesData=F
-incoherentStokesData=F
-flysEye=F
-coherentDedisperseChannels=False
-###### Coherent Stokes parameters ######
-subbandsPerFileCS=512
-numberCollapsedChannelsCS=16
-stokesDownsamplingStepsCS=128
-whichCS=IQUV
-###### Incoherent Stokes parameters ######
-#subbandsPerFileIS=16
-#numberCollapsedChannelsIS=4
-#stokesDownsamplingStepsIS=12
-#whichIS=IQUV
-flaggingStrategy=HBAdefault             # flagging strategy used for AOflagger
-calibratorDuration_s=600                # duration of calibration observations in seconds
-targetDuration_s=3600                    # duration of target observations in seconds
-
-###### Globals are used when a target/calibration line does not have its own specific parameter specified for that property ######
-#Global_TAB=
-#c;05:34:51.94;+22:00:52.2              # a coherent tab with absolute coordinates 05:34:51.94, +22:00:52.2
-#Global_TABrings=1;0.1                   # nr of tabrings and tabring size
-#Global_Pulsar=B0329+54;T;T;DSPSR EXTRA OPTIONS;PREPDATA;5.1;-2;F;F;F;F;F;2BF2FITS;4;101;RFI FIND EXTRA OPTIONS;PREPFOLD EXTRA;PREPSUBBAND Extra; 0.6;T;T
-#Global_Demix=16;4;64;10;CasA,CygA;
-Global_Subbands=76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384;240
-
-# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
-# un-comment the startTimeUTC to have the observation start times generated
-startTimeUTC=2015-09-07 18:45:00
-# timeStep's in seconds
-timeStep1=60
-timeStep2=60
-
-calibratorBeam=
-18:29:31.8;+48:44:46;3C380;;;;;T;900
-BBS=3C380;;;T
-Demix=2;2;64;10;[CygA,CasA];
-#TAB:
-#c;05:34:51.94;+22:00:52.2
-
-# target beams and target pipelines
-# ra ;dec; targetname; subbandList; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds]
-# optionally followed by BBS and/or demixing settings
-# BBS: SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange
-# Demix: avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target
-# Pulsar: pulsar; single-pulse; raw-to-8bit; dspsr-extra-opts; prepdata-extra-opts; 8bit-conversion-sigma; tsubint; norfi; nofold; nopdmp; skip-dspsr; rrats; 2bf2fits-extra-opts; decode-sigma; decode-nblocks; rfifind-extra-opts; prepfold-extra-opts; prepsubband-extra-opts; dynamic-spectrum-time-average; skip-dynamic-spectrum; skip-prepfold
-
-targetBeams=
-18:35:10.9;+62:04:08;B1834+620;;;;;T;1000
-Demix=2;2;64;10;[CygA,CasA];
-
-18:35:19.74;+61:19:39.1;J183519+611939;;;;;T;1000
-Demix=2;2;64;10;[CygA,CasA];
-
-
-nrSubbandsPerImage=10 # (integer, mandatory) the number of subbands grouped together to form one image
-# the following imaging parameters are all optional, if not specified the default value from the default template is used for that parameter
-maxBaseline_m=10000 # (integer) the maximum base-line in meters used in the imaging
-fieldOfView_deg=5.0 # (float) the field of view in degrees
-
-BLOCK
+
+projectName=Commissioning2015
+mainFolderName=B1834+620_phase_shift3
+mainFolderDescription=B1834+620_phase_shift HBA LB
+
+BLOCK
+
+split_targets = F                       # true:create a separate target observation for every target (beam) line or false:combine them in a multi-beam observation
+calibration = external                  # internal / external / none
+create_calibrator_observations = T      # create calibration observations before target observations? (ignored if calibration = none)
+create_target_cal_beam = F              # create a calibration beam in the target observation(s)? (ignored if calibration = none)
+processing= LongBaseline                 # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
+imagingPipeline=none                    # can be one of MSSS, standard, none
+repeat=2                                # the number of repeats of this block
+
+packageName=20150907_B1834+620              # the name that will be given to the package folder that contains this block's observation and pipelines
+packageDescription=B1834+620_phase_shift HBA LB
+packageTag=                             # optional tag that will be prepended before every observation and pipeline name/description (Max 8 characters).
+
+antennaMode=HBA Dual Inner
+clock=200 MHz
+instrumentFilter=110-190 MHz
+numberOfBitsPerSample=8
+integrationTime=1.0
+channelsPerSubband=64
+stationList=all                        # comma-separated list of station names and/or the following aliasses: core, superterp, remote, international, all, nl
+tbbPiggybackAllowed=T
+aartfaacPiggybackAllowed=F
+subbandsPerSubbandGroup=16
+subbandGroupsPerMS=1
+###### Which data types should be produced: ######
+correlatedData=T
+coherentStokesData=F
+incoherentStokesData=F
+flysEye=F
+coherentDedisperseChannels=False
+###### Coherent Stokes parameters ######
+subbandsPerFileCS=512
+numberCollapsedChannelsCS=16
+stokesDownsamplingStepsCS=128
+whichCS=IQUV
+###### Incoherent Stokes parameters ######
+#subbandsPerFileIS=16
+#numberCollapsedChannelsIS=4
+#stokesDownsamplingStepsIS=12
+#whichIS=IQUV
+flaggingStrategy=HBAdefault             # flagging strategy used for AOflagger
+calibratorDuration_s=600                # duration of calibration observations in seconds
+targetDuration_s=3600                    # duration of target observations in seconds
+
+###### Globals are used when a target/calibration line does not have its own specific parameter specified for that property ######
+#Global_TAB=
+#c;05:34:51.94;+22:00:52.2              # a coherent tab with absolute coordinates 05:34:51.94, +22:00:52.2
+#Global_TABrings=1;0.1                   # nr of tabrings and tabring size
+#Global_Pulsar=B0329+54;T;T;DSPSR EXTRA OPTIONS;PREPDATA;5.1;-2;F;F;F;F;F;2BF2FITS;4;101;RFI FIND EXTRA OPTIONS;PREPFOLD EXTRA;PREPSUBBAND Extra; 0.6;T;T
+#Global_Demix=16;4;64;10;CasA,CygA;
+Global_Subbands=76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,300,301,302,303,304,305,306,307,308,309,310,311,312,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377,378,379,380,381,382,383,384;240
+
+# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
+# un-comment the startTimeUTC to have the observation start times generated
+startTimeUTC=2015-09-07 18:45:00
+# timeStep's in seconds
+timeStep1=60
+timeStep2=60
+
+calibratorBeam=
+18:29:31.8;+48:44:46;3C380;;;;;T;900
+BBS=3C380;;;T
+Demix=2;2;64;10;[CygA,CasA];
+#TAB:
+#c;05:34:51.94;+22:00:52.2
+
+# target beams and target pipelines
+# ra ;dec; targetname; subbandList; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds]
+# optionally followed by BBS and/or demixing settings
+# BBS: SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange
+# Demix: avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target
+# Pulsar: pulsar; single-pulse; raw-to-8bit; dspsr-extra-opts; prepdata-extra-opts; 8bit-conversion-sigma; tsubint; norfi; nofold; nopdmp; skip-dspsr; rrats; 2bf2fits-extra-opts; decode-sigma; decode-nblocks; rfifind-extra-opts; prepfold-extra-opts; prepsubband-extra-opts; dynamic-spectrum-time-average; skip-dynamic-spectrum; skip-prepfold
+
+targetBeams=
+18:35:10.9;+62:04:08;B1834+620;;;;;T;1000
+Demix=2;2;64;10;[CygA,CasA];
+
+18:35:19.74;+61:19:39.1;J183519+611939;;;;;T;1000
+Demix=2;2;64;10;[CygA,CasA];
+
+
+nrSubbandsPerImage=10 # (integer, mandatory) the number of subbands grouped together to form one image
+# the following imaging parameters are all optional, if not specified the default value from the default template is used for that parameter
+maxBaseline_m=10000 # (integer) the maximum base-line in meters used in the imaging
+fieldOfView_deg=5.0 # (float) the field of view in degrees
+
+BLOCK
diff --git a/SAS/XML_generator/test/test_regression.in_data/txt/LC4_020_20150813.txt b/SAS/XML_generator/test/test_regression.in_data/txt/LC4_020_20150813.txt
index a2ec8a2470a9bc02de23e43e7d47a7c7c5804076..bd15388c670f2b39bf74ff5652f9aeee056c2c23 100644
--- a/SAS/XML_generator/test/test_regression.in_data/txt/LC4_020_20150813.txt
+++ b/SAS/XML_generator/test/test_regression.in_data/txt/LC4_020_20150813.txt
@@ -1,107 +1,107 @@
-################################################################################
-## Parameters for project: LC4_020
-################################################################################
-
-################################################################################
-## RUN 01: 1 targets -- duration:  37800s ( 10.50h)
-## Abell 2069, 15:23:57.9,+29:53:26.0
- 
-## PARAMETER FILE SETUP 
-projectName=LC4_020
-mainFolderName=A2069_20150813
-mainFolderDescription=A2069_20150813  15:23:57.9,+29:53:26.0
-
-BLOCK
-
-split_targets = F # T/F
-calibration = internal # internal / external / none
-create_calibrator_observations = F # create cal-observations before target (T/F ignored if calibration is none)
-create_target_cal_beam = T # create cal-beam in target observation (T/F ignored if calibration is none)
-processing=Preprocessing # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
-imagingPipeline=none # can be one of MSSS, standard, none
-repeat=1 # the number of repeats of this block
-
-packageName=20150813_A2069 # name of package folder containing observation/pipelines
-packageDescription=
-packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
-
-antennaMode=LBA Outer
-clock=200 MHz
-instrumentFilter=30-90 MHz
-numberOfBitsPerSample=8 # Change to 16 if requested
-integrationTime=1.0
-channelsPerSubband=64
-stationList=nl
-tbbPiggybackAllowed=T
-aartfaacPiggybackAllowed=T
-
-###### Which data types should be produced: ######
-
-correlatedData=T
-coherentStokesData=F
-incoherentStokesData=F
-flysEye=F
-coherentDedisperseChannels=False
-
-###### Coherent Stokes parameters ######
-#subbandsPerFileCS=
-#numberCollapsedChannelsCS=
-#stokesDownsamplingStepsCS=
-#whichCS=
-###### Coherent Stokes parameters ######
-#subbandsPerFileIS=
-#numberCollapsedChannelsIS=
-#stokesDownsamplingStepsIS=
-#whichIS=
-
-flaggingStrategy=LBAdefault
-calibratorDuration_s=37800 # duration of calibration observations in seconds
-targetDuration_s=37800 # duration of target observations in seconds
-
-
-# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss

-# un-comment the startTimeUTC to have the observation start times generated

-startTimeUTC=2015-08-13 13:00:00

-# timeStep's in seconds

-timeStep1=60

-timeStep2=60

-

-

-
-
-
-###### Pipeline settings ######
-## GENERAL INFO
-## Processing mode: Calibration
-## Demixing requested: CygA CasA
-
-# calibrator beam and calibrator pipeline
-# ra; dec; target name; subband list; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds]
-# optionally followed by its own 'BBS:' and/or 'Demix:' settings
-# BBS:SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange
-# Demix:avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target
-calibratorBeam=
-14:11:20.519;+52:12:09.97;3C295;154..397;244;;;T;37800
-Demix=4;2;64;10;CygA,CasA;
-
-# target beams and target pipelines
-# ra ;dec; targetname; subbandList; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds]
-# optionally followed by BBS and/or demixing settings
-# BBS: SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange
-# Demix: avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target
-# Pulsar: pulsar; single-pulse; raw-to-8bit; dspsr-extra-opts; prepdata-extra-opts; 8bit-conversion-sigma; tsubint; norfi; nofold; nopdmp; skip-dspsr; rrats; 2bf2fits-extra-opts; decode-sigma; decode-nblocks; rfifind-extra-opts; prepfold-extra-opts; prepsubband-extra-opts; dynamic-spectrum-time-average; skip-dynamic-spectrum; skip-prepfold
-
-targetBeams= 
-15:23:57.9;+29:53:26.0;Abell 2069;154..397;244;;;T;37800
-Demix=4;2;64;10;CygA,CasA;
-# BBS: Add parameters if BBS needed
-# Pulsar: Add parameters if pulsar pipeline needed
-
-
-
-
-
-
-
-
- 
+################################################################################
+## Parameters for project: LC4_020
+################################################################################
+
+################################################################################
+## RUN 01: 1 targets -- duration:  37800s ( 10.50h)
+## Abell 2069, 15:23:57.9,+29:53:26.0
+ 
+## PARAMETER FILE SETUP 
+projectName=LC4_020
+mainFolderName=A2069_20150813
+mainFolderDescription=A2069_20150813  15:23:57.9,+29:53:26.0
+
+BLOCK
+
+split_targets = F # T/F
+calibration = internal # internal / external / none
+create_calibrator_observations = F # create cal-observations before target (T/F ignored if calibration is none)
+create_target_cal_beam = T # create cal-beam in target observation (T/F ignored if calibration is none)
+processing=Preprocessing # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
+imagingPipeline=none # can be one of MSSS, standard, none
+repeat=1 # the number of repeats of this block
+
+packageName=20150813_A2069 # name of package folder containing observation/pipelines
+packageDescription=
+packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
+
+antennaMode=LBA Outer
+clock=200 MHz
+instrumentFilter=30-90 MHz
+numberOfBitsPerSample=8 # Change to 16 if requested
+integrationTime=1.0
+channelsPerSubband=64
+stationList=nl
+tbbPiggybackAllowed=T
+aartfaacPiggybackAllowed=T
+
+###### Which data types should be produced: ######
+
+correlatedData=T
+coherentStokesData=F
+incoherentStokesData=F
+flysEye=F
+coherentDedisperseChannels=False
+
+###### Coherent Stokes parameters ######
+#subbandsPerFileCS=
+#numberCollapsedChannelsCS=
+#stokesDownsamplingStepsCS=
+#whichCS=
+###### Coherent Stokes parameters ######
+#subbandsPerFileIS=
+#numberCollapsedChannelsIS=
+#stokesDownsamplingStepsIS=
+#whichIS=
+
+flaggingStrategy=LBAdefault
+calibratorDuration_s=37800 # duration of calibration observations in seconds
+targetDuration_s=37800 # duration of target observations in seconds
+
+
+# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
+# un-comment the startTimeUTC to have the observation start times generated
+startTimeUTC=2015-08-13 13:00:00
+# timeStep's in seconds
+timeStep1=60
+timeStep2=60
+
+
+
+
+
+###### Pipeline settings ######
+## GENERAL INFO
+## Processing mode: Calibration
+## Demixing requested: CygA CasA
+
+# calibrator beam and calibrator pipeline
+# ra; dec; target name; subband list; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds]
+# optionally followed by its own 'BBS:' and/or 'Demix:' settings
+# BBS:SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange
+# Demix:avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target
+calibratorBeam=
+14:11:20.519;+52:12:09.97;3C295;154..397;244;;;T;37800
+Demix=4;2;64;10;CygA,CasA;
+
+# target beams and target pipelines
+# ra ;dec; targetname; subbandList; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds]
+# optionally followed by BBS and/or demixing settings
+# BBS: SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange
+# Demix: avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target
+# Pulsar: pulsar; single-pulse; raw-to-8bit; dspsr-extra-opts; prepdata-extra-opts; 8bit-conversion-sigma; tsubint; norfi; nofold; nopdmp; skip-dspsr; rrats; 2bf2fits-extra-opts; decode-sigma; decode-nblocks; rfifind-extra-opts; prepfold-extra-opts; prepsubband-extra-opts; dynamic-spectrum-time-average; skip-dynamic-spectrum; skip-prepfold
+
+targetBeams= 
+15:23:57.9;+29:53:26.0;Abell 2069;154..397;244;;;T;37800
+Demix=4;2;64;10;CygA,CasA;
+# BBS: Add parameters if BBS needed
+# Pulsar: Add parameters if pulsar pipeline needed
+
+
+
+
+
+
+
+
+ 
diff --git a/SAS/XML_generator/test/test_regression.in_data/txt/LC4_022_3C58_HBA_parameters.txt b/SAS/XML_generator/test/test_regression.in_data/txt/LC4_022_3C58_HBA_parameters.txt
index d8cfdcdbef6847c6decdb8574ae10cf197bb28b1..0abc57c150bbfea8a844c70cc3cb000a3f86a108 100644
--- a/SAS/XML_generator/test/test_regression.in_data/txt/LC4_022_3C58_HBA_parameters.txt
+++ b/SAS/XML_generator/test/test_regression.in_data/txt/LC4_022_3C58_HBA_parameters.txt
@@ -1,182 +1,182 @@
-## PARAMETER FILE SETUP 
-projectName=LC4_022
-mainFolderName=20151018_3C58
-mainFolderDescription=HBA_DUAL_INNER, 110-190 MHz, av: 16/2, imaging, 20SB/chk
-
-BLOCK
-
-split_targets = F # T/F
-calibration = external # internal / external / none
-create_calibrator_observations = T # create cal-observations before target (T/F ignored if calibration is none)
-create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none)
-processing=Imaging # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
-
-imagingPipeline=standard # can be one of MSSS, standard, none
-nrSubbandsPerImage=20
-fieldOfView_deg=5.0
-
-repeat=1 # the number of repeats of this block
-
-packageName=3C196/3C58 # name of package folder containing observation/pipelines
-packageDescription=3C196/3C58 Scan
-packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
-
-antennaMode=HBA Dual Inner
-clock=200 MHz
-instrumentFilter=110-190 MHz
-numberOfBitsPerSample=8 # Change to 16 if requested
-integrationTime=2.0
-channelsPerSubband=64
-stationList=nl
-tbbPiggybackAllowed=T
-aartfaacPiggybackAllowed=T
-
-###### Which data types should be produced: ######
-
-correlatedData=T
-coherentStokesData=F
-incoherentStokesData=F
-flysEye=F
-coherentDedisperseChannels=False
-
-###### Coherent Stokes parameters ######
-#subbandsPerFileCS=
-#numberCollapsedChannelsCS=
-#stokesDownsamplingStepsCS=
-#whichCS=
-###### Coherent Stokes parameters ######
-#subbandsPerFileIS=
-#numberCollapsedChannelsIS=
-#stokesDownsamplingStepsIS=
-#whichIS=
-
-flaggingStrategy=HBAdefault
-calibratorDuration_s=600 # duration of calibration observations in seconds
-targetDuration_s=20400 # duration of target observations in seconds
-
-###### Pipeline settings ######
-## GENERAL INFO
-## Processing mode: Imaging
-
-
-Global_Demix=16;2;16;2;;;
-Global_Subbands=100..339;240
-
-# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
-# un-comment the startTimeUTC to have the observation start times generated
-startTimeUTC=2015-10-18 21:00:00
-# timeStep's in seconds
-timeStep1=60
-timeStep2=60
-
-calibratorBeam=
-08:13:36.07;+48:13:02.6;3C196;;;;;T;1000
-BBS=3C196;;;T
-#Demix=8;2;64;10;[CasA,CygA];;
-#TAB:
-#c;05:34:51.94;+22:00:52.2
-
-# target beams and target pipelines
-# ra ;dec; targetname; subbandList; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds]
-# optionally followed by BBS and/or demixing settings
-# BBS: SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange
-# Demix: avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target
-# Pulsar: pulsar; single-pulse; raw-to-8bit; dspsr-extra-opts; prepdata-extra-opts; 8bit-conversion-sigma; tsubint; norfi; nofold; nopdmp; skip-dspsr; rrats; 2bf2fits-extra-opts; decode-sigma; decode-nblocks; rfifind-extra-opts; prepfold-extra-opts; prepsubband-extra-opts; dynamic-spectrum-time-average; skip-dynamic-spectrum; skip-prepfold
-
-targetBeams=
-02:05:38.00;+64:49:42.0;3C58;;;;;T;50000
-#Pulsar=B0531+21;;T;;;;;;;;;;;;;;;;;;
-#0.0417300951946;0.00558069028325;Sun;54..297;244;;;T;8100
-#Demix=8;2;64;10;[CasA,CygA];;
-
-#subbandsPerSubbandGroup = 16             # the number of subbands that will be concatenated in a subband-group
-#subbandGroupsPerMS = 1                  # the number of subband-groups that will be (virually) concatenated in each measurement set
-
-BLOCK1
-
-split_targets = F # T/F
-calibration = none # internal / external / none
-create_calibrator_observations = F # create cal-observations before target (T/F ignored if calibration is none)
-create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none)
-processing=Preprocessing # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
-
-#imagingPipeline=standard # can be one of MSSS, standard, none
-#nrSubbandsPerImage=20
-#fieldOfView_deg=5.0
-
-repeat=1 # the number of repeats of this block
-
-packageName=3C196 # name of package folder containing observation/pipelines
-packageDescription=3C196 Bookend Scan
-packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
-
-antennaMode=HBA Dual Inner
-clock=200 MHz
-instrumentFilter=110-190 MHz
-numberOfBitsPerSample=8 # Change to 16 if requested
-integrationTime=2.0
-channelsPerSubband=64
-stationList=nl
-tbbPiggybackAllowed=T
-aartfaacPiggybackAllowed=T
-
-###### Which data types should be produced: ######
-
-correlatedData=T
-coherentStokesData=F
-incoherentStokesData=F
-flysEye=F
-coherentDedisperseChannels=False
-
-###### Coherent Stokes parameters ######
-#subbandsPerFileCS=
-#numberCollapsedChannelsCS=
-#stokesDownsamplingStepsCS=
-#whichCS=
-###### Coherent Stokes parameters ######
-#subbandsPerFileIS=
-#numberCollapsedChannelsIS=
-#stokesDownsamplingStepsIS=
-#whichIS=
-
-flaggingStrategy=HBAdefault
-#calibratorDuration_s=3600 # duration of calibration observations in seconds
-targetDuration_s=600 # duration of target observations in seconds
-
-###### Pipeline settings ######
-## GENERAL INFO
-## Processing mode: Imaging
-
-
-Global_Demix=16;2;16;2;;;
-Global_Subbands=100..339;240
-
-# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
-# un-comment the startTimeUTC to have the observation start times generated
-startTimeUTC=2015-10-19 02:52:00
-# timeStep's in seconds
-timeStep1=60
-timeStep2=60
-
-#calibratorBeam=
-#08:13:36.07;+48:13:02.6;3C196;;;;;T;50000
-#BBS=3C196;;;T
-#Demix=8;2;64;10;[CasA,CygA];;
-#TAB:
-#c;05:34:51.94;+22:00:52.2
-
-# target beams and target pipelines
-# ra ;dec; targetname; subbandList; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds]
-# optionally followed by BBS and/or demixing settings
-# BBS: SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange
-# Demix: avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target
-# Pulsar: pulsar; single-pulse; raw-to-8bit; dspsr-extra-opts; prepdata-extra-opts; 8bit-conversion-sigma; tsubint; norfi; nofold; nopdmp; skip-dspsr; rrats; 2bf2fits-extra-opts; decode-sigma; decode-nblocks; rfifind-extra-opts; prepfold-extra-opts; prepsubband-extra-opts; dynamic-spectrum-time-average; skip-dynamic-spectrum; skip-prepfold
-
-targetBeams=
-08:13:36.07;+48:13:02.6;3C196;;;;;T;1000
-#Pulsar=B0531+21;;T;;;;;;;;;;;;;;;;;;
-#0.0417300951946;0.00558069028325;Sun;54..297;244;;;T;8100
-#Demix=8;2;64;10;[CasA,CygA];;
-
-#subbandsPerSubbandGroup = 16             # the number of subbands that will be concatenated in a subband-group
+## PARAMETER FILE SETUP 
+projectName=LC4_022
+mainFolderName=20151018_3C58
+mainFolderDescription=HBA_DUAL_INNER, 110-190 MHz, av: 16/2, imaging, 20SB/chk
+
+BLOCK
+
+split_targets = F # T/F
+calibration = external # internal / external / none
+create_calibrator_observations = T # create cal-observations before target (T/F ignored if calibration is none)
+create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none)
+processing=Imaging # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
+
+imagingPipeline=standard # can be one of MSSS, standard, none
+nrSubbandsPerImage=20
+fieldOfView_deg=5.0
+
+repeat=1 # the number of repeats of this block
+
+packageName=3C196/3C58 # name of package folder containing observation/pipelines
+packageDescription=3C196/3C58 Scan
+packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
+
+antennaMode=HBA Dual Inner
+clock=200 MHz
+instrumentFilter=110-190 MHz
+numberOfBitsPerSample=8 # Change to 16 if requested
+integrationTime=2.0
+channelsPerSubband=64
+stationList=nl
+tbbPiggybackAllowed=T
+aartfaacPiggybackAllowed=T
+
+###### Which data types should be produced: ######
+
+correlatedData=T
+coherentStokesData=F
+incoherentStokesData=F
+flysEye=F
+coherentDedisperseChannels=False
+
+###### Coherent Stokes parameters ######
+#subbandsPerFileCS=
+#numberCollapsedChannelsCS=
+#stokesDownsamplingStepsCS=
+#whichCS=
+###### Coherent Stokes parameters ######
+#subbandsPerFileIS=
+#numberCollapsedChannelsIS=
+#stokesDownsamplingStepsIS=
+#whichIS=
+
+flaggingStrategy=HBAdefault
+calibratorDuration_s=600 # duration of calibration observations in seconds
+targetDuration_s=20400 # duration of target observations in seconds
+
+###### Pipeline settings ######
+## GENERAL INFO
+## Processing mode: Imaging
+
+
+Global_Demix=16;2;16;2;;;
+Global_Subbands=100..339;240
+
+# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
+# un-comment the startTimeUTC to have the observation start times generated
+startTimeUTC=2015-10-18 21:00:00
+# timeStep's in seconds
+timeStep1=60
+timeStep2=60
+
+calibratorBeam=
+08:13:36.07;+48:13:02.6;3C196;;;;;T;1000
+BBS=3C196;;;T
+#Demix=8;2;64;10;[CasA,CygA];;
+#TAB:
+#c;05:34:51.94;+22:00:52.2
+
+# target beams and target pipelines
+# ra ;dec; targetname; subbandList; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds]
+# optionally followed by BBS and/or demixing settings
+# BBS: SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange
+# Demix: avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target
+# Pulsar: pulsar; single-pulse; raw-to-8bit; dspsr-extra-opts; prepdata-extra-opts; 8bit-conversion-sigma; tsubint; norfi; nofold; nopdmp; skip-dspsr; rrats; 2bf2fits-extra-opts; decode-sigma; decode-nblocks; rfifind-extra-opts; prepfold-extra-opts; prepsubband-extra-opts; dynamic-spectrum-time-average; skip-dynamic-spectrum; skip-prepfold
+
+targetBeams=
+02:05:38.00;+64:49:42.0;3C58;;;;;T;50000
+#Pulsar=B0531+21;;T;;;;;;;;;;;;;;;;;;
+#0.0417300951946;0.00558069028325;Sun;54..297;244;;;T;8100
+#Demix=8;2;64;10;[CasA,CygA];;
+
+#subbandsPerSubbandGroup = 16             # the number of subbands that will be concatenated in a subband-group
+#subbandGroupsPerMS = 1                  # the number of subband-groups that will be (virually) concatenated in each measurement set
+
+BLOCK1
+
+split_targets = F # T/F
+calibration = none # internal / external / none
+create_calibrator_observations = F # create cal-observations before target (T/F ignored if calibration is none)
+create_target_cal_beam = F # create cal-beam in target observation (T/F ignored if calibration is none)
+processing=Preprocessing # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
+
+#imagingPipeline=standard # can be one of MSSS, standard, none
+#nrSubbandsPerImage=20
+#fieldOfView_deg=5.0
+
+repeat=1 # the number of repeats of this block
+
+packageName=3C196 # name of package folder containing observation/pipelines
+packageDescription=3C196 Bookend Scan
+packageTag= # optional tag to be prepended before every obs/pipeline name/description (max 8 chars)
+
+antennaMode=HBA Dual Inner
+clock=200 MHz
+instrumentFilter=110-190 MHz
+numberOfBitsPerSample=8 # Change to 16 if requested
+integrationTime=2.0
+channelsPerSubband=64
+stationList=nl
+tbbPiggybackAllowed=T
+aartfaacPiggybackAllowed=T
+
+###### Which data types should be produced: ######
+
+correlatedData=T
+coherentStokesData=F
+incoherentStokesData=F
+flysEye=F
+coherentDedisperseChannels=False
+
+###### Coherent Stokes parameters ######
+#subbandsPerFileCS=
+#numberCollapsedChannelsCS=
+#stokesDownsamplingStepsCS=
+#whichCS=
+###### Coherent Stokes parameters ######
+#subbandsPerFileIS=
+#numberCollapsedChannelsIS=
+#stokesDownsamplingStepsIS=
+#whichIS=
+
+flaggingStrategy=HBAdefault
+#calibratorDuration_s=3600 # duration of calibration observations in seconds
+targetDuration_s=600 # duration of target observations in seconds
+
+###### Pipeline settings ######
+## GENERAL INFO
+## Processing mode: Imaging
+
+
+Global_Demix=16;2;16;2;;;
+Global_Subbands=100..339;240
+
+# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
+# un-comment the startTimeUTC to have the observation start times generated
+startTimeUTC=2015-10-19 02:52:00
+# timeStep's in seconds
+timeStep1=60
+timeStep2=60
+
+#calibratorBeam=
+#08:13:36.07;+48:13:02.6;3C196;;;;;T;50000
+#BBS=3C196;;;T
+#Demix=8;2;64;10;[CasA,CygA];;
+#TAB:
+#c;05:34:51.94;+22:00:52.2
+
+# target beams and target pipelines
+# ra ;dec; targetname; subbandList; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds]
+# optionally followed by BBS and/or demixing settings
+# BBS: SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange
+# Demix: avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target
+# Pulsar: pulsar; single-pulse; raw-to-8bit; dspsr-extra-opts; prepdata-extra-opts; 8bit-conversion-sigma; tsubint; norfi; nofold; nopdmp; skip-dspsr; rrats; 2bf2fits-extra-opts; decode-sigma; decode-nblocks; rfifind-extra-opts; prepfold-extra-opts; prepsubband-extra-opts; dynamic-spectrum-time-average; skip-dynamic-spectrum; skip-prepfold
+
+targetBeams=
+08:13:36.07;+48:13:02.6;3C196;;;;;T;1000
+#Pulsar=B0531+21;;T;;;;;;;;;;;;;;;;;;
+#0.0417300951946;0.00558069028325;Sun;54..297;244;;;T;8100
+#Demix=8;2;64;10;[CasA,CygA];;
+
+#subbandsPerSubbandGroup = 16             # the number of subbands that will be concatenated in a subband-group
 #subbandGroupsPerMS = 1                  # the number of subband-groups that will be (virually) concatenated in each measurement set
\ No newline at end of file
diff --git a/SAS/XML_generator/test/test_regression.in_data/txt/MSSS_20151207_testmultipipe.txt b/SAS/XML_generator/test/test_regression.in_data/txt/MSSS_20151207_testmultipipe.txt
index e26364647e71dc53ead3c8378cca2d497997b524..9e10038c20ab23b2bc8818a030f5485339592902 100644
--- a/SAS/XML_generator/test/test_regression.in_data/txt/MSSS_20151207_testmultipipe.txt
+++ b/SAS/XML_generator/test/test_regression.in_data/txt/MSSS_20151207_testmultipipe.txt
@@ -1,130 +1,130 @@
-
-projectName=LOFAROBS
-mainFolderName=multi_pipelines_20151207
-mainFolderDescription=multi_pipelines_20151207
-
-
-BLOCK1
-
-split_targets = F                       # true:create a separate target observation for every target (beam) line or false:combine them in a multi-beam observation
-calibration = none                  # internal / external / none
-create_calibrator_observations = F      # create calibration observations before target observations? (ignored if calibration = none)
-create_target_cal_beam = T              # create a calibration beam in the target observation(s)? (ignored if calibration = none)
-processing=Preprocessing              # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
-imagingPipeline=none                    # can be one of MSSS, standard, none
-repeat=1                                # the number of repeats of this block
-
-packageName=20151207_MSSS_LBA_test             # the name that will be given to the package folder that contains this block's observation and pipelines
-packageDescription=20151207_MSSS_LBA_test  LST 0
-packageTag=MSSSLBA                             # optional tag that will be prepended before every observation and pipeline name/description (Max 8 characters).
-
-antennaMode=LBA Inner
-clock=200 MHz
-instrumentFilter=30-90 MHz
-numberOfBitsPerSample=8
-integrationTime=1.0
-channelsPerSubband=64
-stationList=nl                        # comma-separated list of station names and/or the following aliasses: core, superterp, remote, international, all, nl
-tbbPiggybackAllowed=T
-aartfaacPiggybackAllowed=T
-subbandsPerSubbandGroup=1
-subbandGroupsPerMS=1
-###### Which data types should be produced: ######
-correlatedData=T
-coherentStokesData=F
-incoherentStokesData=F
-flysEye=F
-coherentDedisperseChannels=False
-###### Coherent Stokes parameters ######
-subbandsPerFileCS=512
-numberCollapsedChannelsCS=16
-stokesDownsamplingStepsCS=128
-whichCS=IQUV
-###### Incoherent Stokes parameters ######
-#subbandsPerFileIS=16
-#numberCollapsedChannelsIS=4
-#stokesDownsamplingStepsIS=12
-#whichIS=IQUV
-flaggingStrategy=LBAdefault             # flagging strategy used for AOflagger
-calibratorDuration_s=300                # duration of calibration observations in seconds
-targetDuration_s=300              # duration of target observations in seconds
-
-###### Globals are used when a target/calibration line does not have its own specific parameter specified for that property ######
-#Global_TAB=
-#c;05:34:51.94;+22:00:52.2              # a coherent tab with absolute coordinates 05:34:51.94, +22:00:52.2
-#Global_TABrings=1;0.1                   # nr of tabrings and tabring size
-#Global_Pulsar=B0329+54;T;T;DSPSR EXTRA OPTIONS;PREPDATA;5.1;-2;F;F;F;F;F;2BF2FITS;4;101;RFI FIND EXTRA OPTIONS;PREPFOLD EXTRA;PREPSUBBAND Extra; 0.6;T;T
-#Global_Demix=16;4;64;10;CasA,CygA;
-#Global_Subbands=
-
-# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
-# un-comment the startTimeUTC to have the observation start times generated
-startTimeUTC=2015-12-07 22:00:00
-# timeStep's in seconds
-timeStep1=60
-timeStep2=60
-
-calibratorBeam=
-19:59:28.3;+40:44:02;CygA;154,155,156,157,158,159,160,161,162,163,185,186,187,188,189,190,191,192,193,194,215,216,217,218,219,220,221,222,223,224,245,246,247,248,249,250,251,252,253,254,275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;80;;;T;7000
-#BBS=CygA;;;T
-Demix=8;2;64;10;[CasA]
-Demix=64;5;64;10;[CasA]
-#TAB:
-#c;05:34:51.94;+22:00:52.2
-
-# target beams and target pipelines
-# ra ;dec; targetname; subbandList; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds]
-# optionally followed by BBS and/or demixing settings
-# BBS: SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange
-# Demix: avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target
-# Pulsar: pulsar; single-pulse; raw-to-8bit; dspsr-extra-opts; prepdata-extra-opts; 8bit-conversion-sigma; tsubint; norfi; nofold; nopdmp; skip-dspsr; rrats; 2bf2fits-extra-opts; decode-sigma; decode-nblocks; rfifind-extra-opts; prepfold-extra-opts; prepsubband-extra-opts; dynamic-spectrum-time-average; skip-dynamic-spectrum; skip-prepfold
-
-targetBeams=
-
-00:00:00.00;+30:00:00.0;L000+30;154,155,156,157,158,159,160,161,162,163,185,186,187,188,189,190,191,192,193,194,215,216,217,218,219,220,221,222,223,224,245,246,247,248,249,250,251,252,253,254,275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;80;;;T;5000
-Demix=8;2;64;10;[CygA,CasA]
-Demix=64;5;64;10;[CygA,CasA]
-
-00:22:33.02;+30:00:00.0;L005+30;154,155,156,157,158,159,160,161,162,163,185,186,187,188,189,190,191,192,193,194,215,216,217,218,219,220,221,222,223,224,245,246,247,248,249,250,251,252,253,254,275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;80;;;T;5000
-Demix=8;2;64;10;[CygA,CasA]
-Demix=64;5;64;10;[CygA,CasA]
-
-
-23:37:26.98;+30:00:00.0;L354+30;154,155,156,157,158,159,160,161,162,163,185,186,187,188,189,190,191,192,193,194,215,216,217,218,219,220,221,222,223,224,245,246,247,248,249,250,251,252,253,254,275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;80;;;T;5000
-Demix=8;2;64;10;[CygA,CasA]
-Demix=64;5;64;10;[CygA,CasA]
-
-
-23:48:43.49;+34:13:41.5;L357+34;275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;40;;;T;5000
-Demix=8;2;64;10;[CygA,CasA]
-Demix=64;5;64;10;[CygA,CasA]
-
-
-00:11:16.51;+34:13:41.5;L003+34;275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;40;;;T;5000
-Demix=8;2;64;10;[CygA,CasA]
-Demix=64;5;64;10;[CygA,CasA]
-
-
-23:48:43.49;+25:46:18.5;L357+25;275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;40;;;T;5000
-Demix=8;2;64;10;[CygA,CasA]
-Demix=64;5;64;10;[CygA,CasA]
-
-00:11:16.51;+25:46:18.5;L003+25;275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;40;;;T;5000
-Demix=8;2;64;10;[CygA,CasA]
-Demix=64;5;64;10;[CygA,CasA]
-
-00:00:00.00;+90:00:00.0;NCP;306,307,308,309,310,311,312,313;8;;;T;5000
-Demix=8;2;64;10;[CygA,CasA]
-Demix=64;5;64;10;[CygA,CasA]
-
-
-
-
-
-nrSubbandsPerImage=10 # (integer, mandatory) the number of subbands grouped together to form one image
-# the following imaging parameters are all optional, if not specified the default value from the default template is used for that parameter
-maxBaseline_m=10000 # (integer) the maximum base-line in meters used in the imaging
-fieldOfView_deg=5.0 # (float) the field of view in degrees
-
-BLOCK
-
+
+projectName=LOFAROBS
+mainFolderName=multi_pipelines_20151207
+mainFolderDescription=multi_pipelines_20151207
+
+
+BLOCK1
+
+split_targets = F                       # true:create a separate target observation for every target (beam) line or false:combine them in a multi-beam observation
+calibration = none                  # internal / external / none
+create_calibrator_observations = F      # create calibration observations before target observations? (ignored if calibration = none)
+create_target_cal_beam = T              # create a calibration beam in the target observation(s)? (ignored if calibration = none)
+processing=Preprocessing              # can be one of Calibration, Preprocessing, Imaging, Pulsar, LongBaseline, none
+imagingPipeline=none                    # can be one of MSSS, standard, none
+repeat=1                                # the number of repeats of this block
+
+packageName=20151207_MSSS_LBA_test             # the name that will be given to the package folder that contains this block's observation and pipelines
+packageDescription=20151207_MSSS_LBA_test  LST 0
+packageTag=MSSSLBA                             # optional tag that will be prepended before every observation and pipeline name/description (Max 8 characters).
+
+antennaMode=LBA Inner
+clock=200 MHz
+instrumentFilter=30-90 MHz
+numberOfBitsPerSample=8
+integrationTime=1.0
+channelsPerSubband=64
+stationList=nl                        # comma-separated list of station names and/or the following aliasses: core, superterp, remote, international, all, nl
+tbbPiggybackAllowed=T
+aartfaacPiggybackAllowed=T
+subbandsPerSubbandGroup=1
+subbandGroupsPerMS=1
+###### Which data types should be produced: ######
+correlatedData=T
+coherentStokesData=F
+incoherentStokesData=F
+flysEye=F
+coherentDedisperseChannels=False
+###### Coherent Stokes parameters ######
+subbandsPerFileCS=512
+numberCollapsedChannelsCS=16
+stokesDownsamplingStepsCS=128
+whichCS=IQUV
+###### Incoherent Stokes parameters ######
+#subbandsPerFileIS=16
+#numberCollapsedChannelsIS=4
+#stokesDownsamplingStepsIS=12
+#whichIS=IQUV
+flaggingStrategy=LBAdefault             # flagging strategy used for AOflagger
+calibratorDuration_s=300                # duration of calibration observations in seconds
+targetDuration_s=300              # duration of target observations in seconds
+
+###### Globals are used when a target/calibration line does not have its own specific parameter specified for that property ######
+#Global_TAB=
+#c;05:34:51.94;+22:00:52.2              # a coherent tab with absolute coordinates 05:34:51.94, +22:00:52.2
+#Global_TABrings=1;0.1                   # nr of tabrings and tabring size
+#Global_Pulsar=B0329+54;T;T;DSPSR EXTRA OPTIONS;PREPDATA;5.1;-2;F;F;F;F;F;2BF2FITS;4;101;RFI FIND EXTRA OPTIONS;PREPFOLD EXTRA;PREPSUBBAND Extra; 0.6;T;T
+#Global_Demix=16;4;64;10;CasA,CygA;
+#Global_Subbands=
+
+# startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
+# un-comment the startTimeUTC to have the observation start times generated
+startTimeUTC=2015-12-07 22:00:00
+# timeStep's in seconds
+timeStep1=60
+timeStep2=60
+
+calibratorBeam=
+19:59:28.3;+40:44:02;CygA;154,155,156,157,158,159,160,161,162,163,185,186,187,188,189,190,191,192,193,194,215,216,217,218,219,220,221,222,223,224,245,246,247,248,249,250,251,252,253,254,275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;80;;;T;7000
+#BBS=CygA;;;T
+Demix=8;2;64;10;[CasA]
+Demix=64;5;64;10;[CasA]
+#TAB:
+#c;05:34:51.94;+22:00:52.2
+
+# target beams and target pipelines
+# ra ;dec; targetname; subbandList; nrSubbands; nrTABrings; TABringSize; create_pipeline [;pipeline duration seconds]
+# optionally followed by BBS and/or demixing settings
+# BBS: SkyModel;BBS_baselines;BBS_correlations;BBS_beamModelEnable;BBS_solveParms;BBS_solveUVRange;BBS_strategyBaselines;BBS_strategyTimeRange
+# Demix: avg freq step; avg time step; demix freq step; demix time step; demix_always; demix_if_needed; ignore_target
+# Pulsar: pulsar; single-pulse; raw-to-8bit; dspsr-extra-opts; prepdata-extra-opts; 8bit-conversion-sigma; tsubint; norfi; nofold; nopdmp; skip-dspsr; rrats; 2bf2fits-extra-opts; decode-sigma; decode-nblocks; rfifind-extra-opts; prepfold-extra-opts; prepsubband-extra-opts; dynamic-spectrum-time-average; skip-dynamic-spectrum; skip-prepfold
+
+targetBeams=
+
+00:00:00.00;+30:00:00.0;L000+30;154,155,156,157,158,159,160,161,162,163,185,186,187,188,189,190,191,192,193,194,215,216,217,218,219,220,221,222,223,224,245,246,247,248,249,250,251,252,253,254,275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;80;;;T;5000
+Demix=8;2;64;10;[CygA,CasA]
+Demix=64;5;64;10;[CygA,CasA]
+
+00:22:33.02;+30:00:00.0;L005+30;154,155,156,157,158,159,160,161,162,163,185,186,187,188,189,190,191,192,193,194,215,216,217,218,219,220,221,222,223,224,245,246,247,248,249,250,251,252,253,254,275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;80;;;T;5000
+Demix=8;2;64;10;[CygA,CasA]
+Demix=64;5;64;10;[CygA,CasA]
+
+
+23:37:26.98;+30:00:00.0;L354+30;154,155,156,157,158,159,160,161,162,163,185,186,187,188,189,190,191,192,193,194,215,216,217,218,219,220,221,222,223,224,245,246,247,248,249,250,251,252,253,254,275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;80;;;T;5000
+Demix=8;2;64;10;[CygA,CasA]
+Demix=64;5;64;10;[CygA,CasA]
+
+
+23:48:43.49;+34:13:41.5;L357+34;275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;40;;;T;5000
+Demix=8;2;64;10;[CygA,CasA]
+Demix=64;5;64;10;[CygA,CasA]
+
+
+00:11:16.51;+34:13:41.5;L003+34;275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;40;;;T;5000
+Demix=8;2;64;10;[CygA,CasA]
+Demix=64;5;64;10;[CygA,CasA]
+
+
+23:48:43.49;+25:46:18.5;L357+25;275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;40;;;T;5000
+Demix=8;2;64;10;[CygA,CasA]
+Demix=64;5;64;10;[CygA,CasA]
+
+00:11:16.51;+25:46:18.5;L003+25;275,276,277,278,279,280,281,282,283,284,305,306,307,308,309,310,311,312,313,314,335,336,337,338,339,340,341,342,343,344,374,375,376,377,378,379,380,381,382,383;40;;;T;5000
+Demix=8;2;64;10;[CygA,CasA]
+Demix=64;5;64;10;[CygA,CasA]
+
+00:00:00.00;+90:00:00.0;NCP;306,307,308,309,310,311,312,313;8;;;T;5000
+Demix=8;2;64;10;[CygA,CasA]
+Demix=64;5;64;10;[CygA,CasA]
+
+
+
+
+
+nrSubbandsPerImage=10 # (integer, mandatory) the number of subbands grouped together to form one image
+# the following imaging parameters are all optional, if not specified the default value from the default template is used for that parameter
+maxBaseline_m=10000 # (integer) the maximum base-line in meters used in the imaging
+fieldOfView_deg=5.0 # (float) the field of view in degrees
+
+BLOCK
+
diff --git a/SAS/XML_generator/test/test_regression.in_data/txt/Ticket_6923.txt b/SAS/XML_generator/test/test_regression.in_data/txt/Ticket_6923.txt
index 9044bc3ae9f8725b5b0da4c10567bab4809e4760..f441a1a2aaf256eb39d712a436ae580f09bfc7cb 100644
--- a/SAS/XML_generator/test/test_regression.in_data/txt/Ticket_6923.txt
+++ b/SAS/XML_generator/test/test_regression.in_data/txt/Ticket_6923.txt
@@ -48,7 +48,7 @@ Global_Subbands=55..258;204
 
 # startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
 # un-comment the startTimeUTC to have the observation start times generated
-startTimeUTC=2014-11-16 05:00:00
+startTimeUTC=2014-11-16 05:00:00
 
 startTimeUTC=2014-11-16 05:00:00
 # timeStep's in seconds
@@ -120,7 +120,7 @@ Global_Subbands=256..459;204
 
 # startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
 # un-comment the startTimeUTC to have the observation start times generated
-startTimeUTC=2014-11-16 05:15:00
+startTimeUTC=2014-11-16 05:15:00
 
 startTimeUTC=2014-11-16 05:15:00
 # timeStep's in seconds
@@ -193,7 +193,7 @@ Global_Subbands=52..255;204
 
 # startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
 # un-comment the startTimeUTC to have the observation start times generated
-startTimeUTC=2014-11-16 05:30:00
+startTimeUTC=2014-11-16 05:30:00
 
 startTimeUTC=2014-11-16 05:30:00
 # timeStep's in seconds
@@ -266,7 +266,7 @@ Global_Subbands=54..257;204
 
 # startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
 # un-comment the startTimeUTC to have the observation start times generated
-startTimeUTC=2014-11-16 05:45:00
+startTimeUTC=2014-11-16 05:45:00
 
 startTimeUTC=2014-11-16 05:45:00
 # timeStep's in seconds
@@ -339,7 +339,7 @@ Global_Subbands=256..459;204
 
 # startTimeUTC, the start time of the first observation. format: yyyy-MM-dd hh:mm:ss
 # un-comment the startTimeUTC to have the observation start times generated
-startTimeUTC=2014-11-16 06:00:00
+startTimeUTC=2014-11-16 06:00:00
 
 startTimeUTC=2014-11-16 06:00:00
 # timeStep's in seconds
diff --git a/SubSystems/RAServices/Dockerfile b/SubSystems/RAServices/Dockerfile
deleted file mode 100644
index 3b09b0deab2f904ee783938f8c29b3adf8eb4216..0000000000000000000000000000000000000000
--- a/SubSystems/RAServices/Dockerfile
+++ /dev/null
@@ -1,18 +0,0 @@
-# Goal: this dockerfile provides a 'production'-like centos7 system which can be used for lofar RAServices software
-# should be equivalent to production scu
-FROM centos:centos7.6.1810 AS builder
-RUN yum install -y epel-release
-RUN yum install -y cmake gcc-c++ make log4cplus log4cplus-devel python3 python3-libs python3-devel boost readline-devel boost-devel binutils-devel boost-python36 boost-python36-devel gettext which openldap-devel npm nodejs git java-11-openjdk
-
-# see https://www.postgresql.org/download/linux/redhat/ on how to install postgresql-server > 9.2 on centos7
-RUN yum erase -y postgresql postgresql-server postgresql-devel
-RUN yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-7-x86_64/pgdg-redhat-repo-latest.noarch.rpm
-RUN yum install -y postgresql96 postgresql96-server postgresql96-devel
-ENV PATH /usr/pgsql-9.6/bin:$PATH
-
-RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil django djangorestframework djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 djangorestframework django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc jsonschema
-RUN adduser lofarsys
-USER lofarsys
-
-
-