diff --git a/CEP/Pipeline/recipes/sip/bin/runPipeline.sh b/CEP/Pipeline/recipes/sip/bin/runPipeline.sh
index 89d5e2657115c096496c259a952a235605327692..8c40f4a2247de9408baf19e88ed548060444f282 100755
--- a/CEP/Pipeline/recipes/sip/bin/runPipeline.sh
+++ b/CEP/Pipeline/recipes/sip/bin/runPipeline.sh
@@ -25,6 +25,9 @@ PARSET=
 # Location of pipeline-framework configuration file
 PIPELINE_CONFIG=$LOFARROOT/share/pipeline/pipeline.cfg
 
+# Directory for parset and feedback files
+PARSET_DIR=$LOFARROOT/var/run
+
 # ======= Parse command-line parameters
 
 function usage() {
@@ -33,10 +36,11 @@ function usage() {
   echo "  -o OBSID           Task identifier"
   echo "  -c pipeline.cfg    Override pipeline configuration file (default: $PIPELINE_CONFIG)"
   echo "  -p pipeline.parset Provide parset (default: request through QPID)"
+  echo "  -P dir             Directory to which to save parset and feedback files (default: $PARSET_DIR)"
   exit 1
 }
 
-while getopts "ho:c:p:" opt; do
+while getopts "ho:c:p:P:" opt; do
   case $opt in
     h)  usage
         ;;
@@ -46,6 +50,8 @@ while getopts "ho:c:p:" opt; do
         ;;
     p)  PARSET="$OPTARG"
         ;;
+    P)  PARSET_DIR="$OPTARG"
+        ;;
     \?) error "Invalid option: -$OPTARG"
         ;;
     :)  error "Option requires an argument: -$OPTARG"
@@ -58,7 +64,7 @@ done
 
 if [ -z "$PARSET" ]; then
   # Fetch parset
-  PARSET=${LOFARROOT}/var/run/Observation${OBSID}.parset
+  PARSET=${PARSET_DIR}/Observation${OBSID}.parset
   getOTDBParset -o $OBSID >$PARSET
 fi
 
diff --git a/CEP/Pipeline/recipes/sip/nodes/long_baseline.py b/CEP/Pipeline/recipes/sip/nodes/long_baseline.py
index 244107a677c9943ba580f2b6b228b3b806203339..2fe0aa3e405dcb572376a9ef3a6d7094fa883491 100644
--- a/CEP/Pipeline/recipes/sip/nodes/long_baseline.py
+++ b/CEP/Pipeline/recipes/sip/nodes/long_baseline.py
@@ -58,6 +58,8 @@ class long_baseline(LOFARnodeTCP):
             # I. Create the directories used in this recipe
             create_directory(processed_ms_dir)
             create_directory(working_dir)
+            create_directory(os.path.dirname(output_measurement_set))
+            create_directory(os.path.dirname(final_output_path))
 
             # time slice dir_to_remove: assure empty directory: Stale data
             # is problematic for dppp
@@ -499,12 +501,6 @@ class long_baseline(LOFARnodeTCP):
 
         table = pt.table(output_measurement_set)
 
-        try:
-          os.makedirs(os.path.dirname(final_output_path))
-        except:
-          pass # do nothing, the path already exists, we can output to this
-               # location
-
         table.copy(final_output_path, deep=True)
 
 
diff --git a/Docker/lofar-base/Dockerfile.tmpl b/Docker/lofar-base/Dockerfile.tmpl
index aef0133c364d962959a2919cdfeccf41504d7d07..77228e242bc737535557a3fbcdc5d4c78a52a1f6 100644
--- a/Docker/lofar-base/Dockerfile.tmpl
+++ b/Docker/lofar-base/Dockerfile.tmpl
@@ -1,7 +1,7 @@
 #
 # base
 #
-FROM ubuntu:14.04
+FROM ubuntu:16.04
 
 #
 # common-environment
@@ -18,10 +18,10 @@ ENV DEBIAN_FRONTEND=noninteractive \
 #
 # versions
 #
-ENV CASACORE_VERSION=2.1.0 \
-    CASAREST_VERSION=1.4.1 \
-    PYTHON_CASACORE_VERSION=2.0.1 \
-    BOOST_VERSION=1.54
+ENV CASACORE_VERSION=latest \
+    CASAREST_VERSION=latest \
+    PYTHON_CASACORE_VERSION=2.1.2 \
+    BOOST_VERSION=1.58
 
 #
 # set-uid
@@ -39,7 +39,11 @@ ENV J=6
 #RUN sed -i 's/archive.ubuntu.com/osmirror.rug.nl/' /etc/apt/sources.list 
 RUN apt-get update && \
     apt-get install -y python2.7 libpython2.7 && \
-    apt-get install -y libblas3 liblapacke python-numpy libcfitsio3 libwcs4 libfftw3-bin libhdf5-7 libboost-python${BOOST_VERSION}.0 && \
+    apt-get install -y libopenblas-base libcfitsio-bin libwcs5 libfftw3-bin libhdf5-10 libboost-python${BOOST_VERSION}.0 && \
+    apt-get install -y python-pip && \
+    pip install numpy && \
+    apt-get purge -y python-pip && \
+    apt-get autoremove -y && \
     apt-get install -y nano
 
 #
@@ -59,19 +63,19 @@ RUN mkdir -p ${INSTALLDIR}
 #   Casacore
 # *******************
 #
-RUN apt-get update && apt-get install -y wget git cmake g++ gfortran flex bison libblas-dev liblapacke-dev libfftw3-dev libhdf5-dev libboost-python-dev libcfitsio3-dev wcslib-dev && \
+RUN apt-get update && apt-get install -y wget git cmake g++ gfortran flex bison libopenblas-dev libfftw3-dev libhdf5-dev libboost-python-dev libcfitsio-dev wcslib-dev && \
     mkdir -p ${INSTALLDIR}/casacore/build && \
     mkdir -p ${INSTALLDIR}/casacore/data && \
     cd ${INSTALLDIR}/casacore && git clone https://github.com/casacore/casacore.git src && \
     if [ "${CASACORE_VERSION}" != "latest" ]; then cd ${INSTALLDIR}/casacore/src && git checkout tags/v${CASACORE_VERSION}; fi && \
     cd ${INSTALLDIR}/casacore/data && wget --retry-connrefused ftp://ftp.astron.nl/outgoing/Measures/WSRT_Measures.ztar && \
     cd ${INSTALLDIR}/casacore/data && tar xf WSRT_Measures.ztar  && rm -f WSRT_Measures.ztar && \
-    cd ${INSTALLDIR}/casacore/build && cmake -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/casacore/ -DDATA_DIR=${INSTALLDIR}/casacore/data -DBUILD_PYTHON=True -DUSE_OPENMP=True -DUSE_FFTW3=TRUE -DUSE_HDF5=ON ../src/ && \
+    cd ${INSTALLDIR}/casacore/build && cmake -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/casacore/ -DDATA_DIR=${INSTALLDIR}/casacore/data -DBUILD_PYTHON=True -DENABLE_TABLELOCKING=ON -DUSE_OPENMP=ON -DUSE_FFTW3=TRUE -DUSE_HDF5=ON -DCXX11=YES -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_FLAGS="-fsigned-char -O2 -DNDEBUG -march=native" ../src/ && \
     cd ${INSTALLDIR}/casacore/build && make -j ${J} && \
     cd ${INSTALLDIR}/casacore/build && make install && \
     bash -c "strip ${INSTALLDIR}/casacore/{lib,bin}/* || true" && \
     bash -c "rm -rf ${INSTALLDIR}/casacore/{build,src}" && \
-    apt-get purge -y wget git cmake g++ gfortran flex bison libblas-dev liblapacke-dev libfftw3-dev libhdf5-dev libboost-python-dev libcfitsio3-dev wcslib-dev && \
+    apt-get purge -y wget git cmake g++ gfortran flex bison libopenblas-dev libfftw3-dev libhdf5-dev libboost-python-dev libcfitsio-dev wcslib-dev && \
     apt-get autoremove -y
 
 #
@@ -79,16 +83,16 @@ RUN apt-get update && apt-get install -y wget git cmake g++ gfortran flex bison
 #   Casarest
 # *******************
 #
-RUN apt-get update && apt-get install -y git cmake g++ gfortran libboost-system-dev libboost-thread-dev libhdf5-dev libcfitsio3-dev wcslib-dev && \
+RUN apt-get update && apt-get install -y git cmake g++ gfortran libboost-system-dev libboost-thread-dev libhdf5-dev libcfitsio3-dev wcslib-dev libopenblas-dev && \
     mkdir -p ${INSTALLDIR}/casarest/build && \
     cd ${INSTALLDIR}/casarest && git clone https://github.com/casacore/casarest.git src && \
     if [ "${CASAREST_VERSION}" != "latest" ]; then cd ${INSTALLDIR}/casarest/src && git checkout tags/v${CASAREST_VERSION}; fi && \
-    cd ${INSTALLDIR}/casarest/build && cmake -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/casarest -DCASACORE_ROOT_DIR=${INSTALLDIR}/casacore ../src/ && \
+    cd ${INSTALLDIR}/casarest/build && cmake -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/casarest -DCASACORE_ROOT_DIR=${INSTALLDIR}/casacore -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_FLAGS="-std=c++11 -O2 -march=native -DNDEBUG" ../src/ && \
     cd ${INSTALLDIR}/casarest/build && make -j ${J} && \
     cd ${INSTALLDIR}/casarest/build && make install && \
     bash -c "strip ${INSTALLDIR}/casarest/{lib,bin}/* || true" && \
     bash -c "rm -rf ${INSTALLDIR}/casarest/{build,src}" && \
-    apt-get purge -y git cmake g++ gfortran libboost-system-dev libboost-thread-dev libhdf5-dev libcfitsio3-dev wcslib-dev && \
+    apt-get purge -y git cmake g++ gfortran libboost-system-dev libboost-thread-dev libhdf5-dev libcfitsio3-dev wcslib-dev libopenblas-dev && \
     apt-get autoremove -y
 
 #
diff --git a/Docker/lofar-outputproc/Dockerfile.tmpl b/Docker/lofar-outputproc/Dockerfile.tmpl
index eb5ef0c866b909da19962ba0b2e6c354a3a5ab91..c25fffe979accf5de9a8fd12bae4b73fa3702872 100644
--- a/Docker/lofar-outputproc/Dockerfile.tmpl
+++ b/Docker/lofar-outputproc/Dockerfile.tmpl
@@ -10,20 +10,20 @@ FROM lofar-base:${LOFAR_TAG}
 #
 
 # Run-time dependencies
-RUN apt-get update && apt-get install -y liblog4cplus-1.0-4 libxml2 libboost-thread${BOOST_VERSION}.0 libboost-filesystem${BOOST_VERSION}.0 libboost-date-time${BOOST_VERSION}.0 libpng12-0 libsigc++-2.0-dev libxml++2.6-2 libboost-regex${BOOST_VERSION}.0
+RUN apt-get update && apt-get install -y liblog4cplus-1.1-9 libxml2 libboost-thread${BOOST_VERSION}.0 libboost-filesystem${BOOST_VERSION}.0 libboost-date-time${BOOST_VERSION}.0 libpng12-0 libsigc++-2.0-dev libxml++2.6-2v5 libboost-regex${BOOST_VERSION}.0
 
 # Tell image build information
-ENV LOFAR_BRANCH=${LOFAR_BRANCH_NAME} \
+ENV LOFAR_BRANCH=${LOFAR_BRANCH} \
     LOFAR_REVISION=${LOFAR_REVISION} \
-    LOFAR_BUILDVARIANT=gnu_optarch
+    LOFAR_BUILDVARIANT=gnucxx11_optarch
 
 # Install
-RUN apt-get update && apt-get install -y subversion cmake g++ gfortran bison flex autogen liblog4cplus-dev libhdf5-dev libblitz0-dev libboost-dev libboost-python${BOOST_VERSION}-dev libxml2-dev pkg-config libpng12-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev libboost-regex${BOOST_VERSION} binutils-dev && \
-    mkdir -p ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} libcfitsio3-dev wcslib-dev && \
+RUN apt-get update && apt-get install -y subversion cmake g++ gfortran bison flex autogen liblog4cplus-dev libhdf5-dev libblitz0-dev libboost-dev libboost-python${BOOST_VERSION}-dev libxml2-dev pkg-config libpng12-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev libboost-regex${BOOST_VERSION} binutils-dev libopenblas-dev && libcfitsio3-dev wcslib-dev \
+    mkdir -p ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && \
     cd ${INSTALLDIR}/lofar && \
     svn --non-interactive -q co -r ${LOFAR_REVISION} -N ${LOFAR_BRANCH_URL} src; \
     svn --non-interactive -q up src/CMake && \
-    cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && cmake -DBUILD_PACKAGES=Online_OutputProc -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/lofar/ -DCASACORE_ROOT_DIR=${INSTALLDIR}/casacore/ -DLOG4CPLUS_ROOT_DIR=${INSTALLDIR}/log4cplus/ -DQPID_ROOT_DIR=/opt/qpid/ -DDAL_ROOT_DIR=${INSTALLDIR}/DAL -DUSE_OPENMP=True ${INSTALLDIR}/lofar/src/ && \
+    cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && cmake -DBUILD_PACKAGES=Online_OutputProc -DBUILD_TESTING=OFF -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/lofar/ -DCASACORE_ROOT_DIR=${INSTALLDIR}/casacore/ -DQPID_ROOT_DIR=/opt/qpid/ -DDAL_ROOT_DIR=${INSTALLDIR}/DAL -DUSE_OPENMP=True ${INSTALLDIR}/lofar/src/ && \
     cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && sed -i '29,31d' include/ApplCommon/PosixTime.h && \
     cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && make -j ${J} && \
     cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && make install && \
@@ -32,6 +32,6 @@ RUN apt-get update && apt-get install -y subversion cmake g++ gfortran bison fle
     bash -c "strip ${INSTALLDIR}/lofar/{bin,sbin,lib64}/* || true" && \
     bash -c "rm -rf ${INSTALLDIR}/lofar/{build,src}" && \
     setcap cap_sys_nice,cap_ipc_lock=ep ${INSTALLDIR}/lofar/bin/outputProc && \
-    apt-get purge -y subversion cmake g++ gfortran bison flex autogen liblog4cplus-dev libhdf5-dev libblitz0-dev libboost-dev libboost-python${BOOST_VERSION}-dev libxml2-dev pkg-config libpng12-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev binutils-dev libcfitsio3-dev wcslib-dev && \
+    apt-get purge -y subversion cmake g++ gfortran bison flex autogen liblog4cplus-dev libhdf5-dev libblitz0-dev libboost-dev libboost-python${BOOST_VERSION}-dev libxml2-dev pkg-config libpng12-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev binutils-dev libcfitsio3-dev wcslib-dev libopenblas-dev && libcfitsio3-dev wcslib-dev \
     apt-get autoremove -y
 
diff --git a/Docker/lofar-pipeline/Dockerfile.tmpl b/Docker/lofar-pipeline/Dockerfile.tmpl
index eae03529529524638acfd3260f3260bc059aa6ee..94c6299eeffd5486c8b585e93793f7b72efc99fa 100644
--- a/Docker/lofar-pipeline/Dockerfile.tmpl
+++ b/Docker/lofar-pipeline/Dockerfile.tmpl
@@ -3,10 +3,10 @@
 #
 FROM lofar-base:${LOFAR_TAG}
 
-ENV AOFLAGGER_VERSION=2.7.1
+ENV AOFLAGGER_VERSION=2.8.0
 
 # Run-time dependencies
-RUN apt-get update && apt-get install -y python-xmlrunner python-scipy liblog4cplus-1.0-4 libxml2 libboost-thread${BOOST_VERSION}.0 libboost-filesystem${BOOST_VERSION}.0 libboost-date-time${BOOST_VERSION}.0 libboost-signals${BOOST_VERSION}.0 libpng12-0 libsigc++-2.0-dev libxml++2.6-2 libgsl0ldbl openssh-client libboost-regex${BOOST_VERSION}.0 gettext-base rsync python-matplotlib && \
+RUN apt-get update && apt-get install -y python-xmlrunner python-scipy liblog4cplus-1.1-9 libxml2 libboost-thread${BOOST_VERSION}.0 libboost-filesystem${BOOST_VERSION}.0 libboost-date-time${BOOST_VERSION}.0 libboost-signals${BOOST_VERSION}.0 libpng12-0 libsigc++-2.0-dev libxml++2.6-2v5 libgsl2 openssh-client libboost-regex${BOOST_VERSION}.0 gettext-base rsync python-matplotlib && \
     apt-get -y install python-pip python-dev && \
     pip install pyfits pywcs python-monetdb && \
     apt-get -y purge python-pip python-dev && \
@@ -18,17 +18,17 @@ RUN apt-get update && apt-get install -y python-xmlrunner python-scipy liblog4cp
 # *******************
 #
 
-RUN apt-get update && apt-get install -y wget cmake g++ libxml++2.6-dev libpng12-dev libfftw3-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-signals${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev libcfitsio3-dev && \
+RUN apt-get update && apt-get install -y wget cmake g++ libxml++2.6-dev libpng12-dev libfftw3-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-signals${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev libcfitsio3-dev libopenblas-dev && \
     mkdir -p ${INSTALLDIR}/aoflagger/build && \
     bash -c "cd ${INSTALLDIR}/aoflagger && wget --retry-connrefused http://downloads.sourceforge.net/project/aoflagger/aoflagger-${AOFLAGGER_VERSION%%.?}.0/aoflagger-${AOFLAGGER_VERSION}.tar.bz2" && \
     cd ${INSTALLDIR}/aoflagger && tar xf aoflagger-${AOFLAGGER_VERSION}.tar.bz2 && \
-    cd ${INSTALLDIR}/aoflagger/build && cmake -DCASACORE_ROOT_DIR=${INSTALLDIR}/casacore/ -DBUILD_SHARED_LIBS=ON -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/aoflagger ../aoflagger-${AOFLAGGER_VERSION} && \
+    cd ${INSTALLDIR}/aoflagger/build && cmake -DCASACORE_ROOT_DIR=${INSTALLDIR}/casacore/ -DBUILD_SHARED_LIBS=ON -DCMAKE_CXX_FLAGS="--std=c++11 -O2 -DNDEBUG" -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/aoflagger ../aoflagger-${AOFLAGGER_VERSION} && \
     cd ${INSTALLDIR}/aoflagger/build && make -j ${J} && \
     cd ${INSTALLDIR}/aoflagger/build && make install && \
     bash -c "strip ${INSTALLDIR}/aoflagger/{lib,bin}/* || true" && \
     bash -c "rm -rf ${INSTALLDIR}/aoflagger/{build,aoflagger-${AOFLAGGER_VERSION}}" && \
     bash -c "rm -rf ${INSTALLDIR}/aoflagger/aoflagger-${AOFLAGGER_VERSION}.tar.bz2" && \
-    apt-get -y purge wget cmake g++ libxml++2.6-dev libpng12-dev libfftw3-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-signals${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev libcfitsio3-dev && \
+    apt-get -y purge wget cmake g++ libxml++2.6-dev libpng12-dev libfftw3-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-signals${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev libcfitsio3-dev libopenblas-dev && \
     apt-get -y autoremove
 
 #
@@ -40,15 +40,16 @@ RUN apt-get update && apt-get install -y wget cmake g++ libxml++2.6-dev libpng12
 # Tell image build information
 ENV LOFAR_BRANCH=${LOFAR_BRANCH_NAME} \
     LOFAR_REVISION=${LOFAR_REVISION} \
-    LOFAR_BUILDVARIANT=gnu_optarch
+    LOFAR_BUILDVARIANT=gnucxx11_optarch
+
 
 # Install
-RUN apt-get update && apt-get install -y subversion cmake g++ gfortran bison flex liblog4cplus-dev libhdf5-dev libblitz0-dev libboost-dev libboost-python-dev python-dev libxml2-dev pkg-config libpng12-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libgsl0-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev libboost-regex${BOOST_VERSION} binutils-dev libcfitsio3-dev wcslib-dev && \
+RUN apt-get update && apt-get install -y subversion cmake g++ gfortran bison flex liblog4cplus-dev libhdf5-dev libblitz0-dev libboost-dev libboost-python-dev python-dev libxml2-dev pkg-config libpng12-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libgsl-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev libboost-regex${BOOST_VERSION} binutils-dev libcfitsio3-dev wcslib-dev libopenblas-dev && \
     mkdir -p ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && \
     cd ${INSTALLDIR}/lofar && \
     svn --non-interactive -q co -r ${LOFAR_REVISION} -N ${LOFAR_BRANCH_URL} src; \
     svn --non-interactive -q up src/CMake && \
-    cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && cmake -DBUILD_PACKAGES=Offline -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/lofar/ -DCASAREST_ROOT_DIR=${INSTALLDIR}/casarest/ -DCASACORE_ROOT_DIR=${INSTALLDIR}/casacore/ -DAOFLAGGER_ROOT_DIR=${INSTALLDIR}/aoflagger/ -DLOG4CPLUS_ROOT_DIR=${INSTALLDIR}/log4cplus/ -DQPID_ROOT_DIR=/opt/qpid/ -DUSE_OPENMP=True ${INSTALLDIR}/lofar/src/ && \
+    cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && cmake -DBUILD_PACKAGES=Offline -DBUILD_TESTING=OFF -DCMAKE_INSTALL_PREFIX=${INSTALLDIR}/lofar/ -DCASAREST_ROOT_DIR=${INSTALLDIR}/casarest/ -DCASACORE_ROOT_DIR=${INSTALLDIR}/casacore/ -DAOFLAGGER_ROOT_DIR=${INSTALLDIR}/aoflagger/ -DQPID_ROOT_DIR=/opt/qpid/ -DUSE_OPENMP=True ${INSTALLDIR}/lofar/src/ && \
     cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && sed -i '29,31d' include/ApplCommon/PosixTime.h && \
     cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && make -j ${J} && \
     cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && make install && \
@@ -56,6 +57,6 @@ RUN apt-get update && apt-get install -y subversion cmake g++ gfortran bison fle
     bash -c "chmod a+rwx  ${INSTALLDIR}/lofar/var/{log,run}" && \
     bash -c "strip ${INSTALLDIR}/lofar/{bin,sbin,lib64}/* || true" && \
     bash -c "rm -rf ${INSTALLDIR}/lofar/{build,src}" && \
-    apt-get purge -y subversion cmake g++ gfortran bison flex liblog4cplus-dev libhdf5-dev libblitz0-dev libboost-dev libboost-python-dev python-dev libxml2-dev pkg-config libpng12-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libgsl0-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev binutils-dev wcslib-dev && \
+    apt-get purge -y subversion cmake g++ gfortran bison flex liblog4cplus-dev libhdf5-dev libblitz0-dev libboost-dev libboost-python-dev python-dev libxml2-dev pkg-config libpng12-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libgsl-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev binutils-dev libcfitsio3-dev wcslib-dev libopenblas-dev && \
     apt-get autoremove -y
 
diff --git a/MAC/Services/src/PipelineControl.py b/MAC/Services/src/PipelineControl.py
index 40ab25b34425d4aee1c5ae5d69810c40930b1c27..f3c535d09b41a9be6e6c1a415024672c572f9e4d 100755
--- a/MAC/Services/src/PipelineControl.py
+++ b/MAC/Services/src/PipelineControl.py
@@ -93,12 +93,13 @@ def runCommand(cmdline, input=None):
     )
 
   # Feed input and wait for termination
-  logger.info("runCommand input: %s", input)
+  logger.debug("runCommand input: %s", input)
   stdout, _ = proc.communicate(input)
-  logger.info("runCommand output: %s", stdout)
+  logger.debug("runCommand output: %s", stdout)
 
   # Check exit status, bail on error
   if proc.returncode != 0:
+    logger.warn("runCommand(%s) had exit status %s with output: %s", cmdline, proc.returncode, stdout)
     raise subprocess.CalledProcessError(proc.returncode, cmdline)
 
   # Return output
@@ -154,7 +155,7 @@ class Parset(dict):
     return int(self[PARSET_PREFIX + "Observation.otdbID"])
 
 class Slurm(object):
-  def __init__(self, headnode="head01.cep4"):
+  def __init__(self, headnode="head01.cep4.control.lofar"):
     self.headnode = headnode
 
     # TODO: Derive SLURM partition name
@@ -190,6 +191,10 @@ class Slurm(object):
     return stdout != ""
 
 class PipelineDependencies(object):
+  class TaskNotFoundException(Exception):
+    """ Raised when a task cannot be found in the RADB. """
+    pass
+
   def __init__(self, ra_service_busname=DEFAULT_RAS_SERVICE_BUSNAME):
     self.rarpc = RARPC(busname=ra_service_busname)
 
@@ -220,10 +225,16 @@ class PipelineDependencies(object):
     """
     radb_task = self.rarpc.getTask(otdb_id=otdb_id)
 
-    predecessor_ids = radb_task['predecessor_ids']
-    predecessor_tasks = self.rarpc.getTasks(task_ids=predecessor_ids)
+    if radb_task is None:
+      raise TaskNotFoundException("otdb_id %s not found in RADB" % (otdb_id,))
+
+    predecessor_radb_ids = radb_task['predecessor_ids']
+    predecessor_tasks = self.rarpc.getTasks(task_ids=predecessor_radb_ids)
+    predecessor_states = {t["otdb_id"]: t["status"] for t in predecessor_tasks}
+
+    logger.debug("getPredecessorStates(%s) = %s", otdb_id, predecessor_states)
 
-    return {t["otdb_id"]: t["status"] for t in predecessor_tasks}
+    return predecessor_states
 
   def getSuccessorIds(self, otdb_id):
     """
@@ -231,19 +242,35 @@ class PipelineDependencies(object):
     """
     radb_task = self.rarpc.getTask(otdb_id=otdb_id)
 
-    successor_ids = radb_task['successor_ids']
-    successor_tasks = self.rarpc.getTasks(task_ids=successor_ids) if successor_ids else []
+    if radb_task is None:
+      raise TaskNotFoundException("otdb_id %s not found in RADB" % (otdb_id,))
 
-    return [t["otdb_id"] for t in successor_tasks]
+    successor_radb_ids = radb_task['successor_ids']
+    successor_tasks = self.rarpc.getTasks(task_ids=successor_ids) if successor_radb_ids else []
+    successor_otdb_ids = [t["otdb_id"] for t in successor_tasks]
+
+    logger.debug("getSuccessorIds(%s) = %s", otdb_id, successor_otdb_ids)
+
+    return successor_otdb_ids
 
   def canStart(self, otdbId):
     """
       Return whether `otdbId' can start, according to the status of the predecessors
       and its own status.
     """
+
+    try:
+      myState = self.getState(otdbId)
+      predecessorStates = self.getPredecessorStates(otdbId)
+    except TaskNotFoundException, e:
+      logger.error("canStart(%s): Error obtaining task states, not starting pipeline: %s", otdbId, e)
+      return False
+
+    logger.debug("canStart(%s)? state = %s, predecessors = %s", otdbId, myState, predecessorStates)
+
     return (
-      self.getState(otdbId) == "scheduled" and
-      all([x == "finished" for x in self.getPredecessorStates(otdbId).values()])
+      myState == "scheduled" and
+      all([x == "finished" for x in predecessorStates.values()])
     )
 
 class PipelineControl(OTDBBusListener):
@@ -314,13 +341,16 @@ class PipelineControl(OTDBBusListener):
 
                      # Maximum run time for job (31 days)
                      "--time=31-0",
+
+                     # Lower priority to drop below inspection plots
+                     "--nice=1000",
                    
                      "--partition=%s" % parset.processingPartition(),
                      "--nodes=%s" % parset.processingNumberOfTasks(),
                      "--cpus-per-task=%s" % parset.processingNumberOfCoresPerTask(),
                     
                      # Define better places to write the output
-                     os.path.expandvars("--output=/data/log/runPipeline-%s.log" % (otdbId,)),
+                     os.path.expandvars("--output=/data/log/pipeline-%s-%%j.log" % (otdbId,)),
                      ]
 
     def setStatus_cmdline(status):
@@ -344,11 +374,11 @@ class PipelineControl(OTDBBusListener):
       "{setStatus_active}\n"
       # pull docker image from repository on all nodes
       "srun --nodelist=$SLURM_NODELIST --cpus-per-task=1 --job-name=docker-pull"
-        " --no-kill"
+        " --kill-on-bad-exit=0 --wait=0"
         " docker pull {repository}/{image}\n"
       # put a local tag on the pulled image
       "srun --nodelist=$SLURM_NODELIST --cpus-per-task=1 --job-name=docker-tag"
-        " --no-kill"
+        " --kill-on-bad-exit=0 --wait=0"
         " docker tag -f {repository}/{image} {image}\n"
       # call runPipeline.sh in the image on this node
       "docker run --rm"
@@ -361,7 +391,7 @@ class PipelineControl(OTDBBusListener):
         " -e SLURM_JOB_ID=$SLURM_JOB_ID"
         " -v /data:/data"
         " {image}"
-        " runPipeline.sh -o {obsid} -c /opt/lofar/share/pipeline/pipeline.cfg.{cluster}\n"
+        " runPipeline.sh -o {obsid} -c /opt/lofar/share/pipeline/pipeline.cfg.{cluster} -P /data/parsets || exit $?\n"
 
         # notify that we're tearing down
         "{setStatus_completing}\n"
@@ -426,7 +456,13 @@ class PipelineControl(OTDBBusListener):
     cancel(jobName)
 
   def _startSuccessors(self, otdbId):
-    for s in self.dependencies.getSuccessorIds(otdbId):
+    try:
+      successor_ids = self.dependencies.getSuccessorIds(otdbId)
+    except TaskNotFoundException, e:
+      logger.error("_startSuccessors(%s): Error obtaining task successors, not starting them: %s", otdbId, e)
+      return
+
+    for s in successor_ids:
       parset = self._getParset(s)
       if not self._shouldHandle(parset):
         continue
diff --git a/MAC/Services/src/pipelinecontrol.ini b/MAC/Services/src/pipelinecontrol.ini
index c5a07a437f95370262db0962c693c5986d2f34e5..e6bbb791fcc47ac9d88fc9732757c6880fa2f636 100644
--- a/MAC/Services/src/pipelinecontrol.ini
+++ b/MAC/Services/src/pipelinecontrol.ini
@@ -4,4 +4,5 @@ user=lofarsys
 stopsignal=INT ; KeyboardInterrupt
 stopasgroup=true
 stdout_logfile=%(program_name)s.log
-stderr_logfile=%(program_name)s.stderr
+redirect_stderr=true
+stderr_logfile=NONE
diff --git a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/propagator.py b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/propagator.py
index 8d6f8e6b6facf13cd13f0e12c77040bb53e5378c..081a83859ed0fff53e3669c5b8262d0240b34fb8 100755
--- a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/propagator.py
+++ b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/propagator.py
@@ -137,7 +137,7 @@ class RAtoOTDBPropagator():
                 project_name = 'unknown'
 
             otdb_info = self.translator.CreateParset(otdb_id, ra_info, project_name)
-            logger.debug("Parset info for OTDB: %s" %otdb_info)
+            logger.info("Parset info for OTDB: %s" %otdb_info)
             self.setOTDBinfo(otdb_id, otdb_info, 'scheduled')
         except Exception as e:
             logger.error(e)
diff --git a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/translator.py b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/translator.py
index be12371ee9b2492eba63907019e4b1d91c13cad2..e1d1e2cd4e532f1e82934a08c66b045c529502bf 100755
--- a/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/translator.py
+++ b/SAS/ResourceAssignment/RAtoOTDBTaskSpecificationPropagator/lib/translator.py
@@ -184,17 +184,17 @@ class RAtoOTDBTranslator():
     def CreateStorageKeys(self, otdb_id, storage_properties, project_name, io_type):
         result = {}
         if 'nr_of_uv_files' in storage_properties:
-            result.update(self.CreateCorrelated(otdb_id, storage_properties, project_name, io_type))
+            result.update(self.CreateCorrelated(storage_properties['uv_otdb_id'], storage_properties, project_name, io_type))
         if 'nr_of_cs_files' in storage_properties:
-            result.update(self.CreateCoherentStokes(otdb_id, storage_properties, project_name, io_type))
+            result.update(self.CreateCoherentStokes(storage_properties['cs_otdb_id'], storage_properties, project_name, io_type))
         if 'nr_of_is_files' in storage_properties:
-            result.update(self.CreateIncoherentStokes(otdb_id, storage_properties, project_name, io_type))
+            result.update(self.CreateIncoherentStokes(storage_properties['is_otdb_id'], storage_properties, project_name, io_type))
         if 'nr_of_im_files' in storage_properties:
-            result.update(self.CreateInstrumentModel(otdb_id, storage_properties, project_name, io_type))
+            result.update(self.CreateInstrumentModel(storage_properties['im_otdb_id'], storage_properties, project_name, io_type))
         if 'nr_of_img_files' in storage_properties:
-            result.update(self.CreateSkyImage(otdb_id, storage_properties, project_name, io_type))
+            result.update(self.CreateSkyImage(storage_properties['img_otdb_id'], storage_properties, project_name, io_type))
         if 'nr_of_pulp_files' in storage_properties:
-            result.update(self.CreatePulsarPipeline(otdb_id, storage_properties, project_name, io_type))
+            result.update(self.CreatePulsarPipeline(storage_properties['pulp_otdb_id'], storage_properties, project_name, io_type))
         return result
 
     def ProcessStorageInfo(self, otdb_id, storage_info, project_name):
diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/assignment.py b/SAS/ResourceAssignment/ResourceAssigner/lib/assignment.py
index 0516cbf8040b43a177e05e0ade6ab1b53dd083d4..f0e31b9c13b0b0431f8c86a8852a0e062ac30eac 100755
--- a/SAS/ResourceAssignment/ResourceAssigner/lib/assignment.py
+++ b/SAS/ResourceAssignment/ResourceAssigner/lib/assignment.py
@@ -127,8 +127,7 @@ class ResourceAssigner():
             startTime = datetime.strptime(mainParset.getString('Observation.startTime'), '%Y-%m-%d %H:%M:%S')
             endTime = datetime.strptime(mainParset.getString('Observation.stopTime'), '%Y-%m-%d %H:%M:%S')
         except ValueError:
-            logger.warning('cannot parse for start/end time from specification for otdb_id=%s. skipping specification.', (otdb_id, ))
-            return
+            logger.warning('cannot parse for start/end time from specification for otdb_id=%s. searching for sane defaults...', (otdb_id, ))
             maxPredecessorEndTime = self.getMaxPredecessorEndTime(specification_tree)
 
             if maxPredecessorEndTime:
diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/schedulechecker.py b/SAS/ResourceAssignment/ResourceAssigner/lib/schedulechecker.py
index c8e193d4fc16bc816649ba519d76bc8ee09577dc..51a5758404655ace18b7efe4a7ded22134253d9c 100644
--- a/SAS/ResourceAssignment/ResourceAssigner/lib/schedulechecker.py
+++ b/SAS/ResourceAssignment/ResourceAssigner/lib/schedulechecker.py
@@ -83,8 +83,10 @@ class ScheduleChecker():
 
             try:
                 scheduled_pipelines = self._radbrpc.getTasks(task_status='scheduled', task_type='pipeline')
+                queued_pipelines = self._radbrpc.getTasks(task_status='queued', task_type='pipeline')
+                sq_pipelines = scheduled_pipelines + queued_pipelines
 
-                for task in scheduled_pipelines:
+                for task in sq_pipelines:
                     if task['starttime'] <= now:
                         logger.info("Moving ahead scheduled pipeline radb_id=%s otdb_id=%s to 1 minute from now", task['id'], task['otdb_id'])
                         self._radbrpc.updateTaskAndResourceClaims(task['id'], starttime=now+timedelta(seconds=60), endtime=now+timedelta(seconds=60+task['duration']))
diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/sql/add_resource_allocation_statics.sql b/SAS/ResourceAssignment/ResourceAssignmentDatabase/sql/add_resource_allocation_statics.sql
index 5aee075a79a56fd90d26b544a3da4f806124b00e..512e5634beb9f8ab4385bd79d685582546c27d81 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/sql/add_resource_allocation_statics.sql
+++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/sql/add_resource_allocation_statics.sql
@@ -7,7 +7,7 @@ INSERT INTO resource_allocation.task_status VALUES (200, 'prepared'), (300, 'app
 (1150, 'error'), (1200, 'obsolete'); -- This is the list from OTDB, we'll need to merge it with the list from MoM in the future, might use different indexes?
 INSERT INTO resource_allocation.task_type VALUES (0, 'observation'),(1, 'pipeline'); -- We'll need more types
 INSERT INTO resource_allocation.resource_claim_status VALUES (0, 'claimed'), (1, 'allocated'), (2, 'conflict');
-INSERT INTO resource_allocation.resource_claim_property_type VALUES (0, 'nr_of_is_files'),(1, 'nr_of_cs_files'),(2, 'nr_of_uv_files'),(3, 'nr_of_im_files'),(4, 'nr_of_img_files'),(5, 'nr_of_pulp_files'),(6, 'nr_of_cs_stokes'),(7, 'nr_of_is_stokes'),(8, 'is_file_size'),(9, 'cs_file_size'),(10, 'uv_file_size'),(11, 'im_file_size'),(12, 'img_file_size'),(13, 'nr_of_pulp_files'),(14, 'nr_of_tabs'),(15, 'start_sb_nr');
+INSERT INTO resource_allocation.resource_claim_property_type VALUES (0, 'nr_of_is_files'),(1, 'nr_of_cs_files'),(2, 'nr_of_uv_files'),(3, 'nr_of_im_files'),(4, 'nr_of_img_files'),(5, 'nr_of_pulp_files'),(6, 'nr_of_cs_stokes'),(7, 'nr_of_is_stokes'),(8, 'is_file_size'),(9, 'cs_file_size'),(10, 'uv_file_size'),(11, 'im_file_size'),(12, 'img_file_size'),(13, 'nr_of_pulp_files'),(14, 'nr_of_tabs'),(15, 'start_sb_nr'),(16,'uv_otdb_id'),(17,'cs_otdb_id'),(18,'is_otdb_id'),(19,'im_otdb_id'),(20,'img_otdb_id'),(21,'pulp_otdb_id');
 INSERT INTO resource_allocation.resource_claim_property_io_type VALUES (0, 'output'),(1, 'input');
 INSERT INTO resource_allocation.config VALUES (0, 'max_fill_percentage_cep4', '85.00'), (1, 'claim_timeout', '172800'), (2, 'min_inter_task_delay', '60'); -- Just some values 172800 is two days in seconds
 INSERT INTO resource_allocation.conflict_reason
diff --git a/SAS/ResourceAssignment/ResourceAssignmentEditor/CMakeLists.txt b/SAS/ResourceAssignment/ResourceAssignmentEditor/CMakeLists.txt
index 62f065324294a663a75ac8b8e6b547663e30fb4d..b02ba541d0f43e6c2676beee4d46153511ddb7a9 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentEditor/CMakeLists.txt
+++ b/SAS/ResourceAssignment/ResourceAssignmentEditor/CMakeLists.txt
@@ -1,9 +1,8 @@
 # $Id: CMakeLists.txt 30355 2014-11-04 13:46:05Z loose $
 
-lofar_package(ResourceAssignmentEditor 0.1)
+lofar_package(ResourceAssignmentEditor 1.0 DEPENDS MoMQueryService ResourceAssignmentService PyMessaging)
 
 include(PythonInstall)
-set(USE_PYTHON_COMPILATION Off)
 
 add_subdirectory(lib)
 add_subdirectory(bin)
diff --git a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/datacontroller.js b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/datacontroller.js
index 1142b3e47eb46fc0d020237f68efe9b180b95cf3..d6a0a7cf089a2f05f592ba8b83a47818ea1a8a74 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/datacontroller.js
+++ b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/datacontroller.js
@@ -39,6 +39,7 @@ angular.module('raeApp').factory("dataService", ['$http', '$q', function($http,
 
     self.initialLoadComplete = false;
     self.taskChangeCntr = 0;
+    self.filteredTaskChangeCntr = 0;
     self.claimChangeCntr = 0;
     self.resourceUsagesChangeCntr = 0;
 
@@ -113,9 +114,11 @@ angular.module('raeApp').factory("dataService", ['$http', '$q', function($http,
 
     self.toIdBasedDict = function(list) {
         var dict = {}
-        for(var i = list.length-1; i >=0; i--) {
-            var item = list[i];
-            dict[item.id] = item;
+        if(list) {
+            for(var i = list.length-1; i >=0; i--) {
+                var item = list[i];
+                dict[item.id] = item;
+            }
         }
         return dict;
     };
@@ -177,13 +180,11 @@ angular.module('raeApp').factory("dataService", ['$http', '$q', function($http,
         }
 
         self.tasks = visibleTasks;
-        self.taskDict = self.toIdBasedDict(self.tasks);
-        self.filteredTasks = self.tasks;
-        self.filteredTaskDict = self.taskDict;
+        self.taskDict = self.toIdBasedDict(visibleTasks);
+        self.taskChangeCntr++;
 
         self.computeMinMaxTaskTimes();
 
-
         var numClaims = self.resourceClaims.length;
         var visibleClaims = [];
         for(var i = 0; i < numClaims; i++) {
@@ -240,10 +241,7 @@ angular.module('raeApp').factory("dataService", ['$http', '$q', function($http,
                 }
             }
 
-            self.filteredTasks = self.tasks;
-            self.filteredTaskDict = self.taskDict;
             self.taskChangeCntr++;
-
             self.computeMinMaxTaskTimes();
 
             if(initialTaskLoad && self.tasks.length > 0) {
@@ -596,6 +594,7 @@ angular.module('raeApp').factory("dataService", ['$http', '$q', function($http,
                             }
 
                             self.taskChangeCntr++;
+
                             self.computeMinMaxTaskTimes();
                         } else if(change.objectType == 'resourceClaim') {
                             anyResourceClaims = true;
@@ -683,7 +682,7 @@ dataControllerMod.controller('DataController',
     $scope.openViewFromDatePopup = function() { $scope.viewFromDatePopupOpened = true; };
     $scope.openViewToDatePopup = function() { $scope.viewToDatePopupOpened = true; };
     $scope.zoomTimespans = [{value:30, name:'30 Minutes'}, {value:60, name:'1 Hour'}, {value:3*60, name:'3 Hours'}, {value:6*60, name:'6 Hours'}, {value:12*60, name:'12 Hours'}, {value:24*60, name:'1 Day'}, {value:2*24*60, name:'2 Days'}, {value:3*24*60, name:'3 Days'}, {value:5*24*60, name:'5 Days'}, {value:7*24*60, name:'1 Week'}, {value:14*24*60, name:'2 Weeks'}, {value:28*24*60, name:'4 Weeks'}, {value:1, name:'Custom (1 min)'}];
-    $scope.zoomTimespan = $scope.zoomTimespans[5];
+    $scope.zoomTimespan = $scope.zoomTimespans[4];
     $scope.jumpToNow = function() {
         var floorLofarTime = dataService.floorDate(dataService.lofarTime, 1, 5);
         dataService.viewTimeSpan = {
@@ -785,17 +784,19 @@ dataControllerMod.controller('DataController',
         }
     };
 
-    $scope.$watch('dataService.viewTimeSpan.from', function() {
+    $scope.onViewTimeSpanFromChanged = function() {
+        dataService.autoFollowNow = false;
         if(dataService.viewTimeSpan.from >= dataService.viewTimeSpan.to) {
-            dataService.viewTimeSpan.from = dataService.floorDate(new Date(dataService.viewTimeSpan.to.getTime() - 5*60*1000), 1, 5);
+            dataService.viewTimeSpan.to = dataService.floorDate(new Date(dataService.viewTimeSpan.from.getTime() + $scope.zoomTimespan.value*60*1000), 1, 5);
         }
-    });
+    };
 
-    $scope.$watch('dataService.viewTimeSpan.to', function() {
+    $scope.onViewTimeSpanToChanged = function() {
+        dataService.autoFollowNow = false;
         if(dataService.viewTimeSpan.to <= dataService.viewTimeSpan.from) {
-            dataService.viewTimeSpan.to = dataService.floorDate(new Date(dataService.viewTimeSpan.from.getTime() + 5*60*1000), 1, 5);
+            dataService.viewTimeSpan.from = dataService.floorDate(new Date(dataService.viewTimeSpan.to.getTime() - $scope.zoomTimespan.value*60*1000), 1, 5);
         }
-    });
+    };
 
     $scope.$watch('dataService.viewTimeSpan', function() {
         $scope.selectZoomTimespan();
diff --git a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/ganttprojectcontroller.js b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/ganttprojectcontroller.js
index 5d5a353fb0abaaf1614714124403adc64bcacac3..ef5752e6ffe17d26e4a29d0466e0ee4953bd441f 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/ganttprojectcontroller.js
+++ b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/ganttprojectcontroller.js
@@ -261,19 +261,15 @@ ganttProjectControllerMod.controller('GanttProjectController', ['$scope', 'dataS
         $scope.ganttData = ganntRows;
     };
 
-    $scope.$watch('dataService.initialLoadComplete', updateGanttData);
-    $scope.$watch('dataService.selected_task_id', updateGanttData);
-    $scope.$watch('dataService.tasks', updateGanttData);
-    $scope.$watch('dataService.resources', updateGanttData);
-    $scope.$watch('dataService.resourceClaims', updateGanttData);
-    $scope.$watch('dataService.resourceGroups', updateGanttData);
-    $scope.$watch('dataService.resourceGroupMemberships', updateGanttData);
-    $scope.$watch('dataService.filteredTaskDict', updateGanttData);
-    $scope.$watch('dataService.momProjectsDict', updateGanttData);
-    $scope.$watch('dataService.viewTimeSpan', updateGanttData, true);
-    $scope.$watch('dataService.taskChangeCntr', function() { $scope.$evalAsync(updateGanttData); });
+    $scope.$watch('dataService.initialLoadComplete', function() { $scope.$evalAsync(updateGanttData); });
+    $scope.$watch('dataService.selected_task_id', function() { $scope.$evalAsync(updateGanttData); });
+    $scope.$watch('dataService.viewTimeSpan', function() { $scope.$evalAsync(updateGanttData); }, true);
+    $scope.$watch('dataService.filteredTaskChangeCntr', function() { $scope.$evalAsync(updateGanttData); });
     $scope.$watch('dataService.lofarTime', function() {
-        if($scope.dataService.lofarTime.getSeconds() % 5 == 0) {
-            $scope.options.currentDateValue= $scope.dataService.lofarTime;}});
+        $scope.$evalAsync(function() {
+            if($scope.dataService.lofarTime.getSeconds() % 5 == 0) {
+                $scope.options.currentDateValue= $scope.dataService.lofarTime;}
+        });
+    });
 }
 ]);
diff --git a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/ganttresourcecontroller.js b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/ganttresourcecontroller.js
index 1c17751ae6cd925e407389fe627e155927a04101..9ec685de16ea6e07b287373f288b284197c50370 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/ganttresourcecontroller.js
+++ b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/ganttresourcecontroller.js
@@ -468,18 +468,16 @@ ganttResourceControllerMod.controller('GanttResourceController', ['$scope', 'dat
         }
     };
 
-    $scope.$watch('dataService.initialLoadComplete', updateGanttData);
-    $scope.$watch('dataService.selected_task_id', updateGanttData);
-    $scope.$watch('dataService.tasks', updateGanttData);
-    $scope.$watch('dataService.resources', updateGanttData);
-    $scope.$watch('dataService.resourceClaims', updateGanttData);
-    $scope.$watch('dataService.resourceGroups', updateGanttData);
-    $scope.$watch('dataService.resourceGroupMemberships', updateGanttData);
-    $scope.$watch('dataService.filteredTaskDict', updateGanttData);
-    $scope.$watch('dataService.viewTimeSpan', updateGanttData, true);
+    $scope.$watch('dataService.initialLoadComplete', function() { $scope.$evalAsync(updateGanttData); });
+    $scope.$watch('dataService.selected_task_id', function() { $scope.$evalAsync(updateGanttData); });
+    $scope.$watch('dataService.viewTimeSpan', function() { $scope.$evalAsync(updateGanttData); }, true);
     $scope.$watch('dataService.claimChangeCntr', function() { $scope.$evalAsync(updateGanttData); });
+    $scope.$watch('dataService.filteredTaskChangeCntr', function() { $scope.$evalAsync(updateGanttData); });
     $scope.$watch('dataService.lofarTime', function() {
-        if($scope.dataService.lofarTime.getSeconds() % 5 == 0) {
-            $scope.options.currentDateValue= $scope.dataService.lofarTime;}});
+        $scope.$evalAsync(function() {
+            if($scope.dataService.lofarTime.getSeconds() % 5 == 0) {
+                $scope.options.currentDateValue= $scope.dataService.lofarTime;}
+        });
+    });
 }
 ]);
diff --git a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/gridcontroller.js b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/gridcontroller.js
index 18f3389fdcfbedffd5c1876e4ad7ee8527f5dbdf..835b7588f34ba2539bdab4e6154d4252278486bc 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/gridcontroller.js
+++ b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/static/app/controllers/gridcontroller.js
@@ -55,6 +55,7 @@ $scope.columns = [
         enableCellEdit: true,
         width: '6%',
         filter: {
+            condition: uiGridConstants.filter.EXACT,
             type: uiGridConstants.filter.SELECT,
             selectOptions: []
         },
@@ -69,16 +70,18 @@ $scope.columns = [
         enableCellEdit: false,
         width: '6%',
         filter: {
+            condition: uiGridConstants.filter.EXACT,
             type: uiGridConstants.filter.SELECT,
             selectOptions: []
         }
     },
-    { field: 'mom_object_group_name',
+    { field: 'mom_object_group_id',
         displayName: 'Group',
         enableCellEdit: false,
-        cellTemplate:'<a target="_blank" href="https://lofar.astron.nl/mom3/user/project/setUpMom2ObjectDetails.do?view=generalinfo&mom2ObjectId={{row.entity.mom_object_group_mom2object_id}}">{{row.entity[col.field]}}</a>',
+        cellTemplate:'<a target="_blank" href="https://lofar.astron.nl/mom3/user/project/setUpMom2ObjectDetails.do?view=generalinfo&mom2ObjectId={{row.entity.mom_object_group_mom2object_id}}">{{row.entity.mom_object_group_name}} {{row.entity.mom_object_group_id}}</a>',
         width: '13%',
         filter: {
+            condition: uiGridConstants.filter.EXACT,
             type: uiGridConstants.filter.SELECT,
             selectOptions: []
         }
@@ -120,7 +123,29 @@ $scope.columns = [
         onRegisterApi: function(gridApi){
             $scope.gridApi = gridApi;
 
-            $scope.gridApi.core.on.rowsRendered($scope, filterTasks);
+            $scope.gridApi.core.on.rowsRendered($scope, function() {
+                //on.rowsRendered is called whenever the data/filtering of the grid changed
+                //update the filteredTasks in the dataService from the resulting new grid rows
+                $scope.$evalAsync(function() {
+                    var taskDict = $scope.dataService.taskDict;
+                    $scope.dataService.filteredTasks = [];
+                    var rows = $scope.gridApi.grid.rows;
+                    var numRows = rows.length;
+                    for(var i = 0; i < numRows; i++) {
+                        var row = rows[i];
+                        if(row.visible)
+                        {
+                            var task = taskDict[row.entity.id];
+                            if(task) {
+                                $scope.dataService.filteredTasks.push(task);
+                            }
+                        }
+                    }
+
+                    $scope.dataService.filteredTaskDict = $scope.dataService.toIdBasedDict($scope.dataService.filteredTasks);
+                    $scope.dataService.filteredTaskChangeCntr++;
+                });
+            });
 
             gridApi.edit.on.afterCellEdit($scope,function(rowEntity, colDef, newValue, oldValue){
                 var task = $scope.dataService.taskDict[rowEntity.id];
@@ -136,26 +161,6 @@ $scope.columns = [
         }
     };
 
-    function filterTasks() {
-        var taskDict = $scope.dataService.taskDict;
-        var filteredTasks = [];
-        var filteredTaskDict = {};
-        var rows = $scope.gridApi.grid.rows;
-        var numRows = rows.length;
-        for(var i = 0; i < numRows; i++) {
-            var row = rows[i];
-            if(row.visible)
-            {
-                var task = taskDict[row.entity.id];
-                filteredTasks.push(task);
-                filteredTaskDict[task.id] = task;
-            }
-        }
-
-        $scope.dataService.filteredTasks = filteredTasks;
-        $scope.dataService.filteredTaskDict = filteredTaskDict;
-    };
-
     function fillColumFilterSelectOptions(options, columnDef) {
         var columnSelectOptions = [];
 
@@ -163,7 +168,12 @@ $scope.columns = [
             for(var i = 0; i < options.length; i++)
             {
                 var option = options[i];
-                columnSelectOptions.push({ value: option, label: option })
+                if(option.hasOwnProperty('value') && option.hasOwnProperty('label')) {
+                    columnSelectOptions.push({ value: option.value, label: option.label })
+                }
+                else {
+                    columnSelectOptions.push({ value: option, label: option })
+                }
             }
         }
 
@@ -175,10 +185,13 @@ $scope.columns = [
             var viewFrom = $scope.dataService.viewTimeSpan.from;
             var viewTo = $scope.dataService.viewTimeSpan.to;
 
-            var tasks = [];
+            $scope.dataService.filteredTasks = [];
+            var gridTasks = [];
 
             for(var task of $scope.dataService.tasks) {
                 if(task.endtime >= viewFrom && task.starttime <= viewTo) {
+                    $scope.dataService.filteredTasks.push(task);
+
                     var gridTask = {
                         id: task.id,
                         name: task.name,
@@ -196,11 +209,11 @@ $scope.columns = [
                         mom_object_group_name: task.mom_object_group_name,
                         mom_object_group_mom2object_id: task.mom_object_group_mom2object_id
                     };
-                    tasks.push(gridTask);
+                    gridTasks.push(gridTask);
                 }
             }
 
-            $scope.gridOptions.data = tasks;
+            $scope.gridOptions.data = gridTasks;
         } else
             $scope.gridOptions.data = []
 
@@ -219,19 +232,21 @@ $scope.columns = [
 
     $scope.$watch('dataService.taskChangeCntr', function() { $scope.$evalAsync(populateList); });
     $scope.$watch('dataService.viewTimeSpan', function() {
-        populateList();
-        setTimeout(jumpToSelectedTaskRow, 250);
+        $scope.$evalAsync(populateList);
+        $scope.$evalAsync(jumpToSelectedTaskRow);
     }, true);
 
-    $scope.$watch('dataService.taskstatustypes', function() {
-        taskstatustypenames = $scope.dataService.taskstatustypes.map(function(x) { return x.name; });
-        fillColumFilterSelectOptions(taskstatustypenames, $scope.columns[5]);
-        $scope.columns[6].editDropdownOptionsArray = $scope.dataService.taskstatustypes.map(function(x) { return {id:x.name, value:x.name}; });
-    });
+    $scope.$watch('dataService.initialLoadComplete', function() {
+        $scope.$evalAsync(function() {
+            taskstatustypenames = $scope.dataService.taskstatustypes.map(function(x) { return x.name; });
+            fillColumFilterSelectOptions(taskstatustypenames, $scope.columns[5]);
+            $scope.columns[6].editDropdownOptionsArray = $scope.dataService.taskstatustypes.map(function(x) { return {id:x.name, value:x.name}; });
 
-    $scope.$watch('dataService.tasktypes', function() {
-        tasktypenames = $scope.dataService.tasktypes.map(function(x) { return x.name; });
-        fillColumFilterSelectOptions(tasktypenames, $scope.columns[6]);
+            tasktypenames = $scope.dataService.tasktypes.map(function(x) { return x.name; });
+            fillColumFilterSelectOptions(tasktypenames, $scope.columns[6]);
+
+            fillProjectsColumFilterSelectOptions();
+        });
     });
 
     function fillProjectsColumFilterSelectOptions() {
@@ -257,14 +272,15 @@ $scope.columns = [
     function fillGroupsColumFilterSelectOptions() {
         var tasks = $scope.dataService.tasks;
         //get unique groupNames from tasks
-        var groupNames = tasks.map(function(t) { return t.mom_object_group_name; });
-        groupNames = groupNames.filter(function(value, index, arr) { return arr.indexOf(value) == index;})
-        groupNames.sort();
+        var groupNamesAndIds = tasks.map(function(t) { return { name: t.mom_object_group_name, id: t.mom_object_group_id }; });
+        groupNamesAndIds = groupNamesAndIds.filter(function(value, index, arr) { return arr.indexOf(value) == index;})
+        groupNamesAndIds.sort();
+
+        var groupSelectOptions = groupNamesAndIds.map(function(obj) { return { value: obj.id, label: obj.name + ' ' + obj.id }; });
 
-        fillColumFilterSelectOptions(groupNames, $scope.columns[7]);
+        fillColumFilterSelectOptions(groupSelectOptions, $scope.columns[7]);
     };
 
-    $scope.$watch('dataService.momProjectsDict', fillProjectsColumFilterSelectOptions);
     $scope.$watch('dataService.selected_task_id', jumpToSelectedTaskRow);}
 ]);
 
@@ -334,7 +350,6 @@ gridControllerMod.directive('contextMenu', ['$document', function($document) {
         $element.bind('contextmenu', handleContextMenuEvent);
 
         $scope.$on('$destroy', function() {
-            console.log('destroy');
             $element.unbind('contextmenu', handleContextMenuEvent);
         });
       }
diff --git a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/templates/index.html b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/templates/index.html
index 61a534f91acdd16708cc2c24926ba81980c76ee2..5942abcf8fd350942f648f3f62b14450c5683ff0 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/templates/index.html
+++ b/SAS/ResourceAssignment/ResourceAssignmentEditor/lib/templates/index.html
@@ -60,7 +60,7 @@
                     <span class="input-group-btn">
                         <button type="button" class="btn btn-default" ng-click="openViewFromDatePopup()"><i class="glyphicon glyphicon-calendar"></i></button>
                     </span>
-                    <uib-timepicker ng-model="dataService.viewTimeSpan.from" hour-step="1" minute-step="5" show-meridian="false" show-spinners="false"></uib-timepicker>
+                    <uib-timepicker ng-model="dataService.viewTimeSpan.from" ng-change="onViewTimeSpanFromChanged()" hour-step="1" minute-step="5" show-meridian="false" show-spinners="false"></uib-timepicker>
                 </p>
             </div>
             <div class="col-md-3">
@@ -70,7 +70,7 @@
                     <span class="input-group-btn">
                         <button type="button" class="btn btn-default" ng-click="openViewToDatePopup()"><i class="glyphicon glyphicon-calendar"></i></button>
                     </span>
-                    <uib-timepicker ng-model="dataService.viewTimeSpan.to" hour-step="1" minute-step="5" show-meridian="false" show-spinners="false"></uib-timepicker>
+                    <uib-timepicker ng-model="dataService.viewTimeSpan.to" ng-change="onViewTimeSpanToChanged()" hour-step="1" minute-step="5" show-meridian="false" show-spinners="false"></uib-timepicker>
                 </p>
             </div>
             <div class="col-md-2">
diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py
index 78c707874caad6cd1da80cab23971a8938ceffa7..55cb19d86e5a8c6a01280c1da45e3d64c6c9596c 100644
--- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py
+++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py
@@ -27,12 +27,22 @@ class ResourceEstimatorHandler(MessageHandlerInterface):
         specification_tree = content["specification_tree"]
         return self._get_estimated_resources(specification_tree) ##TODO also handle MoM tasks in RA 1.2
 
+    ##FIXME dirty hack
+    def add_id(self, estimate, otdb_id):
+        if 'storage' in estimate.values()[0]:
+            if 'output_files' in estimate.values()[0]['storage']: #We only need to do output files, it will be someone else's input
+                for data_type in estimate.values()[0]['storage']['output_files'].keys():
+                    if data_type != 'saps':
+                        estimate.values()[0]['storage']['output_files'][data_type][data_type + '_otdb_id'] = otdb_id
+                        logger.info('added %s to %s' % (otdb_id, str(estimate.values()[0]['storage']['output_files'][data_type])))
+        return estimate
+
     #TODO use something else than .values()[0]['storage'] ??
     def get_subtree_estimate(self, specification_tree):
         otdb_id = specification_tree['otdb_id']
         parset = specification_tree['specification']
         if specification_tree['task_type'] == 'observation':
-            return {str(otdb_id): self.observation.verify_and_estimate(parset)}
+            return {str(otdb_id): self.add_id(self.observation.verify_and_estimate(parset), otdb_id)}
         elif specification_tree['task_type'] == 'pipeline':
             branch_estimates = {}
             for branch in specification_tree['predecessors']:
@@ -40,28 +50,33 @@ class ResourceEstimatorHandler(MessageHandlerInterface):
             logger.info(str(branch_estimates)) 
             if specification_tree['task_subtype'] in ['averaging pipeline', 'calibration pipeline']:
                 for id, estimate in branch_estimates.iteritems():
+                    input_files = {}
                     if not 'im' in estimate.values()[0]['storage']['output_files'] and 'uv' in estimate.values()[0]['storage']['output_files']: # Not a calibrator pipeline
                         logger.info('found %s as the target of pipeline %s' % (id, otdb_id))
-                        input_files = estimate.values()[0]['storage']['output_files'] # Need sap here as well
-                return {str(otdb_id): self.calibration_pipeline.verify_and_estimate(parset, input_files)}
+                        input_files['uv'] = estimate.values()[0]['storage']['output_files']['uv']
+                        if 'saps' in estimate.values()[0]['storage']['output_files']:
+                            input_files['saps'] = estimate.values()[0]['storage']['output_files']['saps']
+                    elif 'im' in estimate.values()[0]['storage']['output_files']:
+                        input_files['im'] = estimate.values()[0]['storage']['output_files']['im']
+                return {str(otdb_id): self.add_id(self.calibration_pipeline.verify_and_estimate(parset, input_files), otdb_id)}
                 
             if specification_tree['task_subtype'] in ['imaging pipeline', 'imaging pipeline msss']:
                 if len(branch_estimates) > 1:
                     logger.error('Imaging pipeline %d should not have multiple predecessors: %s' % (otdb_id, branch_estimates.keys() ) )
                 input_files = branch_estimates.values()[0].values()[0]['storage']['output_files']
-                return {str(otdb_id): self.imaging_pipeline.verify_and_estimate(parset, input_files)}
+                return {str(otdb_id): self.add_id(self.imaging_pipeline.verify_and_estimate(parset, input_files), otdb_id)}
                 
             if specification_tree['task_subtype'] in ['long baseline pipeline']:
                 if len(branch_estimates) > 1:
                     logger.error('Long baseline pipeline %d should not have multiple predecessors: %s' % (otdb_id, branch_estimates.keys() ) )
                 input_files = branch_estimates.values()[0].values()[0]['storage']['output_files']
-                return {str(otdb_id): self.longbaseline_pipeline.verify_and_estimate(parset, input_files)}
+                return {str(otdb_id): self.add_id(self.longbaseline_pipeline.verify_and_estimate(parset, input_files), otdb_id)}
 
             if specification_tree['task_subtype'] in ['pulsar pipeline']:
                 if len(branch_estimates) > 1:
                     logger.error('Pulsar pipeline %d should not have multiple predecessors: %s' % (otdb_id, branch_estimates.keys() ) )
                 input_files = branch_estimates.values()[0].values()[0]['storage']['output_files']
-                return {str(otdb_id): self.pulsar_pipeline.verify_and_estimate(parset, input_files)}
+                return {str(otdb_id): self.add_id(self.pulsar_pipeline.verify_and_estimate(parset, input_files), otdb_id)}
             
         else: # reservation, maintenance, system tasks?
             logger.info("It's not a pipeline or observation: %s" % otdb_id)
diff --git a/SAS/ResourceAssignment/Services/src/rataskspecifiedservice.ini b/SAS/ResourceAssignment/Services/src/rataskspecifiedservice.ini
index 1aaf8b5a77e8dcbeb946df7b7cb8532c6ed8fc5c..23d43f56ee0fa5cc956418d5ee77c97030e952eb 100644
--- a/SAS/ResourceAssignment/Services/src/rataskspecifiedservice.ini
+++ b/SAS/ResourceAssignment/Services/src/rataskspecifiedservice.ini
@@ -4,4 +4,5 @@ user=lofarsys
 stopsignal=INT ; KeyboardInterrupt
 stopasgroup=true
 stdout_logfile=%(program_name)s.log
-stderr_logfile=%(program_name)s.stderr
+redirect_stderr=true
+stderr_logfile=NONE
diff --git a/SAS/Scheduler/src/Controller.cpp b/SAS/Scheduler/src/Controller.cpp
index 504b8b04a4099cb4ba2b5f677a079a245e9901b2..2342513510d8c2cece1b72ef98bf05334b534abf 100644
--- a/SAS/Scheduler/src/Controller.cpp
+++ b/SAS/Scheduler/src/Controller.cpp
@@ -4544,6 +4544,9 @@ bool Controller::checkEarlyTasksStatus(void) {
 	int treeID;
 	for (std::vector<Task *>::const_iterator it = tasks.begin(); it != tasks.end(); ++it) {
         if ((*it)->getScheduledStart() <= now()) {
+            if (((*it)->getOutputDataproductCluster() == "CEP4") && (*it)->isPipeline()) {
+                continue; //Pipelines on CEP4: we don't care as SLURM sorts it out.
+            }
 			treeID = (*it)->getSASTreeID();
 			if ((itsSASConnection->connect() == 0) && (treeID != 0)) { // only do the sas check for apparently too early tasks that are already in SAS , not for new tasks
 				Task::task_status status(itsSASConnection->getTaskStatus(treeID));
diff --git a/SAS/Scheduler/src/SASConnection.cpp b/SAS/Scheduler/src/SASConnection.cpp
index c33f4dd27294a16c114070092199953ebeedfdd6..66edc07fc541db8edb5932adbd0f5a806c52c7f9 100644
--- a/SAS/Scheduler/src/SASConnection.cpp
+++ b/SAS/Scheduler/src/SASConnection.cpp
@@ -2576,6 +2576,9 @@ bool SASConnection::saveStationSettings(int treeID, const StationTask &task, con
 
 bool SASConnection::saveInputStorageSettings(int treeID, const Task &task) {
     bool bResult(true);
+    if (task.getOutputDataproductCluster() == "CEP4") { //For CEP4 we're skipping this. /AR
+        return bResult;
+    }
     const TaskStorage *task_storage(task.storage());
     if (task_storage) {
         const std::map<dataProductTypes, TaskStorage::inputDataProduct> &inputDataProducts(task_storage->getInputDataProducts());