diff --git a/.DS_Store b/.DS_Store
deleted file mode 100644
index 772182382fcc07c3b3b57de4515e7fbde30dc0ef..0000000000000000000000000000000000000000
Binary files a/.DS_Store and /dev/null differ
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7f7bd02c93f9369e87765dec61acdd72a6fb5ddb..7f96632c36e9b1caa7887e0fb323adc3fcf69678 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -241,7 +241,10 @@ dockerize_TMSS:
     - docker push nexus.cep4.control.lofar:18080/tmss_testprovider:$CI_COMMIT_SHORT_SHA
     - docker logout $CI_NEXUS_REGISTRY
   needs:
-    - integration_test_TMSS
+    - job: build_TMSS
+      artifacts: true     
+    - job: integration_test_TMSS
+      artifacts: false     
 
 #
 # INTEGRATION TEST STAGE
@@ -262,6 +265,7 @@ integration_test_TMSS:
     RABBITMQ_DEFAULT_PASS: guest
     LOFAR_DEFAULT_BROKER: 'rabbitmq' # override default 'localhost' which does not work for CI service rabbitmq.
   needs:
+    - build_TMSS
     - unit_test_TMSS
   artifacts:
     name: integration-test-report
@@ -286,6 +290,7 @@ integration_test_RAServices:
     - cd build/gnucxx11_opt
     - SKIP_INTEGRATION_TESTS=false SKIP_UNIT_TESTS=true ctest
   needs:
+    - build_RAServices
     - unit_test_RAServices
   artifacts:
     name: integration-test-report
@@ -310,6 +315,7 @@ integration_test_LTAIngest:
     RABBITMQ_DEFAULT_PASS: guest
     LOFAR_DEFAULT_BROKER: 'rabbitmq' # override default 'localhost' which does not work for CI service rabbitmq.
   needs:
+    - build_LTAIngest
     - unit_test_LTAIngest
   artifacts:
     name: integration-test-report
diff --git a/Docker/lofar-ci/Dockerfile_ci_sas b/Docker/lofar-ci/Dockerfile_ci_sas
index 527639e256c50c98b1ef0550b41a7cbf69b3e1c3..1aa8f6689b56f7529d3a0a17e0022128a9ab2bbc 100644
--- a/Docker/lofar-ci/Dockerfile_ci_sas
+++ b/Docker/lofar-ci/Dockerfile_ci_sas
@@ -16,7 +16,7 @@ RUN yum erase -y postgresql postgresql-server postgresql-devel && \
     cd /bin && ln -s /usr/pgsql-9.6/bin/initdb && ln -s /usr/pgsql-9.6/bin/postgres
 ENV PATH /usr/pgsql-9.6/bin:$PATH 
 
-RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil Django==3.0.9 djangorestframework==3.11.1 djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc jsonschema comet pyxb==1.2.5 graphviz isodate astropy packaging
+RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil Django==3.0.9 djangorestframework==3.11.1 djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc jsonschema comet pyxb==1.2.5 graphviz isodate astropy packaging django-debug-toolbar 
 
 #Viewflow package 
 RUN pip3 install django-material django-viewflow
diff --git a/LCS/Messaging/python/messaging/config.py b/LCS/Messaging/python/messaging/config.py
index e7f350333f565688590b9849f9146643e86fc12f..c8ea8f0763e0d97779fc78a78caa9abbf0e5e63c 100644
--- a/LCS/Messaging/python/messaging/config.py
+++ b/LCS/Messaging/python/messaging/config.py
@@ -61,4 +61,4 @@ for port in possible_ports:
                      DEFAULT_BROKER, port, DEFAULT_USER, e)
 
 # default exchange to use for publishing messages
-DEFAULT_BUSNAME = adaptNameToEnvironment("lofar")
+DEFAULT_BUSNAME = adaptNameToEnvironment(os.environ.get('LOFAR_DEFAULT_EXCHANGE', 'lofar'))
diff --git a/LCS/PyCommon/json_utils.py b/LCS/PyCommon/json_utils.py
index f1aee10a64206f6c4897c69868991fd724b92f72..956fd3b0a29c34bc25bc3e204ff877943e266ca1 100644
--- a/LCS/PyCommon/json_utils.py
+++ b/LCS/PyCommon/json_utils.py
@@ -134,23 +134,13 @@ def resolved_refs(schema):
     '''return the given schema with all $ref fields replaced by the referred json (sub)schema that they point to.'''
     if isinstance(schema, dict):
         updated_schema = {}
-        for key, value in schema.items():
-            if key in "$ref" and isinstance(value, str):
-                if value.startswith('#'):
-                    # reference to local document, no need for http injection
-                    updated_schema[key] = value
-                else:
-                    try:
-                        # by returning the referenced (sub)schema, the $ref-key and url-value are replaced from the caller's perspective.
-                        # also, recursively resolve refs in referenced_subschema
-                        referenced_subschema = get_referenced_subschema(value)
-                        return resolved_refs(referenced_subschema)
-                    except:
-                        # can't get the referenced schema
-                        # so, just accept the original value and assume that the user uploaded a proper schema
-                        updated_schema[key] = value
-            else:
-                updated_schema[key] = resolved_refs(value)
+        keys = list(schema.keys())
+        if "$ref" in keys and isinstance(schema['$ref'], str) and schema['$ref'].startswith('http'):
+            keys.remove("$ref")
+            updated_schema = resolved_refs(get_referenced_subschema(schema['$ref']))
+
+        for key in keys:
+            updated_schema[key] = resolved_refs(schema[key])
         return updated_schema
 
     if isinstance(schema, list):
diff --git a/LCS/PyCommon/test/t_json_utils.py b/LCS/PyCommon/test/t_json_utils.py
index 2237f0f8d68717fe304b4babb301887b6bf89546..78609dbf21d6deb06cdc0cc663d6edd3a8f881fe 100755
--- a/LCS/PyCommon/test/t_json_utils.py
+++ b/LCS/PyCommon/test/t_json_utils.py
@@ -94,14 +94,15 @@ class TestJSONUtils(unittest.TestCase):
                            "name": {
                                "type": "string",
                                "minLength": 2 },
-                                "email": {
-                                    "$ref": base_url + "/base_schema.json" + "#/definitions/email"
-                                },
+                           "email": {
+                               "$ref": base_url + "/base_schema.json" + "#/definitions/email",
+                               "extra_prop": "very important"
+                           },
                            "other_emails": {
                                "type": "array",
                                "items": {
-                                    "$ref": base_url + "/base_schema.json" + "#/definitions/email"
-                                }
+                                   "$ref": base_url + "/base_schema.json" + "#/definitions/email"
+                               }
                            } } }
 
         class TestRequestHandler(http.server.BaseHTTPRequestHandler):
@@ -134,7 +135,11 @@ class TestJSONUtils(unittest.TestCase):
             print('resolved_user_schema: ', json.dumps(resolved_user_schema, indent=2))
 
             self.assertNotEqual(user_schema['properties']['email'], resolved_user_schema['properties']['email'])
-            self.assertEqual(base_schema['definitions']['email'], resolved_user_schema['properties']['email'])
+            for key,value in base_schema['definitions']['email'].items():
+                self.assertEqual(value, resolved_user_schema['properties']['email'][key])
+            self.assertTrue('extra_prop' in resolved_user_schema['properties']['email'])
+            self.assertEqual('very important', resolved_user_schema['properties']['email']['extra_prop'])
+
 
             httpd.shutdown()
             thread.join(timeout=2)
diff --git a/RTCP/Cobalt/GPUProc/etc/parset-additions.d/default/PVSS-feedback.parset b/RTCP/Cobalt/GPUProc/etc/parset-additions.d/default/PVSS-feedback.parset
index 4e062233e380cd0bc57b388f0ecec42bf1ccfe33..3f8654919634d6601b5d6c9bb635a067058ed53f 100644
--- a/RTCP/Cobalt/GPUProc/etc/parset-additions.d/default/PVSS-feedback.parset
+++ b/RTCP/Cobalt/GPUProc/etc/parset-additions.d/default/PVSS-feedback.parset
@@ -4,4 +4,4 @@
 # If empty, data points are never sent.
 # One can also start a PVSSGateway_Stub
 #   on localhost, which writes to a file.
-Cobalt.PVSSGateway.host = ccu001
+Cobalt.PVSSGateway.host = 
diff --git a/SAS/TMSS/docker-compose-ua.yml b/SAS/TMSS/docker-compose-ua.yml
index 74752f8596f9daa35763a85b7f5e355288b38cbd..73b699d14c94f2d0606c3242d4b964f593d05b68 100644
--- a/SAS/TMSS/docker-compose-ua.yml
+++ b/SAS/TMSS/docker-compose-ua.yml
@@ -17,9 +17,10 @@ services:
       - "8088:8088"
   web:
     image: nexus.cep4.control.lofar:18080/tmss_django:latest
+    hostname: tmss-ua
     restart: on-failure
     env_file:
       - ./.env
-    command: bash -c 'source /opt/lofar/lofarinit.sh && python3 lib64/python3.6/site-packages/lofar/sas/tmss/manage.py runserver 0.0.0.0:8008'
+    command: bash -c 'source /opt/lofar/lofarinit.sh && ALLOWED_HOSTS=* tmss_test_environment -H 0.0.0.0 -P tmss-ua -p 8008 --data'
     ports:
       - "8008:8008"
diff --git a/SAS/TMSS/frontend/tmss_webapp/debug.log b/SAS/TMSS/frontend/tmss_webapp/debug.log
new file mode 100644
index 0000000000000000000000000000000000000000..2d8c637aed6551186839ffb67b3120ab5e4487b9
--- /dev/null
+++ b/SAS/TMSS/frontend/tmss_webapp/debug.log
@@ -0,0 +1,2 @@
+[1013/111617.035:ERROR:directory_reader_win.cc(43)] FindFirstFile: The system cannot find the path specified. (0x3)
+[1015/122332.151:ERROR:directory_reader_win.cc(43)] FindFirstFile: The system cannot find the path specified. (0x3)
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/App.css b/SAS/TMSS/frontend/tmss_webapp/src/App.css
index d19517b7d6d383cb7d804784960ec03a8c68dc2f..afca29b115546e020b56ad71b4a94fbe82d6c65d 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/App.css
+++ b/SAS/TMSS/frontend/tmss_webapp/src/App.css
@@ -224,3 +224,11 @@ thead {
     transform: rotate(360deg);
   }
 }
+
+div[data-schemapath='root.$schema'] {
+  display: none;
+}
+
+.app-header-menu ul li a span {
+  display: inline !important;
+}
\ No newline at end of file
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js b/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js
index 80552d80e7059f28dee4491223c79ab6484e9bdc..06f9ead9e0cfceb602f0f859e4c249c958cbeb9f 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js
@@ -5,20 +5,77 @@
 import React, {useEffect, useRef} from 'react';
 import _ from 'lodash';
 import flatpickr from 'flatpickr';
-
+import $RefParser from "@apidevtools/json-schema-ref-parser";
 import "@fortawesome/fontawesome-free/css/all.css";
 import "flatpickr/dist/flatpickr.css";
 const JSONEditor = require("@json-editor/json-editor").JSONEditor;
 
 function Jeditor(props) {
-    // console.log("In JEditor");
+    // console.log("In JEditor", props.schema);
     const editorRef = useRef(null);
     let pointingProps = useRef(null);
     let editor = null;
-    useEffect(() => {
-        const element = document.getElementById('editor_holder');
+
+    /**
+     * Function to resolve external references
+     */
+    const resolveExternalRef = async () => {
         let schema = {};
-        Object.assign(schema, props.schema?props.schema:{});
+        Object.assign(schema, props.schema ? props.schema : {});
+        schema.definitions = schema.definitions?schema.definitions:{};
+        return (await resolveSchema(schema));
+    };
+
+    /**
+     * Function to resolve external reference in part based on the depth of schema iteration.
+     * @param {JSON Object} schema 
+     */
+    const resolveSchema = async (schema) => {
+        let properties = schema.properties;
+        schema.definitions = schema.definitions?schema.definitions:{};
+        if (properties) {
+            for (const propertyKey in properties) {
+                let property = properties[propertyKey];
+                if (property["$ref"] && !property["$ref"].startsWith("#")) {    // 1st level reference of the object
+                    const refUrl = property["$ref"];
+                    let newRef = refUrl.substring(refUrl.indexOf("#"));
+                    if (refUrl.endsWith("/pointing")) {                         // For type pointing
+                        schema.definitions["pointing"] = (await $RefParser.resolve(refUrl)).get(newRef);
+                        property["$ref"] = newRef;
+                    }   else {                   // General object to resolve if any reference in child level
+                        property = await resolveSchema((await $RefParser.resolve(refUrl)).get(newRef));
+                    }
+                }   else if(property["type"] === "array") {             // reference in array items definition
+                    let resolvedItems = await resolveSchema(property["items"]);
+                    schema.definitions = {...schema.definitions, ...resolvedItems.definitions};
+                    delete resolvedItems['definitions'];
+                    property["items"] = resolvedItems;
+                }
+                properties[propertyKey] = property;
+            }
+        }   else if (schema["oneOf"]) {             // Reference in OneOf array
+            let resolvedOneOfList = []
+            for (const oneOfProperty of schema["oneOf"]) {
+                const resolvedOneOf = await resolveSchema(oneOfProperty);
+                resolvedOneOfList.push(resolvedOneOf);
+            }
+            schema["oneOf"] = resolvedOneOfList;
+        }   else if (schema["$ref"] && !schema["$ref"].startsWith("#")) {   //reference in oneOf list item
+            const refUrl = schema["$ref"];
+            let newRef = refUrl.substring(refUrl.indexOf("#"));
+            if (refUrl.endsWith("/pointing")) {
+                schema.definitions["pointing"] = (await $RefParser.resolve(refUrl)).get(newRef);
+                schema["$ref"] = newRef;
+            }   else {
+                schema = await resolveSchema((await $RefParser.resolve(refUrl)).get(newRef));
+            }
+        }
+        return schema;
+    }
+
+    const init = async () => {
+        const element = document.getElementById('editor_holder');
+        let schema = await resolveExternalRef();
         pointingProps = [];
         // Customize the pointing property to capture angle1 and angle2 to specified format
         for (const definitionKey in schema.definitions) {
@@ -90,7 +147,8 @@ function Jeditor(props) {
             disable_properties: true,
             disable_collapse: true,
             show_errors: props.errorsOn?props.errorsOn:'change',        // Can be 'interaction', 'change', 'always', 'never'
-            compact: true
+            compact: true,
+            ajax: true
         };
         // Set Initial value to the editor
         if (props.initValue) {
@@ -99,13 +157,19 @@ function Jeditor(props) {
         editor = new JSONEditor(element, editorOptions);
         // editor.getEditor('root').disable();
         if (props.disabled) {
-            editor.disable();
+            editor.on('ready',() => {
+                editor.disable();
+            });
         }
         if (props.parentFunction) {
             props.parentFunction(editorFunction);
         }
         editorRef.current = editor;
         editor.on('change', () => {setEditorOutput()});
+    };
+
+    useEffect(() => {
+        init();
     }, [props.schema]);
 
     /**
@@ -134,46 +198,6 @@ function Jeditor(props) {
      * @param {Boolean} isDegree 
      */
     function getAngleProperty(defProperty, isDegree) {
-        /*let newProperty = {
-            "type": "object",
-            "additionalProperties": false,
-            "format": "grid",
-            // "title": defProperty.title,
-            // "description": defProperty.description};
-            "title": "Duration",
-            "description": "Duration of the observation"};
-        let subProperties = {};
-        if (isDegree) {
-            subProperties["dd"] = {  "type": "number",
-                                      "title": "DD",
-                                      "description": "Degrees",
-                                      "default": 0,
-                                      "minimum": 0,
-                                      "maximum": 90 };
-        }   else {
-            subProperties["hh"] = {  "type": "number",
-                                      "title": "HH",
-                                      "description": "Hours",
-                                      "default": 0,
-                                      "minimum": 0,
-                                      "maximum": 23 };
-            
-        }
-        subProperties["mm"] = {  "type": "number",
-                                      "title": "MM",
-                                      "description": "Minutes",
-                                      "default": 0,
-                                      "minimum": 0,
-                                      "maximum": 59 };
-        subProperties["ss"] = {  "type": "number",
-                                      "title": "SS",
-                                      "description": "Seconds",
-                                      "default": 0,
-                                      "minimum": 0,
-                                      "maximum": 59 };
-
-        newProperty.properties = subProperties;
-        newProperty.required = isDegree?["dd", "mm", "ss"]:["hh", "mm", "ss"];*/
         let newProperty = {
             type: "string",
             title: defProperty.title,
@@ -210,39 +234,8 @@ function Jeditor(props) {
                 newProperty.default = '';
                 newProperty.description = "For Range enter Start and End seperated by 2 dots. Mulitple ranges can be separated by comma. Minimum should be 0 and maximum should be 511. For exmaple 11..20, 30..50";
                 newProperty.validationType = 'subband_list';
-                // newProperty.options = {
-                //     grid_columns: 4
-                // };
                 properties[propertyKey] = newProperty;
             }   else if (propertyKey.toLowerCase() === 'duration') {
-                /*propertyValue.title = "Duration (minutes)";
-                propertyValue.default = "1";
-                propertyValue.description = "Duration of this observation. Enter in decimal for seconds. For example 0.5 for 30 seconds";
-                propertyValue.minimum = 0.25;
-                propertyValue.options = {
-                    grid_columns: 6
-                };*/
-                /*propertyValue.title = "Duration";
-                propertyValue.default = "1H20M30S";
-                propertyValue.type = "string";
-                propertyValue.description = "Duration of the observation (H-hours,M-minutes,S-seconds & should be in the order of H, M and S respectively)";
-                /*let newProperty = {
-                    type: "string",
-                    title: "Duration",
-                    description: `${propertyValue.description} (Hours:Minutes:Seconds)`,
-                    default: "00:00:00",
-                    "options": {
-                        "grid_columns": 5,
-                        "inputAttributes": {
-                            "placeholder": "HH:mm:ss"
-                        },
-                        "cleave": {
-                            date: true,
-                            datePattern: ['HH','mm','ss'],
-                            delimiter: ':'
-                        }
-                    }
-                }*/
                 let newProperty = {
                     "type": "string",
                     "format": "time",
@@ -522,7 +515,6 @@ function Jeditor(props) {
     return (
         <React.Fragment>
             <div id='editor_holder'></div>
-            {/* <div><input type="button" onClick={setEditorOutput} value="Show Output" /></div> */}
         </React.Fragment>
     );
 };
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js b/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js
index 1825ef55a1a8191016e852cba1b9206e0b884c2b..ba27d387d396ba1292a334e6907d58f0f6f91561 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js
@@ -18,6 +18,7 @@ import UtilService from '../../services/util.service';
 
 import 'react-calendar-timeline/lib/Timeline.css';
 import { Calendar } from 'primereact/calendar';
+import { Checkbox } from 'primereact/checkbox';
 
 // Label formats for day headers based on the interval label width
 const DAY_HEADER_FORMATS = [{ name: "longer", minWidth: 300, maxWidth: 50000, format: "DD dddd, MMMM YYYY"},
@@ -30,7 +31,8 @@ const DAY_HEADER_FORMATS = [{ name: "longer", minWidth: 300, maxWidth: 50000, fo
                             {name: "nano", minWidth: 0, maxWidth: 0, format: ""}];
 
 //>>>>>> Constants for date/time formats, zoom level definition & defaults
-const UTC_DISPLAY_FORMAT = "YYYY-MM-DDTHH:mm:ss";
+const UTC_DATE_FORMAT = "YYYY-MM-DD";
+const UTC_TIME_FORMAT = "HH:mm:ss";
 const UTC_LST_KEY_FORMAT = "YYYY-MM-DDTHH:mm:00";
 const UTC_LST_HOUR_FORMAT = "YYYY-MM-DDTHH:00:00";
 const UTC_LST_DAY_FORMAT = "YYYY-MM-DDT00:00:00";
@@ -95,7 +97,8 @@ export class CalendarTimeline extends Component {
         lstDateHeaderUnit: 'hour',                                  // Unit to be considered for the LST axis header based on the visible duration
         isLSTDateHeaderLoading: true,
         dayHeaderVisible: true,                                     // To control the Day header visibility based on the zoom level
-        weekHeaderVisible: false                                    // To control the Week header visibility based on the zoom level
+        weekHeaderVisible: false,                                   // To control the Week header visibility based on the zoom level
+        isLive: false
       }
       this.itemClickCallback = props.itemClickCallback;             // Pass timeline item click event back to parent
       
@@ -125,6 +128,10 @@ export class CalendarTimeline extends Component {
       this.zoomOut = this.zoomOut.bind(this);
       this.setZoomRange = this.setZoomRange.bind(this);
       //<<<<<< Functions of this component
+      
+      //>>>>>> Public functions of the component
+      this.updateTimeline = this.updateTimeline.bind(this);
+      //<<<<<< Public functions of the component
     }
 
     componentDidMount() {
@@ -158,12 +165,17 @@ export class CalendarTimeline extends Component {
                     const currentUTC = moment.utc(utcString);
                     this.setState({currentUTC: currentUTC});
                     let currentLST = await UtilService.getLST(utcString);
-                    this.setState({currentLST: moment(currentUTC.format('DD-MMM-YYYY ') + currentLST)})
+                    this.setState({currentLST: moment(currentUTC.format('DD-MMM-YYYY ') + currentLST.split('.')[0], 'DD-MMM-YYYY HH:mm:ss')})
                 } );
         }   else {
             this.setState({currentUTC: this.state.currentUTC.add(1, 'second'), 
                             currentLST: this.state.currentLST?this.state.currentLST.add(1, 'second'):null});
         }
+        if (this.state.isLive) {
+            this.props.dateRangeCallback(this.state.defaultStartTime.add(1, 'second'), this.state.defaultEndTime.add(1, 'second'));
+            // const result = this.props.dateRangeCallback(this.state.defaultStartTime.add(1, 'second'), this.state.defaultEndTime.add(1, 'second'));
+            // let group = DEFAULT_GROUP.concat(result.group);
+        }
     }
 
     /**
@@ -183,7 +195,7 @@ export class CalendarTimeline extends Component {
             const formattedColUTC = colUTC.format(lstDateHeaderUnit==="hour"?UTC_LST_HOUR_FORMAT:UTC_LST_DAY_FORMAT);
             // if (!lstDateHeaderMap[formattedColUTC]) {
                 const lst = await UtilService.getLST(formattedColUTC);
-                const lstDate = moment(colUTC.format(`DD-MMM-YYYY ${lst}`)).add(30, 'minutes');
+                const lstDate = moment(colUTC.format(`MM-DD-YYYY ${lst.split('.')[0]}`), 'MM-DD-YYYY HH:mm:ss').add(30, 'minutes');
                 lstDateHeaderMap[formattedColUTC] = lstDateHeaderUnit==="hour"?lstDate.format('HH'):lstDate.format('DD');
             // }
         }
@@ -506,6 +518,7 @@ export class CalendarTimeline extends Component {
     onTimeChange(visibleTimeStart, visibleTimeEnd, updateScrollCanvas) {
         this.loadLSTDateHeaderMap(moment(visibleTimeStart).utc(), moment(visibleTimeEnd).utc(), this.state.lstDateHeaderUnit);
         updateScrollCanvas(visibleTimeStart, visibleTimeEnd);
+        this.props.dateRangeCallback(moment(visibleTimeStart).utc(), moment(visibleTimeEnd).utc());
         this.setState({defaultStartTime: moment(visibleTimeStart), defaultEndTime: moment(visibleTimeEnd)})
     }
 
@@ -570,8 +583,11 @@ export class CalendarTimeline extends Component {
         let visibleTimeEnd = this.state.defaultEndTime;
         const visibleTimeDiff = visibleTimeEnd.valueOf()-visibleTimeStart.valueOf();
         const secondsToMove = visibleTimeDiff / 1000 / 10 ;
+        const result = this.props.dateRangeCallback(visibleTimeStart, visibleTimeEnd);
+        let group = DEFAULT_GROUP.concat(result.group);
         this.setState({defaultStartTime: visibleTimeStart.add(-1 * secondsToMove, 'seconds'),
-                        defaultEndTime: visibleTimeEnd.add(-1 * secondsToMove, 'seconds')});
+                        defaultEndTime: visibleTimeEnd.add(-1 * secondsToMove, 'seconds'),
+                        group: group, items: result.items});
     }
 
     /**
@@ -582,22 +598,17 @@ export class CalendarTimeline extends Component {
         let visibleTimeEnd = this.state.defaultEndTime;
         const visibleTimeDiff = visibleTimeEnd.valueOf()-visibleTimeStart.valueOf();
         const secondsToMove = visibleTimeDiff / 1000 / 10 ;
+        const result = this.props.dateRangeCallback(visibleTimeStart, visibleTimeEnd);
+        let group = DEFAULT_GROUP.concat(result.group);
         this.setState({defaultStartTime: visibleTimeStart.add(1 * secondsToMove, 'seconds'),
-                        defaultEndTime: visibleTimeEnd.add(1 * secondsToMove, 'seconds')});
+                        defaultEndTime: visibleTimeEnd.add(1 * secondsToMove, 'seconds'),
+                        group: group, items: result.items});
     }
 
     /**
      * Zooms In to the next pre-defined zoom level
      */
     zoomIn() {
-        /*let visibleTimeStart = this.state.defaultStartTime;
-        let visibleTimeEnd = this.state.defaultEndTime;
-        const visibleTimeDiff = visibleTimeEnd.valueOf()-visibleTimeStart.valueOf();
-        if (visibleTimeDiff > this.state.minZoom) {
-            const secondsToZoom = visibleTimeDiff / 1000 / 2 / 4 * 3 ;
-            this.setState({defaultStartTime: visibleTimeStart.add(1*secondsToZoom, 'seconds'),
-                            defaultEndTime: visibleTimeEnd.add(-1*secondsToZoom, 'seconds')});
-        }*/
         let prevZoomLevel = this.state.zoomLevel;
         const prevZoomObject = _.find(ZOOM_LEVELS, {'name': prevZoomLevel});
         const prevZoomIndex = ZOOM_LEVELS.indexOf(prevZoomObject);
@@ -610,14 +621,6 @@ export class CalendarTimeline extends Component {
      * Zooms out to the next pre-defined zoom level
      */
     zoomOut() {
-        /*let visibleTimeStart = this.state.defaultStartTime;
-        let visibleTimeEnd = this.state.defaultEndTime;
-        const visibleTimeDiff = visibleTimeEnd.valueOf()-visibleTimeStart.valueOf();
-        if (visibleTimeDiff < this.state.maxZoom) {
-            const secondsToZoom = visibleTimeDiff / 1000 * 3 / 2;
-            this.setState({defaultStartTime: visibleTimeStart.add(-1*secondsToZoom, 'seconds'),
-                            defaultEndTime: visibleTimeEnd.add(1*secondsToZoom, 'seconds')});
-        }*/
         let prevZoomLevel = this.state.zoomLevel;
         const prevZoomObject = _.find(ZOOM_LEVELS, {'name': prevZoomLevel});
         const prevZoomIndex = ZOOM_LEVELS.indexOf(prevZoomObject);
@@ -663,15 +666,27 @@ export class CalendarTimeline extends Component {
         }
     }
 
+    /**
+     * Public function that can be called by its implementation class or function to pass required data and parameters
+     * as objects
+     * @param {Object} props 
+     */
+    updateTimeline(props) {
+        this.setState({group: DEFAULT_GROUP.concat(props.group), items: props.items});
+    }
+
     render() {
         return (
             <React.Fragment>
                 {/* Toolbar for the timeline */}
                 <div className="p-fluid p-grid timeline-toolbar">
                     {/* Clock Display */}
-                    <div className="p-col-3" style={{padding: '0px 0px 0px 10px'}}>
+                    <div className="p-col-2" style={{padding: '0px 0px 0px 10px'}}>
                         <div style={{marginTop: "0px"}}>
-                            <label style={{marginBottom: "0px"}}>UTC:</label><span>{this.state.currentUTC.format(UTC_DISPLAY_FORMAT)}</span>
+                            <label style={{marginBottom: "0px"}}>Date:</label><span>{this.state.currentUTC.format(UTC_DATE_FORMAT)}</span>
+                        </div>
+                        <div style={{marginTop: "0px"}}>
+                            <label style={{marginBottom: "0px"}}>UTC:</label><span>{this.state.currentUTC.format(UTC_TIME_FORMAT)}</span>
                         </div>
                         {this.state.currentLST && 
                             <div style={{marginTop: "0px"}}>
@@ -679,8 +694,12 @@ export class CalendarTimeline extends Component {
                             </div>
                         }
                     </div>
+                    <div className="p-col-1 timeline-filters">
+                        <label style={{paddingRight: "3px"}}>Live </label>
+                        <Checkbox checked={this.state.isLive} label="Live" onChange={(e) => { this.setState({'isLive': e.checked})}} ></Checkbox>
+                    </div>
                     {/* Date Range Selection */}
-                    <div className="p-col-4">
+                    <div className="p-col-4 timeline-filters">
                         {/* <span className="p-float-label"> */}
                         <Calendar id="range" placeholder="Select Date Range" selectionMode="range" showIcon={!this.state.zoomRange}
                                 value={this.state.zoomRange} onChange={(e) => this.setZoomRange( e.value )} readOnlyInput />
@@ -690,11 +709,11 @@ export class CalendarTimeline extends Component {
                                                     onClick={() => {this.setZoomRange( null)}}></i>}
                     </div>
                     {/* Reset to default zoom and current timeline */}
-                    <div className="p-col-1" style={{padding: '5px 0px'}}>
+                    <div className="p-col-1 timeline-button" >
                         <Button label="" icon="pi pi-arrow-down" className="p-button-rounded p-button-success" id="now-btn" onClick={this.resetToCurrentTime} title="Rest Zoom & Move to Current Time"/>
                     </div>
                     {/* Zoom Select */}
-                    <div className="p-col-2" style={{paddingRight: '0px'}}>
+                    <div className="p-col-2 timeline-filters" style={{paddingRight: '0px'}}>
                         <Dropdown optionLabel="name" optionValue="name" 
                                 style={{fontSize: '10px'}}
                                 value={this.state.zoomLevel} 
@@ -704,7 +723,7 @@ export class CalendarTimeline extends Component {
                                 placeholder="Zoom"/>
                     </div>
                     {/* Zoom and Move Action */}
-                    <div className="p-col-2 timeline-actionbar">
+                    <div className="p-col-2 timeline-actionbar timeline-filters">
                         <button className="p-link" title="Move Left" onClick={e=> { this.moveLeft() }}><i className="pi pi-angle-left"></i></button>
                         <button className="p-link" title="Zoom Out" onClick={e=> { this.zoomOut() }} disabled={this.state.zoomLevel.startsWith('Custom')}><i className="pi pi-minus-circle"></i></button>
                         <button className="p-link" title="Zoom In" onClick={e=> { this.zoomIn() }} disabled={this.state.zoomLevel.startsWith('Custom')}><i className="pi pi-plus-circle"></i></button>
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js b/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js
index d1c5da28fd14540b005efdcc06283b66e3c93937..d471ef85a97a5486db803f086dc32be6981fb1c6 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js
@@ -14,11 +14,15 @@ import { Slider } from 'primereact/slider';
 import { Button } from "react-bootstrap";
 import { InputNumber } from "primereact/inputnumber";
 
-
-let tbldata =[];
+let tbldata =[], filteredData = [] ;
 let isunittest = false;
 let showTopTotal = true;
+let showGlobalFilter = true;
+let showColumnFilter = true;
+let allowColumnSelection = true;
 let columnclassname =[];
+let parentCallbackFunction;
+
 // Define a default UI for filtering
 function GlobalFilter({
     preGlobalFilteredRows,
@@ -42,7 +46,7 @@ function GlobalFilter({
 
 // Define a default UI for filtering
 function DefaultColumnFilter({
-  column: { filterValue, preFilteredRows, setFilter },
+  column: { filterValue, preFilteredRows, setFilter, filteredRows },
 }) {
   const [value, setValue] = useState('');
   React.useEffect(() => {
@@ -53,7 +57,7 @@ function DefaultColumnFilter({
   return (
     <div className="table-filter" onClick={e => { e.stopPropagation() }}>
       <input
-        value={value}
+        value={value}   //***TO REMOVE - INCOMING CHANGE WAS value={filterValue || ''}
         onChange={e => {
           setValue(e.target.value);
           setFilter(e.target.value || undefined) // Set undefined to remove the filter entirely
@@ -416,8 +420,7 @@ const defaultColumn = React.useMemo(
       useGlobalFilter,
       useSortBy,   
       usePagination
-    )
-
+    );
   React.useEffect(() => {
     setHiddenColumns(
       columns.filter(column => !column.isVisible).map(column => column.accessor)
@@ -471,9 +474,15 @@ const defaultColumn = React.useMemo(
     localStorage.setItem(tablename,JSON.stringify(lsToggleColumns))
   }
 
+  filteredData = _.map(rows, 'values');
+  if (parentCallbackFunction) {
+    parentCallbackFunction(filteredData);
+  }
+  
   return (
     <>
      <div id="block_container"> 
+     { allowColumnSelection &&
           <div   style={{textAlign:'left', marginRight:'30px'}}>
                 <i className="fa fa-columns col-filter-btn" label="Toggle Columns" onClick={(e) => op.current.toggle(e)}  />
                 <div style={{position:"relative",top: "-25px",marginLeft: "50px",color: "#005b9f"}} onClick={() => setAllFilters([])} >
@@ -505,9 +514,9 @@ const defaultColumn = React.useMemo(
                   </div>
                 </OverlayPanel>
             </div> 
-                
+      }
         <div  style={{textAlign:'right'}}>
-        {tbldata.length>0 && !isunittest && 
+        {tbldata.length>0 && !isunittest && showGlobalFilter &&
               <GlobalFilter
                 preGlobalFilteredRows={preGlobalFilteredRows}
                 globalFilter={state.globalFilter}
@@ -600,11 +609,15 @@ filterGreaterThan.autoRemove = val => typeof val !== 'number'
 function ViewTable(props) {
     const history = useHistory();
     // Data to show in table
-    tbldata = props.data; 
+    tbldata = props.data;
+    parentCallbackFunction = props.filterCallback; 
     isunittest = props.unittest;
     columnclassname = props.columnclassname;
-    showTopTotal = props.showTopTotal==='false'? false:true;
-     // Default Header to show in table and other columns header will not show until user action on UI
+    showTopTotal = props.showTopTotal===undefined?true:props.showTopTotal;
+    showGlobalFilter = props.showGlobalFilter===undefined?true:props.showGlobalFilter;
+    showColumnFilter = props.showColumnFilter===undefined?true:props.showColumnFilter;
+    allowColumnSelection = props.allowColumnSelection===undefined?true:props.allowColumnSelection;
+    // Default Header to show in table and other columns header will not show until user action on UI
     let defaultheader = props.defaultcolumns;
     let optionalheader = props.optionalcolumns;
     let defaultSortColumn = props.defaultSortColumn;
@@ -645,14 +658,17 @@ function ViewTable(props) {
   //Default Columns
   defaultdataheader.forEach(header => {
     const isString = typeof defaultheader[0][header] === 'string';
-    const filterFn = isString ? DefaultColumnFilter : (filterTypes[defaultheader[0][header].filter].fn ? filterTypes[defaultheader[0][header].filter].fn : DefaultColumnFilter);
-    const filtertype = (!isString && filterTypes[defaultheader[0][header].filter].type) ? filterTypes[defaultheader[0][header].filter].type : 'fuzzyText'
+    const filterFn = (showColumnFilter?(isString ? DefaultColumnFilter : (filterTypes[defaultheader[0][header].filter].fn ? filterTypes[defaultheader[0][header].filter].fn : DefaultColumnFilter)):"");
+    const filtertype = (showColumnFilter?(!isString && filterTypes[defaultheader[0][header].filter].type) ? filterTypes[defaultheader[0][header].filter].type : 'fuzzyText':"");
     columns.push({
       Header: isString ? defaultheader[0][header] : defaultheader[0][header].name,
       id: isString ? defaultheader[0][header] : defaultheader[0][header].name,
       accessor: header,
       filter: filtertype,
       Filter: filterFn,
+      //*** TO REMOVE - INCOMING CHANGE */
+      // filter: (showColumnFilter?((!isString && defaultheader[0][header].filter=== 'date') ? 'includes' : 'fuzzyText'):""),
+      // Filter: (showColumnFilter?(isString ? DefaultColumnFilter : (filterTypes[defaultheader[0][header].filter] ? filterTypes[defaultheader[0][header].filter] : DefaultColumnFilter)):""),
       isVisible: true,
       Cell: props => <div> {updatedCellvalue(header, props.value)} </div>,
    })
@@ -661,8 +677,8 @@ function ViewTable(props) {
 //Optional Columns
 optionaldataheader.forEach(header => {
   const isString = typeof optionalheader[0][header] === 'string';
-  const filterFn = isString ? DefaultColumnFilter : (filterTypes[optionalheader[0][header].filter].fn ? filterTypes[optionalheader[0][header].filter].fn : DefaultColumnFilter);
-    const filtertype = (!isString && filterTypes[optionalheader[0][header].filter].type) ? filterTypes[optionalheader[0][header].filter].type : 'fuzzyText'
+  const filterFn = (showColumnFilter?(isString ? DefaultColumnFilter : (filterTypes[optionalheader[0][header].filter].fn ? filterTypes[optionalheader[0][header].filter].fn : DefaultColumnFilter)):"");
+    const filtertype = (showColumnFilter?(!isString && filterTypes[optionalheader[0][header].filter].type) ? filterTypes[optionalheader[0][header].filter].type : 'fuzzyText':"");
     columns.push({
       Header: isString ? optionalheader[0][header] : optionalheader[0][header].name,
       id: isString ? header : optionalheader[0][header].name,
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/_overrides.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/_overrides.scss
index 28a3f0bd8ff5a14a6d37af4f3aabe475a1b88102..9b878f2e0f6514e0d7b171013ed3e62f12ed2016 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/layout/_overrides.scss
+++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/_overrides.scss
@@ -104,3 +104,5 @@
     }
 }
 
+
+
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/components/PageHeader.js b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/PageHeader.js
index a1ccb421b626562153020c49fab00e83c8c4db46..fb95ec75a094fc8a2d86bdf74ba78fab8c885a39 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/layout/components/PageHeader.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/PageHeader.js
@@ -15,7 +15,6 @@ export default ({ title, subTitle, actions, ...props}) => {
     }, [props.location.pathname]);
 
     const onClickLink = (action) => {
-        console.log('Hi')
         if (action.link) {
             action.link();
         }
@@ -40,10 +39,10 @@ export default ({ title, subTitle, actions, ...props}) => {
                 {(page.subTitle || subTitle) && <h6 className="page-subtitle">{subTitle || page.subTitle}</h6>}
             </div>
             <div className="page-action-menu">
-                {(actions || []).map(action => {
+                {(actions || []).map((action, index) =>{
                     if (action.type === 'button') {
                         return (
-                            <button className="p-link">
+                            <button className="p-link" key={index}>
                                 <i className={`fa ${action.icon}`}  
                                     onMouseOver={(e) => onButtonMouseOver(e, action)}
                                     onClick={(e) => onButtonClick(e, action)} />
@@ -51,7 +50,7 @@ export default ({ title, subTitle, actions, ...props}) => {
                         );
                     }   else {
                         return (
-                            <Link className={action.classname} to={{ ...action.props }} title={action.title || ''} onClick={() => onClickLink(action)}>
+                            <Link key={index} className={action.classname} to={{ ...action.props }} title={action.title || ''} onClick={() => onClickLink(action)}>
                                 <i className={`fa ${action.icon}`}></i>
                             </Link>
                         );
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss
index c2b32f1d6f45d477613e4d68d7257c3fedaff4ab..44fd846f2ef832ee7249f06e5f1d732764fb9ecf 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss
+++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss
@@ -19,6 +19,7 @@
 .timeline-actionbar button {
     padding-top: 3px;
     font-size: 1.0rem;
+    padding-left: 3px;
     // float: right;
 }
 
@@ -31,8 +32,23 @@
     white-space: nowrap;
 }
 
+.timeline-filters,.timeline-bottom {
+    padding-top: 25px;
+}
+
+.timeline-button {
+    padding-bottom: 5px;
+    padding-left: 0px;
+    padding-right: 0px;
+    padding-top: 25px;
+}
+
+.timeline-details-pane {
+    font-size: 14px;
+}
+
 #now-btn {
-    margin-left: 20px;
+    margin-left: 10px;
 }
 
 .resize-div,
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_viewtable.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_viewtable.scss
index bba9c83de732a18f5b5e0be828cfdf77d1c3f56e..682daca13fee4f45bb0d782e537d014640336762 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_viewtable.scss
+++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_viewtable.scss
@@ -102,27 +102,31 @@ body .p-paginator {
   max-width: 175px;
 }
 
-.filter-input-50 input, .filter-input-50 .p-slider {
+.filter-input-0 input{
+  display: none;
+}
+
+.filter-input-50, .filter-input-50 input, .filter-input-50 .p-slider {
   width: 50px;
 }
 
-.filter-input-75 input, .filter-input-75 .p-slider {
+.filter-input-75, .filter-input-75 input, .filter-input-75 .p-slider {
   width: 75px;
 }
 
-.filter-input-100 input, .filter-input-100 .p-slider {
+.filter-input-100, .filter-input-100 input, .filter-input-100 .p-slider {
   width: 100px;
 }
 
-.filter-input-125 input, .filter-input-125 .p-slider {
+.filter-input-125, .filter-input-125 input, .filter-input-125 .p-slider {
   width: 125px;
 }
 
-.filter-input-150 input, .filter-input-150 .p-slider {
+.filter-input-150, .filter-input-150 input, .filter-input-150 .p-slider {
   width: 150px;
 }
 
-.filter-input-175 input, .filter-input-175 .p-slider {
+.filter-input-175, .filter-input-175 input, .filter-input-175 .p-slider {
   width: 175px;
 }
  
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/view.js
index 0cb42a98add0d439122d25ecf637921ec73fc6c2..1dce9c77522247d114a91f6181ce3f05a00e1861 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/view.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/view.js
@@ -111,7 +111,7 @@ export class ProjectView extends Component {
         
         return (
             <React.Fragment>
-                <TieredMenu model={this.menuOptions} popup ref={el => this.optionsMenu = el} />
+                <TieredMenu className="app-header-menu" model={this.menuOptions} popup ref={el => this.optionsMenu = el} />
                 <PageHeader location={this.props.location} title={'Project - View'} 
                             actions={[  {icon:'fa-bars',title: '', type:'button',
                                          actOn:'mouseOver', props : { callback: this.showOptionMenu},
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js
index 8b7a3f54381ca0d1bc2c0a741a194b16b685a564..9bb43e3f0c7355dbe0c1c0fdf825715090e9ad2a 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js
@@ -37,15 +37,17 @@ class SchedulingUnitList extends Component{
                 },
                 start_time:"Start Time",
                 stop_time:"End time",
-                duration:"Duration (H:mm:ss)"
+                duration:"Duration (HH:mm:ss)",
+                status:"Status"
                 }],
             optionalcolumns:  [{
                 actionpath:"actionpath",
             }],
             columnclassname: [{
                 "Template":"filter-input-50",
-                "Duration":"filter-input-50",
+                "Duration (HH:mm:ss)":"filter-input-75",
                 "Type": "filter-input-75",
+                "Status":"filter-input-100"
             }],
             defaultSortColumn: [{id: "Name", desc: false}],
         }
@@ -69,7 +71,7 @@ class SchedulingUnitList extends Component{
                 for( const scheduleunit  of scheduleunits){
                     const blueprintdata = bluePrint.data.results.filter(i => i.draft_id === scheduleunit.id);
                     blueprintdata.map(blueP => { 
-                        blueP.duration = moment.utc(blueP.duration*1000).format('HH:mm:ss'); 
+                        blueP.duration = moment.utc((blueP.duration || 0)*1000).format('HH:mm:ss'); 
                         blueP.type="Blueprint"; 
                         blueP['actionpath'] ='/schedulingunit/view/blueprint/'+blueP.id;
                         blueP['created_at'] = moment(blueP['created_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
@@ -79,7 +81,7 @@ class SchedulingUnitList extends Component{
                     output.push(...blueprintdata);
                     scheduleunit['actionpath']='/schedulingunit/view/draft/'+scheduleunit.id;
                     scheduleunit['type'] = 'Draft';
-                    scheduleunit['duration'] = moment.utc(scheduleunit.duration*1000).format('HH:mm:ss');
+                    scheduleunit['duration'] = moment.utc((scheduleunit.duration || 0)*1000).format('HH:mm:ss');
                     scheduleunit['created_at'] = moment(scheduleunit['created_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
                     scheduleunit['updated_at'] = moment(scheduleunit['updated_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
                     output.push(scheduleunit);
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js
index 30cab3f14df153befbdfdc94c66762cb12eeb449..088903ca8f4058548f6a60fa5e7dcc2d1950db8b 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js
@@ -9,6 +9,8 @@ import PageHeader from '../../layout/components/PageHeader';
 import ViewTable from './../../components/ViewTable';
 import ScheduleService from '../../services/schedule.service';
 import moment from 'moment';
+import { Dialog } from 'primereact/dialog';
+import TaskStatusLogs from '../Task/state_logs';
 
 class ViewSchedulingUnit extends Component{
     constructor(props){
@@ -17,11 +19,13 @@ class ViewSchedulingUnit extends Component{
             scheduleunit: null,
             schedule_unit_task: [],
             isLoading: true,
+            showStatusLogs: false,
             paths: [{
                 "View": "/task",
             }],
 
             defaultcolumns: [ {
+                status_logs: "Status Logs",
                 tasktype:{
                     name:"Type",
                     filter:"select"
@@ -44,6 +48,7 @@ class ViewSchedulingUnit extends Component{
                 start_time:"Start Time",
                 stop_time:"End Time",
                 duration:"Duration (HH:mm:ss)",
+                status:"Status"
             }],
             optionalcolumns:  [{
                 relative_start_time:"Relative Start Time (HH:mm:ss)",
@@ -54,6 +59,7 @@ class ViewSchedulingUnit extends Component{
                 actionpath:"actionpath"
             }],
             columnclassname: [{
+                "Status Logs": "filter-input-0",
                 "Type":"filter-input-75",
                 "ID":"filter-input-50",
                 "Cancelled":"filter-input-50",
@@ -62,14 +68,15 @@ class ViewSchedulingUnit extends Component{
                 "BluePrint / Task Draft link": "filter-input-100",
                 "Relative Start Time (HH:mm:ss)": "filter-input-75",
                 "Relative End Time (HH:mm:ss)": "filter-input-75",
+                "Status":"filter-input-100"
             }]
         }
         this.actions = [
             {icon: 'fa-window-close',title:'Click to Close Scheduling Unit View', link: this.props.history.goBack} 
         ];
         if (this.props.match.params.type === 'draft') {
-            this.actions.unshift({icon: 'fa-edit', title: 'Click to edit',  props : { pathname:`/schedulingunit/edit/${ this.props.match.params.id}`
-        } });
+            this.actions.unshift({icon: 'fa-edit', title: 'Click to edit',  props : { pathname:`/schedulingunit/edit/${ this.props.match.params.id}`}
+            });
         } else {
             this.actions.unshift({icon: 'fa-lock', title: 'Cannot edit blueprint'});
         }
@@ -85,17 +92,22 @@ class ViewSchedulingUnit extends Component{
         let schedule_id = this.state.scheduleunitId;
         let schedule_type = this.state.scheduleunitType;
         if (schedule_type && schedule_id) {
+            const subtaskComponent = (task)=> {
+                return (
+                    <button className="p-link" onClick={(e) => {this.setState({showStatusLogs: true, task: task})}}>
+                        <i className="fa fa-history"></i>
+                    </button>
+                );
+            };
             this.getScheduleUnit(schedule_type, schedule_id)
             .then(schedulingUnit =>{
                 if (schedulingUnit) {
                     this.getScheduleUnitTasks(schedule_type, schedulingUnit)
                         .then(tasks =>{
-                    /* tasks.map(task => {
-                            task.duration = moment.utc(task.duration*1000).format('HH:mm:ss'); 
-                            task.relative_start_time = moment.utc(task.relative_start_time*1000).format('HH:mm:ss'); 
-                            task.relative_stop_time = moment.utc(task.relative_stop_time*1000).format('HH:mm:ss'); 
+                        tasks.map(task => {
+                            task.status_logs = task.tasktype === "Blueprint"?subtaskComponent(task):"";
                             return task;
-                        });*/
+                        });
                         this.setState({
                             scheduleunit : schedulingUnit,
                             schedule_unit_task : tasks,
@@ -148,36 +160,42 @@ class ViewSchedulingUnit extends Component{
 				{ this.state.isLoading ? <AppLoader/> :this.state.scheduleunit &&
 			    <>
 		            <div className="main-content">
-                    <div className="p-grid">
-                        <label  className="col-lg-2 col-md-2 col-sm-12">Name</label>
-                        <span className="p-col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.name}</span>
-                        <label  className="col-lg-2 col-md-2 col-sm-12">Description</label>
-                        <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.description}</span>
-                    </div>
-                    <div className="p-grid">
-                        <label className="col-lg-2 col-md-2 col-sm-12">Created At</label>
-                        <span className="col-lg-4 col-md-4 col-sm-12">{moment(this.state.scheduleunit.created_at).format("YYYY-MMM-DD HH:mm:SS")}</span>
-                        <label className="col-lg-2 col-md-2 col-sm-12">Updated At</label>
-                        <span className="col-lg-4 col-md-4 col-sm-12">{moment(this.state.scheduleunit.updated_at).format("YYYY-MMM-DD HH:mm:SS")}</span>
-                    </div>
-                    <div className="p-grid">
-                        <label className="col-lg-2 col-md-2 col-sm-12">Start Time</label>
-                        <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.start_time && moment(this.state.scheduleunit.start_time).format("YYYY-MMM-DD HH:mm:SS")}</span>
-                        <label className="col-lg-2 col-md-2 col-sm-12">End Time</label>
-                        <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.stop_time && moment(this.state.scheduleunit.stop_time).format("YYYY-MMM-DD HH:mm:SS")}</span>
-                    </div>
-                    <div className="p-grid">
-                        <label className="col-lg-2 col-md-2 col-sm-12">Template ID</label>
-                        <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.requirements_template_id}</span>
-                        <label  className="col-lg-2 col-md-2 col-sm-12">Scheduling set</label>
-                        <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.scheduling_set_object.name}</span>
-                    </div>
-                    <div className="p-grid">
-                        <label className="col-lg-2 col-md-2 col-sm-12">Duration (HH:mm:ss)</label>
-                        <span className="col-lg-4 col-md-4 col-sm-12">{moment.utc(this.state.scheduleunit.duration*1000).format('HH:mm:ss')}</span>
+                        <div className="p-grid">
+                            <label  className="col-lg-2 col-md-2 col-sm-12">Name</label>
+                            <span className="p-col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.name}</span>
+                            <label  className="col-lg-2 col-md-2 col-sm-12">Description</label>
+                            <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.description}</span>
+                        </div>
+                        <div className="p-grid">
+                            <label className="col-lg-2 col-md-2 col-sm-12">Created At</label>
+                            <span className="col-lg-4 col-md-4 col-sm-12">{moment(this.state.scheduleunit.created_at).format("YYYY-MMM-DD HH:mm:SS")}</span>
+                            <label className="col-lg-2 col-md-2 col-sm-12">Updated At</label>
+                            <span className="col-lg-4 col-md-4 col-sm-12">{moment(this.state.scheduleunit.updated_at).format("YYYY-MMM-DD HH:mm:SS")}</span>
+                        </div>
+                        <div className="p-grid">
+                            <label className="col-lg-2 col-md-2 col-sm-12">Start Time</label>
+                            <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.start_time && moment(this.state.scheduleunit.start_time).format("YYYY-MMM-DD HH:mm:SS")}</span>
+                            <label className="col-lg-2 col-md-2 col-sm-12">End Time</label>
+                            <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.stop_time && moment(this.state.scheduleunit.stop_time).format("YYYY-MMM-DD HH:mm:SS")}</span>
+                        </div>
+                        <div className="p-grid">
+                            <label className="col-lg-2 col-md-2 col-sm-12">Template ID</label>
+                            <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.requirements_template_id}</span>
+                            <label  className="col-lg-2 col-md-2 col-sm-12">Scheduling set</label>
+                            <span className="col-lg-4 col-md-4 col-sm-12">{this.state.scheduleunit.scheduling_set_object.name}</span>
+                        </div>
+                        <div className="p-grid">
+                            <label className="col-lg-2 col-md-2 col-sm-12" >Duration (HH:mm:ss)</label>
+                            <span className="col-lg-4 col-md-4 col-sm-12">{moment.utc((this.state.scheduleunit.duration?this.state.scheduleunit.duration:0)*1000).format('HH:mm:ss')}</span>
+                            {this.props.match.params.type === 'blueprint' &&
+                            <label className="col-lg-2 col-md-2 col-sm-12 ">Status</label> }
+                             {this.props.match.params.type === 'blueprint' &&
+                            <span className="col-lg-2 col-md-2 col-sm-12">{this.state.scheduleunit.status}</span>}
+                         </div>
+                     <div className="p-grid">
                         <label  className="col-lg-2 col-md-2 col-sm-12">Tags</label>
                         <Chips className="p-col-4 chips-readonly" disabled value={this.state.scheduleunit.tags}></Chips>
-                    </div>
+                        </div>
                     </div>
                 </>
 			 
@@ -208,7 +226,14 @@ class ViewSchedulingUnit extends Component{
                         unittest={this.state.unittest}
                         tablename="scheduleunit_task_list"
                     />
-                 } 
+                 }
+                 {this.state.showStatusLogs &&
+                    <Dialog header={`Status change logs - ${this.state.task?this.state.task.name:""}`} 
+                            visible={this.state.showStatusLogs} maximizable maximized={false} position="left" style={{ width: '50vw' }} 
+                            onHide={() => {this.setState({showStatusLogs: false})}}>
+                            <TaskStatusLogs taskId={this.state.task.id}></TaskStatusLogs>
+                    </Dialog>
+                 }
             </>
         )
     }
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js
index c10c070b09658bd0874a3322a85c5172ae3690be..62db24996287f74d5e98fc1c816d7d519f944898 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js
@@ -17,6 +17,7 @@ import ProjectService from '../../services/project.service';
 import ScheduleService from '../../services/schedule.service';
 import TaskService from '../../services/task.service';
 import UIConstants from '../../utils/ui.constants';
+import PageHeader from '../../layout/components/PageHeader';
 
 /**
  * Component to create a new SchedulingUnit from Observation strategy template
@@ -106,8 +107,8 @@ export class SchedulingUnitCreate extends Component {
         let schema = { type: 'object', additionalProperties: false, 
                         properties: {}, definitions:{}
                      };
-
-        for (const taskName in tasks)  {
+                     
+            for (const taskName of _.keys(tasks)) {
             const task = tasks[taskName];
             //Resolve task from the strategy template
             const $taskRefs = await $RefParser.resolve(task);
@@ -116,23 +117,35 @@ export class SchedulingUnitCreate extends Component {
             const taskTemplate = _.find(this.taskTemplates, {'name': task['specifications_template']});
             schema['$id'] = taskTemplate.schema['$id'];
             schema['$schema'] = taskTemplate.schema['$schema'];
-            observStrategy.template.parameters.forEach(async(param, index) => {
+            let index = 0;
+            for (const param of observStrategy.template.parameters) {
                 if (param.refs[0].indexOf(`/tasks/${taskName}`) > 0) {
                     // Resolve the identified template
                     const $templateRefs = await $RefParser.resolve(taskTemplate);
                     let property = { };
                     let tempProperty = null;
+                    const taskPaths = param.refs[0].split("/");
                     // Get the property type from the template and create new property in the schema for the parameters
                     try {
-                        tempProperty = $templateRefs.get(param.refs[0].replace(`#/tasks/${taskName}/specifications_doc`, '#/schema/properties'))
+                        const parameterRef = param.refs[0];//.replace(`#/tasks/${taskName}/specifications_doc`, '#/schema/properties');
+                        tempProperty = $templateRefs.get(parameterRef);
+                    //    property = _.cloneDeep(taskTemplate.schema.properties[taskPaths[4]]);
+                       
                     }   catch(error) {
-                        const taskPaths = param.refs[0].split("/");
                         tempProperty = _.cloneDeep(taskTemplate.schema.properties[taskPaths[4]]);
                         if (tempProperty.type === 'array') {
                             tempProperty = tempProperty.items.properties[taskPaths[6]];
                         }
                         property = tempProperty;
                     }
+                  /*  if (property['$ref'] && !property['$ref'].startsWith("#")) {
+                        const $propDefinition = await $RefParser.resolve(property['$ref']);
+                        const propDefinitions = $propDefinition.get("#/definitions");
+                        for (const propDefinition in propDefinitions) {
+                            schema.definitions[propDefinition] = propDefinitions[propDefinition];
+                            property['$ref'] = "#/definitions/"+ propDefinition ;
+                        } 
+                    } */
                     property.title = param.name;
                     property.default = $taskRefs.get(param.refs[0].replace(`#/tasks/${taskName}`, '#'));
                     paramsOutput[`param_${index}`] = property.default;
@@ -140,9 +153,13 @@ export class SchedulingUnitCreate extends Component {
                     // Set property defintions taken from the task template in new schema
                     for (const definitionName in taskTemplate.schema.definitions) {
                         schema.definitions[definitionName] = taskTemplate.schema.definitions[definitionName];
+                        
                     }
                 }
-            });
+                index++;
+               
+            }
+            
         }
         this.setState({observStrategy: observStrategy, paramsSchema: schema, paramsOutput: paramsOutput});
 
@@ -299,27 +316,19 @@ export class SchedulingUnitCreate extends Component {
         
         let jeditor = null;
         if (schema) {
-		    jeditor = React.createElement(Jeditor, {title: "Task Parameters", 
+            
+		   jeditor = React.createElement(Jeditor, {title: "Task Parameters", 
                                                         schema: schema,
                                                         initValue: this.state.paramsOutput, 
                                                         callback: this.setEditorOutput,
                                                         parentFunction: this.setEditorFunction
-                                                    });
+                                                    }); 
         }
         return (
             <React.Fragment>
-                <div className="p-grid">
-                    <Growl ref={(el) => this.growl = el} />
-                
-                    <div className="p-col-10 p-lg-10 p-md-10">
-                        <h2>Scheduling Unit - Add</h2>
-                    </div>
-                    <div className="p-col-2 p-lg-2 p-md-2">
-                        <Link to={{ pathname: '/schedulingunit'}} tite="Close" style={{float: "right"}}>
-                            <i className="fa fa-window-close" link={this.props.history.goBack()} style={{marginTop: "10px"}}></i>
-                        </Link>
-                    </div>
-                </div>
+                <Growl ref={(el) => this.growl = el} />
+                <PageHeader location={this.props.location} title={'Scheduling Unit - Add'} 
+                           actions={[{icon: 'fa-window-close',link: this.props.history.goBack,title:'Click to close Scheduling Unit creation', props : { pathname: `/schedulingunit`}}]}/>
                 { this.state.isLoading ? <AppLoader /> :
                 <>
                 <div>
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/edit.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/edit.js
index b333f66d341c9d18a5b4207ffcbdcc2fa5fda4ff..23c61c0f6fb0d6893913e3275699ea54224b2acb 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/edit.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/edit.js
@@ -90,18 +90,20 @@ export class EditSchedulingUnit extends Component {
             const taskTemplate = _.find(this.taskTemplates, {'name': task['specifications_template']});
             schema['$id'] = taskTemplate.schema['$id'];
             schema['$schema'] = taskTemplate.schema['$schema'];
-            observStrategy.template.parameters.forEach(async(param, index) => {
+            let index = 0;
+            for (const param of observStrategy.template.parameters) {
                 if (param.refs[0].indexOf(`/tasks/${taskName}`) > 0) {
                     tasksToUpdate[taskName] = taskName;
                     // Resolve the identified template
                     const $templateRefs = await $RefParser.resolve(taskTemplate);
                     let property = { };
                     let tempProperty = null;
+                    const taskPaths = param.refs[0].split("/");
                     // Get the property type from the template and create new property in the schema for the parameters
                     try {
-                        tempProperty = $templateRefs.get(param.refs[0].replace(`#/tasks/${taskName}/specifications_doc`, '#/schema/properties'))
+                        const parameterRef = param.refs[0];//.replace(`#/tasks/${taskName}/specifications_doc`, '#/schema/properties');
+                        tempProperty = $templateRefs.get(parameterRef);
                     }   catch(error) {
-                        const taskPaths = param.refs[0].split("/");
                         tempProperty = _.cloneDeep(taskTemplate.schema.properties[taskPaths[4]]);
                         if (tempProperty.type === 'array') {
                             tempProperty = tempProperty.items.properties[taskPaths[6]];
@@ -117,7 +119,8 @@ export class EditSchedulingUnit extends Component {
                         schema.definitions[definitionName] = taskTemplate.schema.definitions[definitionName];
                     }
                 }
-            });
+                index++;
+            }
         }
         this.setState({observStrategy: observStrategy, paramsSchema: schema, paramsOutput: paramsOutput, tasksToUpdate: tasksToUpdate});
 
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js
new file mode 100644
index 0000000000000000000000000000000000000000..e5ca99a752c5d3e59b739ecb292b5658b4f65bfd
--- /dev/null
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js
@@ -0,0 +1,71 @@
+import React, {Component} from 'react';
+import { Link } from 'react-router-dom/cjs/react-router-dom.min';
+import moment from 'moment';
+import ViewTable from '../../components/ViewTable';
+
+/**
+ * Component to view summary of the scheduling unit with limited task details
+ */
+export class SchedulingUnitSummary extends Component {
+
+    constructor(props) {
+        super(props);
+        this.state = {
+            schedulingUnit: props.schedulingUnit || null
+        }
+        this.closeSUDets = this.closeSUDets.bind(this);
+    }
+
+    componentDidMount() {}
+
+    closeSUDets() {
+        if(this.props.closeCallback) {
+            this.props.closeCallback();
+        }
+    }
+
+    render() {
+        const schedulingUnit = this.props.schedulingUnit;
+        const suTaskList = this.props.suTaskList;
+        return (
+            <React.Fragment>
+            { schedulingUnit &&
+                <div className="p-grid timeline-details-pane" style={{marginTop: '10px'}}>
+                    <h6 className="col-lg-10 col-sm-10">Details</h6>
+                    <Link to={`/schedulingunit/view/blueprint/${schedulingUnit.id}`} title="View Full Details"><i className="fa fa-eye"></i></Link>
+                    <Link to={`/su/timelineview`} onClick={this.closeSUDets} title="Close Details"><i className="fa fa-times"></i></Link>
+                    <div className="col-4"><label>Name:</label></div>
+                    <div className="col-8">{schedulingUnit.name}</div>
+                    <div className="col-4"><label>Project:</label></div>
+                    <div className="col-8">{schedulingUnit.project.name}</div>
+                    <div className="col-4"><label>Start Time:</label></div>
+                    <div className="col-8">{moment.utc(schedulingUnit.start_time).format("DD-MMM-YYYY HH:mm:ss")}</div>
+                    <div className="col-4"><label>Stop Time:</label></div>
+                    <div className="col-8">{moment.utc(schedulingUnit.stop_time).format("DD-MMM-YYYY HH:mm:ss")}</div>
+                    <div className="col-4"><label>Status:</label></div>
+                    <div className="col-8">{schedulingUnit.status}</div>
+                    <div className="col-12">
+                        <ViewTable 
+                            data={suTaskList} 
+                            defaultcolumns={[{id: "ID", start_time:"Start Time", stop_time:"End Time", status: "Status", 
+                                                antenna_set: "Antenna Set", band: 'Band'}]}
+                            optionalcolumns={[{actionpath: "actionpath"}]}
+                            columnclassname={[{"ID": "filter-input-50", "Start Time": "filter-input-75", "End Time": "filter-input-75",
+                                                "Status": "filter-input-75", "Antenna Set": "filter-input-75", "Band": "filter-input-75"}]}
+                            defaultSortColumn= {[{id: "ID", desc: false}]}
+                            showaction="false"
+                            tablename="timeline_su_taskslist"
+                            showTopTotal={false}
+                            showGlobalFilter={false}
+                            showColumnFilter={false}
+                            allowColumnSelection={false}
+                        />
+                    </div>
+                </div>
+            }
+            </React.Fragment>
+        );
+    }
+}
+
+export default SchedulingUnitSummary;
\ No newline at end of file
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/edit.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/edit.js
index 19272db95bfe5ca90f4ba45105a1cc0689155583..0b3f2d234e0e0272736f2a234d91819bf5695137 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/edit.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/edit.js
@@ -280,4 +280,4 @@ export class TaskEdit extends Component {
             </React.Fragment>
         );
     }
-}
\ No newline at end of file
+}
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/state_logs.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/state_logs.js
new file mode 100644
index 0000000000000000000000000000000000000000..bd33d13b192c7efd087bf6580d4bfbd511ced14a
--- /dev/null
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/state_logs.js
@@ -0,0 +1,53 @@
+import React, {Component} from 'react';
+import _ from 'lodash';
+import TaskService from '../../services/task.service';
+import ViewTable from '../../components/ViewTable';
+import AppLoader from '../../layout/components/AppLoader';
+
+/**
+ * Component that list down the status change logs of subtasks
+ */
+export class TaskStatusLogs extends Component {
+
+    constructor(props) {
+        super(props);
+        this.state = {
+            isLoading: true,
+            logs: []
+        }
+    }
+
+    async componentDidMount() {
+        let logs = await TaskService.getTaskStatusLogs(this.props.taskId);
+        logs = _.sortBy(logs, ['subtask_id', 'updated_at']);
+        this.setState({logs: logs, isLoading: false});
+    }
+
+    render() {
+        return(
+            <React.Fragment>
+            { this.state.isLoading? <AppLoader /> : 
+                <ViewTable 
+                    data={this.state.logs} 
+                    defaultcolumns={[{subtask_id: "Subtask Id", subtask_type: "Type", updated_at: "Updated At", 
+                                        old_state_value: "From State", new_state_value: "To State", user: 'User'}]} 
+                    optionalcolumns={[{}]}
+                    columnclassname={[{"Subtask Id": "filter-input-75", "Type": "filter-input-75",
+                                        "Updated At": "filter-input-75", "From State": "filter-input-75",
+                                        "To State": "filter-input-75", "User": "filter-input-75"}]}
+                    defaultSortColumn={[{}]}
+                    showaction="false"
+                    keyaccessor="id"
+                    paths={this.state.paths}
+                    defaultpagesize={this.state.logs.length}
+                    showTopTotal={false}
+                    showGlobalFilter={false}
+                    allowColumnSelection={false}
+                />
+            }
+            </React.Fragment>
+        );
+    }
+}
+
+export default TaskStatusLogs;
\ No newline at end of file
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/view.js
index f90a3db7820620c7ef72ae2796000a2f4ac76bd4..dbf432aadae315148ea4560534b6926f454b3a61 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/view.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/view.js
@@ -6,8 +6,11 @@ import Jeditor from '../../components/JSONEditor/JEditor';
 
 import TaskService from '../../services/task.service';
 import { Chips } from 'primereact/chips';
+import { Dialog } from 'primereact/dialog';
+
 import AppLoader from '../../layout/components/AppLoader';
 import PageHeader from '../../layout/components/PageHeader';
+import TaskStatusLogs from './state_logs';
 
 export class TaskView extends Component {
     DATE_FORMAT = 'YYYY-MMM-DD HH:mm:ss';
@@ -122,15 +125,13 @@ export class TaskView extends Component {
                             props : { pathname:'/task/edit',
                                         state: {taskId: this.state.task?this.state.task.id:''} 
                                     } 
-                        },
-                        {   icon: 'fa-window-close',
-                            link: this.props.history.goBack,
-                            title:'Click to Close Task', 
-                            props : { pathname:'/task' }}];
+                        }];
         }   else {
             actions = [{    icon: 'fa-lock',
                             title: 'Cannot edit blueprint'}];
         }
+        actions.push({  icon: 'fa-window-close', link: this.props.history.goBack,
+                        title:'Click to Close Task', props : { pathname:'/schedulingunit' }});
 
         // Child component to render predecessors and successors list
         const TaskRelationList = ({ list }) => (
@@ -233,10 +234,16 @@ export class TaskView extends Component {
                             <div className="p-grid">
                                 <label className="col-lg-2 col-md-2 col-sm-12">Data Product</label>
                                 <div className="col-lg-4 col-md-4 col-sm-12">
-                                     
-                                <Link to={ { pathname:`/task/view/blueprint/${this.state.taskId}/dataproducts`}}> View Data Product</Link>
+                                    <Link to={ { pathname:`/task/view/blueprint/${this.state.taskId}/dataproducts`}}> View Data Product</Link>
+                                </div>
+                                <label className="col-lg-2 col-md-2 col-sm-12">Status Logs</label>
+                                <div className="col-lg-4 col-md-4 col-sm-12">
+                                    <button className="p-link" onMouseOver={(e) => { this.setState({showStatusLogs: true})}}><i className="fa fa-history"></i></button>
+                                    <Dialog header="State change logs" visible={this.state.showStatusLogs} maximizable position="right" style={{ width: '50vw' }} 
+                                            onHide={() => {this.setState({showStatusLogs: false})}}>
+                                        <TaskStatusLogs taskId={this.state.taskId}></TaskStatusLogs>
+                                    </Dialog>
                                 </div>
-                            
                             </div>
                         }
                         <div className="p-fluid">
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js
index 7b1577973c9b081ff5534cb129c013846e890b5b..3d4ad6a1a31004c9c31dcb1b9f77adcf94cc15bc 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js
@@ -15,6 +15,7 @@ import ScheduleService from '../../services/schedule.service';
 import UtilService from '../../services/util.service';
 
 import UnitConverter from '../../utils/unit.converter';
+import SchedulingUnitSummary from '../Scheduling/summary';
 
 // Color constant for status
 const STATUS_COLORS = { "ERROR": "FF0000", "CANCELLED": "#00FF00", "DEFINED": "#00BCD4", 
@@ -39,13 +40,16 @@ export class TimelineView extends Component {
             isSUDetsVisible: false,
             canExtendSUList: true,
             canShrinkSUList: false,
-            selectedItem: null
+            selectedItem: null,
+            suTaskList:[],
+            isSummaryLoading: false
         }
 
         this.onItemClick = this.onItemClick.bind(this);
         this.closeSUDets = this.closeSUDets.bind(this);
         this.dateRangeCallback = this.dateRangeCallback.bind(this);
         this.resizeSUList = this.resizeSUList.bind(this);
+        this.suListFilterCallback = this.suListFilterCallback.bind(this);
     }
 
     async componentDidMount() {
@@ -126,7 +130,24 @@ export class TimelineView extends Component {
         if (this.state.isSUDetsVisible && item.id===this.state.selectedItem.id) {
             this.closeSUDets();
         }   else {
-            this.setState({selectedItem: item, isSUDetsVisible: true, canExtendSUList: false, canShrinkSUList:false});
+            const fetchDetails = !this.state.selectedItem || item.id!==this.state.selectedItem.id
+            this.setState({selectedItem: item, isSUDetsVisible: true, 
+                isSummaryLoading: fetchDetails,
+                suTaskList: !fetchDetails?this.state.suTaskList:[],
+                canExtendSUList: false, canShrinkSUList:false});
+            if (fetchDetails) {
+                const suBlueprint = _.find(this.state.suBlueprints, {id: item.id});
+                ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true)
+                    .then(taskList => {
+                        for (let task of taskList) {
+                            if (task.template.type_value.toLowerCase() === "observation") {
+                                task.antenna_set = task.specifications_doc.antenna_set;
+                                task.band = task.specifications_doc.filter;
+                            }
+                        }
+                        this.setState({suTaskList: _.sortBy(taskList, "id"), isSummaryLoading: false})
+                    });
+            }
         }
     }
 
@@ -162,7 +183,7 @@ export class TimelineView extends Component {
         }
         this.setState({suBlueprintList: _.filter(suBlueprintList, (suBlueprint) => {return suBlueprint.start_time!=null})});
         // On range change close the Details pane
-        this.closeSUDets();
+        // this.closeSUDets();
         return {group: group, items: items};
     }
 
@@ -185,6 +206,25 @@ export class TimelineView extends Component {
         this.setState({canExtendSUList: canExtendSUList, canShrinkSUList: canShrinkSUList});
     }
 
+    /**
+     * Callback function to pass to the ViewTable component to pass back filtered data
+     * @param {Array} filteredData 
+     */
+    suListFilterCallback(filteredData) {
+        let group=[], items = [];
+        const suBlueprints = this.state.suBlueprints;
+        for (const data of filteredData) {
+            const suBlueprint = _.find(suBlueprints, {actionpath: data.actionpath});
+            items.push(this.getTimelineItem(suBlueprint));
+            if (!_.find(group, {'id': suBlueprint.suDraft.id})) {
+                group.push({'id': suBlueprint.suDraft.id, title: suBlueprint.suDraft.name});
+            }
+        }
+        if (this.timeline) {
+            this.timeline.updateTimeline({group: group, items: items});
+        }
+    }
+
     render() {
         if (this.state.redirect) {
             return <Redirect to={ {pathname: this.state.redirect} }></Redirect>
@@ -192,21 +232,15 @@ export class TimelineView extends Component {
         const isSUDetsVisible = this.state.isSUDetsVisible;
         const canExtendSUList = this.state.canExtendSUList;
         const canShrinkSUList = this.state.canShrinkSUList;
+        let suBlueprint = null;
+        if (isSUDetsVisible) {
+            suBlueprint = _.find(this.state.suBlueprints, {id: this.state.selectedItem.id});
+        }
         return (
             <React.Fragment>
                 <PageHeader location={this.props.location} title={'Scheduling Units - Timeline View'} />
                 { this.state.isLoading ? <AppLoader /> :
                         <div className="p-grid">
-                        {/* <SplitPane split="vertical" defaultSize={600} style={{height: 'auto'}} primary="second"> */}
-                            {/* <div className={isSUDetsVisible || (canExtendSUList && !canShrinkSUList)?"resize-div-min col-lg-4 col-md-4 col-sm-12":((canExtendSUList && canShrinkSUList)?"resize-div-avg col-lg-5 col-md-5 col-sm-12":"resize-div-max col-lg-6 col-md-6 col-sm-12")}>
-                                    <button className="p-link resize-btn" disabled={!this.state.canExtendSUList} 
-                                        onClick={(e)=> { this.resizeSUList(1)}}>
-                                        <i className="pi pi-step-forward"></i>
-                                    </button>
-                                    <button className="p-link resize-btn" disabled={!this.state.canShrinkSUList} 
-                                            onClick={(e)=> { this.resizeSUList(-1)}}>
-                                        <i className="pi pi-step-backward"></i>
-                                    </button></div>  */}
                             {/* SU List Panel */}
                             <div className={isSUDetsVisible || (canExtendSUList && !canShrinkSUList)?"col-lg-4 col-md-4 col-sm-12":((canExtendSUList && canShrinkSUList)?"col-lg-5 col-md-5 col-sm-12":"col-lg-6 col-md-6 col-sm-12")}
                                  style={{position: "inherit", borderRight: "5px solid #efefef", paddingTop: "10px"}}>
@@ -221,10 +255,11 @@ export class TimelineView extends Component {
                                     showaction="true"
                                     tablename="timeline_scheduleunit_list"
                                     showTopTotal="false"
+                                    filterCallback={this.suListFilterCallback}
                                 />
                             </div>
                             {/* Timeline Panel */}
-                            <div className={isSUDetsVisible || (!canExtendSUList && canShrinkSUList)?"col-lg-6 col-md-6 col-sm-12":((canExtendSUList && canShrinkSUList)?"col-lg-7 col-md-7 col-sm-12":"col-lg-8 col-md-8 col-sm-12")}>
+                            <div className={isSUDetsVisible || (!canExtendSUList && canShrinkSUList)?"col-lg-5 col-md-5 col-sm-12":((canExtendSUList && canShrinkSUList)?"col-lg-7 col-md-7 col-sm-12":"col-lg-8 col-md-8 col-sm-12")}>
                                 {/* Panel Resize buttons */}
                                 <div className="resize-div">
                                     <button className="p-link resize-btn" disabled={!this.state.canShrinkSUList} 
@@ -245,21 +280,14 @@ export class TimelineView extends Component {
                                         rowHeight={30} itemClickCallback={this.onItemClick}
                                         dateRangeCallback={this.dateRangeCallback}></Timeline>
                             </div>
-                        {/* </SplitPane> */}
                             {/* Details Panel */}
                             {this.state.isSUDetsVisible &&
-                                <div className="col-lg-2 col-md-2 col-sm-12" 
+                                <div className="col-lg-3 col-md-3 col-sm-12" 
                                      style={{borderLeft: "1px solid #efefef", marginTop: "0px", backgroundColor: "#f2f2f2"}}>
-                                    <div className="p-grid" style={{marginTop: '10px'}}>
-                                        <h6 className="col-lg-10 col-sm-10">Details</h6>
-                                        <button className="p-link" onClick={this.closeSUDets}><i className="fa fa-times"></i></button>
-                                    
-                                        <div className="col-12">
-                                            {this.state.selectedItem.title}
-                                        </div>
-
-                                        <div className="col-12">Still In Development</div>
-                                    </div>
+                                    {this.state.isSummaryLoading?<AppLoader /> :
+                                        <SchedulingUnitSummary schedulingUnit={suBlueprint} suTaskList={this.state.suTaskList}
+                                                closeCallback={this.closeSUDets}></SchedulingUnitSummary>
+                                    }
                                 </div>
                             }  
                         
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/index.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/index.js
index 38db5418a30031416ab3cd8770b326494ce49d2c..f13587989bebe2c3dbc2f796fcbf0c443d9b54fb 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/index.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/index.js
@@ -34,7 +34,8 @@ export const routes = [
     },{
         path: "/schedulingunit/create",
         component: SchedulingUnitCreate,
-        name: 'Scheduling Unit Add'
+        name: 'Scheduling Unit Add',
+        title: 'Scheduling Unit - Add'
     },{
         path: "/task",
         component: TaskView,
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js
index f5c26c533265c2efc0bb5e9e15fec488f06ebd50..db5256d215dc791d446bad4caee7f9d0e20c1869 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js
@@ -53,29 +53,31 @@ const ScheduleService = {
             return null;
         }
     },
-    getTaskBlueprintById: async function(id){
-        let res = [];
-        await axios.get('/api/task_blueprint/'+id)
-        .then(response => {
-            res= response; 
-        }).catch(function(error) {
+    getTaskBlueprintById: async function(id, loadTemplate){
+        let result;
+        try {
+            result = await axios.get('/api/task_blueprint/'+id);
+            if (result.data && loadTemplate) {
+                result.data.template = await TaskService.getTaskTemplate(result.data.specifications_template_id);
+            }
+        }   catch(error) {
             console.error('[schedule.services.getTaskBlueprintById]',error);
-        });
-        return res;
+        }
+        return result;
     },
-    getTaskBlueprintsBySchedulingUnit: async function(scheduleunit){
+    getTaskBlueprintsBySchedulingUnit: async function(scheduleunit, loadTemplate){
         // there no single api to fetch associated task_blueprint, so iteare the task_blueprint id to fetch associated task_blueprint
         let taskblueprintsList = [];
         if(scheduleunit.task_blueprints_ids){
             for(const id of scheduleunit.task_blueprints_ids){
-               await this.getTaskBlueprintById(id).then(response =>{
+               await this.getTaskBlueprintById(id, loadTemplate).then(response =>{
                     let taskblueprint = response.data;
                     taskblueprint['tasktype'] = 'Blueprint';
                     taskblueprint['actionpath'] = '/task/view/blueprint/'+taskblueprint['id'];
                     taskblueprint['blueprint_draft'] = taskblueprint['draft'];
                     taskblueprint['relative_start_time'] = 0;
                     taskblueprint['relative_stop_time'] = 0;
-                    taskblueprint.duration = moment.utc(taskblueprint.duration*1000).format('HH:mm:ss'); 
+                    taskblueprint.duration = moment.utc((taskblueprint.duration || 0)*1000).format('HH:mm:ss');
                     taskblueprintsList.push(taskblueprint);
                 })
             }
@@ -86,9 +88,9 @@ const ScheduleService = {
         let scheduletasklist=[];
         // let taskblueprints = [];
         // Common keys for Task and Blueprint
-        let commonkeys = ['id','created_at','description','name','tags','updated_at','url','do_cancel','relative_start_time','relative_stop_time','start_time','stop_time','duration'];
+        let commonkeys = ['id','created_at','description','name','tags','updated_at','url','do_cancel','relative_start_time','relative_stop_time','start_time','stop_time','duration','status'];
         // await this.getTaskBlueprints().then( blueprints =>{
-        //     taskblueprints = blueprints.data.results;
+        //     taskblueprints = blueprints.data.results;'
         // });
         await this.getTasksDraftBySchedulingUnitId(id)
         .then(async(response) =>{
@@ -97,6 +99,7 @@ const ScheduleService = {
                 scheduletask['tasktype'] = 'Draft';
                 scheduletask['actionpath'] = '/task/view/draft/'+task['id'];
                 scheduletask['blueprint_draft'] = task['task_blueprints'];
+                scheduletask['status'] = task['status'];
 
               
                 //fetch task draft details
@@ -106,7 +109,7 @@ const ScheduleService = {
                 scheduletask['created_at'] = moment(task['created_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
                 scheduletask['updated_at'] = moment(task['updated_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
                 
-                scheduletask.duration = moment.utc(scheduletask.duration*1000).format('HH:mm:ss'); 
+                scheduletask.duration = moment.utc((scheduletask.duration || 0)*1000).format('HH:mm:ss'); 
                 scheduletask.relative_start_time = moment.utc(scheduletask.relative_start_time*1000).format('HH:mm:ss'); 
                 scheduletask.relative_stop_time = moment.utc(scheduletask.relative_stop_time*1000).format('HH:mm:ss'); 
                //Fetch blueprint details for Task Draft
@@ -120,14 +123,14 @@ const ScheduleService = {
                     taskblueprint['tasktype'] = 'Blueprint';
                     taskblueprint['actionpath'] = '/task/view/blueprint/'+blueprint['id'];
                     taskblueprint['blueprint_draft'] = blueprint['draft'];
-
+                    taskblueprint['status'] = blueprint['status'];
                   
                     for(const key of commonkeys){
                         taskblueprint[key] = blueprint[key];
                     }
                     taskblueprint['created_at'] = moment(blueprint['created_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
                     taskblueprint['updated_at'] = moment(blueprint['updated_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
-                    taskblueprint.duration = moment.utc(taskblueprint.duration*1000).format('HH:mm:ss'); 
+                    taskblueprint.duration = moment.utc((taskblueprint.duration || 0)*1000).format('HH:mm:ss'); 
                     taskblueprint.relative_start_time = moment.utc(taskblueprint.relative_start_time*1000).format('HH:mm:ss'); 
                     taskblueprint.relative_stop_time = moment.utc(taskblueprint.relative_stop_time*1000).format('HH:mm:ss'); 
 
@@ -281,12 +284,12 @@ const ScheduleService = {
                     for(const suDraft of suDraftList){
                         suDraft['actionpath']='/schedulingunit/view/draft/'+suDraft.id;
                         suDraft['type'] = 'Draft';
-                        suDraft['duration'] = moment.utc(suDraft.duration*1000).format('HH:mm:ss');
+                        suDraft['duration'] = moment.utc((suDraft.duration || 0)*1000).format('HH:mm:ss');
                         schedulingunitlist = schedulingunitlist.concat(suDraft);
                         //Fetch SU Blue prints for the SU Draft
                         await this.getBlueprintsByschedulingUnitId(suDraft.id).then(suBlueprintList =>{
                             for(const suBlueprint of suBlueprintList.data.results){
-                                suBlueprint.duration = moment.utc(suBlueprint.duration*1000).format('HH:mm:ss'); 
+                                suBlueprint.duration = moment.utc((suBlueprint.duration || 0)*1000).format('HH:mm:ss'); 
                                 suBlueprint.type="Blueprint"; 
                                 suBlueprint['actionpath'] = '/schedulingunit/view/blueprint/'+suBlueprint.id;
                                 schedulingunitlist = schedulingunitlist.concat(suBlueprint);
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/task.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/task.service.js
index bf908a57c61c92a9cb56d897a026f5ef52269b3a..a6044b01419f50c99b67a577aaf486edf7abaf67 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/services/task.service.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/services/task.service.js
@@ -120,6 +120,14 @@ const TaskService = {
         console.error(error);
       }
     },
+    getTaskTemplateSchemaResolved: async function(templateId) {
+      try {
+        const response = await axios.get('/api/task_template/' + templateId + '/ref_resolved_schema' );
+        return response.data;
+      } catch (error) {
+        console.log(error);
+      }
+    },
     getDraftsTaskBlueprints: async function(id) {
       try {
         const url = `/api/task_draft/${id}/task_blueprint`;
@@ -137,8 +145,56 @@ const TaskService = {
       } catch (error) {
         console.error(error);
       }
-    }
+    },
+    getAllSubtaskStatusLogs: async function() {
+      try {
+        const count = (await axios.get('/api/subtask_state_log')).data.count;
+        const response = await axios.get(`/api/subtask_state_log?offset=0&limit=${count}`);
+        return response.data.results;
+      } catch(error) {
+        console.error(error);
+      }
+    },
+    getSubtaskStatusLogs: async function(subtaskId) {
+      try {
+        const response = await axios.get(`/api/subtask/${subtaskId}/state_log`);
+        return response.data;
+      } catch(error) {
+        console.error(error);
+      }
+    },
+    getTaskStatusLogs: async function(taskId) {
+      let statusLogs = [];
+      try {
+        let subtaskTemplates = {};
+        const taskDetails = (await axios.get(`/api/task_blueprint/${taskId}`)).data;
+        for (const subtaskId of taskDetails.subtasks_ids) {
+          const subtaskDetails = (await axios.get(`/api/subtask/${subtaskId}`)).data;
+          const subtaskLogs = await this.getSubtaskStatusLogs(subtaskId);
+          let template = subtaskTemplates[subtaskDetails.specifications_template_id];
+          if (!template) {
+            template = (await this.getSubtaskTemplate(subtaskDetails.specifications_template_id));
+            subtaskTemplates[subtaskDetails.specifications_template_id] = template;
+          }
+          for (let statusLog of subtaskLogs) {
+            statusLog.subtask_type = template?template.name:"";
+          }
+          statusLogs = statusLogs.concat(subtaskLogs);
+        }
+      } catch(error) {
+        console.error(error);
+      }
+      return statusLogs;
+    },
+    getSubtaskTemplate: async function(templateId) {
+      try {
+        const response = await axios.get(`/api/subtask_template/${templateId}`);
+        return response.data;
+      } catch(error) {
+        console.error(error);
+      }
+    },
     
 }
 
-export default TaskService;
\ No newline at end of file
+export default TaskService;
diff --git a/SAS/TMSS/src/remakemigrations.py b/SAS/TMSS/src/remakemigrations.py
index 6a4ee430ffd683388eb4c0ba5523dfc4d89d4c39..a80266cbb4acbff5cc59bbe34e590a2d4d555474 100755
--- a/SAS/TMSS/src/remakemigrations.py
+++ b/SAS/TMSS/src/remakemigrations.py
@@ -75,6 +75,16 @@ class Migration(migrations.Migration):
 
     # Start SubTask id with 2 000 000 to avoid overlap with 'old' (test/production) OTDB
     operations = [ migrations.RunSQL('ALTER SEQUENCE tmssapp_SubTask_id_seq RESTART WITH 2000000;'),
+                   migrations.RunSQL("DROP VIEW IF EXISTS tmssapp_taskblueprintsummary; "
+                                     "CREATE OR REPLACE VIEW tmssapp_taskblueprintsummary AS "
+                                     "SELECT tmssapp_taskblueprint.id AS taskblueprint_id, tmssapp_subtask.id AS subtask_id, tmssapp_subtask.state_id AS substate, tmssapp_subtasktemplate.type_id AS subtask_type"
+                                     " FROM tmssapp_subtask LEFT JOIN tmssapp_taskblueprint ON tmssapp_taskblueprint.id = tmssapp_subtask.task_blueprint_id"
+                                     " LEFT JOIN tmssapp_subtasktemplate ON tmssapp_subtasktemplate.id = tmssapp_subtask.specifications_template_id;"),
+                   migrations.RunSQL("DROP VIEW IF EXISTS tmssapp_schedulingunitblueprintsummary; "
+                                     "CREATE OR REPLACE VIEW tmssapp_schedulingunitblueprintsummary AS "
+                                     "SELECT row_number() OVER () AS id, tmssapp_schedulingunitblueprint.id AS sub_id, tmssapp_taskblueprint.id AS taskblueprint_id, tmssapp_tasktemplate.type_id AS task_type, 'unknown' AS derived_task_status"
+                                     " FROM tmssapp_taskblueprint LEFT JOIN tmssapp_schedulingunitblueprint ON tmssapp_schedulingunitblueprint.id = tmssapp_taskblueprint.scheduling_unit_blueprint_id"
+                                     " LEFT JOIN tmssapp_tasktemplate ON tmssapp_tasktemplate.id = tmssapp_taskblueprint.specifications_template_id;"),
                    migrations.RunPython(populate_choices),
                    migrations.RunPython(populate_settings),
                    migrations.RunPython(populate_misc),
diff --git a/SAS/TMSS/src/tmss/CMakeLists.txt b/SAS/TMSS/src/tmss/CMakeLists.txt
index a38c2b149ed20a69a4ae3376365d869db9c1990e..3e7754777f2f6d34a58352c9d78765303dd9cfa4 100644
--- a/SAS/TMSS/src/tmss/CMakeLists.txt
+++ b/SAS/TMSS/src/tmss/CMakeLists.txt
@@ -13,3 +13,4 @@ python_install(${_py_files}
     DESTINATION lofar/sas/tmss/tmss)
 
 add_subdirectory(tmssapp)
+add_subdirectory(workflowapp)
diff --git a/SAS/TMSS/src/tmss/settings.py b/SAS/TMSS/src/tmss/settings.py
index 3fcb6ea5e997dfabaa0357e8d62c9da6b4a54cac..84b03966e8e201c27685d55e7754a1c75afa8ef8 100644
--- a/SAS/TMSS/src/tmss/settings.py
+++ b/SAS/TMSS/src/tmss/settings.py
@@ -120,8 +120,17 @@ INSTALLED_APPS = [
     'material.frontend',
     'viewflow',
     'viewflow.frontend',
+    'lofar.sas.tmss.tmss.workflowapp',
+    'debug_toolbar',
 ]
 
+def show_debug_toolbar(*args, **kwargs):
+    return os.environ.get('SHOW_DJANGO_DEBUG_TOOLBAR', False)
+
+DEBUG_TOOLBAR_CONFIG = {
+    'SHOW_TOOLBAR_CALLBACK': show_debug_toolbar
+}
+
 MIDDLEWARE = [
     'django.middleware.gzip.GZipMiddleware',
     'django.middleware.security.SecurityMiddleware',
@@ -133,6 +142,7 @@ MIDDLEWARE = [
     'django.middleware.clickjacking.XFrameOptionsMiddleware'
 ]
 
+
 ROOT_URLCONF = 'lofar.sas.tmss.tmss.urls'
 
 TEMPLATES = [
@@ -337,3 +347,9 @@ SWAGGER_SETTINGS = {
     },
 
 }
+
+# TODO Do I need distinguish more between Test and Production Environment??
+# maybe a local file in Development environment for test purposes
+SCU = "http://scu199" if isDevelopmentEnvironment() or isTestEnvironment() else "http://scu001"
+PIPELINE_SUBTASK_LOG_URL = SCU + ".control.lofar:7412/tasks/%s/log.html"
+OBSERVATION_SUBTASK_LOG_URL = "https://proxy.lofar.eu/inspect/%s/rtcp-%s.errors"
diff --git a/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt b/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt
index e24af6998d0ad9240a454cd41fdb389a38cb4208..58c545f7ed434d8c05064e1fad48ebf0c93d821a 100644
--- a/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt
+++ b/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt
@@ -23,5 +23,3 @@ add_subdirectory(serializers)
 add_subdirectory(viewsets)
 add_subdirectory(adapters)
 add_subdirectory(schemas)
-add_subdirectory(workflows)
-
diff --git a/SAS/TMSS/src/tmss/tmssapp/adapters/feedback.py b/SAS/TMSS/src/tmss/tmssapp/adapters/feedback.py
index 55df3b79bd73c349cdac0b4681adcf92668f98f5..f87dd3ff615c89e037a7c0bb617f25853c7b23c4 100644
--- a/SAS/TMSS/src/tmss/tmssapp/adapters/feedback.py
+++ b/SAS/TMSS/src/tmss/tmssapp/adapters/feedback.py
@@ -18,6 +18,7 @@
 # with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
 
 from lofar.sas.tmss.tmss.tmssapp.models import *
+from lofar.sas.tmss.tmss.tmssapp.conversions import antennafields_for_antennaset_and_station
 import logging
 logger = logging.getLogger(__name__)
 
@@ -90,18 +91,7 @@ def process_subtask_feedback(subtask:Subtask):
                 antennatype = antennaset.split('_')[0]  # LBA or HBA
                 antennafields = []
                 for station in stationlist:
-                    if antennaset.startswith('LBA'):
-                        fields = ['LBA']
-                    elif antennaset.startswith('HBA') and not station.startswith('CS'):
-                        fields = ['HBA']
-                    elif antennaset.startswith('HBA_DUAL'):
-                        fields = ['HBA0', 'HBA1']
-                    elif antennaset.startswith('HBA_ZERO'):
-                        fields = ['HBA0']
-                    elif antennaset.startswith('HBA_ONE'):
-                        fields = ['HBA1']
-                    else:
-                        raise ValueError('Cannot determine antennafields for station=%s antennaset=%s' % (station, antennaset))
+                    fields = antennafields_for_antennaset_and_station(antennaset, station)
                     antennafields += [{"station": station, "field": field, "type": antennatype} for field in fields]
                 pointing = subtask.specifications_doc['stations']['digital_pointings'][int(feedback_dict[dpkey+'.SAP'])]['pointing']
             else:
diff --git a/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py b/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py
index 3de850a26251957507e23f9e1ac0f21d5b832fc7..49e555448b91d3587c7f39db9a1c7b8021dc5f90 100644
--- a/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py
+++ b/SAS/TMSS/src/tmss/tmssapp/adapters/sip.py
@@ -6,6 +6,7 @@ from lofar.lta.sip import siplib, ltasip, validator, constants
 import uuid
 import logging
 import isodate
+import datetime
 logger = logging.getLogger(__name__)
 
 mapping_antennaset_type_TMSS_2_SIP = {
@@ -51,14 +52,27 @@ def get_number_of_dataproducts_of_type(subtask, dataproduct_datatype):
 def get_siplib_stations_list(subtask):
     """
     Retrieve a list of siplib Stations objects which is extracted from the station_list and the antennaset
-    TODO Correct mapping of all different HBA/LBA antennaset flavours to antenna fieldtypes required for SIP
     :param subtask:
     :return:
+
+    Conversion logic see here: https://support.astron.nl/confluence/display/TMSS/Dataproduct+Provenance
     """
-    siplib_station_list =[]
+    siplib_station_list = []
     list_stations = subtask.specifications_doc['stations']['station_list']
-    antennafieldtypes = ["HBA"] if "HBA" in subtask.specifications_doc['stations']['antenna_set'] else ["LBA"]
+    antennaset = subtask.specifications_doc['stations']['antenna_set']
     for station in list_stations:
+        if antennaset.startswith('LBA'):
+            antennafieldtypes = ['LBA']
+        elif antennaset.startswith('HBA') and not station.startswith('CS'):
+            antennafieldtypes = ['HBA']
+        elif antennaset.startswith('HBA_DUAL'):
+            antennafieldtypes = ['HBA0', 'HBA1']
+        elif antennaset.startswith('HBA_ZERO'):
+            antennafieldtypes = ['HBA0']
+        elif antennaset.startswith('HBA_ONE'):
+            antennafieldtypes = ['HBA1']
+        else:
+            raise ValueError('Cannot determine antennafields for station=%s antennaset=%s' % (station, antennaset))
         siplib_station_list.append(siplib.Station.preconfigured(station, antennafieldtypes))
     return siplib_station_list
 
@@ -128,6 +142,7 @@ def create_sip_representation_for_subtask(subtask: Subtask):
 
     # determine subtask specific properties and add subtask representation to Sip object
     if subtask.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value:
+        subarraypointings=None  # todo, subtask.specifications_doc, probably more complex than it looks -> RGOE yes complex type for later -> JK: assuming this is done in TMSS-308?
         observation = siplib.Observation(observingmode=constants.OBSERVINGMODETYPE_BEAM_OBSERVATION,  # can be hardcoded for an observation
                                          instrumentfilter=mapping_filterset_type_TMSS_2_SIP[subtask.specifications_doc['stations']['filter']],
                                          clock_frequency="200",  # fixed,
@@ -147,7 +162,7 @@ def create_sip_representation_for_subtask(subtask: Subtask):
                                          channelwidth_frequencyunit=constants.FREQUENCYUNIT_HZ,  # fixed
                                          observationdescription=subtask.task_blueprint.description,
                                          channelspersubband=0,  # NA any more ('BlueGene compatibility' see comment in LTA-SIP.xsd)
-                                         subarraypointings=None, # todo, subtask.specifications_doc, probably more complex than it looks -> RGOE yes complex type for later
+                                         subarraypointings=subarraypointings,
                                          transientbufferboardevents=None  # fixed
         )
 
@@ -161,7 +176,7 @@ def create_sip_representation_for_subtask(subtask: Subtask):
 
         pipeline_map = siplib.PipelineMap(
                 name=subtask.task_blueprint.name,
-                version='unknown',  # todo subtask.specifications_doc, from feedback?
+                version='unknown',  # todo from subtask.specifications_doc? from feedback (we have feedback and storagewriter versions there, not pipeline version or sth)?
                 sourcedata_identifiers=sourcedata_identifiers,
                 process_map=process_map)
 
@@ -230,6 +245,11 @@ def create_sip_representation_for_dataproduct(dataproduct: Dataproduct):
     :param dataproduct:
     :return: One of the siplib dataproduct object flavors.
     """
+
+    # Make sure feedback is not empty
+    if 'percentage_written' not in dataproduct.feedback_doc:
+        raise ValueError("The feedback_doc of dataproduct id=%s is incomplete. Has feedback of the producing subtask been processed?" % dataproduct.id)
+
     # Note: this is for the type property present on all dataproduct flavors, dataproduct classes are
     #  differentiated in addition to that below
     type_map = {Datatype.Choices.VISIBILITIES.value: constants.DATAPRODUCTTYPE_CORRELATOR_DATA,
@@ -246,7 +266,8 @@ def create_sip_representation_for_dataproduct(dataproduct: Dataproduct):
                       Dataformat.Choices.QA_PLOTS.value: constants.FILEFORMATTYPE_UNDOCUMENTED}
 
     storage_writer_map = {"dysco": constants.STORAGEWRITERTYPE_DYSCOSTORAGEMANAGER,
-                          "unknown": constants.STORAGEWRITERTYPE_UNKNOWN}
+                          "unknown": constants.STORAGEWRITERTYPE_UNKNOWN,
+                          "standard": constants.STORAGEWRITERTYPE_LOFARSTORAGEMANAGER}
 
     try:
         dataproduct_type = type_map[dataproduct.datatype.value]
@@ -265,25 +286,24 @@ def create_sip_representation_for_dataproduct(dataproduct: Dataproduct):
                                             size=dataproduct.size if dataproduct.size else 0,
                                             filename=dataproduct.filename,
                                             fileformat=dataproduct_fileformat,
-                                            storage_writer=storage_writer_map[dataproduct.producer.subtask.task_blueprint.specifications_doc.get("storagemanager", 'unknown')], # note: default required because storagemanager is not required by all schemas!
-                                            storage_writer_version='Unknown',  # todo: not modeled? needs to come from feedback eventually.
+                                            storage_writer=storage_writer_map[dataproduct.feedback_doc["samples"]["writer"] if 'samples' in dataproduct.feedback_doc else 'unknown'], # todo: verify we can use the feedback_doc here and remove the old method | storage_writer_map[dataproduct.producer.subtask.task_blueprint.specifications_doc.get("storagemanager", 'unknown')],
+                                            storage_writer_version=dataproduct.feedback_doc["samples"]["writer_version"] if 'samples' in dataproduct.feedback_doc else 'unknown',
                                             process_identifier=create_fake_identifier_for_testing(unique_id=dataproduct.producer.subtask.id))
 
-    # next TODOs: TMSS-300
     if dataproduct.dataformat.value == Dataformat.Choices.MEASUREMENTSET.value:  # <- This is the only one we currently need for UC1
         sip_dataproduct = siplib.CorrelatedDataProduct(
             dataproduct_map,
             subarraypointing_identifier=create_fake_identifier_for_testing(), # todo, from dataproduct.specifications_doc, Jan David checks how to translate int -> Identifier object
-            subband="1",  # todo, from dataproduct.specifications_doc
-            starttime="1980-03-23T10:20:15",  # todo, from dataproduct.specifications_doc
-            duration="P6Y3M10DT15H",  # todo, from dataproduct.specifications_doc
-            integrationinterval=10,  # todo, double, from dataproduct.specifications_doc
+            subband=dataproduct.feedback_doc['frequency']['subbands'][0],
+            starttime=dataproduct.feedback_doc['time']['start_time'],
+            duration=isodate.duration_isoformat(datetime.timedelta(seconds=dataproduct.feedback_doc['time']['duration'])),
+            integrationinterval=dataproduct.feedback_doc['time']['sample_width'],
             integrationintervalunit="s",
-            central_frequency=160,  # todo, from dataproduct.specifications_doc
+            central_frequency=dataproduct.feedback_doc['frequency']['central_frequencies'][0],
             central_frequencyunit="Hz",
-            channelwidth_frequency=200,  # todo, from dataproduct.specifications_doc
+            channelwidth_frequency=dataproduct.feedback_doc['frequency']['channel_width'],
             channelwidth_frequencyunit="Hz",
-            channelspersubband=122,  # todo, from dataproduct.specifications_doc
+            channelspersubband=dataproduct.feedback_doc['frequency']['channels_per_subband'],
             stationsubband=0  # not correct ;)    (see metadata recipe CEP/Pipeline/recipes/sip/helpers/metadata.py)
         )
     # todo: distinguish and create other dataproduct types. Probably most of these can be filled in over time as needed,
diff --git a/SAS/TMSS/src/tmss/tmssapp/conversions.py b/SAS/TMSS/src/tmss/tmssapp/conversions.py
index e851ecbe396955955f1ae9dc1f32890cb819b53d..ee8f35b3770cfba1683ad3a2822949a6f6dabe60 100644
--- a/SAS/TMSS/src/tmss/tmssapp/conversions.py
+++ b/SAS/TMSS/src/tmss/tmssapp/conversions.py
@@ -39,3 +39,26 @@ def local_sidereal_time_for_utc_and_longitude(timestamp: datetime = None,
     t = Time(timestamp, format='datetime', scale='utc')
     return t.sidereal_time(kind=kind, longitude=longitude)
 
+
+def antennafields_for_antennaset_and_station(antennaset:str, station:str) -> list:
+    """
+    convert an antennaset to a list of antennafields
+    :param antennaset: A string identifier for an antennaset, like 'HBA_DUAL'
+    :param station: A string identifier for a station, like 'CS001'
+    :return: a list of antennafields that the station uses for the given antennaset ['HBA0', 'HBA1']
+    """
+    if antennaset.startswith('LBA'):
+        fields = ['LBA']
+    elif antennaset.startswith('HBA') and not station.startswith('CS'):
+        fields = ['HBA']
+    elif antennaset.startswith('HBA_DUAL'):
+        fields = ['HBA0', 'HBA1']
+    elif antennaset.startswith('HBA_ZERO'):
+        fields = ['HBA0']
+    elif antennaset.startswith('HBA_ONE'):
+        fields = ['HBA1']
+    else:
+        raise ValueError('Cannot determine antennafields for station=%s antennaset=%s' % (station, antennaset))
+
+    return fields
+
diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py
index daa63f9369488f5e160485fbfec01af9cdb5121b..e3ccbb0b1e8fd9aebceb4e3725b984be53f19bec 100644
--- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py
+++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py
@@ -1,4 +1,4 @@
-# Generated by Django 3.0.9 on 2020-09-24 15:47
+# Generated by Django 3.0.9 on 2020-10-16 10:16
 
 from django.conf import settings
 import django.contrib.postgres.fields
@@ -14,10 +14,37 @@ class Migration(migrations.Migration):
 
     dependencies = [
         migrations.swappable_dependency(settings.AUTH_USER_MODEL),
-        ('viewflow', '0008_jsonfield_and_artifact'),
     ]
 
     operations = [
+        migrations.CreateModel(
+            name='SchedulingUnitBlueprintSummary',
+            fields=[
+                ('id', models.IntegerField(primary_key=True, serialize=False)),
+                ('sub_id', models.IntegerField()),
+                ('taskblueprint_id', models.IntegerField()),
+                ('task_type', models.CharField(max_length=128)),
+                ('derived_task_status', models.CharField(max_length=128)),
+            ],
+            options={
+                'db_table': 'tmssapp_schedulingunitblueprintsummary',
+                'managed': False,
+            },
+        ),
+        migrations.CreateModel(
+            name='TaskBlueprintSummary',
+            fields=[
+                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('taskblueprint_id', models.IntegerField()),
+                ('subtask_id', models.IntegerField()),
+                ('substate', models.CharField(max_length=128)),
+                ('subtask_type', models.CharField(max_length=128)),
+            ],
+            options={
+                'db_table': 'tmssapp_taskblueprintsummary',
+                'managed': False,
+            },
+        ),
         migrations.CreateModel(
             name='Algorithm',
             fields=[
@@ -35,7 +62,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('rcus', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=128)),
                 ('inputs', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, size=128)),
             ],
@@ -51,7 +78,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('location', models.CharField(help_text='Human-readable location of the cluster.', max_length=128)),
                 ('archive_site', models.BooleanField(default=False, help_text='TRUE if this cluster is an archive site, FALSE if not (f.e. a local cluster, or user-owned cluster).')),
             ],
@@ -67,7 +94,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('version', models.IntegerField(editable=False, help_text='Version of this template (with respect to other templates of the same name)')),
                 ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')),
             ],
@@ -90,7 +117,7 @@ class Migration(migrations.Migration):
                 ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)),
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)),
                 ('start', models.DateTimeField(help_text='Moment at which the cycle starts, that is, when its projects can run.')),
                 ('stop', models.DateTimeField(help_text='Moment at which the cycle officially ends.')),
@@ -159,7 +186,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('version', models.IntegerField(editable=False, help_text='Version of this template (with respect to other templates of the same name)')),
                 ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')),
             ],
@@ -188,7 +215,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('version', models.IntegerField(editable=False, help_text='Version of this template (with respect to other templates of the same name)')),
                 ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')),
             ],
@@ -317,7 +344,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('capacity', models.BigIntegerField(help_text='Capacity in bytes')),
                 ('directory', models.CharField(help_text='Root directory under which we are allowed to write our data.', max_length=1024)),
             ],
@@ -342,7 +369,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('version', models.IntegerField(editable=False, help_text='Version of this template (with respect to other templates of the same name)')),
                 ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')),
                 ('create_function', models.CharField(help_text='Python function to call to execute the generator.', max_length=128)),
@@ -366,7 +393,7 @@ class Migration(migrations.Migration):
                 ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)),
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)),
                 ('priority_rank', models.FloatField(help_text='Priority of this project w.r.t. other projects. Projects can interrupt observations of lower-priority projects.')),
                 ('trigger_priority', models.IntegerField(default=1000, help_text='Priority of this project w.r.t. triggers.')),
@@ -411,7 +438,7 @@ class Migration(migrations.Migration):
                 ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)),
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)),
             ],
             options={
@@ -427,6 +454,35 @@ class Migration(migrations.Migration):
                 'abstract': False,
             },
         ),
+        migrations.CreateModel(
+            name='SAP',
+            fields=[
+                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)),
+                ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
+                ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
+                ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='SAP properties.')),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
+        migrations.CreateModel(
+            name='SAPTemplate',
+            fields=[
+                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)),
+                ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
+                ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
+                ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
+                ('version', models.IntegerField(editable=False, help_text='Version of this template (with respect to other templates of the same name)')),
+                ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')),
+            ],
+            options={
+                'abstract': False,
+            },
+        ),
         migrations.CreateModel(
             name='ScheduleMethod',
             fields=[
@@ -444,7 +500,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('version', models.IntegerField(editable=False, help_text='Version of this template (with respect to other templates of the same name)')),
                 ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')),
             ],
@@ -469,7 +525,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('generator_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Parameters for the generator (NULLable).', null=True)),
             ],
             options={
@@ -484,7 +540,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('requirements_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling and/or quality requirements for this scheduling unit (IMMUTABLE).')),
                 ('do_cancel', models.BooleanField()),
             ],
@@ -492,26 +548,6 @@ class Migration(migrations.Migration):
                 'abstract': False,
             },
         ),
-        migrations.CreateModel(
-            name='SchedulingUnitDemo',
-            fields=[
-                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
-                ('name', models.CharField(max_length=50)),
-                ('state', models.IntegerField()),
-            ],
-        ),
-        migrations.CreateModel(
-            name='SchedulingUnitDemoProcess',
-            fields=[
-                ('process_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='viewflow.Process')),
-                ('text', models.CharField(max_length=150)),
-                ('approved', models.BooleanField(default=False)),
-            ],
-            options={
-                'abstract': False,
-            },
-            bases=('viewflow.process',),
-        ),
         migrations.CreateModel(
             name='SchedulingUnitDraft',
             fields=[
@@ -520,7 +556,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('requirements_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling and/or quality requirements for this run.')),
                 ('generator_instance_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Parameter value that generated this run draft (NULLable).', null=True)),
                 ('scheduling_constraints_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling Constraints for this run.', null=True)),
@@ -537,7 +573,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)),
                 ('template', django.contrib.postgres.fields.jsonb.JSONField(help_text='JSON-data compliant with the JSON-schema in the scheduling_unit_template. This observation strategy template like a predefined recipe with all the correct settings, and defines which parameters the user can alter.')),
             ],
@@ -553,7 +589,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('version', models.IntegerField(editable=False, help_text='Version of this template (with respect to other templates of the same name)')),
                 ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')),
             ],
@@ -643,7 +679,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('version', models.IntegerField(editable=False, help_text='Version of this template (with respect to other templates of the same name)')),
                 ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')),
                 ('queue', models.BooleanField(default=False)),
@@ -678,7 +714,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schedulings for this task (IMMUTABLE).')),
                 ('do_cancel', models.BooleanField(help_text='Cancel this task.')),
             ],
@@ -706,7 +742,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Specifications for this task.')),
             ],
             options={
@@ -747,7 +783,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('version', models.IntegerField(editable=False, help_text='Version of this template (with respect to other templates of the same name)')),
                 ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')),
             ],
@@ -764,19 +800,6 @@ class Migration(migrations.Migration):
                 'abstract': False,
             },
         ),
-        migrations.CreateModel(
-            name='HelloWorldProcess',
-            fields=[
-            ],
-            options={
-                'verbose_name': 'World Request',
-                'verbose_name_plural': 'World Requests',
-                'proxy': True,
-                'indexes': [],
-                'constraints': [],
-            },
-            bases=('viewflow.process',),
-        ),
         migrations.CreateModel(
             name='Setting',
             fields=[
@@ -798,7 +821,7 @@ class Migration(migrations.Migration):
                 ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')),
                 ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')),
                 ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)),
-                ('description', models.CharField(help_text='A longer description of this object.', max_length=255)),
+                ('description', models.CharField(blank=True, help_text='A longer description of this object.', max_length=255)),
                 ('version', models.IntegerField(editable=False, help_text='Version of this template (with respect to other templates of the same name)')),
                 ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')),
                 ('validation_code_js', models.CharField(blank=True, default='', help_text='JavaScript code for additional (complex) validation.', max_length=128)),
@@ -1093,11 +1116,6 @@ class Migration(migrations.Migration):
             name='scheduling_set',
             field=models.ForeignKey(help_text='Set to which this scheduling unit draft belongs.', on_delete=django.db.models.deletion.CASCADE, related_name='scheduling_unit_drafts', to='tmssapp.SchedulingSet'),
         ),
-        migrations.AddField(
-            model_name='schedulingunitdemoprocess',
-            name='su',
-            field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='tmssapp.SchedulingUnitDemo'),
-        ),
         migrations.AddField(
             model_name='schedulingunitblueprint',
             name='draft',
@@ -1127,6 +1145,15 @@ class Migration(migrations.Migration):
             model_name='schedulingconstraintstemplate',
             constraint=models.UniqueConstraint(fields=('name', 'version'), name='schedulingconstraintstemplate_unique_name_version'),
         ),
+        migrations.AddConstraint(
+            model_name='saptemplate',
+            constraint=models.UniqueConstraint(fields=('name', 'version'), name='saptemplate_unique_name_version'),
+        ),
+        migrations.AddField(
+            model_name='sap',
+            name='specifications_template',
+            field=models.ForeignKey(help_text='Schema used for specifications_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.SAPTemplate'),
+        ),
         migrations.AddField(
             model_name='resourcetype',
             name='quantity',
@@ -1259,6 +1286,11 @@ class Migration(migrations.Migration):
             name='producer',
             field=models.ForeignKey(help_text='Subtask Output which generates this dataproduct.', on_delete=django.db.models.deletion.PROTECT, related_name='dataproducts', to='tmssapp.SubtaskOutput'),
         ),
+        migrations.AddField(
+            model_name='dataproduct',
+            name='sap',
+            field=models.ForeignKey(help_text='SAP this dataproduct was generated out of (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='SAP_dataproducts', to='tmssapp.SAP'),
+        ),
         migrations.AddField(
             model_name='dataproduct',
             name='specifications_template',
@@ -1331,6 +1363,10 @@ class Migration(migrations.Migration):
             model_name='setting',
             index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_set_tags_41a1ba_gin'),
         ),
+        migrations.AddIndex(
+            model_name='sap',
+            index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sap_tags_7451b0_gin'),
+        ),
         migrations.AddIndex(
             model_name='defaulttasktemplate',
             index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_c88200_gin'),
diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py
index 92baffd4c15a8c025d234eeffed61ae9f443fabf..023594b67ad9d5f700bb0a6976b5151bacd4fd49 100644
--- a/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py
+++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py
@@ -16,6 +16,16 @@ class Migration(migrations.Migration):
 
     # Start SubTask id with 2 000 000 to avoid overlap with 'old' (test/production) OTDB
     operations = [ migrations.RunSQL('ALTER SEQUENCE tmssapp_SubTask_id_seq RESTART WITH 2000000;'),
+                   migrations.RunSQL("DROP VIEW IF EXISTS tmssapp_taskblueprintsummary; "
+                                     "CREATE OR REPLACE VIEW tmssapp_taskblueprintsummary AS "
+                                     "SELECT tmssapp_taskblueprint.id AS taskblueprint_id, tmssapp_subtask.id AS subtask_id, tmssapp_subtask.state_id AS substate, tmssapp_subtasktemplate.type_id AS subtask_type"
+                                     " FROM tmssapp_subtask LEFT JOIN tmssapp_taskblueprint ON tmssapp_taskblueprint.id = tmssapp_subtask.task_blueprint_id"
+                                     " LEFT JOIN tmssapp_subtasktemplate ON tmssapp_subtasktemplate.id = tmssapp_subtask.specifications_template_id;"),
+                   migrations.RunSQL("DROP VIEW IF EXISTS tmssapp_schedulingunitblueprintsummary; "
+                                     "CREATE OR REPLACE VIEW tmssapp_schedulingunitblueprintsummary AS "
+                                     "SELECT row_number() OVER () AS id, tmssapp_schedulingunitblueprint.id AS sub_id, tmssapp_taskblueprint.id AS taskblueprint_id, tmssapp_tasktemplate.type_id AS task_type, 'unknown' AS derived_task_status"
+                                     " FROM tmssapp_taskblueprint LEFT JOIN tmssapp_schedulingunitblueprint ON tmssapp_schedulingunitblueprint.id = tmssapp_taskblueprint.scheduling_unit_blueprint_id"
+                                     " LEFT JOIN tmssapp_tasktemplate ON tmssapp_tasktemplate.id = tmssapp_taskblueprint.specifications_template_id;"),
                    migrations.RunPython(populate_choices),
                    migrations.RunPython(populate_settings),
                    migrations.RunPython(populate_misc),
diff --git a/SAS/TMSS/src/tmss/tmssapp/models/CMakeLists.txt b/SAS/TMSS/src/tmss/tmssapp/models/CMakeLists.txt
index 2ac64b115ecf2f4bc700c614a3ba9572f3af6aa6..7598bc12c79161c19b95275e001a28adb92d3b56 100644
--- a/SAS/TMSS/src/tmss/tmssapp/models/CMakeLists.txt
+++ b/SAS/TMSS/src/tmss/tmssapp/models/CMakeLists.txt
@@ -5,8 +5,6 @@ set(_py_files
     __init__.py
     specification.py
     scheduling.py
-    helloworldflow.py
-    schedulingunitdemoflow.py
     )
 
 python_install(${_py_files}
diff --git a/SAS/TMSS/src/tmss/tmssapp/models/__init__.py b/SAS/TMSS/src/tmss/tmssapp/models/__init__.py
index be7a174d740d60b255c47117cb8abfc657cc9bde..93f3c7e6d54f95c40d6d9484aad802b13f9991ba 100644
--- a/SAS/TMSS/src/tmss/tmssapp/models/__init__.py
+++ b/SAS/TMSS/src/tmss/tmssapp/models/__init__.py
@@ -1,4 +1,2 @@
 from .specification import *
-from .scheduling import *
-from .helloworldflow import *
-from .schedulingunitdemoflow import *
\ No newline at end of file
+from .scheduling import *
\ No newline at end of file
diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
index a3ebd865de710e9df320248e1614f9ba4f5344da..a6932d705e358db231d0fe22f47496a970591b29 100644
--- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
@@ -21,7 +21,8 @@ from lofar.messaging.messagebus import ToBus, DEFAULT_BROKER, DEFAULT_BUSNAME
 from lofar.messaging.messages import EventMessage
 from lofar.sas.tmss.client.tmssbuslistener import DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX
 from lofar.common.util import single_line_with_single_spaces
-
+from django.conf import settings
+from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC
 #
 # I/O
 #
@@ -120,8 +121,13 @@ class DefaultDataproductSpecificationsTemplate(BasicCommon):
 class DataproductFeedbackTemplate(Template):
     pass
 
+
+class SAPTemplate(Template):
+    pass
+
 # todo: do we need to specify a default?
 
+
 #
 # Instance Objects
 #
@@ -149,7 +155,7 @@ class Subtask(BasicCommon):
         super().__init__(*args, **kwargs)
 
         # keep original state for logging
-        self.__original_state = self.state
+        self.__original_state_id = self.state_id
 
     @staticmethod
     def _send_state_change_event_message(subtask_id:int, old_state: str, new_state: str):
@@ -189,7 +195,7 @@ class Subtask(BasicCommon):
 
         annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template')
 
-        if self.state.value == SubtaskState.Choices.SCHEDULED.value and self.__original_state.value == SubtaskState.Choices.SCHEDULING.value:
+        if self.state.value == SubtaskState.Choices.SCHEDULED.value and self.__original_state_id == SubtaskState.Choices.SCHEDULING.value:
             if self.start_time is None:
                 if self.predecessors.all().count() == 0:
                     raise SubtaskSchedulingException("Cannot schedule subtask id=%s when start time is 'None'." % (self.pk, ))
@@ -202,12 +208,12 @@ class Subtask(BasicCommon):
         super().save(force_insert, force_update, using, update_fields)
 
         # log if either state update or new entry:
-        if self.state != self.__original_state or creating == True:
+        if self.state_id != self.__original_state_id or creating == True:
             if self.created_or_updated_by_user is None:
                 identifier = None
             else:
                 identifier = self.created_or_updated_by_user.email
-            log_entry = SubtaskStateLog(subtask=self, old_state=self.__original_state, new_state=self.state,
+            log_entry = SubtaskStateLog(subtask=self, old_state=SubtaskState.objects.get(value=self.__original_state_id), new_state=self.state,
                                            user=self.created_or_updated_by_user, user_identifier=identifier)
             log_entry.save()
 
@@ -217,7 +223,33 @@ class Subtask(BasicCommon):
                 logger.error("Could not send state change to messagebus: %s", e)
 
             # update the previous state value
-            self.__original_state = self.state
+            self.__original_state_id = self.state_id
+
+    @property
+    def log_url(self):
+        """
+        Return the link to the pipeline log in case of pipeline or
+        link to COBALT error log in case of an observation
+        otherwise just an empty string
+        """
+        if self.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value:
+            url = settings.OBSERVATION_SUBTASK_LOG_URL % (self.id, self.id)
+        elif self.specifications_template.type.value == SubtaskType.Choices.PIPELINE.value:
+            # Get RADBID, subtask must be at least 'scheduled' to exist in radb
+            # If RA is not started don't wait longer than 10 seconds
+            with RADBRPC.create(timeout=10) as radbrpc:
+                try:
+                    radb_id = radbrpc.getTask(tmss_id=self.id)
+                except:
+                    radb_id = None
+            if radb_id is None:
+               url = "not available (missing radbid)"
+            else:
+               url = settings.PIPELINE_SUBTASK_LOG_URL % radb_id['id']
+        else:
+            url = ""
+        return url
+
 
 class SubtaskStateLog(BasicCommon):
     """
@@ -253,6 +285,15 @@ class SubtaskOutput(BasicCommon):
     subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, related_name='outputs', help_text='Subtask to which this output specification refers.')
 
 
+class SAP(BasicCommon):
+    specifications_doc = JSONField(help_text='SAP properties.')
+    specifications_template = ForeignKey('SAPTemplate', null=False, on_delete=CASCADE, help_text='Schema used for specifications_doc.')
+
+    def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
+        annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template')
+
+        super().save(force_insert, force_update, using, update_fields)
+
 class Dataproduct(BasicCommon):
     """
     A data product represents an atomic dataset, produced and consumed by subtasks. The consumed dataproducts are those
@@ -274,6 +315,7 @@ class Dataproduct(BasicCommon):
     size = BigIntegerField(null=True, help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).')
     feedback_doc = JSONField(help_text='Dataproduct properties, as reported by the producing process.')
     feedback_template = ForeignKey('DataproductFeedbackTemplate', on_delete=PROTECT, help_text='Schema used for feedback_doc.')
+    sap = ForeignKey('SAP', on_delete=PROTECT, null=True, related_name="dataproducts", help_text='SAP this dataproduct was generated out of (NULLable).')
 
     def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
         annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template')
diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py
index 1a807b5fad0e2283fc811343aa49826f2e3cb2e6..984898dbddbc1175d38b46acb1dea5df766e3af5 100644
--- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py
+++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py
@@ -18,6 +18,8 @@ import datetime
 import json
 import jsonschema
 from django.urls import reverse as revese_url
+from collections import Counter
+from django.utils.functional import cached_property
 
 #
 # Common
@@ -39,7 +41,7 @@ class BasicCommon(Model):
 
 class NamedCommon(BasicCommon):
     name = CharField(max_length=128, help_text='Human-readable name of this object.', null=False) # todo: check if we want to have this primary_key=True
-    description = CharField(max_length=255, help_text='A longer description of this object.')
+    description = CharField(max_length=255, help_text='A longer description of this object.', blank=True, default="")
 
     def __str__(self):
         return self.name
@@ -368,11 +370,40 @@ class DefaultTaskTemplate(BasicCommon):
 class TaskRelationSelectionTemplate(Template):
     pass
 
+
 class DefaultTaskRelationSelectionTemplate(BasicCommon):
     name = CharField(max_length=128, unique=True)
     template = ForeignKey("TaskRelationSelectionTemplate", on_delete=PROTECT)
 
 
+#
+# DatabaseView  objects
+#
+class TaskBlueprintSummary(Model):
+    taskblueprint_id = IntegerField()
+    subtask_id = IntegerField()
+    substate = CharField(max_length=128)
+    subtask_type = CharField(max_length=128)
+
+    class Meta:
+        managed = False
+        db_table = 'tmssapp_taskblueprintsummary'
+
+
+class SchedulingUnitBlueprintSummary(Model):
+    # Using in an id and ForeignKey is not common for a view BUT the id is a 'dummy' to be able to use in Django
+    # https://resources.rescale.com/using-database-views-in-django-orm/
+    # otherwise an exception will be thrown
+    id = IntegerField(primary_key=True)
+    sub_id = IntegerField()
+    taskblueprint_id = IntegerField()
+    task_type = CharField(max_length=128)
+    derived_task_status = CharField(max_length=128)
+
+    class Meta:
+        managed = False
+        db_table = 'tmssapp_schedulingunitblueprintsummary'
+
 #
 # Instance Objects
 #
@@ -381,7 +412,7 @@ class Cycle(NamedCommonPK):
     start = DateTimeField(help_text='Moment at which the cycle starts, that is, when its projects can run.')
     stop = DateTimeField(help_text='Moment at which the cycle officially ends.')
 
-    @property
+    @cached_property
     def duration(self) -> datetime.timedelta:
         '''the duration of the cycle (stop-start date)'''
         return self.stop - self.start
@@ -522,19 +553,19 @@ class SchedulingUnitDraft(NamedCommon):
                 validate_json_against_schema(self.observation_strategy_template.template, self.requirements_template.schema)
 
         if self.scheduling_constraints_doc is not None and self.scheduling_constraints_template_id and self.scheduling_constraints_template.schema is not None:
-                validate_json_against_schema(self.scheduling_constraints_doc, self.scheduling_constraints_template.schema)
+            validate_json_against_schema(self.scheduling_constraints_doc, self.scheduling_constraints_template.schema)
 
         annotate_validate_add_defaults_to_doc_using_template(self, 'requirements_doc', 'requirements_template')
         annotate_validate_add_defaults_to_doc_using_template(self, 'scheduling_constraints_doc', 'scheduling_constraints_template')
         super().save(force_insert, force_update, using, update_fields)
 
-    @property
+    @cached_property
     def duration(self) -> datetime.timedelta:
         '''return the overall duration of all tasks of this scheduling unit
         '''
         return self.relative_stop_time - self.relative_start_time
 
-    @property
+    @cached_property
     def relative_start_time(self) -> datetime.timedelta:
         '''return the earliest relative start time of all tasks of this scheduling unit
         '''
@@ -544,7 +575,7 @@ class SchedulingUnitDraft(NamedCommon):
         else:
             return datetime.timedelta(seconds=0)
 
-    @property
+    @cached_property
     def relative_stop_time(self) -> datetime.timedelta:
         '''return the latest relative stop time of all tasks of this scheduling unit
         '''
@@ -566,7 +597,7 @@ class SchedulingUnitBlueprint(NamedCommon):
 
         super().save(force_insert, force_update, using, update_fields)
 
-    @property
+    @cached_property
     def duration(self) -> datetime.timedelta:
         '''return the overall duration of all tasks of this scheduling unit
         '''
@@ -575,7 +606,7 @@ class SchedulingUnitBlueprint(NamedCommon):
         else:
             return self.stop_time - self.start_time   # <- todo: do we ever want this?
 
-    @property
+    @cached_property
     def relative_start_time(self) -> datetime.timedelta:
         '''return the earliest relative start time of all tasks of this scheduling unit
         '''
@@ -585,7 +616,7 @@ class SchedulingUnitBlueprint(NamedCommon):
         else:
             return datetime.timedelta(seconds=0)
 
-    @property
+    @cached_property
     def relative_stop_time(self) -> datetime.timedelta:
         '''return the latest relative stop time of all tasks of this scheduling unit
         '''
@@ -595,7 +626,7 @@ class SchedulingUnitBlueprint(NamedCommon):
         else:
             return datetime.timedelta(seconds=0)
 
-    @property
+    @cached_property
     def start_time(self) -> datetime or None:
         '''return the earliest start time of all tasks of this scheduling unit
         '''
@@ -605,7 +636,7 @@ class SchedulingUnitBlueprint(NamedCommon):
         else:
             return None
 
-    @property
+    @cached_property
     def stop_time(self) -> datetime or None:
         '''return the latest stop time of all tasks of this scheduling unit
         '''
@@ -615,6 +646,106 @@ class SchedulingUnitBlueprint(NamedCommon):
         else:
             return None
 
+    @property
+    def status(self):
+        """
+        Return the schedulingunit blueprint status which is derived from the taskblueprint status (which is derived
+        from the subtasks states)
+        See https://support.astron.nl/confluence/display/TMSS/Specification+Flow#SpecificationFlow-TaskBlueprints
+        The status is displayed as extra field in rest api of the schedulingunit
+        """
+        logger.debug("Status of SUB with id %d" % self.id)
+        logger.debug("total_nbr_observation_tasks=%d, total_nbr_processing_tasks=%d, total_nbr_ingest_tasks=%d"
+                     % (self._get_total_nbr_observation_tasks(), self._get_total_nbr_processing_tasks(), self._get_total_nbr_observation_tasks()))
+
+        # Get the the taskblueprint_ids per task_type
+        taskblueprints_per_type_dict = {"observation": [], "pipeline": [], "ingest": []}
+        for task_type in taskblueprints_per_type_dict:
+            queryset = SchedulingUnitBlueprintSummary.objects.filter(sub_id=self.id, task_type=task_type)
+            taskblueprints_per_type_dict[task_type].extend([item.taskblueprint_id for item in queryset])
+
+        # Determine status per task_type (unfortunately did not manage with updatable view)
+        status_overview_counter = Counter()
+        status_overview_counter_per_type = {"observation": Counter(), "pipeline": Counter(), "ingest": Counter() }
+        for tb in TaskBlueprint.objects.filter(scheduling_unit_blueprint_id=self.id):
+            status_overview_counter[(tb.status)]+=1
+            for task_type in taskblueprints_per_type_dict:
+                if tb.id in taskblueprints_per_type_dict[task_type]:
+                    status_overview_counter_per_type[task_type][(tb.status)] += 1
+
+        # The actual determination of the SchedulingunitBlueprint status
+        if not self._task_graph_instantiated():
+            status = "defined"
+        elif self._all_task_finished(status_overview_counter):
+            status = "finished"
+        elif self._any_task_cancelled(status_overview_counter):
+            status = "cancelled"
+        elif self._any_task_error(status_overview_counter):
+            status = "error"
+        elif self._any_task_started_observed_finished(status_overview_counter):
+            if not self._all_observation_task_observed_finished(status_overview_counter_per_type):
+                status = "observing"
+            elif not self._any_processing_task_started_or_finished(status_overview_counter_per_type):
+                status = "observed"
+            elif not self._all_processing_tasks_and_observation_tasks_finished(status_overview_counter_per_type):
+                status = "processing"
+            elif not self._any_ingest_task_started(status_overview_counter_per_type):
+                status = "processed"
+            else:
+                status = "ingesting"
+        elif self._any_task_scheduled(status_overview_counter):
+            status = "scheduled"
+        else:
+            status = "schedulable"
+        return status
+
+    def _task_graph_instantiated(self):
+        return self._get_total_nbr_tasks() > 0
+
+    def _all_task_finished(self, status_overview_counter):
+        return status_overview_counter["finished"] == self._get_total_nbr_tasks()
+
+    def _any_task_cancelled(self, status_overview_counter):
+        return status_overview_counter["cancelled"] > 0
+
+    def _any_task_error(self, status_overview_counter):
+        return status_overview_counter["error"] > 0
+
+    def _any_task_started_observed_finished(self, status_overview_counter):
+        return (status_overview_counter["started"] + status_overview_counter["observed"] + status_overview_counter["finished"]) > 0
+
+    def _any_task_scheduled(self, status_overview_counter):
+        return status_overview_counter["scheduled"] > 0
+
+    def _all_observation_task_observed_finished(self, status_overview_counter_per_type):
+        total_nbr_observation_tasks = self._get_total_nbr_observation_tasks()
+        return (status_overview_counter_per_type["observation"]["observed"] +
+                status_overview_counter_per_type["observation"]["finished"]) == total_nbr_observation_tasks
+
+    def _any_processing_task_started_or_finished(self, status_overview_counter_per_type):
+        return status_overview_counter_per_type["pipeline"]["started"] + status_overview_counter_per_type["pipeline"]["finished"] > 0
+
+    def _all_processing_tasks_and_observation_tasks_finished(self, status_overview_counter_per_type):
+        total_nbr_observation_tasks = self._get_total_nbr_observation_tasks()
+        total_nbr_processing_tasks = self._get_total_nbr_processing_tasks()
+        return (status_overview_counter_per_type["pipeline"]["finished"] == total_nbr_processing_tasks and
+                status_overview_counter_per_type["observation"]["finished"] == total_nbr_observation_tasks)
+
+    def _any_ingest_task_started(self, status_overview_counter_per_type):
+        return status_overview_counter_per_type["ingest"]["started"] > 0
+
+    def _get_total_nbr_tasks(self):
+        return self.task_blueprints.all().count()
+
+    def _get_total_nbr_observation_tasks(self):
+        return SchedulingUnitBlueprintSummary.objects.filter(sub_id=self.id, task_type='observation').count()
+
+    def _get_total_nbr_processing_tasks(self):
+        return SchedulingUnitBlueprintSummary.objects.filter(sub_id=self.id, task_type='pipeline').count()
+
+    def _get_total_nbr_ingest_tasks(self):
+        return SchedulingUnitBlueprintSummary.objects.filter(sub_id=self.id, task_type='ingest').count()
+
 
 class TaskDraft(NamedCommon):
     specifications_doc = JSONField(help_text='Specifications for this task.')
@@ -627,7 +758,7 @@ class TaskDraft(NamedCommon):
         annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template')
         super().save(force_insert, force_update, using, update_fields)
 
-    @property
+    @cached_property
     def successors(self) -> QuerySet:
         '''return the connect successor taskdraft(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets)
            If you want the result, add .all() like so: my_taskdraft.successors.all()
@@ -637,7 +768,7 @@ class TaskDraft(NamedCommon):
                                                       "INNER JOIN tmssapp_taskrelationdraft as task_rel on task_rel.consumer_id = successor_task.id\n"
                                                       "WHERE task_rel.producer_id = %s", params=[self.id]))
 
-    @property
+    @cached_property
     def predecessors(self) -> QuerySet:
         '''return the connect predecessor taskdraft(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets)
         If you want the result, add .all() like so: my_taskdraft.predecessors.all()
@@ -647,26 +778,26 @@ class TaskDraft(NamedCommon):
                                                       "INNER JOIN tmssapp_taskrelationdraft as task_rel on task_rel.producer_id = successor_task.id\n"
                                                       "WHERE task_rel.consumer_id = %s", params=[self.id]))
 
-    @property
+    @cached_property
     def duration(self) -> datetime.timedelta:
          '''returns the overall duration of this task
          '''
          return self.relative_stop_time - self.relative_start_time
 
-    @property
+    @cached_property
     def relative_start_time(self) -> datetime.timedelta:
         '''return the earliest relative start time of all subtasks of this task
         '''
         scheduling_relations = list(self.first_to_connect.all()) + list(self.second_to_connect.all())
         for scheduling_relation in scheduling_relations:
-            if scheduling_relation.first.id == self.id and scheduling_relation.placement.value == "after":
+            if scheduling_relation.first.id == self._id and scheduling_relation.placement_id == "after":
                 previous_related_task_draft = TaskDraft.objects.get(id=scheduling_relation.second.id)
                 time_offset = scheduling_relation.time_offset
                 # todo: max of several relations
                 if previous_related_task_draft.relative_stop_time:
                     return previous_related_task_draft.relative_stop_time + datetime.timedelta(seconds=time_offset)
 
-            if scheduling_relation.second.id == self.id and scheduling_relation.placement.value == "before":
+            if scheduling_relation.second.id == self._id and scheduling_relation.placement_id == "before":
                 previous_related_task_draft = TaskDraft.objects.get(id=scheduling_relation.first.id)
                 time_offset = scheduling_relation.time_offset
                 # todo: max of several relations
@@ -674,7 +805,7 @@ class TaskDraft(NamedCommon):
                     return previous_related_task_draft.relative_stop_time + datetime.timedelta(seconds=time_offset)
         return datetime.timedelta(seconds=0)
 
-    @property
+    @cached_property
     def relative_stop_time(self) -> datetime.timedelta:
         '''return the latest relative stop time of all subtasks of this task
         '''
@@ -694,7 +825,7 @@ class TaskDraft(NamedCommon):
     #  Only on the blueprints, we also aggregate start_stop times as they are in the system
     #  I'll leave these code bits here for now, until we made up our minds about this, but this can probably be removed
     #
-    # @property
+    # @cached_property
     # def duration(self) -> datetime.timedelta:
     #     '''returns the overall duration in seconds of all blueprints of this task
     #     # todo: is this the wanted behavior? Do you want to consider all the blueprints created from your draft or do you want to preview a new blueprint?
@@ -705,7 +836,7 @@ class TaskDraft(NamedCommon):
     #     else:
     #         return self.stop_time - self.start_time
     #
-    # @property
+    # @cached_property
     # def start_time(self) -> datetime or None:
     #     '''return the earliest start time of all blueprints of this task
     #     # todo: is this the wanted behavior? Do you want to consider all the blueprints created from your draft or do you want to preview a new blueprint?
@@ -717,7 +848,7 @@ class TaskDraft(NamedCommon):
     #         # todo: calculate?
     #         return None
     #
-    # @property
+    # @cached_property
     # def stop_time(self) -> datetime or None:
     #     '''return the latest stop time of all blueprints of this task
     #     # todo: is this the wanted behavior? Do you want to consider all the blueprints created from your draft or do you want to preview a new blueprint?
@@ -741,7 +872,7 @@ class TaskBlueprint(NamedCommon):
         annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template')
         super().save(force_insert, force_update, using, update_fields)
 
-    @property
+    @cached_property
     def successors(self) -> QuerySet:
         '''return the connect successor taskblueprint(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets)
            If you want the result, add .all() like so: my_taskblueprint.successors.all()
@@ -751,7 +882,7 @@ class TaskBlueprint(NamedCommon):
                                                           "INNER JOIN tmssapp_taskrelationblueprint as task_rel on task_rel.consumer_id = successor_task.id\n"
                                                           "WHERE task_rel.producer_id = %s", params=[self.id]))
 
-    @property
+    @cached_property
     def predecessors(self) -> QuerySet:
         '''return the connect predecessor taskblueprint(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets)
         If you want the result, add .all() like so: my_taskblueprint.predecessors.all()
@@ -761,7 +892,7 @@ class TaskBlueprint(NamedCommon):
                                                           "INNER JOIN tmssapp_taskrelationblueprint as task_rel on task_rel.producer_id = predecessor_task.id\n"
                                                           "WHERE task_rel.consumer_id = %s", params=[self.id]))
 
-    @property
+    @cached_property
     def duration(self) -> datetime.timedelta:
         '''return the overall duration of this task
         '''
@@ -770,20 +901,20 @@ class TaskBlueprint(NamedCommon):
         else:
             return self.stop_time - self.start_time
 
-    @property
+    @cached_property
     def relative_start_time(self) -> datetime.timedelta:
         '''return the earliest relative start time of all subtasks of this task
         '''
         scheduling_relations = list(self.first_to_connect.all()) + list(self.second_to_connect.all())
         for scheduling_relation in scheduling_relations:
-                if scheduling_relation.first.id == self.id and scheduling_relation.placement.value == "after":
+            if scheduling_relation.first.id == self._id and scheduling_relation.placement_id == "after":   # self.id and placement.value will hit the db, this does not
                     previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.second.id)
                     time_offset = scheduling_relation.time_offset
                     # todo: max of several relations
                     if previous_related_task_blueprint.relative_stop_time:
                         return previous_related_task_blueprint.relative_stop_time + datetime.timedelta(seconds=time_offset)
 
-                if scheduling_relation.second.id == self.id and scheduling_relation.placement.value == "before":
+            if scheduling_relation.second.id == self._id and scheduling_relation.placement_id == "before":   # self.id and placement.value will hit the db, this does not
                     previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.first.id)
                     time_offset = scheduling_relation.time_offset
                     # todo: max of several relations
@@ -791,7 +922,7 @@ class TaskBlueprint(NamedCommon):
                         return previous_related_task_blueprint.relative_stop_time + datetime.timedelta(seconds=time_offset)
         return datetime.timedelta(seconds=666660)
 
-    @property
+    @cached_property
     def relative_stop_time(self) -> datetime.timedelta:
         '''return the latest relative stop time of all subtasks of this task
         '''
@@ -803,7 +934,7 @@ class TaskBlueprint(NamedCommon):
             pass
         return self.relative_start_time
 
-    @property
+    @cached_property
     def start_time(self) -> datetime or None:
         '''return the earliest start time of all subtasks of this task
         '''
@@ -813,7 +944,7 @@ class TaskBlueprint(NamedCommon):
         else:
             return None
 
-    @property
+    @cached_property
     def stop_time(self) -> datetime or None:
         '''return the latest stop time of all subtasks of this task
         '''
@@ -823,6 +954,58 @@ class TaskBlueprint(NamedCommon):
         else:
             return None
 
+    @property
+    def status(self):
+        """
+        Return the taskblueprint status which is derived from the subtasks status
+        See https://support.astron.nl/confluence/display/TMSS/Specification+Flow#SpecificationFlow-TaskBlueprints
+        The status is displayed as extra field in rest api of the taskblueprint
+        """
+        if self._subtask_graph_not_instantiated():
+            status = "defined"
+        elif self._all_subtask_finished():
+            status = "finished"
+        elif self._any_subtask_cancelled():
+            status = "cancelled"
+        elif self._any_subtask_error():
+            status = "error"
+        elif self._all_observation_subtasks_finishing_finished():
+            status = "observed"
+        elif self._any_subtask_between_started_finished():
+            status = "started"
+        elif self._any_subtask_scheduled():
+            status = "scheduled"
+        else:
+            status = "schedulable"
+        return status
+
+    def _subtask_graph_not_instantiated(self):
+        total_nbr_subtasks = self.subtasks.all().count()
+        return (total_nbr_subtasks == 0 or
+                TaskBlueprintSummary.objects.filter(taskblueprint_id=self.id, substate='defining').count() > 0)
+
+    def _all_subtask_finished(self):
+        total_nbr_subtasks = self.subtasks.all().count()
+        return (TaskBlueprintSummary.objects.filter(taskblueprint_id=self.id, substate='finished').count() == total_nbr_subtasks)
+
+    def _any_subtask_cancelled(self):
+        return (TaskBlueprintSummary.objects.filter(taskblueprint_id=self.id, substate__in=('cancelling', 'cancelled')).count() > 0)
+
+    def _any_subtask_error(self):
+        return (TaskBlueprintSummary.objects.filter(taskblueprint_id=self.id, substate='error').count() > 0)
+
+    def _all_observation_subtasks_finishing_finished(self):
+        total_nbr_observation_subtasks = TaskBlueprintSummary.objects.filter(taskblueprint_id=self.id,
+                                                                             subtask_type='observation').count()
+        return (TaskBlueprintSummary.objects.filter(taskblueprint_id=self.id, substate__in=('finishing','finished'), subtask_type='observation').count()
+                == total_nbr_observation_subtasks and total_nbr_observation_subtasks > 0)
+
+    def _any_subtask_between_started_finished(self):
+        return (TaskBlueprintSummary.objects.filter(taskblueprint_id=self.id, substate__in=('starting','started','queueing','queued','finishing','finished')).count() > 0)
+
+    def _any_subtask_scheduled(self):
+        return (TaskBlueprintSummary.objects.filter(taskblueprint_id=self.id, substate='scheduled').count() > 0)
+
 
 class TaskRelationDraft(BasicCommon):
     selection_doc = JSONField(help_text='Filter for selecting dataproducts from the output role.')
diff --git a/SAS/TMSS/src/tmss/tmssapp/populate.py b/SAS/TMSS/src/tmss/tmssapp/populate.py
index 3916b8df40b61249b54d8bb4f35b8061de96addf..b786248f34773046434364d3ddc887ecd6d59e3a 100644
--- a/SAS/TMSS/src/tmss/tmssapp/populate.py
+++ b/SAS/TMSS/src/tmss/tmssapp/populate.py
@@ -55,6 +55,12 @@ def populate_test_data():
             from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data, SchedulingUnitDraft_test_data
             from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprints_and_subtasks_from_scheduling_unit_draft, create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_draft
             from lofar.sas.tmss.tmss.tmssapp.subtasks import schedule_subtask
+            from lofar.common.json_utils import get_default_json_object_for_schema
+
+            constraints_template = models.SchedulingConstraintsTemplate.objects.get(name="constraints")
+            constraints_spec = get_default_json_object_for_schema(constraints_template.schema)
+
+            strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines")
 
             # create a Test Scheduling Set UC1 under project TMSS-Commissioning
             tmss_project = models.Project.objects.get(name="TMSS-Commissioning")
@@ -67,8 +73,6 @@ def populate_test_data():
                 logger.info('created test scheduling_set: %s', scheduling_set.name)
 
                 for unit_nr in range(5):
-                    strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines")
-
 
                     # the 'template' in the strategy_template is a predefined json-data blob which validates against the given scheduling_unit_template
                     # a user might 'upload' a partial json-data blob, so add all the known defaults
@@ -79,7 +83,9 @@ def populate_test_data():
                                                                                       scheduling_set=scheduling_set,
                                                                                       requirements_template=strategy_template.scheduling_unit_template,
                                                                                       requirements_doc=scheduling_unit_spec,
-                                                                                      observation_strategy_template=strategy_template)
+                                                                                      observation_strategy_template=strategy_template,
+                                                                                      scheduling_constraints_doc=constraints_spec,
+                                                                                      scheduling_constraints_template=constraints_template)
                     scheduling_unit_draft.tags = ["TEST", "UC1"]
                     scheduling_unit_draft.save()
 
@@ -91,9 +97,6 @@ def populate_test_data():
                             scheduled_subtasks = models.Subtask.objects.filter(task_blueprint__scheduling_unit_blueprint=scheduling_unit_blueprint, task_blueprint__name='Calibrator Observation 1', specifications_template__type='observation').all()
                             for subtask in scheduled_subtasks:
                                 schedule_subtask(subtask)
-                                for state in [SubtaskState.Choices.QUEUEING, SubtaskState.Choices.QUEUED, SubtaskState.Choices.STARTING, SubtaskState.Choices.STARTED, SubtaskState.Choices.FINISHING, SubtaskState.Choices.FINISHED]:
-                                    subtask.state = SubtaskState.objects.get(value=state.value)
-                                    subtask.save()
                         else:
                             create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
 
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json b/SAS/TMSS/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json
index 05b2946b839c5e1e0929d1f3622d849de8e3cb10..b7ba95dffbfe3d81a8d6830181b6b03796ac3190 100644
--- a/SAS/TMSS/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json
@@ -62,21 +62,34 @@
         },
         "antenna_set": "HBA_DUAL_INNER",
         "filter": "HBA_110_190",
-        "stations":["CS001"],
+        "station_groups": [
+          {
+            "stations": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"],
+            "max_nr_missing": 4
+          },
+          {
+            "stations": ["DE601", "DE602", "DE603", "DE604", "DE605", "DE609", "FR606", "SE607", "UK608", "PL610", "PL611", "PL612", "IE613", "LV614"],
+            "max_nr_missing": 2
+          },
+          {
+            "stations": ["DE601", "DE605"],
+            "max_nr_missing": 1
+          }
+        ],
         "tile_beam": {
           "direction_type": "J2000",
-          "angle1": 42,
-          "angle2": 42,
-          "angle3": 42
+          "angle1": 0.42,
+          "angle2": 0.43,
+          "angle3": 0.44
         },
         "SAPs": [
           {
             "name": "target0",
             "digital_pointing": {
               "direction_type": "J2000",
-              "angle1": 24,
-              "angle2": 24,
-              "angle3": 24
+              "angle1": 0.24,
+              "angle2": 0.25,
+              "angle3": 0.26
             },
             "subbands": [
               349,
@@ -87,9 +100,9 @@
             "name": "target1",
             "digital_pointing": {
               "direction_type": "J2000",
-              "angle1": 24,
-              "angle2": 24,
-              "angle3": 24
+              "angle1": 0.27,
+              "angle2": 0.28,
+              "angle3": 0.29
             },
             "subbands": [
               349,
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json
index 256c576f84a5bf38f14de663abce45395a5ae325..1e6ef2fb974154228595d046c99c2b9a67934888 100644
--- a/SAS/TMSS/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json
@@ -6,17 +6,11 @@
   "version":"1",
   "type":"object",
   "definitions":{
-    "station_list":{
-      "title":"fixed station list",
-      "default":[
-        "CS001"
-      ],
-      "type":"array",
-      "additionalItems":false,
-      "additionalProperties":false,
-      "items":{
-        "type":"string",
-        "enum":[
+    "station":{
+      "type":"string",
+      "title":"Station",
+      "description":"These are the LOFAR stations",
+      "enum":[
           "CS001",
           "CS002",
           "CS003",
@@ -71,66 +65,203 @@
           "PL612",
           "IE613",
           "LV614"
-        ],
-        "title":"Station",
-        "description":""
+        ]
+      },
+    "station_list":{
+      "default":[
+        "CS001"
+      ],
+      "type":"array",
+      "additionalItems":false,
+      "additionalProperties":false,
+      "items":{
+        "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station"
       },
       "minItems":1,
       "uniqueItems":true
     },
-    "station_set":{
-      "title":"dynamic station set",
+    "max_number_of_missing_stations": {
+      "type":"integer",
+      "title":"Maximum number of stations to omit",
+      "description":"Maximum number of stations that can be omitted from a group (due to maintenance for example)",
+      "minimum":0
+    },
+    "station_group":{
       "type":"object",
+      "title": "Station group",
+      "description": "A set of predefined list of stations, and a constraint on how many stations are allowed to be missing (due to maintenance for example)",
       "default":{},
-      "additionalItems":false,
-      "items":{
-        "type":"object",
-        "title":"Station set",
-        "headerTemplate":"{{ self.group }}",
-        "additionalProperties":false,
-        "properties":{
-          "group":{
-            "type":"string",
-            "title":"Group/station",
-            "description":"Which (group of) station(s) to select from",
-            "default":"ALL",
-            "enum":[
-              "ALL",
-              "SUPERTERP",
-              "CORE",
-              "REMOTE",
-              "DUTCH",
-              "INTERNATIONAL"
-            ]
+      "anyOf": [
+        {
+          "title":"Superterp",
+          "description": "The group of all stations on the Superterp",
+          "type": "object",
+          "properties":{
+            "stations":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list",
+              "enum": [["CS002", "CS003", "CS004", "CS005", "CS006", "CS007"]],
+              "default": ["CS002", "CS003", "CS004", "CS005", "CS006", "CS007"],
+              "uniqueItems": false
+            },
+            "max_nr_missing":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations",
+              "default": 0
+            }
           },
-          "min_stations":{
-            "type":"integer",
-            "title":"Minimum nr of stations",
-            "description":"Number of stations to use within group/station",
-            "default":1,
-            "minimum":0
-          }
+          "required": ["stations", "max_nr_missing"],
+          "additionalProperties": false
         },
-        "required":[
-          "group",
-          "min_stations"
-        ]
-      }
-    },
-    "stations": {
-      "title":"stations",
-      "description":"Use either the fixed station list, or one of the dynamic station sets.",
-      "oneOf": [ {
-          "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list"
+        {
+          "title":"Core",
+          "description": "The group of all Core stations",
+          "type": "object",
+          "properties":{
+            "stations":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list",
+              "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501"]],
+              "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501"],
+              "uniqueItems": false
+            },
+            "max_nr_missing":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations",
+              "default": 4
+            }
+          },
+          "required": ["stations", "max_nr_missing"],
+          "additionalProperties": false
         },
         {
-          "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_set"
+          "title":"Remote",
+          "description": "The group of all Dutch remote stations",
+          "type": "object",
+          "properties":{
+            "stations":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list",
+              "enum": [["RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"]],
+              "default": ["RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"],
+              "uniqueItems": false
+            },
+            "max_nr_missing":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations",
+              "default": 4
+            }
+          },
+          "required": ["stations", "max_nr_missing"],
+          "additionalProperties": false
+        },
+        {
+          "title":"Dutch",
+          "description": "The group of all Dutch (Core + Remote) stations",
+          "type": "object",
+          "properties":{
+            "stations":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list",
+              "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"]],
+              "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"],
+              "uniqueItems": false
+            },
+            "max_nr_missing":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations",
+              "default": 4
+            }
+          },
+          "required": ["stations", "max_nr_missing"],
+          "additionalProperties": false
+        },
+        {
+          "title":"International",
+          "description": "The group of all international stations",
+          "type": "object",
+          "properties":{
+            "stations":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list",
+              "enum": [["DE601", "DE602", "DE603", "DE604", "DE605", "DE609", "FR606", "SE607", "UK608", "PL610", "PL611", "PL612", "IE613", "LV614"]],
+              "default": ["DE601", "DE602", "DE603", "DE604", "DE605", "DE609", "FR606", "SE607", "UK608", "PL610", "PL611", "PL612", "IE613", "LV614"],
+              "uniqueItems": false
+            },
+            "max_nr_missing":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations",
+              "default": 2
+            }
+          },
+          "required": ["stations", "max_nr_missing"],
+          "additionalProperties": false
+        },
+        {
+          "title":"International required",
+          "description": "A subgroup of the international stations which are required when doing observation with international stations",
+          "type": "object",
+          "properties":{
+            "stations":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list",
+              "enum": [["DE601", "DE605"]],
+              "default": ["DE601", "DE605"],
+              "uniqueItems": false
+            },
+            "max_nr_missing":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations",
+              "default": 1
+            }
+          },
+          "required": ["stations", "max_nr_missing"],
+          "additionalProperties": false
+        },
+        {
+          "title":"All",
+          "description": "The group of all (Core + Remote + International) stations",
+          "type": "object",
+          "properties":{
+            "stations":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list",
+              "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509", "DE601", "DE602", "DE603", "DE604", "DE605", "DE609", "FR606", "SE607", "UK608", "PL610", "PL611", "PL612", "IE613", "LV614"]],
+              "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509", "DE601", "DE602", "DE603", "DE604", "DE605", "DE609", "FR606", "SE607", "UK608", "PL610", "PL611", "PL612", "IE613", "LV614"],
+              "uniqueItems": false
+            },
+            "max_nr_missing":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations",
+              "default": 6
+            }
+          },
+          "required": ["stations", "max_nr_missing"],
+          "additionalProperties": false
+        },
+        {
+          "title":"Custom",
+          "description": "A custom group of stations which can be defined by the user",
+          "type": "object",
+          "properties":{
+            "stations":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list",
+              "default": ["CS001"],
+              "minItems": 1,
+              "additionalItems": false,
+              "additionalProperties": false,
+              "uniqueItems": true
+            },
+            "max_nr_missing":{
+              "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations",
+              "default": 0
+            }
+          },
+          "required": ["stations", "max_nr_missing"],
+          "additionalProperties": false
         }
-      ],
-      "default": {
-        "group": "ALL",
-        "min_stations": 1
-      }
+        ]
+    },
+    "station_groups": {
+      "title":"Station groups",
+      "description": "One or more predefined or custom groups of stations",
+      "type":"array",
+      "additionalItems":false,
+      "additionalProperties":false,
+      "items":{
+        "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_group"
+      },
+      "minItems":1,
+      "default": [ {
+        "stations": ["CS002", "CS003", "CS004", "CS005", "CS006", "CS007"],
+        "max_nr_missing": 1
+      } ]
     },
     "antenna_set":{
       "type":"string",
@@ -164,6 +295,55 @@
         "HBA_110_190",
         "HBA_210_250"
       ]
+    },
+    "antennas": {
+      "title": "Antennas",
+      "type": "object",
+      "description":"Structure to describe a set of specific antennafields",
+      "properties": {
+        "set": {
+          "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1/#/definitions/antenna_set",
+          "default": "HBA_DUAL"
+        },
+        "fields": {
+          "title": "Fields",
+          "type": "array",
+          "default": [],
+          "items": {
+            "title": "Field",
+            "type": "object",
+            "properties": {
+              "station": {
+                "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1/#/definitions/station"
+              },
+              "field": {
+                "title": "Field",
+                "type": "string",
+                "default": "HBA",
+                "enum": [
+                  "LBA",
+                  "HBA",
+                  "HBA0",
+                  "HBA1"
+                ]
+              },
+              "type": {
+                "title": "Type",
+                "type": "string",
+                "default": "HBA",
+                "enum": [
+                  "LBA",
+                  "HBA"
+                ]
+              }
+            },
+            "required": [ "station", "field", "type" ]
+          }
+        }
+      },
+      "required": [ "fields" ]
     }
   }
-}
\ No newline at end of file
+}
+
+
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/dataproduct_feedback_template-feedback-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/dataproduct_feedback_template-feedback-1.json
index 4c851e2e22e891d32cc34baf0d764ed0eff97f79..55611877d14f4742b4db08a752356f7cff89bc4d 100644
--- a/SAS/TMSS/src/tmss/tmssapp/schemas/dataproduct_feedback_template-feedback-1.json
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/dataproduct_feedback_template-feedback-1.json
@@ -73,119 +73,7 @@
       "required": [ "start_time", "duration", "sample_width" ]
     },
     "antennas": {
-      "title": "Antennas",
-      "type": "object",
-      "properties": {
-        "set": {
-          "title": "Antenna set",
-          "type": "string",
-          "default": "HBA_DUAL",
-          "enum": [
-            "HBA_DUAL",
-            "HBA_DUAL_INNER",
-            "HBA_ONE",
-            "HBA_ONE_INNER",
-            "HBA_ZERO",
-            "HBA_ZERO_INNER",
-            "LBA_INNER",
-            "LBA_OUTER",
-            "LBA_SPARSE_EVEN",
-            "LBA_SPARSE_ODD",
-            "LBA_ALL"
-          ]
-        },
-        "fields": {
-          "title": "Fields",
-          "type": "array",
-          "default": [],
-          "items": {
-            "title": "Field",
-            "type": "object",
-            "properties": {
-              "station": {
-                "title": "Station",
-                "type": "string",
-                "enum": [
-                  "CS001",
-                  "CS002",
-                  "CS003",
-                  "CS004",
-                  "CS005",
-                  "CS006",
-                  "CS007",
-                  "CS011",
-                  "CS013",
-                  "CS017",
-                  "CS021",
-                  "CS024",
-                  "CS026",
-                  "CS028",
-                  "CS030",
-                  "CS031",
-                  "CS032",
-                  "CS101",
-                  "CS103",
-                  "CS201",
-                  "CS301",
-                  "CS302",
-                  "CS401",
-                  "CS501",
-                  "RS106",
-                  "RS205",
-                  "RS208",
-                  "RS210",
-                  "RS305",
-                  "RS306",
-                  "RS307",
-                  "RS310",
-                  "RS406",
-                  "RS407",
-                  "RS409",
-                  "RS503",
-                  "RS508",
-                  "RS509",
-                  "DE601",
-                  "DE602",
-                  "DE603",
-                  "DE604",
-                  "DE605",
-                  "FR606",
-                  "SE607",
-                  "UK608",
-                  "DE609",
-                  "PL610",
-                  "PL611",
-                  "PL612",
-                  "IE613",
-                  "LV614"
-                ]
-              },
-              "field": {
-                "title": "Field",
-                "type": "string",
-                "default": "HBA",
-                "enum": [
-                  "LBA",
-                  "HBA",
-                  "HBA0",
-                  "HBA1"
-                ]
-              },
-              "type": {
-                "title": "Type",
-                "type": "string",
-                "default": "HBA",
-                "enum": [
-                  "LBA",
-                  "HBA"
-                ]
-              }
-            },
-            "required": [ "station", "field", "type" ]
-          }
-        }
-      },
-      "required": [ "fields" ]
+      "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1/#/definitions/antennas"
     },
     "target": {
       "title": "Target",
@@ -193,42 +81,7 @@
       "properties": {
         "pointing": {
           "title": "Pointing",
-          "type": "object",
-          "required": [
-            "direction_type",
-            "angle1",
-            "angle2"
-          ],
-          "properties": {
-            "direction_type": {
-              "title": "Direction type",
-              "type": "string",
-              "default": "J2000",
-              "enum": [
-                "J2000",
-                "SUN",
-                "MOON",
-                "MERCURY",
-                "VENUS",
-                "MARS",
-                "JUPITER",
-                "SATURN",
-                "URANUS",
-                "NEPTUNE",
-                "PLUTO"
-              ]
-            },
-            "angle1": {
-              "title": "Angle 1",
-              "type": "number",
-              "default": 0.0
-            },
-            "angle2": {
-              "title": "Angle 2",
-              "type": "number",
-              "default": 0.0
-            }
-          }
+          "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/pointing/1/#/definitions/pointing"
         }
       },
       "required": [ "pointing" ]
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/sap_template-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/sap_template-1.json
new file mode 100644
index 0000000000000000000000000000000000000000..b4f6eb64f1e422da284e0f7b7c6c3c37a9d7bd58
--- /dev/null
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/sap_template-1.json
@@ -0,0 +1,68 @@
+{
+  "$id":"http://tmss.lofar.org/api/schemas/saptemplate/sap/1#",
+  "$schema": "http://json-schema.org/draft-06/schema#",
+  "title": "SAP",
+  "type": "object",
+  "default": {},
+  "properties": {
+    "identifiers": {
+      "type": "object",
+      "properties": {
+        "SIP": {
+          "type": "string",
+          "default": ""
+        }
+      },
+      "additionalProperties": false,
+      "default": {}
+    },
+    "measurement_type": {
+      "type": "string",
+      "enum": ["calibrator", "target"],
+      "default": "target"
+    },
+    "name": {
+      "type": "string",
+      "default": ""
+    },
+    "target": {
+      "type": "string",
+      "default": ""
+    },
+    "pointing": {
+          "title": "Pointing",
+          "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/pointing/1/#/definitions/pointing",
+          "default": {}
+        },
+    "time": {
+      "type": "object",
+      "properties": {
+        "start_time": {
+          "type": "string",
+          "default": ""
+        },
+        "duration": {
+          "type": "number",
+          "default": 0
+        }
+      },
+      "additionalProperties": false,
+      "default": {},
+      "required": [
+        "start_time",
+        "duration"
+      ]
+    },
+    "antennas": {
+      "default": {},
+      "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1/#/definitions/antennas"
+    }
+  },
+  "required": [
+    "identifiers",
+    "name",
+    "pointing",
+    "time",
+    "antennas"
+  ]
+}
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json
index 04bb208f0b4deff2d4a7d0491ef4108afe335922..77a916705c8df50c069f5929e11fc03d5586acf7 100644
--- a/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json
@@ -1,13 +1,32 @@
 {
+  "$id":"http://tmss.lofar.org/api/schemas/schedulingconstraintstemplate/constraints/1#",
   "$schema": "http://json-schema.org/draft-06/schema#",
-  "title": "Constraints",
-  "description": "This schema defines the constraints for a scheduling unit",
+  "title": "constraints",
+  "description": "This schema defines the scheduling constraints for a scheduling unit",
   "version": 1,
   "definitions": {
     "timestamp": {
+      "description": "A timestamp defined in UTC",
       "type": "string",
-      "pattern": "\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d\\.\\d+Z",
-      "format": "datetime"
+      "pattern": "\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d(\\.\\d+)?Z",
+      "format": "date-time"
+    },
+    "timewindow": {
+      "type": "object",
+      "description": "A timewindow interval: [from, to)",
+      "properties": {
+        "from": {
+          "$ref": "#/definitions/timestamp"
+        },
+        "to": {
+          "$ref": "#/definitions/timestamp"
+        }
+      },
+      "additionalProperties": false,
+      "required": [
+        "from",
+        "to"
+      ]
     },
     "distance_on_sky": {
       "type": "number",
@@ -24,7 +43,7 @@
   "properties": {
     "scheduler": {
       "name": "Scheduler",
-      "description": "Which scheduling system wiil schedule this",
+      "description": "Which scheduling system will schedule this",
       "type": "string",
       "enum": [
         "manual",
@@ -34,6 +53,7 @@
     },
     "time": {
       "type": "object",
+      "default": {},
       "properties": {
         "at": {
           "description": "Start at this moment",
@@ -51,41 +71,28 @@
           "description": "Run within one of these time windows",
           "type": "array",
           "items": {
-            "from": {
-              "$ref": "#/definitions/timestamp"
-            },
-            "to": {
-              "$ref": "#/definitions/timestamp"
-            },
-            "required": [
-              "from",
-              "to"
-            ]
+            "$ref": "#/definitions/timewindow"
           },
-          "additionalItems": false
+          "minItems":0,
+          "uniqueItems":true,
+          "default": []
         },
         "not_between": {
-          "description": "NOT run within one of these time windows",
+          "description": "Do NOT run within any of these time windows",
           "type": "array",
           "items": {
-            "from": {
-              "$ref": "#/definitions/timestamp"
-            },
-            "to": {
-              "$ref": "#/definitions/timestamp"
-            },
-            "required": [
-              "from",
-              "to"
-            ]
+            "$ref": "#/definitions/timewindow"
           },
-          "additionalItems": false
+          "minItems":0,
+          "uniqueItems":true,
+          "default": []
         }
       },
       "additionalProperties": false
     },
     "daily": {
       "type": "object",
+      "default": {},
       "properties": {
         "require_night": {
           "description": "Must run at night",
@@ -107,6 +114,7 @@
     },
     "sky": {
       "type": "object",
+      "default": {},
       "properties": {
         "min_calibrator_elevation": {
           "description": "Minimum elevation for all calibrator sources",
@@ -123,14 +131,14 @@
           "type": "object",
           "properties": {
             "from": {
-              "type": "integer",
-              "minimum": -43200,
-              "maximum": 43200
+              "type": "number",
+              "minimum": -0.20943951,
+              "maximum": 0.20943951
             },
             "to": {
-              "type": "integer",
-              "minimum": -43200,
-              "maximum": 43200
+              "type": "number",
+              "minimum": -0.20943951,
+              "maximum": 0.20943951
             }
           },
           "additionalProperties": false
@@ -157,7 +165,6 @@
       "additionalProperties": false
     }
   },
-  "additionalProperties": false,
   "required": [
     "scheduler"
   ]
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-calibrator_observation-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-calibrator_observation-1.json
index 0e32bb1da081fbee61a559f8a07364787282bdb7..cd606bf6794ef166c491a80cff583e0838d8d788 100644
--- a/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-calibrator_observation-1.json
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-calibrator_observation-1.json
@@ -7,6 +7,7 @@
   "type": "object",
   "properties": {
     "duration": {
+      "$id": "#duration",
       "type": "number",
       "title": "Duration (seconds)",
       "description": "Duration of this observation",
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-target_observation-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-target_observation-1.json
index e987d0d2fc1d0628be24ef009833f712601cf05f..5777e4b1b98f9a13f63eacc8c8545438a62deff7 100644
--- a/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-target_observation-1.json
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-target_observation-1.json
@@ -6,9 +6,12 @@
   "version": 1,
   "type": "object",
   "properties": {
-    "stations": {
-      "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/stations",
-      "default": ["CS001"]
+    "station_groups": {
+      "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_groups",
+      "default": [ {
+        "stations": ["CS002", "CS003", "CS004", "CS005", "CS006", "CS007"],
+        "max_nr_missing": 1
+      } ]
     },
     "antenna_set": {
       "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1/#/definitions/antenna_set",
@@ -46,6 +49,7 @@
             "default": ""
           },
           "digital_pointing": {
+            "$id": "#target_pointing",
             "title": "Digital pointing",
             "default": {},
             "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/pointing/1/#/definitions/pointing"
@@ -70,6 +74,7 @@
       }
     },
     "duration": {
+      "$id": "#duration",
       "type": "number",
       "title": "Duration (seconds)",
       "description": "Duration of this observation",
@@ -129,7 +134,7 @@
     }
   },
   "required": [
-    "stations",
+    "station_groups",
     "antenna_set",
     "filter",
     "SAPs",
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json
index 09698f2cd23c880f8f6638b35640d5fc9b6c3917..b181f8ea1edb710d50264007309a63775962316a 100644
--- a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json
@@ -108,5 +108,9 @@
   {
     "file_name": "scheduling_constraints_template-constraints-1.json",
     "template": "scheduling_constraints_template"
+  },
+  {
+    "file_name": "sap_template-1.json",
+    "template": "sap_template"
   }
 ]
\ No newline at end of file
diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
index 889ecdefb101cd9e175a125f065e22f74f36cda7..091a2352b42ec6cea116152462769e62cdd624c7 100644
--- a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
@@ -85,7 +85,7 @@ class SubtaskSerializer(RelationalHyperlinkedModelSerializer):
     class Meta:
         model = models.Subtask
         fields = '__all__'
-        extra_fields = ['cluster_value']
+        extra_fields = ['cluster_value', 'log_url']
 
 
 class SubtaskInputSerializer(RelationalHyperlinkedModelSerializer):
@@ -148,3 +148,15 @@ class DataproductHashSerializer(RelationalHyperlinkedModelSerializer):
         fields = '__all__'
 
 
+class SAPSerializer(RelationalHyperlinkedModelSerializer):
+    specifications_doc = JSONEditorField(schema_source='specifications_template.schema')
+
+    class Meta:
+        model = models.SAP
+        fields = '__all__'
+
+
+class SAPTemplateSerializer(AbstractTemplateSerializer):
+    class Meta:
+        model = models.SAPTemplate
+        fields = '__all__'
diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py
index 6f31450473362b76bd9ee1cb74258474a3f0a58d..bf250c5a51a2781970924e9ec30eb415d147b9fe 100644
--- a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py
+++ b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py
@@ -288,7 +288,8 @@ class SchedulingSetSerializer(RelationalHyperlinkedModelSerializer):
 
 class SchedulingUnitDraftSerializer(RelationalHyperlinkedModelSerializer):
     requirements_doc = JSONEditorField(schema_source="requirements_template.schema")
-    duration = FloatDurationField(required=False)
+    scheduling_constraints_doc = JSONEditorField(schema_source="scheduling_constraints_template.schema")
+    duration = FloatDurationField(read_only=True)
 
     class Meta:
         model = models.SchedulingUnitDraft
@@ -308,12 +309,12 @@ class SchedulingUnitDraftCopyFromSchedulingSetSerializer(SchedulingUnitDraftSeri
 
 class SchedulingUnitBlueprintSerializer(RelationalHyperlinkedModelSerializer):
     requirements_doc = JSONEditorField(schema_source="requirements_template.schema")
-    duration = FloatDurationField(required=False)
+    duration = FloatDurationField(read_only=True)
 
     class Meta:
         model = models.SchedulingUnitBlueprint
         fields = '__all__'
-        extra_fields = ['task_blueprints', 'duration', 'start_time', 'stop_time']
+        extra_fields = ['task_blueprints', 'duration', 'start_time', 'stop_time', 'status']
 
 class SchedulingUnitBlueprintCopyToSchedulingUnitDraftSerializer(SchedulingUnitBlueprintSerializer):
     class Meta(SchedulingUnitDraftSerializer.Meta):
@@ -325,9 +326,9 @@ class SchedulingUnitBlueprintCopyToSchedulingUnitDraftSerializer(SchedulingUnitB
 
 class TaskDraftSerializer(RelationalHyperlinkedModelSerializer):
 
-    duration = FloatDurationField(required=False)
-    relative_start_time = FloatDurationField(required=False)
-    relative_stop_time = FloatDurationField(required=False)
+    duration = FloatDurationField(read_only=True)
+    relative_start_time = FloatDurationField(read_only=True)
+    relative_stop_time = FloatDurationField(read_only=True)
     specifications_doc = JSONEditorField(schema_source='specifications_template.schema')
 
     class Meta:
@@ -338,15 +339,16 @@ class TaskDraftSerializer(RelationalHyperlinkedModelSerializer):
 
 class TaskBlueprintSerializer(RelationalHyperlinkedModelSerializer):
 
-    duration = FloatDurationField(required=False)
-    relative_start_time = FloatDurationField(required=False)
-    relative_stop_time = FloatDurationField(required=False)
+    duration = FloatDurationField(read_only=True)
+    relative_start_time = FloatDurationField(read_only=True)
+    relative_stop_time = FloatDurationField(read_only=True)
     specifications_doc = JSONEditorField(schema_source='specifications_template.schema')
 
     class Meta:
         model = models.TaskBlueprint
         fields = '__all__'
-        extra_fields = ['subtasks', 'produced_by', 'consumed_by', 'first_to_connect', 'second_to_connect', 'duration', 'start_time', 'stop_time', 'relative_start_time', 'relative_stop_time']
+        extra_fields = ['subtasks', 'produced_by', 'consumed_by', 'first_to_connect', 'second_to_connect', 'duration',
+                        'start_time', 'stop_time', 'relative_start_time', 'relative_stop_time', 'status']
 
 
 class TaskRelationDraftSerializer(RelationalHyperlinkedModelSerializer):
diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/widgets.py b/SAS/TMSS/src/tmss/tmssapp/serializers/widgets.py
index f19ee6f1913f297bed9f97fcfc920ebb150954b7..cedabc794bf1c6b104c737b23a0d2f4344bf5eb5 100644
--- a/SAS/TMSS/src/tmss/tmssapp/serializers/widgets.py
+++ b/SAS/TMSS/src/tmss/tmssapp/serializers/widgets.py
@@ -44,10 +44,11 @@ class JSONEditorField(serializers.JSONField):
     def to_representation(self, value):
         '''create representation of the json-schema-value,
         with all common json schema $ref's pointing to the correct host,
-        and inject the josdejong_jsoneditor_widget.html in the render style based on the requests accepted_media_type'''
+        and inject the josdejong_jsoneditor_widget.html in the render style based on the requests accepted_media_type for single instances'''
         self.style = {}
 
-        if self.parent.context['request'].accepted_media_type == 'text/html':
+        if self.parent.context['request'].accepted_media_type == 'text/html' and \
+                not (self.parent.parent is not None and self.parent.parent.many):
             # get the used schema...
             schema = self.get_schema(value)
 
@@ -58,6 +59,12 @@ class JSONEditorField(serializers.JSONField):
                 # so, let's do the resolving here and feed the resolved schema to the josdejong_jsoneditor_widget
                 schema = json_utils.resolved_refs(schema)
 
+                # the editor already fetched and cached common meta schema's from json-schema.org
+                # and raises an error if we supply it as well
+                # so, set schema to None for those
+                if 'json-schema.org' in value.get('$schema', ''):
+                    schema = None
+
                 self.style = {'template': 'josdejong_jsoneditor_widget.html',
                               'schema': json.dumps(schema)}
 
diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py
index 87189cc5cb1307e30073ba8d47309b583719d6f0..8dc5a528a9c0be66010cd8e80dc146a8c0f85d73 100644
--- a/SAS/TMSS/src/tmss/tmssapp/subtasks.py
+++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py
@@ -20,6 +20,8 @@ from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC
 from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset_dict
 from lofar.sas.resourceassignment.taskprescheduler.cobaltblocksize import CorrelatorSettings, BlockConstraints, BlockSize
 
+from lofar.sas.tmss.tmss.tmssapp.conversions import antennafields_for_antennaset_and_station
+
 # ==== various create* methods to convert/create a TaskBlueprint into one or more Subtasks ====
 
 def check_prerequities_for_subtask_creation(task_blueprint: TaskBlueprint) -> bool:
@@ -128,17 +130,20 @@ def create_observation_subtask_specifications_from_observation_task_blueprint(ta
     subtask_spec['stations']["antenna_set"] = task_spec["antenna_set"]
     subtask_spec['stations']["filter"] = task_spec["filter"]
 
-    if "stations" in task_spec:
-        if "group" in task_spec["stations"]:
-            try:
-                # retrieve stations in group from RADB virtual instrument
-                station_group_name = task_spec["stations"]["group"]
-                subtask_spec['stations']['station_list'] = get_stations_in_group(station_group_name)
-            except Exception as e:
-                raise SubtaskCreationException("Could not determine stations in group '%s' for task_blueprint id=%s. Error: %s" % (
-                    station_group_name, task_blueprint.id, e))
-        else:
-            subtask_spec['stations']['station_list'] = task_spec["stations"]
+    # At this moment of subtask creation we known which stations we *want* from the task_spec
+    # But we do not know yet which stations are available at the moment of observing.
+    # So, we decided that we set the subtask station_list as the union of all stations in all specified groups.
+    # This way, the user can see which stations are (likely) to be used.
+    # At the moment of scheduling of this subtask, then station_list is re-evaluated, and the max_nr_missing per group is validated.
+    subtask_spec['stations']['station_list'] = []
+    if "station_groups" in task_spec:
+        for station_group in task_spec["station_groups"]:
+            subtask_spec['stations']['station_list'].extend(station_group["stations"])
+        # make list have unique items
+        subtask_spec['stations']['station_list'] = sorted(list(set(subtask_spec['stations']['station_list'])))
+
+    if not subtask_spec['stations']['station_list']:
+        raise SubtaskCreationException("Cannot create observation subtask specifications for task_blueprint id=%s. No stations are defined." % (task_blueprint.id,))
 
     if 'calibrator' not in task_blueprint.specifications_template.name.lower():
         # copy/convert the analoge/digital_pointings only for non-calibrator observations (the calibrator has its own pointing)
@@ -173,6 +178,16 @@ def create_observation_subtask_specifications_from_observation_task_blueprint(ta
 def get_stations_in_group(station_group_name: str) -> []:
     '''Get a list of station names in the given station_group.
     A lookup is performed in the RADB, in the virtual instrument table'''
+
+    # TODO Make names RA and TMSS spec equal: 'NL' or 'DUTCH'?
+    if station_group_name == "DUTCH":
+        station_group_name = "NL"
+
+    #International required is by defintion DE601 or DE605, take 601 for now
+    # TODO check with RA the availability of both stations
+    if station_group_name == "INTERNATIONAL_REQUIRED":
+        return ["DE601"]
+
     with RADBRPC.create() as radbrpc:
         resource_group_memberships = radbrpc.getResourceGroupMemberships()['groups']
         station_resource_group = next(rg for rg in resource_group_memberships.values()
@@ -184,6 +199,10 @@ def get_stations_in_group(station_group_name: str) -> []:
         if 'RS408' in station_names:
             station_names.remove('RS408')
 
+        # HACK remove TEST1 from station list otherwise validate will fail
+        if 'TEST1' in station_names:
+            station_names.remove('TEST1')
+
         return sorted(list(station_names))
 
 
@@ -551,7 +570,8 @@ def schedule_qafile_subtask(qafile_subtask: Subtask):
                                                                 specifications_doc=get_default_json_object_for_schema(DataproductSpecificationsTemplate.objects.get(name="empty").schema),
                                                                 specifications_template=DataproductSpecificationsTemplate.objects.get(name="empty"),
                                                                 feedback_doc=get_default_json_object_for_schema(DataproductFeedbackTemplate.objects.get(name="empty").schema),
-                                                                feedback_template=DataproductFeedbackTemplate.objects.get(name="empty")
+                                                                feedback_template=DataproductFeedbackTemplate.objects.get(name="empty"),
+                                                                sap=None  # todo: do we need to point to a SAP here? Of which dataproduct then?
                                                                 )
 
     # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it)
@@ -602,7 +622,8 @@ def schedule_qaplots_subtask(qaplots_subtask: Subtask):
                                                              specifications_doc=get_default_json_object_for_schema(DataproductSpecificationsTemplate.objects.get(name="empty").schema),
                                                              specifications_template=DataproductSpecificationsTemplate.objects.get(name="empty"),
                                                              feedback_doc=get_default_json_object_for_schema(DataproductFeedbackTemplate.objects.get(name="empty").schema),
-                                                             feedback_template=DataproductFeedbackTemplate.objects.get(name="empty")
+                                                             feedback_template=DataproductFeedbackTemplate.objects.get(name="empty"),
+                                                             sap=None  # todo: do we need to point to a SAP here? Of which dataproduct then?
                                                              )
 
     # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it)
@@ -664,7 +685,7 @@ def calculate_start_time(observation_subtask: Subtask):
         previous_subtask_obs = lst_previous_subtasks_obs[0]
         logger.info("The previous observation subtask is id=%s", previous_subtask_obs.pk)
         if previous_subtask_obs.start_time is None:
-            logger.info("Oeps the previous start time is unknown so I can not calculate it")
+            raise SubtaskSchedulingException("Cannot compute start_time for subtask id=%s because the its predecessor id=%s has not start_time" %(observation_subtask.id, previous_subtask_obs.id))
         next_start_time = previous_subtask_obs.start_time + timedelta(seconds=duration_in_sec+time_offset)
     return next_start_time
 
@@ -712,7 +733,26 @@ def schedule_observation_subtask(observation_subtask: Subtask):
     directory = "/data/%s/%s/L%s/uv" % ("projects" if isProductionEnvironment() else "test-projects",
                                         observation_subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name,
                                         observation_subtask.id)
+
     for sap_nr, pointing in enumerate(specifications_doc['stations']['digital_pointings']):
+        antennaset = specifications_doc['stations']['antenna_set']
+        antennafields = []
+        for station in specifications_doc['stations']['station_list']:
+            fields = antennafields_for_antennaset_and_station(antennaset, station)
+            antennafields += [{"station": station, "field": field, "type": antennaset.split('_')[0]} for field in fields]
+
+        sap = SAP.objects.create(specifications_doc={ "name": "%s_%s" % (observation_subtask.id, pointing['name']),
+                                                      "identifiers": {},  # todo: TMSS-324
+                                                      "pointing": pointing['pointing'],
+                                                      "time": {"start_time": observation_subtask.start_time.isoformat(),
+                                                               "duration": (observation_subtask.stop_time - observation_subtask.start_time).total_seconds()},
+                                                      "antennas": {
+                                                      "antenna_set": antennaset,
+                                                      "fields": antennafields
+                                                      }
+                                                    },
+                                 specifications_template=SAPTemplate.objects.get(name="SAP"))
+
         for sb_nr in pointing['subbands']:
             Dataproduct.objects.create(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr),
                                        directory=directory,
@@ -724,11 +764,14 @@ def schedule_observation_subtask(observation_subtask: Subtask):
                                        feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema),
                                        feedback_template=dataproduct_feedback_template,
                                        size=0 if sb_nr%10==0 else 1024*1024*1024*sb_nr,
-                                       expected_size=1024*1024*1024*sb_nr)
+                                       expected_size=1024*1024*1024*sb_nr,
+                                       sap=sap)
 
     # step 4: resource assigner (if possible)
     _assign_resources(observation_subtask)
 
+    # TODO: TMSS-382: evaluate the scheduled stations and see if the requiments given in the subtask.task_bluepring.specifications_doc are met for the station_groups and max_nr_missing.
+
     # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it)
     observation_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value)
     observation_subtask.save()
@@ -802,8 +845,9 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask):
                                                    producer=pipeline_subtask_output,
                                                    specifications_doc={},
                                                    specifications_template=dataproduct_specifications_template,
-                                                   feedback_doc="",
-                                                   feedback_template=dataproduct_feedback_template)
+                                                   feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema),
+                                                   feedback_template=dataproduct_feedback_template,
+                                                   sap=input_dp.sap)
             DataproductTransform.objects.create(input=input_dp, output=output_dp, identity=False)
             output_dps.append(output_dp)
 
@@ -908,6 +952,8 @@ def specifications_doc_meets_selection_doc(specifications_doc, selection_doc):
     """
     meets_criteria = True
     for k, v in selection_doc.items():
+        if k.startswith('$'):  # ignore stuff like $schema
+            continue
         if k not in specifications_doc.keys():
             meets_criteria = False
         else:
diff --git a/SAS/TMSS/src/tmss/tmssapp/views.py b/SAS/TMSS/src/tmss/tmssapp/views.py
index 3b163882b38e08b032d7700efe3145b8d70c02f2..58a389fd6e332c7fea88e113fda8fe8e0d734217 100644
--- a/SAS/TMSS/src/tmss/tmssapp/views.py
+++ b/SAS/TMSS/src/tmss/tmssapp/views.py
@@ -1,6 +1,6 @@
 import os
 
-from django.http import HttpResponse, JsonResponse
+from django.http import HttpResponse, JsonResponse, Http404
 from django.shortcuts import get_object_or_404, render
 from lofar.sas.tmss.tmss.tmssapp import models
 from lofar.common.json_utils import get_default_json_object_for_schema
@@ -60,6 +60,30 @@ def get_template_json_schema(request, template:str, name:str, version:str):
     return response
 
 
+# Allow everybody to GET our publicly available station group lookups
+@permission_classes([AllowAny])
+@authentication_classes([AllowAny])
+@swagger_auto_schema(responses={200: 'A JSON object with two properties: group:<the_group_name>, stations:<the_list_of_stations>',
+                                404: 'No such group or template available'},
+                     operation_description="Get a JSON list of stations for the given <station_group> name the the group definitions in the common_schema_template given by <template_name> and <template_version>")
+def get_stations_in_group(request, template_name:str, template_version:str, station_group:str):
+    station_schema_template = get_object_or_404(models.CommonSchemaTemplate, name=template_name, version=template_version)
+    station_schema = station_schema_template.schema
+
+    if 'station_group' not in station_schema.get('definitions', {}):
+        raise Http404('The JSON schema in template %s version %s has no station_group definitions' % (template_name, template_version))
+
+    groups = station_schema['definitions']['station_group']['anyOf']
+    try:
+        selected_group = next(g for g in groups if g['title'].lower() == station_group.lower())
+    except StopIteration:
+        raise Http404('No station_group with name "%s" found in the JSON schema. template=%s version=%s' % (station_group, template_name, template_version))
+
+    stations = selected_group['properties']['stations']['enum'][0]
+    return JsonResponse({'group': station_group,
+                         'stations': stations})
+
+
 def utc(request):
     return HttpResponse(datetime.utcnow().isoformat(), content_type='text/plain')
 
diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/CMakeLists.txt b/SAS/TMSS/src/tmss/tmssapp/viewsets/CMakeLists.txt
index 445e0bbe4672e5cdad3a5a41be8575dbf2169ff0..fc0325a523508e371b2456d96b3467274dae748d 100644
--- a/SAS/TMSS/src/tmss/tmssapp/viewsets/CMakeLists.txt
+++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/CMakeLists.txt
@@ -6,8 +6,6 @@ set(_py_files
     lofar_viewset.py
     specification.py
     scheduling.py
-    helloworldflow.py
-    schedulingunitdemoflow.py
     )
 
 python_install(${_py_files}
diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/__init__.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/__init__.py
index 882458975ee4be50507620471ed1026433ddf589..93f3c7e6d54f95c40d6d9484aad802b13f9991ba 100644
--- a/SAS/TMSS/src/tmss/tmssapp/viewsets/__init__.py
+++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/__init__.py
@@ -1,3 +1,2 @@
 from .specification import *
-from .scheduling import *
-from .schedulingunitdemoflow import *
\ No newline at end of file
+from .scheduling import *
\ No newline at end of file
diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
index af49948d1615bac654f06ea03f55f8b09f679d6a..601321cf92d003f606c7d9d32afb5c32020b970e 100644
--- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
@@ -144,6 +144,8 @@ class SubtaskViewSet(LOFARViewSet):
     filter_class = SubTaskFilter
     ordering = ('start_time',)
 
+    queryset = queryset.prefetch_related('state')
+
     @swagger_auto_schema(auto_schema=TextPlainAutoSchema,
                          responses={200: 'A LOFAR parset for this subtask (as plain text)',
                                     403: 'forbidden',
@@ -313,3 +315,19 @@ class DataproductHashViewSet(LOFARViewSet):
     serializer_class = serializers.DataproductHashSerializer
 
 
+class SAPViewSet(LOFARViewSet):
+    queryset = models.SAP.objects.all()
+    serializer_class = serializers.SAPSerializer
+
+    @swagger_auto_schema(responses={200: 'The dataproducts in this SAP',
+                                    403: 'forbidden'},
+                         operation_description="The dataproducts in this SAP.")
+    @action(methods=['get'], detail=True, url_name='dataproducts')
+    def dataproducts(self, request, pk=None):
+        sap = get_object_or_404(models.SAP, pk=pk)
+        serializer = serializers.DataproductSerializer(sap.dataproducts, many=True, context={'request': request})
+        return RestResponse(serializer.data)
+
+class SAPTemplateViewSet(AbstractTemplateViewSet):
+    queryset = models.SAPTemplate.objects.all()
+    serializer_class = serializers.SAPTemplateSerializer
diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py
index ce3fa163142398bbaf6ae6bf7e197b33b6311cd2..6e292b61afa714df6356cf528da69ebc18a555f3 100644
--- a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py
+++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py
@@ -21,6 +21,7 @@ from drf_yasg.openapi import Parameter
 from lofar.sas.tmss.tmss.tmssapp.viewsets.lofar_viewset import LOFARViewSet, LOFARNestedViewSet, AbstractTemplateViewSet, LOFARCopyViewSet
 from lofar.sas.tmss.tmss.tmssapp import models
 from lofar.sas.tmss.tmss.tmssapp import serializers
+from django.http import JsonResponse
 
 from datetime import datetime
 from lofar.common.json_utils import get_default_json_object_for_schema
@@ -307,6 +308,16 @@ class SchedulingUnitDraftViewSet(LOFARViewSet):
     queryset = models.SchedulingUnitDraft.objects.all()
     serializer_class = serializers.SchedulingUnitDraftSerializer
 
+    # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries
+    queryset = queryset.prefetch_related('copied_from') \
+                       .prefetch_related('scheduling_unit_blueprints')\
+                       .prefetch_related('task_drafts')
+
+    # preselect all references to other models to avoid even more duplicate queries
+    queryset = queryset.select_related('copies') \
+                       .select_related('copy_reason') \
+                       .select_related('scheduling_set')
+
     @swagger_auto_schema(responses={201: 'The Created SchedulingUnitBlueprint, see Location in Response header',
                                     403: 'forbidden'},
                          operation_description="Carve SchedulingUnitDraft in stone, and make an (uneditable) blueprint out of it.")
@@ -580,6 +591,9 @@ class SchedulingUnitBlueprintViewSet(LOFARViewSet):
     queryset = models.SchedulingUnitBlueprint.objects.all()
     serializer_class = serializers.SchedulingUnitBlueprintSerializer
 
+    # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries
+    queryset = queryset.prefetch_related('task_blueprints')
+
     @swagger_auto_schema(responses={201: "This SchedulingUnitBlueprint, with references to its created TaskBlueprints and (scheduled) Subtasks.",
                                     403: 'forbidden'},
                          operation_description="Create TaskBlueprint(s) for this scheduling unit, create subtasks, and schedule the ones that are not dependend on predecessors.")
@@ -616,6 +630,20 @@ class SchedulingUnitBlueprintViewSet(LOFARViewSet):
         return Response(serializers.SchedulingUnitBlueprintSerializer(scheduling_unit_blueprint, context={'request':request}).data,
                         status=status.HTTP_201_CREATED)
 
+    @swagger_auto_schema(responses={200: 'The available logging urls for all Subtasks of this SchedulingUnitBlueprint.',
+                                    403: 'forbidden'},
+                         operation_description="Get the subtask logging urls of this schedulingunit blueprint.")
+    @action(methods=['get'], detail=True, url_name='get_all_subtasks_log_urls')
+    def get_all_subtasks_log_urls(self, request, pk=None):
+        subtasks = models.Subtask.objects.filter(task_blueprint__scheduling_unit_blueprint_id=pk)
+        result = []
+        for subtask in subtasks:
+            if subtask.log_url != "":
+                result.append({"subtaskid": subtask.id, "type": subtask.specifications_template.type.value, "log_url": subtask.log_url})
+        # TypeError: In order to allow non-dict objects to be serialized set the safe parameter to False.
+        # result is list of dict so thats why
+        return JsonResponse(result, safe=False)
+
 
 class SchedulingUnitBlueprintNestedViewSet(LOFARNestedViewSet):
     queryset = models.SchedulingUnitBlueprint.objects.all()
@@ -633,6 +661,22 @@ class TaskDraftViewSet(LOFARViewSet):
     queryset = models.TaskDraft.objects.all()
     serializer_class = serializers.TaskDraftSerializer
 
+    # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries
+    queryset = queryset.prefetch_related('first_to_connect') \
+                       .prefetch_related('second_to_connect')\
+                       .prefetch_related('produced_by')\
+                       .prefetch_related('consumed_by')\
+                       .prefetch_related('task_blueprints')\
+                       .prefetch_related('copied_from')
+
+    # prefetch nested references in reverse models to avoid duplicate lookup queries
+    queryset = queryset.prefetch_related('first_to_connect__placement') \
+                       .prefetch_related('second_to_connect__placement')
+
+    # select all references to other models to avoid even more duplicate queries
+    queryset = queryset.select_related('copies') \
+                       .select_related('copy_reason')
+
     @swagger_auto_schema(responses={201: 'The created task blueprint, see Location in Response header',
                                     403: 'forbidden'},
                          operation_description="Carve this draft task specification in stone, and make an (uneditable) blueprint out of it.")
@@ -726,6 +770,17 @@ class TaskBlueprintViewSet(LOFARViewSet):
     queryset = models.TaskBlueprint.objects.all()
     serializer_class = serializers.TaskBlueprintSerializer
 
+    # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries
+    queryset = queryset.prefetch_related('first_to_connect')\
+                       .prefetch_related('second_to_connect')\
+                       .prefetch_related('produced_by')\
+                       .prefetch_related('consumed_by')\
+                       .prefetch_related('subtasks')
+
+    # prefetch nested references in reverse models to avoid duplicate lookup queries
+    queryset = queryset.prefetch_related('first_to_connect__placement') \
+                       .prefetch_related('second_to_connect__placement')
+
     @swagger_auto_schema(responses={201: "This TaskBlueprint, with it is created subtasks",
                                     403: 'forbidden'},
                          operation_description="Create subtasks.")
diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py
index 37d9d081a3a3ecbee61e876ea3ee365b7c111ace..c6c812bc3c5ee97115ef0c2664bcf56f524bfbc3 100644
--- a/SAS/TMSS/src/tmss/urls.py
+++ b/SAS/TMSS/src/tmss/urls.py
@@ -23,7 +23,7 @@ from django.views.generic.base import TemplateView, RedirectView
 
 from collections import OrderedDict
 from rest_framework import routers, permissions
-from .tmssapp import viewsets, models, serializers, views, workflows
+from .tmssapp import viewsets, models, serializers, views
 from rest_framework.documentation import include_docs_urls
 from drf_yasg.views import get_schema_view
 from drf_yasg import openapi
@@ -33,6 +33,8 @@ from datetime import datetime
 from material.frontend import urls as frontend_urls
 from viewflow.flow.viewset import FlowViewSet
 
+import debug_toolbar
+
 #
 # Django style patterns
 #
@@ -62,8 +64,11 @@ urlpatterns = [
     path('redoc/', swagger_schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
     path('schemas/<str:template>/<str:name>/<str:version>', views.get_template_json_schema, name='get_template_json_schema'), #TODO: how to make trailing slash optional?
     path('schemas/<str:template>/<str:name>/<str:version>/', views.get_template_json_schema, name='get_template_json_schema'),
+    path('station_groups/<str:template_name>/<str:template_version>/<str:station_group>', views.get_stations_in_group, name='get_stations_in_group'), #TODO: how to make trailing slash optional?
+    path('station_groups/<str:template_name>/<str:template_version>/<str:station_group>/', views.get_stations_in_group, name='get_stations_in_group'),
     path(r'util/utc', views.utc, name="system-utc"),
-    path(r'util/lst', views.lst, name="conversion-lst")
+    path(r'util/lst', views.lst, name="conversion-lst"),
+    path('__debug__/', include(debug_toolbar.urls)),
 ]
 
 
@@ -176,6 +181,7 @@ router.register(r'dataproduct_specifications_template', viewsets.DataproductSpec
 router.register(r'default_subtask_template', viewsets.DefaultSubtaskTemplateViewSet)
 router.register(r'default_dataproduct_specifications_template', viewsets.DefaultDataproductSpecificationsTemplateViewSet)
 router.register(r'dataproduct_feedback_template', viewsets.DataproductFeedbackTemplateViewSet)
+router.register(r'sap_template', viewsets.SAPTemplateViewSet)
 
 # instances
 router.register(r'subtask', viewsets.SubtaskViewSet)
@@ -190,6 +196,7 @@ router.register(r'dataproduct_archive_info', viewsets.DataproductArchiveInfoView
 router.register(r'dataproduct_hash', viewsets.DataproductHashViewSet)
 router.register(r'subtask_state_log', viewsets.SubtaskStateLogViewSet)
 router.register(r'user', viewsets.UserViewSet)
+router.register(r'sap', viewsets.SAPViewSet)
 
 # ---
 
diff --git a/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..94b72e83e35a77ab9b16f84b7647f8ab0c8af94a
--- /dev/null
+++ b/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt
@@ -0,0 +1,17 @@
+
+include(PythonInstall)
+
+set(_py_files
+    __init__.py
+    admin.py
+    apps.py
+    tests.py
+    )
+
+python_install(${_py_files}
+    DESTINATION lofar/sas/tmss/tmss/workflowapp)
+
+add_subdirectory(migrations)
+add_subdirectory(models)
+add_subdirectory(flows)
+add_subdirectory(viewsets)
diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/helloworldflow.py b/SAS/TMSS/src/tmss/workflowapp/__init__.py
similarity index 100%
rename from SAS/TMSS/src/tmss/tmssapp/viewsets/helloworldflow.py
rename to SAS/TMSS/src/tmss/workflowapp/__init__.py
diff --git a/SAS/TMSS/src/tmss/workflowapp/admin.py b/SAS/TMSS/src/tmss/workflowapp/admin.py
new file mode 100644
index 0000000000000000000000000000000000000000..8c38f3f3dad51e4585f3984282c2a4bec5349c1e
--- /dev/null
+++ b/SAS/TMSS/src/tmss/workflowapp/admin.py
@@ -0,0 +1,3 @@
+from django.contrib import admin
+
+# Register your models here.
diff --git a/SAS/TMSS/src/tmss/workflowapp/apps.py b/SAS/TMSS/src/tmss/workflowapp/apps.py
new file mode 100644
index 0000000000000000000000000000000000000000..d70dc7921a32145aa2a76285c3362041e091a358
--- /dev/null
+++ b/SAS/TMSS/src/tmss/workflowapp/apps.py
@@ -0,0 +1,5 @@
+from django.apps import AppConfig
+
+
+class WorkflowappConfig(AppConfig):
+    name = 'workflowapp'
diff --git a/SAS/TMSS/src/tmss/tmssapp/workflows/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/flows/CMakeLists.txt
similarity index 72%
rename from SAS/TMSS/src/tmss/tmssapp/workflows/CMakeLists.txt
rename to SAS/TMSS/src/tmss/workflowapp/flows/CMakeLists.txt
index 474aada33041160e598ac2b1a126d68971d75afd..769f922e4781a912f1c0488c3655f6ab61363d3a 100644
--- a/SAS/TMSS/src/tmss/tmssapp/workflows/CMakeLists.txt
+++ b/SAS/TMSS/src/tmss/workflowapp/flows/CMakeLists.txt
@@ -8,4 +8,4 @@ set(_py_files
     )
 
 python_install(${_py_files}
-    DESTINATION lofar/sas/tmss/tmss/tmssapp/workflows)
+    DESTINATION lofar/sas/tmss/tmss/workflowapp/flows)
diff --git a/SAS/TMSS/src/tmss/tmssapp/workflows/__init__.py b/SAS/TMSS/src/tmss/workflowapp/flows/__init__.py
similarity index 100%
rename from SAS/TMSS/src/tmss/tmssapp/workflows/__init__.py
rename to SAS/TMSS/src/tmss/workflowapp/flows/__init__.py
diff --git a/SAS/TMSS/src/tmss/tmssapp/workflows/helloworldflow.py b/SAS/TMSS/src/tmss/workflowapp/flows/helloworldflow.py
similarity index 97%
rename from SAS/TMSS/src/tmss/tmssapp/workflows/helloworldflow.py
rename to SAS/TMSS/src/tmss/workflowapp/flows/helloworldflow.py
index d3307efe5f773359de58c89bea4a8728fa809c05..cd7ee660823074d4a00e5dca9e87e240098442c9 100644
--- a/SAS/TMSS/src/tmss/tmssapp/workflows/helloworldflow.py
+++ b/SAS/TMSS/src/tmss/workflowapp/flows/helloworldflow.py
@@ -5,9 +5,7 @@ from viewflow.base import this, Flow
 from viewflow.compat import _
 from viewflow.flow import views as flow_views
 
-
-from lofar.sas.tmss.tmss.tmssapp import models
-
+from .. import models
 
 
 @frontend.register
diff --git a/SAS/TMSS/src/tmss/tmssapp/workflows/schedulingunitdemoflow.py b/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitdemoflow.py
similarity index 99%
rename from SAS/TMSS/src/tmss/tmssapp/workflows/schedulingunitdemoflow.py
rename to SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitdemoflow.py
index a35c72db8e9430b929e9ada4f424bbf6a58527c9..0a2882d7a4550ef3ff8e60b190c4074f60356795 100644
--- a/SAS/TMSS/src/tmss/tmssapp/workflows/schedulingunitdemoflow.py
+++ b/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitdemoflow.py
@@ -8,7 +8,7 @@ from viewflow.activation import FuncActivation, ViewActivation
 from viewflow.flow.nodes import Signal
 from viewflow import mixins
 
-from lofar.sas.tmss.tmss.tmssapp import models
+from .. import models
 
 from viewflow import frontend, ThisObject
 from viewflow.activation import STATUS
diff --git a/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py
new file mode 100644
index 0000000000000000000000000000000000000000..2e95b97379265e5eb14cfd44e85357218eb63948
--- /dev/null
+++ b/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py
@@ -0,0 +1,50 @@
+# Generated by Django 3.0.9 on 2020-10-01 12:30
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+    initial = True
+
+    dependencies = [
+        ('viewflow', '0008_jsonfield_and_artifact'),
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='SchedulingUnitDemo',
+            fields=[
+                ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
+                ('name', models.CharField(max_length=50)),
+                ('state', models.IntegerField()),
+            ],
+        ),
+        migrations.CreateModel(
+            name='HelloWorldProcess',
+            fields=[
+            ],
+            options={
+                'verbose_name': 'World Request',
+                'verbose_name_plural': 'World Requests',
+                'proxy': True,
+                'indexes': [],
+                'constraints': [],
+            },
+            bases=('viewflow.process',),
+        ),
+        migrations.CreateModel(
+            name='SchedulingUnitDemoProcess',
+            fields=[
+                ('process_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='viewflow.Process')),
+                ('text', models.CharField(max_length=150)),
+                ('approved', models.BooleanField(default=False)),
+                ('su', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.SchedulingUnitDemo')),
+            ],
+            options={
+                'abstract': False,
+            },
+            bases=('viewflow.process',),
+        ),
+    ]
diff --git a/SAS/TMSS/src/tmss/workflowapp/migrations/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/migrations/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..158ea7946445fcee8e52b00447df80a873e98ec2
--- /dev/null
+++ b/SAS/TMSS/src/tmss/workflowapp/migrations/CMakeLists.txt
@@ -0,0 +1,8 @@
+
+include(PythonInstall)
+
+
+FILE(GLOB _py_files RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.py)
+
+python_install(${_py_files}
+    DESTINATION lofar/sas/tmss/tmss/workflowapp/migrations)
\ No newline at end of file
diff --git a/SAS/TMSS/src/tmss/workflowapp/migrations/__init__.py b/SAS/TMSS/src/tmss/workflowapp/migrations/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/SAS/TMSS/src/tmss/workflowapp/models/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/models/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..1c94f0a15d5ade684111945ce5bb79dfe25f7a91
--- /dev/null
+++ b/SAS/TMSS/src/tmss/workflowapp/models/CMakeLists.txt
@@ -0,0 +1,11 @@
+
+include(PythonInstall)
+
+set(_py_files
+    __init__.py
+    helloworldflow.py
+    schedulingunitdemoflow.py
+    )
+
+python_install(${_py_files}
+    DESTINATION lofar/sas/tmss/tmss/workflowapp/models)
diff --git a/SAS/TMSS/src/tmss/workflowapp/models/__init__.py b/SAS/TMSS/src/tmss/workflowapp/models/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..45516795a25730483ebfa40c1fbdb5f533df8ebe
--- /dev/null
+++ b/SAS/TMSS/src/tmss/workflowapp/models/__init__.py
@@ -0,0 +1,2 @@
+from .helloworldflow import *
+from .schedulingunitdemoflow import *
\ No newline at end of file
diff --git a/SAS/TMSS/src/tmss/tmssapp/models/helloworldflow.py b/SAS/TMSS/src/tmss/workflowapp/models/helloworldflow.py
similarity index 100%
rename from SAS/TMSS/src/tmss/tmssapp/models/helloworldflow.py
rename to SAS/TMSS/src/tmss/workflowapp/models/helloworldflow.py
diff --git a/SAS/TMSS/src/tmss/tmssapp/models/schedulingunitdemoflow.py b/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitdemoflow.py
similarity index 100%
rename from SAS/TMSS/src/tmss/tmssapp/models/schedulingunitdemoflow.py
rename to SAS/TMSS/src/tmss/workflowapp/models/schedulingunitdemoflow.py
diff --git a/SAS/TMSS/src/tmss/workflowapp/tests.py b/SAS/TMSS/src/tmss/workflowapp/tests.py
new file mode 100644
index 0000000000000000000000000000000000000000..7ce503c2dd97ba78597f6ff6e4393132753573f6
--- /dev/null
+++ b/SAS/TMSS/src/tmss/workflowapp/tests.py
@@ -0,0 +1,3 @@
+from django.test import TestCase
+
+# Create your tests here.
diff --git a/SAS/TMSS/src/tmss/workflowapp/viewsets/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/viewsets/CMakeLists.txt
new file mode 100644
index 0000000000000000000000000000000000000000..7adc12fcf7a85912784409d17f37177986c94298
--- /dev/null
+++ b/SAS/TMSS/src/tmss/workflowapp/viewsets/CMakeLists.txt
@@ -0,0 +1,10 @@
+
+include(PythonInstall)
+
+set(_py_files
+    __init__.py
+    schedulingunitdemoflow.py
+    )
+
+python_install(${_py_files}
+    DESTINATION lofar/sas/tmss/tmss/workflowapp/viewsets)
diff --git a/SAS/TMSS/src/tmss/workflowapp/viewsets/__init__.py b/SAS/TMSS/src/tmss/workflowapp/viewsets/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..b77c70aeb959e9d4f63c395fd1079cfbbe3bc078
--- /dev/null
+++ b/SAS/TMSS/src/tmss/workflowapp/viewsets/__init__.py
@@ -0,0 +1 @@
+from .schedulingunitdemoflow import *
\ No newline at end of file
diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/schedulingunitdemoflow.py b/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitdemoflow.py
similarity index 92%
rename from SAS/TMSS/src/tmss/tmssapp/viewsets/schedulingunitdemoflow.py
rename to SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitdemoflow.py
index ea117c0f9c27a4324fe76c77fe1256e1b1eca446..da3dc24e15ff6f3bd93da9037101a718f4ebed66 100644
--- a/SAS/TMSS/src/tmss/tmssapp/viewsets/schedulingunitdemoflow.py
+++ b/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitdemoflow.py
@@ -3,7 +3,7 @@ from rest_framework import viewsets
 from rest_framework.response import Response
 from rest_framework.decorators import action
 from rest_framework.serializers import ModelSerializer
-from lofar.sas.tmss.tmss.tmssapp import models
+from lofar.sas.tmss.tmss.workflowapp import models
 
 # Create your views here.
 
diff --git a/SAS/TMSS/test/CMakeLists.txt b/SAS/TMSS/test/CMakeLists.txt
index 769fce231ac3bc18470ae3c974456d4ec089ff68..716469c7ca9294350badc60448fc92870eb6be8e 100644
--- a/SAS/TMSS/test/CMakeLists.txt
+++ b/SAS/TMSS/test/CMakeLists.txt
@@ -38,5 +38,5 @@ if(BUILD_TESTING)
     file(COPY testdata DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
 
     set_tests_properties(t_tmssapp_scheduling_REST_API PROPERTIES TIMEOUT 300)
-    set_tests_properties(t_tmssapp_specification_REST_API PROPERTIES TIMEOUT 300)
+    set_tests_properties(t_tmssapp_specification_REST_API PROPERTIES TIMEOUT 360)
 endif()
diff --git a/SAS/TMSS/test/t_adapter.py b/SAS/TMSS/test/t_adapter.py
index 4a7428ce1a224630289cc99aed89501458719bb9..379f3a37a4b53165882be3579af3eef08a8b40fe 100755
--- a/SAS/TMSS/test/t_adapter.py
+++ b/SAS/TMSS/test/t_adapter.py
@@ -44,6 +44,7 @@ from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset
 from lofar.common.json_utils import get_default_json_object_for_schema
 from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct
 from lofar.sas.tmss.tmss.tmssapp.adapters.feedback import generate_dataproduct_feedback_from_subtask_feedback_and_set_finished
+from lofar.lta.sip import constants
 
 class ParsetAdapterTest(unittest.TestCase):
     def test_01(self):
@@ -63,16 +64,24 @@ class SIPdapterTest(unittest.TestCase):
     def test_simple_sip_generate_from_dataproduct(self):
         subtask_template = models.SubtaskTemplate.objects.get(name='observation control')
         specifications_doc = get_default_json_object_for_schema(subtask_template.schema)
+        specifications_doc['stations']['filter'] = "HBA_210_250"
+        feedback_template = models.DataproductFeedbackTemplate.objects.get(name='feedback')
+        # feedback_doc = get_default_json_object_for_schema(feedback_template.schema)  # todo <- fix the default generator, for some reason it does not produce valid json here...
+        feedback_doc = {'percentage_written': 100, 'frequency': {'subbands': [156], 'central_frequencies': [33593750.0], 'channel_width': 6103.515625, 'channels_per_subband': 32}, 'time': {'start_time': '2013-02-16T17:00:00', 'duration': 5.02732992172, 'sample_width': 2.00278016}, 'antennas': {'set': 'HBA_DUAL', 'fields': [{'type': 'HBA', 'field': 'HBA0', 'station': 'CS001'}, {'type': 'HBA', 'field': 'HBA1', 'station': 'CS001'}]}, 'target': {'pointing': {'angle1': 0, 'angle2': 0, 'angle3': 0, 'direction_type': 'J2000'}}, 'samples': {'polarisations': ['XX', 'XY', 'YX', 'YY'], 'type': 'float', 'bits': 32, 'writer': 'standard', 'writer_version': '2.2.0', 'complex': True}, '$schema': 'http://127.0.0.1:8001/api/schemas/dataproductfeedbacktemplate/feedback/1#'}
         for dp in specifications_doc['stations']['digital_pointings']:
             dp['subbands'] = list(range(8))
         subtask_data = Subtask_test_data(subtask_template=subtask_template, specifications_doc=specifications_doc)
         subtask:models.Subtask = models.Subtask.objects.create(**subtask_data)
         subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask))
-        dataproduct:models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtask_output))
+        dataproduct:models.Dataproduct = models.Dataproduct.objects.create(**Dataproduct_test_data(feedback_doc=feedback_doc, producer=subtask_output))
 
+        # make sure we can create a valid SIP
         sip = generate_sip_for_dataproduct(dataproduct)
-        # TODO: Although validate succeed at this step, would be interesting to check some xml values
-        logger.info(sip.get_prettyxml())
+
+        # double-check that SIP contains values from feedback and specifications docs
+        self.assertIn(str(feedback_doc['frequency']['channel_width']), sip.get_prettyxml())
+        self.assertIn(str(feedback_doc['time']['start_time']), sip.get_prettyxml())
+        self.assertIn(constants.FILTERSELECTIONTYPE_210_250_MHZ, sip.get_prettyxml()) # specifications_doc: "HBA_210_250"
 
 
 class FeedbackAdapterTest(unittest.TestCase):
diff --git a/SAS/TMSS/test/t_scheduling.py b/SAS/TMSS/test/t_scheduling.py
index fa64b627ef404c7eed2f48cc6ac8c43fd450415c..1f3d24c819fcb2099eebac85931e7348bf2799ba 100755
--- a/SAS/TMSS/test/t_scheduling.py
+++ b/SAS/TMSS/test/t_scheduling.py
@@ -21,6 +21,7 @@
 
 import os
 import unittest
+from unittest import mock
 
 import logging
 logger = logging.getLogger(__name__)
@@ -63,6 +64,7 @@ from datetime import datetime, timedelta
 from lofar.sas.resourceassignment.resourceassigner.rarpc import RARPC
 from lofar.sas.tmss.tmss.tmssapp import models
 from lofar.sas.tmss.tmss.tmssapp.subtasks import *
+from lofar.sas.tmss.tmss.tmssapp.tasks import *
 
 
 def create_subtask_object_for_testing(subtask_type_value, subtask_state_value):
@@ -77,7 +79,6 @@ def create_subtask_object_for_testing(subtask_type_value, subtask_state_value):
     return models.Subtask.objects.create(**subtask_data)
 
 
-
 class SchedulingTest(unittest.TestCase):
     def setUp(self):
         # clean all specs/tasks/claims in RADB (cascading delete)
@@ -257,7 +258,8 @@ class SubtaskInputOutputTest(unittest.TestCase):
         setting.value = True
         setting.save()
 
-    def test_schedule_pipeline_subtask_filters_predecessor_output_dataproducts_for_input(self):
+    @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks._assign_resources")
+    def test_schedule_pipeline_subtask_filters_predecessor_output_dataproducts_for_input(self, assign_resources_mock):
         # setup:
         #   create observation subtask and outputs and dataproducts
         obs_st = create_subtask_object_for_testing('observation', 'finished')
@@ -278,15 +280,132 @@ class SubtaskInputOutputTest(unittest.TestCase):
         dp2_1 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out2, specifications_doc={'sap': [0]}))
         dp2_2 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out2, specifications_doc={'sap': [1]}))
 
-        # uncomment when RA scheduler works
-        # # trigger:
-        # #   schedule pipeline, which should attach the correct subset of dataproducts to the pipeline inputs
-        # schedule_pipeline_subtask(pipe_st)
-        #
-        # # assert:
-        # #   check correct input filtering
-        # self.assertEqual(set(pipe_in1.dataproducts.all()), {dp1_1, dp1_3})
-        # self.assertEqual(set(pipe_in2.dataproducts.all()), {dp2_2})
+        # trigger:
+        #   schedule pipeline, which should attach the correct subset of dataproducts to the pipeline inputs
+        schedule_pipeline_subtask(pipe_st)
+
+        # assert:
+        #   check correct input filtering
+        self.assertEqual(set(pipe_in1.dataproducts.all()), {dp1_1, dp1_3})
+        self.assertEqual(set(pipe_in2.dataproducts.all()), {dp2_2})
+
+
+class SAPTest(unittest.TestCase):
+    """
+    SAP test
+    These testcases are located in the t_scheduling module, because the SAP entries are created/assigned during scheduling
+    """
+
+    def setUp(self) -> None:
+        # make sure we're allowed to schedule
+        setting = Setting.objects.get(name='allow_scheduling_observations')
+        setting.value = True
+        setting.save()
+
+    def test_schedule_observation_subtask_creates_sap_with_correct_pointing(self):
+        with tmss_test_env.create_tmss_client() as client:
+            subtask_template = client.get_subtask_template("observation control")
+            spec = get_default_json_object_for_schema(subtask_template['schema'])
+            spec['stations']['digital_pointings'][0]['subbands'] = [0]
+            cluster_url = client.get_path_as_json_object('/cluster/1')['url']
+            pointing = {"angle1": 7.6, "angle2": 5.4, "direction_type": "J2000"}
+            spec['stations']['digital_pointings'][0]['pointing'] = pointing
+
+            subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'],
+                                                     specifications_doc=spec,
+                                                     cluster_url = cluster_url,
+                                                     start_time=datetime.utcnow() + timedelta(minutes=5),
+                                                     stop_time=datetime.utcnow() + timedelta(minutes=15))
+            subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/')
+            subtask_id = subtask['id']
+            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']),
+                                                    '/subtask_output/')
+
+            sap_count_before_scheduling = models.SAP.objects.count()
+            client.set_subtask_status(subtask_id, 'defined')
+            subtask = client.schedule_subtask(subtask_id)
+
+            self.assertGreater(models.SAP.objects.count(), sap_count_before_scheduling)
+            self.assertEqual(models.SAP.objects.first().specifications_doc['pointing']['angle1'], pointing['angle1'])
+            self.assertEqual(models.SAP.objects.first().specifications_doc['pointing']['angle2'], pointing['angle2'])
+
+    @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks._assign_resources")
+    def test_schedule_pipeline_subtask_copies_sap_from_input_to_output(self, assign_resources_mock):
+        # setup:
+        #   create observation subtask and outputs and dataproducts
+        obs_st = create_subtask_object_for_testing('observation', 'finished')
+        obs_out = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=obs_st))
+
+        #   create connected pipeline subtask and inputs, specify input filtering
+        pipe_st = create_subtask_object_for_testing('pipeline', 'defined')
+        pipe_out = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=pipe_st)) # required by scheduling function
+        pipe_in = models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=pipe_st, producer=obs_out))
+
+        #   create obs output dataproducts
+        dp1_in = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out))
+        dp2_in = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out))
+
+        #   schedule pipeline, which should copy the SAP
+        schedule_pipeline_subtask(pipe_st)
+
+        # determine the newly created pipeline dataproducts
+        dp1_out = DataproductTransform.objects.get(input=dp1_in).output
+        dp2_out = DataproductTransform.objects.get(input=dp2_in).output
+
+        # assert:
+        self.assertEqual(dp1_in.sap, dp1_out.sap)
+        self.assertEqual(dp2_in.sap, dp2_out.sap)
+
+
+
+class CreationFromSchedulingUnitDraft(unittest.TestCase):
+    """
+    From scheduling_unit_draft test:
+     create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitBlueprint:
+    This requires Resource Assigner testenvironment being alive
+    """
+
+    def test_create_task_blueprints_and_subtasks_from_scheduling_unit_draft_with_UC1_requirements(self):
+        """
+        Create Scheduling Unit Draft with requirements_doc (read from file)
+        Create Task Blueprints and Subtasks
+        Check if tasks (7) are created:
+           Calibration 1     : 1 Observation and 1 Pipeline task
+           Target Observation: 1 Observation and 2 Pipeline tasks
+           Calibration 2     : 1 Observation and 1 Pipeline task
+        Check if subtasks (13) are created:
+           Every Observation Task: 3 subtasks (1 control, 2 QA)
+           Every Pipeline Task:    1 subtasks (1 control)
+           makes 3x3 + 4x1 = 13
+        """
+        strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines")
+
+        scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(
+                                   name="Test Scheduling Unit UC1",
+                                   requirements_doc=strategy_template.template,
+                                   requirements_template=strategy_template.scheduling_unit_template,
+                                   observation_strategy_template=strategy_template,
+                                   copy_reason=models.CopyReason.objects.get(value='template'),
+                                   generator_instance_doc="para",
+                                   copies=None,
+                                   scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data()))
+
+        create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
+
+        scheduling_unit_draft.refresh_from_db()
+        task_drafts = scheduling_unit_draft.task_drafts.all()
+        self.assertEqual(7, len(task_drafts))
+
+        scheduling_unit_blueprints = scheduling_unit_draft.scheduling_unit_blueprints.all()
+        self.assertEqual(1, len(scheduling_unit_blueprints))
+
+        scheduling_unit_blueprint = scheduling_unit_blueprints[0]
+        task_blueprints = scheduling_unit_blueprint.task_blueprints.all()
+        self.assertEqual(7, len(task_blueprints))
+        total_subtasks = 0
+        for task_blueprint in task_blueprints:
+            total_subtasks += task_blueprint.subtasks.count()
+        self.assertEqual(13, total_subtasks)
 
 
 if __name__ == "__main__":
diff --git a/SAS/TMSS/test/t_subtasks.py b/SAS/TMSS/test/t_subtasks.py
index 17210063f2e24e31a19a3a1f05edee0375c409d7..7e024d6b143a6483c1bfcdaedceb428cd44ae281 100755
--- a/SAS/TMSS/test/t_subtasks.py
+++ b/SAS/TMSS/test/t_subtasks.py
@@ -115,6 +115,8 @@ def create_scheduling_relation_task_blueprint_for_testing(first_task_blueprint,
                                  placement=models.SchedulingRelationPlacement.objects.get(value='before'),
                                  time_offset=60)
     return task_scheduling_rel_obj
+
+
 class SubTasksCreationFromSubTask(unittest.TestCase):
 
     def test_create_qafile_subtask_from_observation_subtask_failed(self):
@@ -255,15 +257,15 @@ class SubTasksCreationFromTaskBluePrintCalibrator(unittest.TestCase):
             create_observation_control_subtask_from_task_blueprint(cal_task_blueprint)
 
         cal_task_blueprint.specifications_doc['autoselect'] = False
-        cal_task_blueprint.specifications_doc['pointing']['angle1'] = 11.11
-        cal_task_blueprint.specifications_doc['pointing']['angle2'] = 22.22
+        cal_task_blueprint.specifications_doc['pointing']['angle1'] = 1.111
+        cal_task_blueprint.specifications_doc['pointing']['angle2'] = 2.222
         subtask = create_observation_control_subtask_from_task_blueprint(cal_task_blueprint)
         self.assertEqual("defined", str(subtask.state))
         self.assertEqual("observation control", str(subtask.specifications_template.name))
         self.assertEqual("observation", str(subtask.specifications_template.type))
         self.assertEqual('J2000', subtask.specifications_doc['stations']['analog_pointing']['direction_type'])
-        self.assertEqual(11.11, subtask.specifications_doc['stations']['analog_pointing']['angle1'])
-        self.assertEqual(22.22, subtask.specifications_doc['stations']['analog_pointing']['angle2'])
+        self.assertEqual(1.111, subtask.specifications_doc['stations']['analog_pointing']['angle1'])
+        self.assertEqual(2.222, subtask.specifications_doc['stations']['analog_pointing']['angle2'])
 
 
 class SubtaskInputSelectionFilteringTest(unittest.TestCase):
@@ -323,6 +325,23 @@ class SettingTest(unittest.TestCase):
         with self.assertRaises(SubtaskSchedulingException):
             schedule_observation_subtask(obs_st)
 
+    def test_links_to_log_files(self):
+        """
+        Test if the links to logging of a subtasks is correct:
+        For an observation the subtaskid is in the logging url
+        For a pipeline the radbid of the subtaskid is in the link, BUT because RA is not started is should
+        return "not available"
+        All other subtask types (like qa) should have an empty string (no logging)
+        """
+        subtask_pipeline = create_subtask_object_for_testing("pipeline", "defined")
+        subtask_qa_plots = create_subtask_object_for_testing("qa_plots", "defined")
+        subtask_observation = create_subtask_object_for_testing("observation", "defined")
+
+        self.assertIn("proxy.lofar.eu", subtask_observation.log_url)
+        self.assertIn("rtcp-%s.errors" % subtask_observation.id, subtask_observation.log_url)
+        self.assertIn("not available", subtask_pipeline.log_url)
+        self.assertEqual("", subtask_qa_plots.log_url)
+
 
 if __name__ == "__main__":
     os.environ['TZ'] = 'UTC'
diff --git a/SAS/TMSS/test/t_tasks.py b/SAS/TMSS/test/t_tasks.py
index ae878f68ad6712aab49ab8d974d4aa8a1416712f..1b4aefc9e56a1dccccb7fdbf92abac85068ace38 100755
--- a/SAS/TMSS/test/t_tasks.py
+++ b/SAS/TMSS/test/t_tasks.py
@@ -123,51 +123,9 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase):
         task_drafts = scheduling_unit_draft.task_drafts.all()
         self.assertEqual(7, len(task_drafts))
 
-    def test_create_task_blueprints_and_subtasks_from_scheduling_unit_draft_with_UC1_requirements(self):
-        """
-        Create Scheduling Unit Draft with requirements_doc (read from file)
-        Create Task Blueprints and Subtasks
-        Check if tasks (7) are created:
-           Calibration 1     : 1 Observation and 1 Pipeline task
-           Target Observation: 1 Observation and 2 Pipeline tasks
-           Calibration 2     : 1 Observation and 1 Pipeline task
-        Check if subtasks (13) are created:
-           Every Observation Task: 3 subtasks (1 control, 2 QA)
-           Every Pipeline Task:    1 subtasks (1 control)
-           makes 3x3 + 4x1 = 13
-        """
-        strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines")
-
-        scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(
-                                   name="Test Scheduling Unit UC1",
-                                   requirements_doc=strategy_template.template,
-                                   requirements_template=strategy_template.scheduling_unit_template,
-                                   observation_strategy_template=strategy_template,
-                                   copy_reason=models.CopyReason.objects.get(value='template'),
-                                   generator_instance_doc="para",
-                                   copies=None,
-                                   scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data()))
-
-        create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
-
-        scheduling_unit_draft.refresh_from_db()
-        task_drafts = scheduling_unit_draft.task_drafts.all()
-        self.assertEqual(7, len(task_drafts))
-
-        scheduling_unit_blueprints = scheduling_unit_draft.scheduling_unit_blueprints.all()
-        self.assertEqual(1, len(scheduling_unit_blueprints))
-
-        scheduling_unit_blueprint = scheduling_unit_blueprints[0]
-        task_blueprints = scheduling_unit_blueprint.task_blueprints.all()
-        self.assertEqual(7, len(task_blueprints))
-        total_subtasks = 0
-        for task_blueprint in task_blueprints:
-            total_subtasks += task_blueprint.subtasks.count()
-        self.assertEqual(13, total_subtasks)
-
     def test_create_task_blueprints_and_subtasks_from_scheduling_unit_draft(self):
         """
-        Create Scheduling Unit Draft
+        Create Scheduling Unit Draft with empty task specification
         Check if the name draft (specified) is equal to name blueprint (created)
         Check with REST-call if NO tasks are created
         """
@@ -255,6 +213,386 @@ class CreationFromTaskDraft(unittest.TestCase):
         self.assertEqual(0, task_blueprint.subtasks.count())
 
 
+class TaskBlueprintStateTest(unittest.TestCase):
+    """
+    Test the Task Blueprint State which is derived from the SubTask states.
+    The result of each possible combination of these states will be checked
+    See https://support.astron.nl/confluence/display/TMSS/Specification+Flow#SpecificationFlow-TaskBlueprints
+    """
+
+    def test_state_with_no_subtasks(self):
+        """
+        Test the taskblueprint state when subtasks are not instantiated.
+        the expected state should be 'defined'
+        """
+        task_blueprint_data = TaskBlueprint_test_data(name="Task Blueprint No Subtasks")
+        task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_data)
+        self.assertEqual("defined", task_blueprint.status)
+
+    def test_states_with_one_subtask(self):
+        """
+        Test the taskblueprint state when only one subtasks is instantiated, an pipeline
+        See next table where every row represents:
+            Substate(Pipeline), Expected TaskBlueprint State
+        """
+        test_table = [
+            ("defining",    "defined"),
+            ("defining",    "defined"),
+            ("defined",     "schedulable"),
+            ("scheduling",  "schedulable"),
+            ("scheduled",   "scheduled"),
+            ("starting",    "started"),
+            ("started",     "started"),
+            ("queueing",    "started"),
+            ("queued",      "started"),
+            ("finishing",   "started"),
+            ("finished",    "finished"),
+            ("cancelling",  "cancelled"),
+            ("cancelled",   "cancelled"),
+            ("error",       "error")
+        ]
+        # Create taskblueprint
+        task_blueprint_data = TaskBlueprint_test_data(name="Task Blueprint With One Subtask")
+        task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_data)
+        # Create pipeline subtask related to taskblueprint
+        subtask_data = Subtask_test_data(task_blueprint, state=models.SubtaskState.objects.get(value="defined"),
+                                         subtask_template=models.SubtaskTemplate.objects.get(name='pipeline control'))
+        subtask_pipe = models.Subtask.objects.create(**subtask_data)
+
+        # Do the actual test
+        for test_item in test_table:
+            state_pipe, expected_task_state = test_item
+            logger.info("Expected test result of substate pipeline='%s' should be '%s'" % (state_pipe, expected_task_state))
+            subtask_pipe.state = models.SubtaskState.objects.get(value=state_pipe)
+            subtask_pipe.save()
+            self.assertEqual(expected_task_state, task_blueprint.status)
+
+    def test_states_with_observation_and_qa_subtask(self):
+        """
+        Test the taskblueprint state when two subtasks are instantiated, an observation and a QA.
+        See next table where every row represents:
+            Substate(Obs), Substate(QA), Expected TaskBlueprint State
+        """
+        test_table = [
+            ("defining",    "defining",   "defined"),
+            ("defining",    "defined",    "defined"),
+            ("defined",     "defined",    "schedulable"),
+            ("scheduling",  "defined",    "schedulable"),
+            ("scheduled",   "defined",    "scheduled"),
+            ("starting",    "defined",    "started"),
+            ("started",     "defined",    "started"),
+            ("queueing",    "defined",    "started"),
+            ("queued",      "defined",    "started"),
+            ("finishing",   "defined",    "observed"),
+            ("finished",    "defined",    "observed"),
+            ("finished",    "finished",   "finished"),
+            ("cancelling",  "defined",    "cancelled"),
+            ("cancelled",   "defined",    "cancelled"),
+            ("error",       "defined",    "error"),
+            # qa finishing/finished should be not observed
+            ("defined",     "finishing",  "started"),
+            ("defined",     "finished",   "started"),
+            ("scheduled",   "finishing",  "started"),
+            ("scheduled",   "finished",   "started"),
+            # error and cancelled/ing
+            ("scheduled",   "error",      "error"),
+            ("scheduled",   "cancelling", "cancelled"),
+            ("scheduled",   "cancelled",  "cancelled"),
+            ("started",     "error",      "error"),
+            ("started",     "cancelling", "cancelled"),
+            ("started",     "cancelled",  "cancelled"),
+            ("finished",    "error",      "error"),
+            ("finished",    "cancelling", "cancelled"),
+            ("finished",    "cancelled",  "cancelled"),
+            # cancelled over error
+            ("cancelling",  "error",      "cancelled"),
+            ("cancelled",   "error",      "cancelled"),
+            ("error",       "cancelling", "cancelled"),
+            ("error",       "cancelling", "cancelled"),
+            # qa scheduled
+            ("starting",    "scheduled",  "started"),
+            ("started",     "scheduled",  "started"),
+            ("queueing",    "scheduled",  "started"),
+            ("queued",      "scheduled",  "started"),
+            ("finishing",   "scheduled",  "observed"),
+            ("finished",    "scheduled",  "observed"),
+            ("cancelling",  "scheduled", "cancelled"),
+            ("cancelled",   "scheduled", "cancelled"),
+            ("error",       "scheduled", "error"),
+        ]
+        # Create taskblueprint
+        task_blueprint_data = TaskBlueprint_test_data(name="Task Blueprint With Subtasks")
+        task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_data)
+        # Create observation and qa subtask related to taskblueprint
+        subtask_data = Subtask_test_data(task_blueprint, state=models.SubtaskState.objects.get(value="defined"),
+                                         subtask_template=models.SubtaskTemplate.objects.get(name='observation control'))
+        subtask_obs = models.Subtask.objects.create(**subtask_data)
+        subtask_data = Subtask_test_data(task_blueprint, state=models.SubtaskState.objects.get(value="defined"),
+                                         subtask_template=models.SubtaskTemplate.objects.get(name='QA file conversion'))
+        subtask_qa = models.Subtask.objects.create(**subtask_data)
+
+        # Do the actual test
+        for test_item in test_table:
+            state_obs, state_qa, expected_task_state = test_item
+            logger.info("Expected test result of substates observation='%s' and qa='%s' should be '%s'" % (state_obs, state_qa, expected_task_state))
+            subtask_obs.state = models.SubtaskState.objects.get(value=state_obs)
+            subtask_obs.save()
+            subtask_qa.state = models.SubtaskState.objects.get(value=state_qa)
+            subtask_qa.save()
+            self.assertEqual(expected_task_state, task_blueprint.status)
+
+    def test_states_with_two_observation_and_two_qa_subtasks(self):
+        """
+        Test the taskblueprint state when four subtasks are instantiated, two observation and two QA.
+        See next table where every row represents:
+            Substate(Obs1), Substate(Obs2), Substate(QA1), Substate(QA2), Expected TaskBlueprint State
+        """
+        test_table = [
+            ("finishing",   "defined",    "defined",    "defined",    "started"),
+            ("finished",    "defined",    "defined",    "defined",    "started"),
+            ("finishing",   "started",    "defined",    "defined",    "started"),
+            ("finished",    "started",    "defined",    "defined",    "started"),
+            ("finishing",   "finishing",  "defined",    "defined",    "observed"),
+            ("finished",    "finished",   "defined",    "defined",    "observed"),
+            ("finished",    "finished",   "scheduled",  "defined",    "observed"),
+            ("finished",    "finished",   "finished",   "scheduled",  "observed"),
+            ("finished",    "finished",   "finished",   "finished",   "finished"),
+            ("finished",    "finished",   "finished",   "cancelled",  "cancelled"),
+            ("finished",    "finished",   "finished",   "error",      "error"),
+            ("error",       "finished",   "finished",   "cancelled",  "cancelled"),
+        ]
+        # Create taskblueprint
+        task_blueprint_data = TaskBlueprint_test_data(name="Task Blueprint With Subtasks")
+        task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_data)
+        # Create observation and qa subtasks related to taskblueprint
+        subtask_data = Subtask_test_data(task_blueprint, state=models.SubtaskState.objects.get(value="defined"),
+                                         subtask_template=models.SubtaskTemplate.objects.get(name='observation control'))
+        subtask_obs1 = models.Subtask.objects.create(**subtask_data)
+        subtask_obs2 = models.Subtask.objects.create(**subtask_data)
+        subtask_data = Subtask_test_data(task_blueprint, state=models.SubtaskState.objects.get(value="defined"),
+                                         subtask_template=models.SubtaskTemplate.objects.get(name='QA file conversion'))
+        subtask_qa1 = models.Subtask.objects.create(**subtask_data)
+        subtask_qa2 = models.Subtask.objects.create(**subtask_data)
+
+        # Do the actual test
+        for test_item in test_table:
+            state_obs1, state_obs2, state_qa1, state_qa2, expected_task_state = test_item
+            logger.info("Expected test result of substates observation='%s','%s' and qa='%s','%s' should be '%s'" %
+                        (state_obs1, state_obs1, state_qa1, state_qa2, expected_task_state))
+            subtask_obs1.state = models.SubtaskState.objects.get(value=state_obs1)
+            subtask_obs1.save()
+            subtask_obs2.state = models.SubtaskState.objects.get(value=state_obs2)
+            subtask_obs2.save()
+            subtask_qa1.state = models.SubtaskState.objects.get(value=state_qa1)
+            subtask_qa1.save()
+            subtask_qa2.state = models.SubtaskState.objects.get(value=state_qa2)
+            subtask_qa2.save()
+            self.assertEqual(expected_task_state, task_blueprint.status)
+
+
+class SchedulingUnitBlueprintStateTest(unittest.TestCase):
+    """
+    Test the Scheduling Blueprint State which is derived from the TaskBlueprint states.
+    The result of each possible combination of these states will be checked
+    See https://support.astron.nl/confluence/display/TMSS/Specification+Flow#SpecificationFlow-SchedulingBlueprints
+    """
+
+    def create_tasks_and_subtasks(self, schedulingunit_blueprint, skip_create_subtask=[]):
+        """
+        Create three taskblueprint related to the schedulingunit_blueprint.
+        These task are an observation, a pipeline and a ingest task.
+        Also per task one subtask is instantiated (so makes three total) which is required to be able to set
+        the task status which is a read-only property and is derived from the subtask states
+        :param schedulingunit_blueprint:
+        :return: dictionary with task and subtask objects
+        """
+        # Create observation task
+        task_data = TaskBlueprint_test_data(name="Task Observation", scheduling_unit_blueprint=schedulingunit_blueprint)
+        task_obs = models.TaskBlueprint.objects.create(**task_data)
+        subtask_data = Subtask_test_data(task_obs, state=models.SubtaskState.objects.get(value="defined"),
+                                               subtask_template=models.SubtaskTemplate.objects.get(name='observation control'))
+        if "observation" in skip_create_subtask:
+            subtask_obs = None
+        else:
+            subtask_obs = models.Subtask.objects.create(**subtask_data)
+
+        # Create pipeline task
+        task_data = TaskBlueprint_test_data(name="Task Pipeline", scheduling_unit_blueprint=schedulingunit_blueprint)
+        task_pipe = models.TaskBlueprint.objects.create(**task_data)
+        # Need to change the default template type (observation) to pipeline
+        task_pipe.specifications_template = models.TaskTemplate.objects.get(type=models.TaskType.Choices.PIPELINE.value)
+        task_pipe.save()
+        subtask_data = Subtask_test_data(task_pipe,
+                                         state=models.SubtaskState.objects.get(value="defined"),
+                                         subtask_template=models.SubtaskTemplate.objects.get(name='pipeline control'))
+        if "pipeline" in skip_create_subtask:
+            subtask_pipe = None
+        else:
+            subtask_pipe = models.Subtask.objects.create(**subtask_data)
+
+        # Create ingest task
+        # Because there is no taskTemplate object for ingest by default I have to create one
+        test_data = TaskTemplate_test_data(name="task_template_for_ingest", task_type_value="ingest")
+        my_test_template = models.TaskTemplate.objects.create(**test_data)
+        task_data = TaskBlueprint_test_data(name="Task Ingest", scheduling_unit_blueprint=schedulingunit_blueprint)
+        task_ingest = models.TaskBlueprint.objects.create(**task_data)
+        task_ingest.specifications_template = my_test_template
+        task_ingest.save()
+        # There is no template defined for ingest yet ...but I can use pipeline control, only the template type matters
+        # ....should become other thing in future but for this test does not matter
+        subtask_data = Subtask_test_data(task_ingest,
+                                         state=models.SubtaskState.objects.get(value="defined"),
+                                         subtask_template=models.SubtaskTemplate.objects.get(name='pipeline control'))
+        if "ingest" in skip_create_subtask:
+            subtask_ingest = None
+        else:
+            subtask_ingest = models.Subtask.objects.create(**subtask_data)
+
+        return {"observation": {"task": task_obs, "subtask": subtask_obs},
+                "pipeline": {"task": task_pipe, "subtask": subtask_pipe},
+                "ingest": {"task": task_ingest, "subtask": subtask_ingest}}
+
+    def set_task_state(self, task_state, task_type, task, subtask):
+        """
+        Set the taskblueprint state for given task_type
+        State of task can only be set by setting the subtask state
+        Do not set subtask state if subtask is None
+        :param task_state: Task state to be set
+        :param task_type: observation, pipeline or ingest
+        :param task: TaskBlueprint object
+        :param subtask: SubTask object
+        """
+        # Translate task state to subtask state, mostly one-o-one but two exceptions
+        if task_state == "observed":
+            subtask_state = "finishing"
+        elif task_state == "schedulable":
+            subtask_state = "scheduling"
+        else:
+            subtask_state = task_state
+
+        if subtask is not None:
+            subtask.state = models.SubtaskState.objects.get(value=subtask_state)
+            subtask.save()
+        # Check task.status as precondition
+        self.assertEqual(task_state, task.status,
+                         "PRECONDITION DOES NOT MET. Expect %s task to be equal to %s (but is %s)" % (
+                         task_type, task_state, task.status))
+
+    def test_state_with_no_tasks(self):
+        """
+        Test the schedulingunitblueprint state when tasks are not instantiated.
+        the expected state should be 'defined'
+        """
+        schedulingunit_data = SchedulingUnitBlueprint_test_data(name="Scheduling Blueprint No Tasks")
+        schedulingunit_blueprint = models.SchedulingUnitBlueprint.objects.create(**schedulingunit_data)
+        self.assertEqual("defined", schedulingunit_blueprint.status)
+
+    def test_states_with_observation_pipeline_ingest_tasks_subtasks(self):
+        """
+        Test the schedulingunitblueprint state when only one task is instantiated, an pipeline
+        Subtask are also instantiated so minimal task state is schedulable !
+        See next table where every row represents:
+            Taskstate(obs),  Taskstate(pipeline), Taskstate(ingest), Expected SchedulingUnitBlueprint Status
+        """
+        test_table = [
+            # normal behaviour
+            ("error",       "schedulable", "schedulable",  "error"),
+            ("cancelled",   "schedulable", "schedulable",  "cancelled"),
+            ("schedulable", "schedulable", "schedulable",  "schedulable"),
+            ("scheduled",   "schedulable", "schedulable",  "scheduled"),
+            ("started",     "schedulable", "schedulable",  "observing"),
+            ("observed",    "schedulable", "schedulable",  "observed"),
+            ("observed",    "scheduled",   "schedulable",  "observed"),
+            ("observed",    "started",     "schedulable",  "processing"),
+            ("observed",    "finished",    "schedulable",  "processing"),
+            ("observed",    "finished",    "scheduled",    "processing"),
+            ("observed",    "finished",    "started",      "processing"),
+            ("observed",    "finished",    "finished",     "processing"),
+            ("finished",    "schedulable", "schedulable",  "observed"),
+            ("finished",    "scheduled",   "schedulable",  "observed"),
+            ("finished",    "started",     "schedulable",  "processing"),
+            ("finished",    "finished",    "schedulable",  "processed"),
+            ("finished",    "finished",    "scheduled",    "processed"),
+            ("finished",    "finished",    "started",      "ingesting"),
+            ("finished",    "finished",    "finished",     "finished"),
+            # any cancelled
+            ("observed",    "cancelled",   "schedulable",  "cancelled"),
+            ("observed",    "schedulable", "cancelled",    "cancelled"),
+            ("observed",    "scheduled",   "cancelled",    "cancelled"),
+            ("observed",    "started",     "cancelled",    "cancelled"),
+            ("observed",    "cancelled",   "schedulable",  "cancelled"),
+            ("observed",    "cancelled",   "scheduled",    "cancelled"),
+            ("observed",    "cancelled",   "started",      "cancelled"),
+            ("observed",    "cancelled",   "finished",     "cancelled"),
+            ("finished",    "cancelled",   "schedulable",  "cancelled"),
+            # any error
+            ("observed",    "error",       "schedulable",  "error"),
+            ("observed",    "schedulable", "error",        "error"),
+            ("observed",    "scheduled",   "error",        "error"),
+            ("observed",    "started",     "error",        "error"),
+            ("observed",    "error",       "schedulable",  "error"),
+            ("observed",    "error",       "scheduled",    "error"),
+            ("observed",    "error",       "started",      "error"),
+            ("observed",    "error",       "finished",     "error"),
+            # cancelled over error
+            ("error",       "error",       "cancelled",    "cancelled")
+        ]
+        # Create schedulingblueprint
+        schedulingunit_data = SchedulingUnitBlueprint_test_data(name="Task Blueprint With Three Tasks")
+        schedulingunit_blueprint = models.SchedulingUnitBlueprint.objects.create(**schedulingunit_data)
+        # Create related task and subtasks
+        tasks_and_subtasks_dict = self.create_tasks_and_subtasks(schedulingunit_blueprint)
+        # Do the actual test
+        task_state_dict = {}
+        for test_item in test_table:
+            task_state_dict["observation"], task_state_dict["pipeline"], task_state_dict["ingest"], expected_schedulingunit_status = test_item
+            info_msg = "Test with with states observation='%s',pipeline='%s',ingest='%s' should result in schedulingunit_blueprint.status '%s'" \
+                        % (task_state_dict["observation"], task_state_dict["pipeline"], task_state_dict["ingest"], expected_schedulingunit_status)
+            logger.info(info_msg)
+            for key in tasks_and_subtasks_dict:
+                self.set_task_state(task_state_dict[key], key, tasks_and_subtasks_dict[key]["task"], tasks_and_subtasks_dict[key]["subtask"])
+            # Check result
+            self.assertEqual(expected_schedulingunit_status, schedulingunit_blueprint.status, info_msg)
+
+    def test_states_with_observation_pipeline_ingest_tasks_no_ingest_subtask(self):
+        """
+        Test the schedulingunitblueprint state when the tasks, observation, pipeline and ingest are instantiated
+        Subtask of ingest is missing, which makes implicit the task state defined!
+        See next table where every row represents:
+            Taskstate(obs),  Taskstate(pipeline), Taskstate(ingest), Expected SchedulingUnitBlueprint Status
+        """
+        test_table = [
+            # normal behaviour
+            ("error",       "schedulable", "defined",  "error"),
+            ("cancelled",   "schedulable", "defined",  "cancelled"),
+            ("schedulable", "schedulable", "defined",  "schedulable"),
+            ("scheduled",   "schedulable", "defined",  "scheduled"),
+            ("started",     "schedulable", "defined",  "observing"),
+            ("observed",    "schedulable", "defined",  "observed"),
+            ("observed",    "scheduled",   "defined",  "observed"),
+            ("observed",    "started",     "defined",  "processing"),
+            ("observed",    "finished",    "defined",  "processing"),
+            ("finished",    "schedulable", "defined",  "observed"),
+        ]
+        # Create schedulingblueprint
+        schedulingunit_data = SchedulingUnitBlueprint_test_data(name="Task Blueprint With Three Tasks No Ingest Subtask")
+        schedulingunit_blueprint = models.SchedulingUnitBlueprint.objects.create(**schedulingunit_data)
+        # Create related task and subtasks (skip creation of ingest subtask)
+        tasks_and_subtasks_dict = self.create_tasks_and_subtasks(schedulingunit_blueprint, ["ingest"])
+        # Do the actual test
+        task_state_dict = {}
+        for test_item in test_table:
+            task_state_dict["observation"], task_state_dict["pipeline"], task_state_dict["ingest"], expected_schedulingunit_status = test_item
+            info_msg = "Test with with states observation='%s',pipeline='%s',ingest='%s' should result in schedulingunit_blueprint.status '%s'" \
+                        % (task_state_dict["observation"], task_state_dict["pipeline"], task_state_dict["ingest"], expected_schedulingunit_status)
+            logger.info(info_msg)
+            for key in tasks_and_subtasks_dict:
+                self.set_task_state(task_state_dict[key], key, tasks_and_subtasks_dict[key]["task"], tasks_and_subtasks_dict[key]["subtask"])
+            # Check result
+            self.assertEqual(expected_schedulingunit_status, schedulingunit_blueprint.status, info_msg)
+
+
+
 if __name__ == "__main__":
     os.environ['TZ'] = 'UTC'
     unittest.main()
diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py
index ec2a1bb407b065247fa6a087618968ac0606bdfc..f5207337417bb58a4b825451ff3fe63437f9a9ea 100755
--- a/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py
+++ b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py
@@ -472,6 +472,69 @@ class DataproductHashTest(unittest.TestCase):
         self.assertGreater(after, entry.updated_at)
 
 
+class SAPTemplateTest(unittest.TestCase):
+    def test_SAPTemplate_gets_created_with_correct_creation_timestamp(self):
+
+        # setup
+        before = datetime.utcnow()
+        entry = models.SAPTemplate.objects.create(**SAPTemplate_test_data())
+
+        after = datetime.utcnow()
+
+        # assert
+        self.assertLess(before, entry.created_at)
+        self.assertGreater(after, entry.created_at)
+
+    def test_SAPTemplate_update_timestamp_gets_changed_correctly(self):
+
+        # setup
+        entry = models.SAPTemplate.objects.create(**SAPTemplate_test_data())
+        before = datetime.utcnow()
+        entry.save()
+        after = datetime.utcnow()
+
+        # assert
+        self.assertLess(before, entry.updated_at)
+        self.assertGreater(after, entry.updated_at)
+
+
+class SAPTest(unittest.TestCase):
+    def test_SAP_gets_created_with_correct_creation_timestamp(self):
+
+        # setup
+        before = datetime.utcnow()
+        entry = models.SAP.objects.create(**SAP_test_data())
+
+        after = datetime.utcnow()
+
+        # assert
+        self.assertLess(before, entry.created_at)
+        self.assertGreater(after, entry.created_at)
+
+    def test_SAP_update_timestamp_gets_changed_correctly(self):
+
+        # setup
+        entry = models.SAP.objects.create(**SAP_test_data())
+        before = datetime.utcnow()
+        entry.save()
+        after = datetime.utcnow()
+
+        # assert
+        self.assertLess(before, entry.updated_at)
+        self.assertGreater(after, entry.updated_at)
+
+
+    def test_SAP_prevents_missing_specifications_template(self):
+
+        # setup
+        test_data = dict(SAP_test_data())
+        test_data['specifications_template'] = None
+
+        # assert
+        with self.assertRaises(IntegrityError):
+            models.SAP.objects.create(**test_data)
+
+
 if __name__ == "__main__":
     os.environ['TZ'] = 'UTC'
     unittest.main()
diff --git a/SAS/TMSS/test/t_tmssapp_specification_REST_API.py b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py
index 5bb9e175d4324f9ecdaa40176243dde8fa0da040..018c985f4b69f7b626564bda91f076dcc49591b9 100755
--- a/SAS/TMSS/test/t_tmssapp_specification_REST_API.py
+++ b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py
@@ -1335,11 +1335,10 @@ class SchedulingUnitDraftTestCase(unittest.TestCase):
         GET_OK_and_assert_equal_expected_response(self, url, test_data)
 
     def test_GET_SchedulingUnitDraft_list_view_shows_entry(self):
-
         test_data_1 = SchedulingUnitDraft_test_data("scheduler unit draft one")
-        models.SchedulingUnitDraft.objects.create(**test_data_1)
+        obj = models.SchedulingUnitDraft.objects.create(**test_data_1)
         nbr_results = models.SchedulingUnitDraft.objects.count()
-        GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_unit_draft/', test_data_1, nbr_results)
+        GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_unit_draft/', test_data_1, nbr_results, obj.id)
 
     def test_GET_SchedulingUnitDraft_view_returns_correct_entry(self):
 
@@ -1385,8 +1384,8 @@ class SchedulingUnitDraftTestCase(unittest.TestCase):
 
         # setup
         test_data_1 = SchedulingUnitDraft_test_data("scheduler unit draft one")
-        tdt_test_data_1 = TaskDraft_test_data("task draft one")
-        tdt_test_data_2 = TaskDraft_test_data("task draft two")
+        tdt_test_data_1 = TaskDraft_test_data("task draft one of su1")
+        tdt_test_data_2 = TaskDraft_test_data("task draft two of su2 ")
         scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(**test_data_1)
         task_draft_1 = models.TaskDraft.objects.create(**tdt_test_data_1)
         task_draft_1.scheduling_unit_draft = scheduling_unit_draft
@@ -1522,9 +1521,9 @@ class TaskDraftTestCase(unittest.TestCase):
     def test_GET_TaskDraft_list_view_shows_entry(self):
 
         test_data_1 = TaskDraft_test_data("task draft")
-        models.TaskDraft.objects.create(**test_data_1)
+        obj = models.TaskDraft.objects.create(**test_data_1)
         nbr_results = models.TaskDraft.objects.count()
-        GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_draft/', test_data_1, nbr_results)
+        GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_draft/', test_data_1, nbr_results, obj.id)
 
     def test_GET_TaskDraft_view_returns_correct_entry(self):
 
@@ -1540,7 +1539,7 @@ class TaskDraftTestCase(unittest.TestCase):
     def test_nested_TaskDraft_are_filtered_according_to_SchedulingUnitDraft(self):
 
         # setup
-        test_data_1 = TaskDraft_test_data("task draft one")
+        test_data_1 = TaskDraft_test_data("task draft three")
         sudt_test_data_1 = SchedulingUnitDraft_test_data("scheduling unit draft one")
         scheduling_unit_draft_1 = models.SchedulingUnitDraft.objects.create(**sudt_test_data_1)
         test_data_1 = dict(test_data_1)
@@ -1552,7 +1551,7 @@ class TaskDraftTestCase(unittest.TestCase):
     def test_TaskDraft_contains_list_of_related_TaskBlueprint(self):
 
         # setup
-        test_data_1 = TaskDraft_test_data("task draft one")
+        test_data_1 = TaskDraft_test_data("task draft four")
         tbt_test_data_1 = TaskBlueprint_test_data()
         tbt_test_data_2 = TaskBlueprint_test_data()
         task_draft = models.TaskDraft.objects.create(**test_data_1)
@@ -1569,7 +1568,7 @@ class TaskDraftTestCase(unittest.TestCase):
     def test_TaskDraft_contains_lists_of_related_TaskRelationDraft(self):
 
         # setup
-        test_data_1 = TaskDraft_test_data("task draft one")
+        test_data_1 = TaskDraft_test_data("task draft five")
         task_draft = models.TaskDraft.objects.create(**test_data_1)
 
         trdt_test_data_1 = TaskRelationDraft_test_data()
diff --git a/SAS/TMSS/test/test_utils.py b/SAS/TMSS/test/test_utils.py
index 52e18d0a8a10191285b7daaf6266fdd00768a4bc..2edeaae66b24887a9491527b23bbe6518f4456ae 100644
--- a/SAS/TMSS/test/test_utils.py
+++ b/SAS/TMSS/test/test_utils.py
@@ -214,7 +214,7 @@ class TMSSDjangoServerInstance():
 
         # wait for server to be up and running....
         # or exit via TimeoutError
-        self.check_running_server(timeout=30)
+        self.check_running_server(timeout=60)
 
     def stop(self):
         '''
diff --git a/SAS/TMSS/test/testdata/example_UC1_scheduling_unit.json b/SAS/TMSS/test/testdata/example_UC1_scheduling_unit.json
index 639ad9535ae620604a82c8bdb9752c3a253d5618..38dc23b9cc5e09253801bbce32c50273cc05b8af 100644
--- a/SAS/TMSS/test/testdata/example_UC1_scheduling_unit.json
+++ b/SAS/TMSS/test/testdata/example_UC1_scheduling_unit.json
@@ -69,16 +69,16 @@
         "stations": ["CS001","CS002","CS003"],
         "tile_beam": {
           "direction_type": "J2000",
-          "angle1": 42,
-          "angle2": 42
+          "angle1": 0.42,
+          "angle2": 0.43
         },
         "SAPs": [
           {
             "name": "target0",
             "digital_pointing": {
               "direction_type": "J2000",
-              "angle1": 24,
-              "angle2": 24
+              "angle1": 0.24,
+              "angle2": 0.25
             },
             "subbands": [
               349,
diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py
index 4aed6d9eebc53f9d76c5f24f6270d69e518d906f..f58583962a1887ddc6e3e6e136351ede386ba255 100644
--- a/SAS/TMSS/test/tmss_test_data_django_models.py
+++ b/SAS/TMSS/test/tmss_test_data_django_models.py
@@ -84,11 +84,14 @@ def SchedulingUnitObservingStrategyTemplate_test_data(name="my_SchedulingUnitObs
             "scheduling_unit_template": scheduling_unit_template,
             "tags": ["TMSS", "TESTING"]}
 
-def TaskTemplate_test_data(name="my TaskTemplate", description:str=None, schema:dict=None) -> dict:
+def TaskTemplate_test_data(name="my TaskTemplate", description:str=None, schema:dict=None, task_type_value:str=None) -> dict:
     if schema is None:
         schema = minimal_json_schema(properties={"mykey": {}})
 
-    return {"type": models.TaskType.objects.get(value='observation'),
+    if task_type_value is None:
+        task_type_value = 'observation'
+
+    return {"type": models.TaskType.objects.get(value=task_type_value),
             "validation_code_js":"",
             "name": name,
             "description": description or "<no description>",
@@ -463,3 +466,24 @@ def DataproductHash_test_data() -> dict:
             "algorithm": models.Algorithm.objects.get(value='md5'),
             "hash": "myhash_1",
             "tags": ['tmss', 'testing']}
+
+
+def SAP_test_data(specifications_template=None, specifications_doc=None) -> dict:
+
+    if specifications_template is None:
+        specifications_template = models.SAPTemplate.objects.create(**SAPTemplate_test_data())
+
+    if specifications_doc is None:
+        specifications_doc = get_default_json_object_for_schema(specifications_template.schema)
+
+    return {"specifications_doc": specifications_doc,
+            "specifications_template": specifications_template,
+            "tags": ['tmss', 'testing']}
+
+
+def SAPTemplate_test_data() -> dict:
+    return {"name": "my_sap_template" + str(uuid.uuid4()),
+            "description": 'My SAP test template',
+            "schema": minimal_json_schema(),
+            "tags": ["TMSS", "TESTING"]}
+
diff --git a/SAS/TMSS/test/tmss_test_data_rest.py b/SAS/TMSS/test/tmss_test_data_rest.py
index 76f23608e242dd7efed4380290729249c999c476..e0e121edb5c823c74e8619fd8e59c131bebc11de 100644
--- a/SAS/TMSS/test/tmss_test_data_rest.py
+++ b/SAS/TMSS/test/tmss_test_data_rest.py
@@ -534,7 +534,9 @@ class TMSSRESTTestDataCreator():
                     specifications_doc=None, specifications_template_url=None,
                     subtask_output_url=None,
                     dataproduct_feedback_doc=None, dataproduct_feedback_template_url=None,
-                    dataformat="MeasurementSet", datatype="visibilities"):
+                    dataformat="MeasurementSet", datatype="visibilities",
+                    sap_url=None):
+
         if specifications_template_url is None:
             specifications_template_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/dataproduct_specifications_template/')
     
@@ -550,6 +552,9 @@ class TMSSRESTTestDataCreator():
         if dataproduct_feedback_doc is None:
             dataproduct_feedback_doc = self.get_response_as_json_object(dataproduct_feedback_template_url+'/default')
 
+        if sap_url is None:
+            sap_url = self.post_data_and_get_url(self.SAP(), '/sap/')
+
         return {"filename": filename,
                 "directory": directory,
                 "dataformat": "%s/dataformat/%s" % (self.django_api_url, dataformat),
@@ -564,7 +569,8 @@ class TMSSRESTTestDataCreator():
                 "expected_size": 1234,
                 "size": 123,
                 "feedback_doc": dataproduct_feedback_doc,
-                "feedback_template": dataproduct_feedback_template_url
+                "feedback_template": dataproduct_feedback_template_url,
+                "SAP": sap_url
                 }
     
     def AntennaSet(self, name="antennaset1"):
@@ -648,4 +654,21 @@ class TMSSRESTTestDataCreator():
                 "cluster": cluster_url,
                 "directory": '/',
                 "tags": ['tmss', 'testing']}
-    
+
+    def SAPTemplate(self):
+        return {"name": "my_sap_template" + str(uuid.uuid4()),
+                "description": 'My SAP test template',
+                "schema": minimal_json_schema(),
+                "tags": ["TMSS", "TESTING"]}
+
+    def SAP(self, specifications_template_url=None, specifications_doc=None):
+
+        if specifications_template_url is None:
+            specifications_template_url = self.post_data_and_get_url(self.SAPTemplate(), '/sap_template/')
+
+        if specifications_doc is None:
+            specifications_doc = self.get_response_as_json_object(specifications_template_url + '/default')
+
+        return {"specifications_doc": specifications_doc,
+                "specifications_template": specifications_template_url,
+                "tags": ['tmss', 'testing']}
\ No newline at end of file
diff --git a/SAS/TMSS/test/tmss_test_environment_unittest_setup.py b/SAS/TMSS/test/tmss_test_environment_unittest_setup.py
index dc4f72644bf2b40058a6eb6571218f7cf6fd3d89..04b9882454838c474e06523d8017ebc9320aca07 100644
--- a/SAS/TMSS/test/tmss_test_environment_unittest_setup.py
+++ b/SAS/TMSS/test/tmss_test_environment_unittest_setup.py
@@ -134,7 +134,7 @@ def GET_and_assert_equal_expected_code(test_instance, url, expected_code):
     return r_dict
 
 
-def GET_and_assert_in_expected_response_result_list(test_instance, url, expected_content, expected_nbr_results):
+def GET_and_assert_in_expected_response_result_list(test_instance, url, expected_content, expected_nbr_results, expected_id=None):
     """
     GET from url and assert the expected code is returned and the expected content is in the response content
     Use this check when multiple results (list) are returned
@@ -159,7 +159,17 @@ def GET_and_assert_in_expected_response_result_list(test_instance, url, expected
             test_instance.assertIn(key, item.keys())
 
     if url_check:
-        assertDataWithUrls(test_instance, r_dict['results'][expected_nbr_results-1], expected_content)
+        # Find the expected id in result list if parameter is given (of curse for just one it does not make sense)
+        # There was an 'old' assumption that the last one should taken, but that is not reliable
+        if expected_id is not None:
+            for idx in range(0, expected_nbr_results):
+                if r_dict['results'][idx]['id'] == expected_id:
+                    expected_idx = idx
+                    break
+        else:
+            # this is the 'old' assumption that last object added will also be the last one in the result dict
+            expected_idx = expected_nbr_results-1
+        assertDataWithUrls(test_instance, r_dict['results'][expected_idx], expected_content)
     return r_dict