diff --git a/.gitignore b/.gitignore
index fe3291cd80a1f65ccfec7caa6f9426e7d9974d99..93d22e6d47aebc616cb6afb76369b01ba9dd04f4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -22,3 +22,4 @@ SAS/OTB/jRSP/configure.in
 **/.idea
 SAS/TMSS/frontend/tmss_webapp/package-lock.json
 SAS/TMSS/frontend/tmss_webapp/node_modules/
+SAS/TMSS/frontend/tmss_webapp/debug.log
diff --git a/Docker/lofar-ci/Dockerfile_ci_sas b/Docker/lofar-ci/Dockerfile_ci_sas
index a79d7fa701a8bb79b9ab5236fa8b24b5cc1d6e38..8c03dbfab9bd3e22c96eb7513a3511f7f131b5c6 100644
--- a/Docker/lofar-ci/Dockerfile_ci_sas
+++ b/Docker/lofar-ci/Dockerfile_ci_sas
@@ -16,7 +16,7 @@ RUN yum erase -y postgresql postgresql-server postgresql-devel && \
     cd /bin && ln -s /usr/pgsql-9.6/bin/initdb && ln -s /usr/pgsql-9.6/bin/postgres
 ENV PATH /usr/pgsql-9.6/bin:$PATH 
 
-RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil Django==3.0.9 djangorestframework==3.11.1 djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc jsonschema comet pyxb==1.2.5 graphviz isodate astropy astroplan packaging django-debug-toolbar
+RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil Django==3.0.9 djangorestframework==3.11.1 djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc jsonschema comet pyxb==1.2.5 graphviz isodate astropy astroplan packaging django-debug-toolbar pymysql
 
 #Viewflow package 
 RUN pip3 install django-material django-viewflow
diff --git a/SAS/Scheduler/src/taskdialog.cpp b/SAS/Scheduler/src/taskdialog.cpp
index 49b1ef01849dce5aa4a9f523eefb3f96eb4167db..339ef7a54573d003848e4e0ad566d0ba4d3c814c 100644
--- a/SAS/Scheduler/src/taskdialog.cpp
+++ b/SAS/Scheduler/src/taskdialog.cpp
@@ -1655,6 +1655,8 @@ void TaskDialog::apply(bool close) {
 		close = false;
 	}
 	if (close) {
+		// SW-933
+		clearMultiTasks();
 		this->close();
 	}
 	else {
@@ -4647,6 +4649,9 @@ void TaskDialog::showMultiEdit(std::vector<Task *> &tasks) {
 	isMultiTasks = true;
 	enableTabs();
 
+	// SW-933
+	clearMultiTasks();
+
 	itsDataSlotDialog.clear();
 
     ui.lineEditCreationDate->clear();
diff --git a/SAS/TMSS/client/lib/tmss_http_rest_client.py b/SAS/TMSS/client/lib/tmss_http_rest_client.py
index 5e218c26b8d5f8c901e84af320e14cc4d9b22baf..f7d3fee1f76251001eb29337d4952f7ad8517511 100644
--- a/SAS/TMSS/client/lib/tmss_http_rest_client.py
+++ b/SAS/TMSS/client/lib/tmss_http_rest_client.py
@@ -156,10 +156,13 @@ class TMSSsession(object):
 
         return self.get_path_as_json_object("subtask", clauses)
 
+    def get_full_url_for_path(self, path: str) -> str:
+        '''get the full URL for the given path'''
+        return '%s/%s' % (self.base_url, path.strip('/'))
+
     def get_path_as_json_object(self, path: str, params={}) -> object:
         '''get resource at the given path, interpret it as json, and return it as as native object (usually a dict or a list of dicts)'''
-        full_url = '%s/%s' % (self.base_url, path.strip('/'))
-        return self.get_url_as_json_object(full_url, params=params)
+        return self.get_url_as_json_object(self.get_full_url_for_path(path=path), params=params)
 
     def get_url_as_json_object(self, full_url: str, params={}) -> object:
         '''get resource at the given full url (including http://<base_url>, interpret it as json, and return it as as native object (usually a dict or a list of dicts)'''
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js b/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js
index 9b4a1a0b03870df47a160564b882e3bbe15865b4..a045bc5f47a2b7559902a5ba1f0b9f996bafff9a 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js
@@ -572,7 +572,7 @@ export class CalendarTimeline extends Component {
         if (this.state.viewType===UIConstants.timeline.types.NORMAL) {
             const startTime = moment().utc().add(-24, 'hours');
             const endTime = moment().utc().add(24, 'hours');
-            let result = this.props.dateRangeCallback(startTime, endTime);
+            let result = await this.props.dateRangeCallback(startTime, endTime);
             let group = DEFAULT_GROUP.concat(result.group);
             this.setState({defaultStartTime: startTime, defaultEndTime: endTime, 
                             zoomLevel: DEFAULT_ZOOM_LEVEL, dayHeaderVisible: true, 
@@ -626,12 +626,7 @@ export class CalendarTimeline extends Component {
                 }
             }
             this.loadLSTDateHeaderMap(startTime, endTime, 'hour');
-            let result = {};
-            if (this.state.viewType===UIConstants.timeline.types.WEEKVIEW) {
-                result = await this.props.dateRangeCallback(startTime, endTime);
-            }   else {
-                result = this.props.dateRangeCallback(startTime, endTime);
-            }
+            let result = await this.props.dateRangeCallback(startTime, endTime);
             let group = DEFAULT_GROUP.concat(result.group);
             this.setState({zoomLevel: zoomLevel, defaultStartTime: startTime, defaultEndTime: endTime, 
                             isTimelineZoom: isTimelineZoom, zoomRange: null, 
@@ -650,17 +645,12 @@ export class CalendarTimeline extends Component {
         let secondsToMove = visibleTimeDiff / 1000 / 10 ;
         let newVisibleTimeStart = visibleTimeStart.clone().add(-1 * secondsToMove, 'seconds');
         let newVisibleTimeEnd = visibleTimeEnd.clone().add(-1 * secondsToMove, 'seconds');
-        let result = {};
         if (this.state.viewType === UIConstants.timeline.types.WEEKVIEW &&
             newVisibleTimeStart.isBefore(this.state.timelineStartDate)) {
             newVisibleTimeStart = this.state.timelineStartDate.clone().hours(0).minutes(0).seconds(0);
             newVisibleTimeEnd = newVisibleTimeStart.clone().add(visibleTimeDiff/1000, 'seconds');
         }
-        if (this.state.viewType === UIConstants.timeline.types.WEEKVIEW) {
-            result = await this.props.dateRangeCallback(newVisibleTimeStart, newVisibleTimeEnd);
-        }   else {
-            result = this.props.dateRangeCallback(newVisibleTimeStart, newVisibleTimeEnd);
-        }
+        let result = await this.props.dateRangeCallback(newVisibleTimeStart, newVisibleTimeEnd);
         this.loadLSTDateHeaderMap(newVisibleTimeStart, newVisibleTimeEnd, 'hour');
         let group = DEFAULT_GROUP.concat(result.group);
         this.setState({defaultStartTime: newVisibleTimeStart,
@@ -678,17 +668,12 @@ export class CalendarTimeline extends Component {
         const secondsToMove = visibleTimeDiff / 1000 / 10 ;
         let newVisibleTimeStart = visibleTimeStart.clone().add(1 * secondsToMove, 'seconds');
         let newVisibleTimeEnd = visibleTimeEnd.clone().add(1 * secondsToMove, 'seconds');
-        let result = {};
         if (this.state.viewType === UIConstants.timeline.types.WEEKVIEW &&
             newVisibleTimeEnd.isAfter(this.state.timelineEndDate)) {
             newVisibleTimeEnd = this.state.timelineEndDate.clone().hours(23).minutes(59).minutes(59);
             newVisibleTimeStart = newVisibleTimeEnd.clone().add((-1 * visibleTimeDiff/1000), 'seconds');
         }
-        if (this.state.viewType === UIConstants.timeline.types.WEEKVIEW) {
-           result = await this.props.dateRangeCallback(visibleTimeStart, visibleTimeEnd);
-        }   else {
-            result = this.props.dateRangeCallback(visibleTimeStart, visibleTimeEnd);
-        }
+        let result = await this.props.dateRangeCallback(visibleTimeStart, visibleTimeEnd);
         this.loadLSTDateHeaderMap(newVisibleTimeStart, newVisibleTimeEnd, 'hour');
         let group = DEFAULT_GROUP.concat(result.group);
         this.setState({defaultStartTime: newVisibleTimeStart,
@@ -726,13 +711,13 @@ export class CalendarTimeline extends Component {
      * calls back parent to get updated group and item records, LST date header values
      * @param {array} value - array of moment object
      */
-    setZoomRange(value){
+    async setZoomRange(value){
         let startDate, endDate = null;
         if (value) {
             // Set all values only when both range values available in the array else just set the value to reflect in the date selection component
             if (value[1]!==null) {
-                startDate = moment.utc(moment(value[0]).format("DD-MMM-YYYY"));
-                endDate = moment.utc(moment(value[1]).format("DD-MMM-YYYY 23:59:59"));
+                startDate = moment.utc(moment(value[0]).format("YYYY-MM-DD"));
+                endDate = moment.utc(moment(value[1]).format("YYYY-MM-DD 23:59:59"));
                 let dayHeaderVisible = this.state.dayHeaderVisible;
                 let weekHeaderVisible = this.state.weekHeaderVisible;
                 let lstDateHeaderUnit = this.state.lstDateHeaderUnit;
@@ -746,7 +731,7 @@ export class CalendarTimeline extends Component {
                                 dayHeaderVisible: dayHeaderVisible, weekHeaderVisible: weekHeaderVisible, 
                                 lstDateHeaderUnit: lstDateHeaderUnit
                                 });
-                const result = this.props.dateRangeCallback(startDate, endDate);
+                const result = await this.props.dateRangeCallback(startDate, endDate);
                 let group = DEFAULT_GROUP.concat(result.group);
                 this.setState({group: group, items: result.items});
                 this.loadLSTDateHeaderMap(startDate, endDate, lstDateHeaderUnit);
@@ -795,7 +780,7 @@ export class CalendarTimeline extends Component {
         return (
             <React.Fragment>
                 {/* Toolbar for the timeline */}
-                <div className="p-fluid p-grid timeline-toolbar">
+                <div className={`p-fluid p-grid timeline-toolbar ${this.props.className}`}>
                     {/* Clock Display */}
                     <div className="p-col-2" style={{padding: '0px 0px 0px 10px'}}>
                         <div style={{marginTop: "0px"}}>
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss
index 7161a159b4277ff85a1b7b7e1b107b0d97dca3e1..1c5e635be11f8ff590511f5b9407cbc7242a250a 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss
+++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss
@@ -6,6 +6,20 @@
     background-color: #f0f0f0;
 }
 
+.timeline-view-toolbar {
+    margin-left: 10px;
+}
+
+.timeline-view-toolbar label {
+    margin-bottom: 0px;
+    vertical-align: top;
+    margin-right: 10px;
+}
+
+.timeline-toolbar-margin-top-0 {
+    margin-top: 0px !important;
+}
+
 .timeline-toolbar {
     margin-top: 25px;
     margin-bottom: 2px;
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/view.js
index 1dce9c77522247d114a91f6181ce3f05a00e1861..d2dfd4708c542b56153824199dc5a9ebc02e9f17 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/view.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/view.js
@@ -179,7 +179,7 @@ export class ProjectView extends Component {
                             <div className="p-field p-grid resource-input-grid">
                                 <ResourceDisplayList projectQuota={this.state.projectQuota}  unitMap={this.resourceUnitMap} />
                             </div>
-                            {/* Show Schedule Unit blongest to Project */}
+                            {/* Show Schedule Unit belongs to Project */}
                             <div className="p-fluid">
                                 <div className="p-field p-grid">
                                     <div className="col-lg-3 col-md-3 col-sm-12">
@@ -187,7 +187,7 @@ export class ProjectView extends Component {
                                     </div>
                                 </div>
                             </div>
-                            <SchedulingUnitList project={this.state.project.name}/>
+                            <SchedulingUnitList project={this.state.project.name} hideProjectColumn/>
                         </div>
                     </React.Fragment>
                 }
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js
index 9bb43e3f0c7355dbe0c1c0fdf825715090e9ad2a..570ca6388bd7f10954ccb6fe2731e0b424f92435 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js
@@ -9,37 +9,42 @@ import ScheduleService from '../../services/schedule.service';
 class SchedulingUnitList extends Component{
      
     constructor(props){
-        super(props)
+       super(props)
+       const defaultcolumns = {
+        type:{
+            name:"Type",
+            filter:"select"
+        },
+        name:"Name",
+        description:"Description",
+        project:"Project",
+        created_at:{
+            name:"Created At",
+            filter: "date"
+        },
+        updated_at:{
+            name:"Updated At",
+            filter: "date"
+        },
+        requirements_template_id:{
+            name: "Template",
+            filter: "select"
+        },
+        start_time:"Start Time",
+        stop_time:"End time",
+        duration:"Duration (HH:mm:ss)",
+        status:"Status"
+        };
+        if (props.hideProjectColumn) {
+            delete defaultcolumns['project'];
+        }
         this.state = {
             scheduleunit: [],
             paths: [{
                 "View": "/schedulingunit/view",
             }],
             isLoading: true,
-            defaultcolumns: [ {
-                type:{
-                    name:"Type",
-                    filter:"select"
-                },
-                name:"Name",
-                description:"Description",
-                created_at:{
-                    name:"Created At",
-                    filter: "date"
-                },
-                updated_at:{
-                    name:"Updated At",
-                    filter: "date"
-                },
-                requirements_template_id:{
-                    name: "Template",
-                    filter: "select"
-                },
-                start_time:"Start Time",
-                stop_time:"End time",
-                duration:"Duration (HH:mm:ss)",
-                status:"Status"
-                }],
+            defaultcolumns: [defaultcolumns],
             optionalcolumns:  [{
                 actionpath:"actionpath",
             }],
@@ -57,25 +62,30 @@ class SchedulingUnitList extends Component{
         //Get SU Draft/Blueprints for the Project ID. This request is coming from view Project page. Otherwise it will show all SU
         let project = this.props.project;
         if(project){
-            let scheduleunits = await ScheduleService.getSchedulingListByProject(project);
-            if(scheduleunits){
+           let scheduleunits = await ScheduleService.getSchedulingListByProject(project);
+        if(scheduleunits){
                 this.setState({
                     scheduleunit: scheduleunits, isLoading: false
                 });
             }
-        }else{
+        }else{ 
+            const schedulingSet = await ScheduleService.getSchedulingSets();
+            const projects = await ScheduleService.getProjectList();
             const bluePrint = await ScheduleService.getSchedulingUnitBlueprint();
             ScheduleService.getSchedulingUnitDraft().then(scheduleunit =>{
                 const output = [];
                 var scheduleunits = scheduleunit.data.results;
                 for( const scheduleunit  of scheduleunits){
+                    const suSet = schedulingSet.find((suSet) => { return  scheduleunit.scheduling_set_id === suSet.id });
+                    const project = projects.find((project) => { return suSet.project_id === project.name});
                     const blueprintdata = bluePrint.data.results.filter(i => i.draft_id === scheduleunit.id);
                     blueprintdata.map(blueP => { 
-                        blueP.duration = moment.utc((blueP.duration || 0)*1000).format('HH:mm:ss'); 
+                        blueP.duration = moment.utc((blueP.duration || 0)*1000).format('HH:mm:ss');
                         blueP.type="Blueprint"; 
                         blueP['actionpath'] ='/schedulingunit/view/blueprint/'+blueP.id;
                         blueP['created_at'] = moment(blueP['created_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
                         blueP['updated_at'] = moment(blueP['updated_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
+                        blueP.project = project.name;
                         return blueP; 
                     });
                     output.push(...blueprintdata);
@@ -84,6 +94,7 @@ class SchedulingUnitList extends Component{
                     scheduleunit['duration'] = moment.utc((scheduleunit.duration || 0)*1000).format('HH:mm:ss');
                     scheduleunit['created_at'] = moment(scheduleunit['created_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
                     scheduleunit['updated_at'] = moment(scheduleunit['updated_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss");
+                    scheduleunit.project = project.name;
                     output.push(scheduleunit);
                 }
                 this.setState({
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js
index bb5635a0b0bb2e7fc15ad9ae16d62bcd555a8bb1..16524edc0f57b879a3302545d2ba588c2fcc087e 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js
@@ -4,6 +4,7 @@ import moment from 'moment';
 import _ from 'lodash';
 
 // import SplitPane, { Pane }  from 'react-split-pane';
+import {InputSwitch} from 'primereact/inputswitch';
 
 import AppLoader from '../../layout/components/AppLoader';
 import PageHeader from '../../layout/components/PageHeader';
@@ -44,6 +45,7 @@ export class TimelineView extends Component {
             suTaskList:[],
             isSummaryLoading: false
         }
+        this.STATUS_BEFORE_SCHEDULED = ['defining', 'defined', 'schedulable'];  // Statuses before scheduled to get station_group
 
         this.onItemClick = this.onItemClick.bind(this);
         this.closeSUDets = this.closeSUDets.bind(this);
@@ -59,11 +61,11 @@ export class TimelineView extends Component {
                             ScheduleService.getSchedulingUnitDraft(),
                             ScheduleService.getSchedulingSets(),
                             UtilService.getUTC()] ;
-        Promise.all(promises).then(responses => {
+        Promise.all(promises).then(async(responses) => {
             const projects = responses[0];
             const suBlueprints = _.sortBy(responses[1].data.results, 'name');
             const suDrafts = responses[2].data.results;
-            const suSets = responses[3]
+            const suSets = responses[3];
             const group = [], items = [];
             const currentUTC = moment.utc(responses[4]);
             const defaultStartTime = currentUTC.clone().add(-24, 'hours');      // Default start time, this should be updated if default view is changed.
@@ -81,15 +83,24 @@ export class TimelineView extends Component {
                         suBlueprint.suSet = suSet;
                         suBlueprint.durationInSec = suBlueprint.duration;
                         suBlueprint.duration = UnitConverter.getSecsToHHmmss(suBlueprint.duration);
+                        // Load subtasks also to get stations from subtask if status is before scheduled
+                        const loadSubtasks = this.STATUS_BEFORE_SCHEDULED.indexOf(suBlueprint.status.toLowerCase()) < 0 ;
                         // Select only blueprints with start_time and stop_time in the default time limit
                         if (suBlueprint.start_time && 
                             (moment.utc(suBlueprint.start_time).isBetween(defaultStartTime, defaultEndTime) ||
                              moment.utc(suBlueprint.stop_time).isBetween(defaultStartTime, defaultEndTime))) {
+                            // suBlueprint.tasks = await ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true);
+                            suBlueprint.tasks = await ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true, loadSubtasks);
                             items.push(this.getTimelineItem(suBlueprint));
                             if (!_.find(group, {'id': suDraft.id})) {
                                 group.push({'id': suDraft.id, title: suDraft.name});
                             }
                             suList.push(suBlueprint);
+                        }   else if (suBlueprint.start_time) {  // For other SUs with start_time load details asynchronously
+                            ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true, loadSubtasks)
+                                .then(tasks => {
+                                    suBlueprint.tasks = tasks;
+                            })
                         }
                     }
                 }
@@ -108,7 +119,7 @@ export class TimelineView extends Component {
     getTimelineItem(suBlueprint) {
         // Temporary for testing
         const diffOfCurrAndStart = moment().diff(moment(suBlueprint.stop_time), 'seconds');
-        suBlueprint.status = diffOfCurrAndStart>=0?"FINISHED":"DEFINED";
+        // suBlueprint.status = diffOfCurrAndStart>=0?"FINISHED":"DEFINED";
         let item = { id: suBlueprint.id, 
             group: suBlueprint.suDraft.id,
             title: `${suBlueprint.project.name} - ${suBlueprint.suDraft.name} - ${(suBlueprint.durationInSec/3600).toFixed(2)}Hrs`,
@@ -136,7 +147,7 @@ export class TimelineView extends Component {
                 suTaskList: !fetchDetails?this.state.suTaskList:[],
                 canExtendSUList: false, canShrinkSUList:false});
             if (fetchDetails) {
-                const suBlueprint = _.find(this.state.suBlueprints, {id: item.id});
+                const suBlueprint = _.find(this.state.suBlueprints, {id: (this.state.stationView?parseInt(item.id.split('-')[0]):item.id)});
                 ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true)
                     .then(taskList => {
                         for (let task of taskList) {
@@ -163,17 +174,24 @@ export class TimelineView extends Component {
      * @param {moment} startTime 
      * @param {moment} endTime 
      */
-    dateRangeCallback(startTime, endTime) {
+    async dateRangeCallback(startTime, endTime) {
         let suBlueprintList = [], group=[], items = [];
         if (startTime && endTime) {
             for (const suBlueprint of this.state.suBlueprints) {
                 if (moment.utc(suBlueprint.start_time).isBetween(startTime, endTime) 
                         || moment.utc(suBlueprint.stop_time).isBetween(startTime, endTime)) {
-                    suBlueprintList.push(suBlueprint);
-                    items.push(this.getTimelineItem(suBlueprint));
-                    if (!_.find(group, {'id': suBlueprint.suDraft.id})) {
-                        group.push({'id': suBlueprint.suDraft.id, title: suBlueprint.suDraft.name});
+                    let timelineItem = this.getTimelineItem(suBlueprint);
+                    if (this.state.stationView) {
+                        const loadSubtasks = this.STATUS_BEFORE_SCHEDULED.indexOf(suBlueprint.status.toLowerCase()) < 0 ;
+                        suBlueprint.tasks = await ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true, loadSubtasks);
+                        this.getStationItemGroups(suBlueprint, timelineItem, group, items);
+                    }   else {
+                        items.push(timelineItem);
+                        if (!_.find(group, {'id': suBlueprint.suDraft.id})) {
+                            group.push({'id': suBlueprint.suDraft.id, title: suBlueprint.suDraft.name});
+                        }
                     }
+                    suBlueprintList.push(suBlueprint);
                 } 
             }
         }   else {
@@ -184,7 +202,48 @@ export class TimelineView extends Component {
         this.setState({suBlueprintList: _.filter(suBlueprintList, (suBlueprint) => {return suBlueprint.start_time!=null})});
         // On range change close the Details pane
         // this.closeSUDets();
-        return {group: group, items: items};
+        return {group: _.sortBy(group,'id'), items: items};
+    }
+
+    /**
+     * To get items and groups for station view
+     * @param {Object} suBlueprint 
+     * @param {Object} timelineItem 
+     * @param {Array} group 
+     * @param {Array} items 
+     */
+    getStationItemGroups(suBlueprint, timelineItem, group, items) {
+        /** Get all observation tasks */
+        const observtionTasks = _.filter(suBlueprint.tasks, (task) => { return task.template.type_value.toLowerCase() === "observation"});
+        let stations = [];
+        for (const observtionTask of observtionTasks) {
+            /** If the status of SU is before scheduled, get all stations from the station_groups from the task specification_docs */
+            if (this.STATUS_BEFORE_SCHEDULED.indexOf(suBlueprint.status.toLowerCase()) >= 0
+                && observtionTask.specifications_doc.station_groups) {
+                for (const grpStations of _.map(observtionTask.specifications_doc.station_groups, "stations")) {
+                    stations = _.concat(stations, grpStations);
+                }
+            }   else if (this.STATUS_BEFORE_SCHEDULED.indexOf(suBlueprint.status.toLowerCase()) < 0 
+                            && observtionTask.subTasks) {
+                /** If the status of SU is scheduled or after get the stations from the subtask specification tasks */
+                for (const subtask of observtionTask.subTasks) {
+                    if (subtask.specifications_doc.stations) {
+                        stations = _.concat(stations, subtask.specifications_doc.stations.station_list);
+                    }
+                }
+            }
+        }
+        stations = _.uniq(stations);
+        /** Group the items by station */
+        for (const station of stations) {
+            let stationItem = _.cloneDeep(timelineItem);
+            stationItem.id = `${stationItem.id}-${station}`;
+            stationItem.group = station;
+            items.push(stationItem);
+            if (!_.find(group, {'id': station})) {
+                group.push({'id': station, title: station});
+            }
+        }
     }
 
     /**
@@ -215,13 +274,18 @@ export class TimelineView extends Component {
         const suBlueprints = this.state.suBlueprints;
         for (const data of filteredData) {
             const suBlueprint = _.find(suBlueprints, {actionpath: data.actionpath});
-            items.push(this.getTimelineItem(suBlueprint));
-            if (!_.find(group, {'id': suBlueprint.suDraft.id})) {
-                group.push({'id': suBlueprint.suDraft.id, title: suBlueprint.suDraft.name});
+            let timelineItem = this.getTimelineItem(suBlueprint);
+            if (this.state.stationView) {
+                this.getStationItemGroups(suBlueprint, timelineItem, group, items);
+            }   else {
+                items.push(timelineItem);
+                if (!_.find(group, {'id': suBlueprint.suDraft.id})) {
+                    group.push({'id': suBlueprint.suDraft.id, title: suBlueprint.suDraft.name});
+                }
             }
         }
         if (this.timeline) {
-            this.timeline.updateTimeline({group: group, items: items});
+            this.timeline.updateTimeline({group: _.sortBy(group,"id"), items: items});
         }
     }
 
@@ -234,7 +298,7 @@ export class TimelineView extends Component {
         const canShrinkSUList = this.state.canShrinkSUList;
         let suBlueprint = null;
         if (isSUDetsVisible) {
-            suBlueprint = _.find(this.state.suBlueprints, {id: this.state.selectedItem.id});
+            suBlueprint = _.find(this.state.suBlueprints, {id:  this.state.stationView?parseInt(this.state.selectedItem.id.split('-')[0]):this.state.selectedItem.id});
         }
         return (
             <React.Fragment>
@@ -274,12 +338,17 @@ export class TimelineView extends Component {
                                         <i className="pi pi-step-forward"></i>
                                     </button>
                                 </div> 
+                                <div className="timeline-view-toolbar">
+                                    <label>Station View</label>
+                                    <InputSwitch checked={this.state.stationView} onChange={(e) => {this.closeSUDets();this.setState({stationView: e.value})}} />
+                                </div>
                                 <Timeline ref={(tl)=>{this.timeline=tl}} 
                                         group={this.state.group} 
                                         items={this.state.items}
                                         currentUTC={this.state.currentUTC}
                                         rowHeight={30} itemClickCallback={this.onItemClick}
-                                        dateRangeCallback={this.dateRangeCallback}></Timeline>
+                                        dateRangeCallback={this.dateRangeCallback}
+                                        className="timeline-toolbar-margin-top-0"></Timeline>
                             </div>
                             {/* Details Panel */}
                             {this.state.isSUDetsVisible &&
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js
index a2a13b18f9dd4c090bf5e5f9c5239ba219aa6ec3..4e11e19eaca14e596b889f35c882306b0352210d 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js
@@ -85,7 +85,7 @@ export class WeekTimelineView extends Component {
                         const suBlueprint = _.find(suBlueprints, {'id': suBlueprintId});
                         suBlueprint['actionpath'] = `/schedulingunit/view/blueprint/${suBlueprintId}`;
                         suBlueprint.suDraft = suDraft;
-                        suBlueprint.project = project;
+                        suBlueprint.project = project.name;
                         suBlueprint.suSet = suSet;
                         suBlueprint.durationInSec = suBlueprint.duration;
                         suBlueprint.duration = UnitConverter.getSecsToHHmmss(suBlueprint.duration);
@@ -313,7 +313,7 @@ export class WeekTimelineView extends Component {
                                     data={this.state.suBlueprintList} 
                                     defaultcolumns={[{name: "Name",
                                                         start_time:"Start Time", stop_time:"End Time"}]}
-                                    optionalcolumns={[{description: "Description", duration:"Duration (HH:mm:ss)", actionpath: "actionpath"}]}
+                                    optionalcolumns={[{project:"Project",description: "Description", duration:"Duration (HH:mm:ss)",actionpath: "actionpath"}]}
                                     columnclassname={[{"Name":"filter-input-100", "Start Time":"filter-input-50", "End Time":"filter-input-50",
                                                         "Duration (HH:mm:ss)" : "filter-input-50",}]}
                                     defaultSortColumn= {[{id: "Start Time", desc: false}]}
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js
index 33bae7c040bc44e5934f26ff9b141aafd8aff360..0b77e10fc80469dba49272d93eec6e6bc720459e 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js
@@ -53,24 +53,31 @@ const ScheduleService = {
             return null;
         }
     },
-    getTaskBlueprintById: async function(id, loadTemplate){
+    getTaskBlueprintById: async function(id, loadTemplate, loadSubtasks){
         let result;
         try {
             result = await axios.get('/api/task_blueprint/'+id);
             if (result.data && loadTemplate) {
                 result.data.template = await TaskService.getTaskTemplate(result.data.specifications_template_id);
             }
+            if (result.data && loadSubtasks) {
+                let subTasks = [];
+                for (const subtaskId of result.data.subtasks_ids) {
+                    subTasks.push((await TaskService.getSubtaskDetails(subtaskId)));
+                }
+                result.data.subTasks = subTasks;
+            }
         }   catch(error) {
             console.error('[schedule.services.getTaskBlueprintById]',error);
         }
         return result;
     },
-    getTaskBlueprintsBySchedulingUnit: async function(scheduleunit, loadTemplate){
+    getTaskBlueprintsBySchedulingUnit: async function(scheduleunit, loadTemplate, loadSubtasks){
         // there no single api to fetch associated task_blueprint, so iteare the task_blueprint id to fetch associated task_blueprint
         let taskblueprintsList = [];
         if(scheduleunit.task_blueprints_ids){
             for(const id of scheduleunit.task_blueprints_ids){
-               await this.getTaskBlueprintById(id, loadTemplate).then(response =>{
+               await this.getTaskBlueprintById(id, loadTemplate, loadSubtasks).then(response =>{
                     let taskblueprint = response.data;
                     taskblueprint['tasktype'] = 'Blueprint';
                     taskblueprint['actionpath'] = '/task/view/blueprint/'+taskblueprint['id'];
@@ -324,6 +331,14 @@ const ScheduleService = {
         } catch (error) {
           console.error('[project.services.getSchedulingUnitBySet]',error);
         }
+      },
+      getProjectList: async function() {
+        try {
+          const response = await axios.get('/api/project/');
+          return response.data.results;
+        } catch (error) {
+          console.error('[project.services.getProjectList]',error);
+        }
       }
 }
 
diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/task.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/task.service.js
index a6044b01419f50c99b67a577aaf486edf7abaf67..34a1e75b3d052010bf1deb74540d3c7761835ae4 100644
--- a/SAS/TMSS/frontend/tmss_webapp/src/services/task.service.js
+++ b/SAS/TMSS/frontend/tmss_webapp/src/services/task.service.js
@@ -169,7 +169,7 @@ const TaskService = {
         let subtaskTemplates = {};
         const taskDetails = (await axios.get(`/api/task_blueprint/${taskId}`)).data;
         for (const subtaskId of taskDetails.subtasks_ids) {
-          const subtaskDetails = (await axios.get(`/api/subtask/${subtaskId}`)).data;
+          const subtaskDetails = await this.getSubtaskDetails(subtaskId);
           const subtaskLogs = await this.getSubtaskStatusLogs(subtaskId);
           let template = subtaskTemplates[subtaskDetails.specifications_template_id];
           if (!template) {
@@ -194,7 +194,13 @@ const TaskService = {
         console.error(error);
       }
     },
-    
+    getSubtaskDetails: async function(subtaskId) {
+      try {
+        return (await axios.get(`/api/subtask/${subtaskId}`)).data;
+      } catch(error) {
+        console.error(error);
+      }
+    }
 }
 
 export default TaskService;
diff --git a/SAS/TMSS/src/migrate_momdb_to_tmss.py b/SAS/TMSS/src/migrate_momdb_to_tmss.py
index 07fa5d2ffccbcf7298d5e273e6f688acc5d3149a..13efa43bbc7759f453875c51cdbfb3f9b5734fb9 100755
--- a/SAS/TMSS/src/migrate_momdb_to_tmss.py
+++ b/SAS/TMSS/src/migrate_momdb_to_tmss.py
@@ -1,6 +1,4 @@
 #!/usr/bin/env python3
-from tmss.wsgi import application  # required to set up django, even though not explicitly used
-from tmss.tmssapp import models
 
 from lofar.common import dbcredentials
 
@@ -8,8 +6,24 @@ import logging
 import datetime
 import pymysql
 from optparse import OptionParser
+import os
+import django
+import sys
+import re
 
 logger = logging.getLogger(__file__)
+handler = logging.StreamHandler(sys.stdout)
+handler.setLevel(logging.DEBUG)
+formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+handler.setFormatter(formatter)
+logger.addHandler(handler)
+
+
+# 'mom2id' -> 'tmss object' mapping
+# (so we know what has been created already and refer to that)
+mom2id_to_tmss_representation = {}
+stats = {"projects_skipped": 0, "projects_updated": 0, "projects_created":0,
+         "subtasks_skipped": 0, "subtasks_updated": 0, "subtasks_created": 0}
 
 def _execute_query(query, data=None):
     try:
@@ -27,74 +41,77 @@ def query_project_details_from_momdb():
     Queries MoM database for project details and returns the list of results
     :return: list of details as dict
     """
-    logger.info("Querying MoM database for projects")
+    logger.info("...querying MoM database for projects")
     query = """SELECT project.priority,
                       project.allowtriggers,
                       mom2object.name,
                       mom2object.description,
-                      mom2object.mom2id
+                      mom2object.mom2id,
+                      resourcetype.name AS resourcetypename,
+                      resource.projectpath
                 FROM project
                 JOIN mom2object ON project.mom2objectid=mom2object.id
+                LEFT JOIN resource ON projectid=project.id AND resource.resourcetypeid IN (2,3,4,5,6,7,8,9,10,12)
+                LEFT JOIN resourcetype ON resource.resourcetypeid=resourcetype.id
                 ORDER BY mom2id;
                 """
 
     results = _execute_query(query)
 
-    # dummy data:
-    # results = [{"mom2id": 42,
-    #             "name": "dummyproject",
-    #             "description": "fake description",
-    #             "priority": 1234,
-    #             "allowtriggers": True}]
+    # MoM resourcetypes
+    #
+    # mysql> SELECT * FROM lofar_mom_test_lsmr.resourcetype;
+    # +----+----------------------------------+--------------------------------------------------------------------------------+----------------+
+    # | id | name                             | hosturi                                                                        | type           |
+    # +----+----------------------------------+--------------------------------------------------------------------------------+----------------+
+    # |  1 | Lofar Observing Time             | NULL                                                                           | OBSERVING_TIME |
+    # |  2 | Lofar Storage (SARA)             | srm://srm.grid.sara.nl:8443/pnfs/grid.sara.nl/data/lofar/ops/projects/         | LTA_STORAGE    |
+    # |  3 | Lofar Test Storage (SARA)        | srm://srm.grid.sara.nl:8443/pnfs/grid.sara.nl/data/lofar/ops/test/projects/    | LTA_STORAGE    |
+    # |  4 | Lofar Storage (TARGET) old       | gsiftp://lotar1.staging.lofar/target/gpfs2/lofar/home/lofarops/ops/projects/   | LTA_STORAGE    |
+    # |  5 | Lofar Storage (Jülich)           | srm://lofar-srm.fz-juelich.de:8443/pnfs/fz-juelich.de/data/lofar/ops/projects/ | LTA_STORAGE    |
+    # |  6 | Lofar User Disk Storage (SARA)   | srm://srm.grid.sara.nl/pnfs/grid.sara.nl/data/lofar/user/disk/projects/        | LTA_STORAGE    |
+    # |  7 | Lofar Tape Storage (Target)      | srm://srm.target.rug.nl:8444/lofar/ops/projects/                               | LTA_STORAGE    |
+    # |  8 | Lofar Tape Test Storage (Target) | srm://srm.target.rug.nl:8444/lofar/ops/test/projects/                          | LTA_STORAGE    |
+    # |  9 | Lofar Disk Storage (Target)      | srm://srm.target.rug.nl:8444/lofar/ops/disk/projects/                          | LTA_STORAGE    |
+    # | 10 | Lofar Disk Test Storage (Target) | srm://srm.target.rug.nl:8444/lofar/ops/disk/test/projects/                     | LTA_STORAGE    |
+    # | 11 | Lofar Processing Time            | NULL                                                                           | OBSERVING_TIME |
+    # | 12 | Lofar Storage (Poznan)           | srm://lta-head.lofar.psnc.pl:8443/lofar/ops/projects/                          | LTA_STORAGE    |
+    # | 13 | Lofar Triggers                   | NULL                                                                           | LOFAR_TRIGGERS |
+    # +----+----------------------------------+--------------------------------------------------------------------------------+----------------+
 
     return results
 
 
-def get_project_details_from_momdb():
+def query_subtask_details_for_project_from_momdb(project_mom2id):
     """
-    Obtains project details from MoM database and translates it into details as understood by the tmss data model.
-    :return: dict mom2id -> project details as dict
+    Obtains details of observations and pipelines from the MoM database.
+    Note: Some of these details still need to be converted to sth. that is understood by the tmss data model for subtasks.
+    :param project_mom2id:
+    :return:
     """
-    logger.info("Getting project details from MoM database")
-    mom_results = query_project_details_from_momdb()
-    results = {}
-
-    for mom_details in mom_results:
-
-        # create new tmss details based on MoM details
-        details = {"name": mom_details['name'],
-                   "description": mom_details['description'],
-                   "tags": ["migrated_from_MoM"],
-                   "priority": mom_details['priority'],
-                   "can_trigger": mom_details['allowtriggers'],
-                   "private_data": True  # todo: check project.releasedate and compare to now or how to determine???
-                   }
-
-        # alterations to comply with constraints:
-        if details['description'] is None:
-            details['description'] = ''
-
-        # add to return dict
-        results[mom_details['mom2id']] = details
-
-    return results
-
-
-def query_subtask_details_for_project_from_momdb(project_mom2id):
-    logger.info("Querying MoM database for tasks of project %s" % project_mom2id)
+    logger.info("...querying MoM database for subtasks of project %s" % project_mom2id)
     # todo: double-check the correct use of ids. What refers to a mom2id and what refers to a database entry pk does not seem systematic and is very comfusing.
+    # todo: clarify: Measurements correspond to subtask and Observations correspond to task level?
+    #  We have lofar_observation and lofar_pipeline tables, but lofar_observation seems to refer to a 'task level mom2object'
+    #  with measurement and pipeline children, where the latter are further described on the lofar_pipeline table.
+    #  So I'm assuming that type '%%MEASUREMENT%%' here correspond to observation subtasks, which also means that some
+    #  info is apparently squashed together with that of other subtasks of the same task in the lofar_observation entry
+    #  of the parent/'task'.
+    # todo: clarify: Is there info on template/start/stop on Measurement level somewhere? Using the parent task/observation now.
     query = '''SELECT mom2object.mom2id, mom2object.name, mom2object.description, mom2object.mom2objecttype, status.code,
-                      lofar_pipeline.template, lofar_observation.default_template, lofar_pipeline.starttime, lofar_pipeline.endtime,
-                      lofar_observation_specification.starttime AS obs_starttime, lofar_observation_specification.endtime AS obs_endtime
+                      lofar_pipeline.template AS template, lofar_observation.default_template as obs_template, lofar_pipeline.starttime, lofar_pipeline.endtime,
+                      lofar_observation_specification.starttime AS obs_starttime, lofar_observation_specification.endtime AS obs_endtime,
+                      parent_mom2object.mom2id as parent_mom2id
                       FROM mom2object
                       INNER JOIN mom2object AS ownerproject_mom2object ON mom2object.ownerprojectid = ownerproject_mom2object.id
                       INNER JOIN mom2objectstatus ON mom2object.currentstatusid = mom2objectstatus.id
                       INNER JOIN status ON mom2objectstatus.statusid = status.id
+                      LEFT JOIN mom2object AS parent_mom2object ON mom2object.parentid = parent_mom2object.id
                       LEFT JOIN lofar_pipeline ON mom2object.id = lofar_pipeline.mom2objectid
-                      LEFT JOIN lofar_observation ON mom2object.id = lofar_observation.mom2objectid
+                      LEFT JOIN lofar_observation ON mom2object.parentid = lofar_observation.mom2objectid
                       LEFT JOIN lofar_observation_specification ON lofar_observation.user_specification_id = lofar_observation_specification.id
                       WHERE ownerproject_mom2object.mom2id = %s
-                      AND (mom2object.mom2objecttype = 'LOFAR_OBSERVATION' OR mom2object.mom2objecttype LIKE '%%PIPELINE%%');
+                      AND (mom2object.mom2objecttype LIKE '%%MEASUREMENT%%' OR mom2object.mom2objecttype LIKE '%%PIPELINE%%');  
                       '''
 
     parameters = (project_mom2id,)
@@ -104,242 +121,497 @@ def query_subtask_details_for_project_from_momdb(project_mom2id):
     return results
 
 
-def _dummy_subtask_template():
-    try:
-        return models.SubtaskTemplate.objects.get(name='dummy')
-    except:
-        dummy_template_details = {"name": "dummy",
-                                  "description": 'Dummy Template',
-                                  "version": '1',
-                                  "schema": {},
-                                  "realtime": False,
-                                  "queue": False,
-                                  "tags": ["DUMMY"]}
+def query_task_details_for_subtask_from_momdb(subtask_mom2id):
+    """
+    Obtains details of observations and pipelines from the MoM database.
+    Note: Some of these details still need to be converted to sth. that is understood by the tmss data model for subtasks.
+    :param project_mom2id:
+    :return:
+    """
+    logger.info("...querying MoM database for parent task of subtask mom2id=%s" % subtask_mom2id)
+    query = '''SELECT parent_mom2object.mom2id, parent_mom2object.name, parent_mom2object.description, parent_mom2object.mom2objecttype, status.code,
+                      lofar_observation.template, lofar_observation_specification.starttime AS starttime, lofar_observation_specification.endtime AS endtime
+                      FROM mom2object
+                      INNER JOIN mom2object AS parent_mom2object ON mom2object.parentid = parent_mom2object.id
+                      INNER JOIN mom2objectstatus ON parent_mom2object.currentstatusid = mom2objectstatus.id
+                      INNER JOIN status ON mom2objectstatus.statusid = status.id
+                      LEFT JOIN lofar_observation ON parent_mom2object.id = lofar_observation.mom2objectid
+                      LEFT JOIN lofar_observation_specification ON lofar_observation.user_specification_id = lofar_observation_specification.id
+                      WHERE mom2object.mom2id = %s                      
+                      '''
+    parameters = (subtask_mom2id,)
 
-        return models.SubtaskTemplate.objects.create(**dummy_template_details)
+    results = _execute_query(query, parameters)
 
+    return results
 
-def _dummy_scheduling_set(project):
-    dummy_scheduling_set_details = {"name": 'dummy',
-                            "description": "Dummy scheduling unit set",
-                            "tags": ["DUMMY"],
-                            "generator_doc": "{}",
-                            "project": project,
-                            "generator_template": None}
+def query_related_tasks_from_momdb(task_mom2id):
+    """
+    Obtains details of observations and pipelines from the MoM database.
+    Note: Some of these details still need to be converted to sth. that is understood by the tmss data model for subtasks.
+    :param project_mom2id:
+    :return:
+    """
+    logger.info("...querying MoM database for tasks related to task mom2id=%s" % task_mom2id)
+    query = '''SELECT TIMEDIFF(related_lofar_observation_specification.starttime, lofar_observation_specification.endtime) AS startdiff, 
+               TIMEDIFF(lofar_observation_specification.starttime, related_lofar_observation_specification.endtime) AS enddiff, 
+               related_mom2object.mom2id, related_mom2object.name, related_mom2object.description, related_mom2object.mom2objecttype, status.code,
+               related_lofar_observation.template, related_lofar_observation_specification.starttime AS starttime, related_lofar_observation_specification.endtime AS endtime
+               FROM mom2object
+               INNER JOIN mom2object AS related_mom2object ON mom2object.parentid = related_mom2object.parentid
+               INNER JOIN mom2objectstatus ON related_mom2object.currentstatusid = mom2objectstatus.id
+               INNER JOIN status ON mom2objectstatus.statusid = status.id
+               LEFT JOIN lofar_observation AS related_lofar_observation ON related_mom2object.id = related_lofar_observation.mom2objectid
+               LEFT JOIN lofar_observation_specification AS related_lofar_observation_specification ON related_lofar_observation.user_specification_id = related_lofar_observation_specification.id 
+               LEFT JOIN lofar_observation ON mom2object.id = lofar_observation.mom2objectid
+               LEFT JOIN lofar_observation_specification ON lofar_observation.user_specification_id = lofar_observation_specification.id
+               WHERE mom2object.mom2id = %s
+               '''
+
+    parameters = (task_mom2id,)
+
+    results = _execute_query(query, parameters)
 
+    return results
 
-    return models.SchedulingSet.objects.create(**dummy_scheduling_set_details)
 
+def get_project_details_from_momdb():
+    """
+    Obtains project details from MoM database and translates it into details as understood by the tmss data model.
+    :return: dict mom2id -> project details as dict
+    """
+    logger.info("Getting project details from MoM database")
+    mom_results = query_project_details_from_momdb()
+    results = {}
 
-def _dummy_scheduling_unit_template():
-    dummy_scheduling_unit_template_details = {"name": "dummy",
-                                  "description": 'Dummy scheduling unit template',
-                                  "version": 'v0.314159265359',
-                                  "schema": {"mykey": "my value"},
-                                  "tags": ["DUMMY"]}
+    for mom_details in mom_results:
+
+        # derive values for TMSS:
 
-    return models.RunTemplate.objects.create(**dummy_scheduling_unit_template_details)
+        #   filesystem   todo: how to deal with Target locations?
+        if mom_details['resourcetypename']:
+            try:
+                archive_location = models.Filesystem.objects.get(name=mom_details['resourcetypename'])
+            except:
+                logger.error("No Filesystem matching '%(resourcetypename)s' in tmss database! Skipping project name=%(name)s" % mom_details)
+                continue
+        else:
+            logger.warning("Missing archive info in MoM details, using None! name=%(name)s" % mom_details)
+            archive_location = None
+            mom_details['projectpath'] = ""
 
+        # create new tmss details
+        details = {"name": mom_details['name'],
+                   "description":  "" if not mom_details['description'] else mom_details['description'],
+                   "tags": ["migrated_from_MoM", "migration_incomplete"],
+                   "priority_rank": mom_details['priority'],
+                   "trigger_priority": 1000,
+                   "can_trigger": mom_details['allowtriggers'],
+                   "private_data": True,  # todo: check project.releasedate and compare to now or how to determine???
+                   "archive_subdirectory": mom_details['projectpath'],
+                   # optional:
+                   # "project_category":,
+                   # "period_category":,
+                   "archive_location": archive_location
+                   }
 
-def _dummy_scheduling_unit_draft(scheduling_set, template):
-    dummy_scheduling_unit_draft_details = {"name": 'dummy',
-                            "description": "Dummy scheduling_unit draft",
-                            "tags": ["DUMMY"],
-                            "requirements_doc": "{}",
-                            "copy_reason": models.CopyReason.objects.get(value='template'),
-                            "generator_instance_doc": "para",
-                            "copies": None,
-                            "scheduling_set": scheduling_set,
-                            "generator_source": None,
-                            "template": template}
+        # add to return dict
+        results[mom_details['mom2id']] = details
 
-    return models.RunDraft.objects.create(**dummy_scheduling_unit_draft_details)
+    return results
 
 
-def _dummy_scheduling_unit_blueprint(draft, template):
+def get_or_create_scheduling_set_for_project(project):
+    """
+    Returns the common scheduling set for all scheduling units of the given project or creates a new one if not found in TMSS.
+    """
+    try:
+        scheduling_set = models.SchedulingSet.objects.get(name=project.name)
+    except:
+        dummy_scheduling_set_details = {"name": project.name,
+                            "description": "Common scheduling set for all scheduling units in this project (created during MoM migration)",
+                            "tags": ["migrated_from_MoM", "migration_incomplete"],
+                            "generator_doc": {},
+                            "project": project,
+                            "generator_template": None}
+        scheduling_set = models.SchedulingSet.objects.create(**dummy_scheduling_set_details)
 
-    dummy_scheduling_unit_blueprint_details = {"name": 'dummy',
-                                   "description": "Dummy scheduling_unit blueprint",
-                                   "tags": ["DUMMY"],
-                                   "requirements_doc": "{}",
-                                   "do_cancel": False,
-                                   "draft": draft,
-                                   "template": template}
+    return scheduling_set
 
 
-    return  models.RunBlueprint.objects.create(**dummy_scheduling_unit_blueprint_details)
+def get_or_create_scheduling_unit_for_subtask(scheduling_set, scheduling_unit_template, task_mom2id, max_time_distance=900):
+    """
+    Returns a scheduling unit for the given subtask mom2id. It is either newly created or an existing scheduling unit
+    of related subtasks. Subtasks are considered related when they are within the same folder and one task does not start
+    more than max_time_distance seconds  before of after the other task.
 
+    # todo: we have groups/topologies as well, need some guidance here...
+    # todo: where to get the specification time from?
 
-def _dummy_task_template():
+    :returns tuple(scheduling_unit_draft, scheduling_unit_template)
+    """
 
-    dummy_task_template_details = {"name": "dummy",
-                                           "description": 'Dummy work request template',
-                                           "validation_code_js": "",
-                                           "version": 'v0.314159265359',
-                                           "schema": {"mykey": "my value"},
-                                           "tags": ["DUMMY"]}
+    related_task_details = query_related_tasks_from_momdb(task_mom2id)
+    if related_task_details:
+        for details in related_task_details:
+            if details["mom2id"] in mom2id_to_tmss_representation.keys():
+                related_task = mom2id_to_tmss_representation[details["mom2id"]]  # we kept a blueprint reference
+                if details['startdiff'] and details['enddiff']:
+                    time_distance = min(abs(details['startdiff'].total_seconds()), abs(details['enddiff'].total_seconds()))
+                    if time_distance < max_time_distance:
+                        blueprint = related_task.scheduling_unit_blueprint
+                        draft = blueprint.draft
+                        logger.info("...using scheduling unit draft_id=%s blueprint_id=%s from related task mom2id=%s for task mom2id=%s" % (draft.id, blueprint.id, details["mom2id"], task_mom2id))
+                        return draft, blueprint
+                    else:
+                        logger.info("...related task mom2id=%s starts too far apart (seconds=%s threshold=%s)" % (details["mom2id"], time_distance, max_time_distance))
+                        continue
+                else:
+                    logger.warning("Cannot compare times, assuming task mom2id=%s is not related to %s" % (task_mom2id, details))  # todo: Investigate... is this because sometimes user sometimes system specified?
+                    continue
+
+    scheduling_unit_draft_details = {"name": 'dummy',
+                                     "description": "Scheduling unit draft (created during MoM migration for task mom2id=%s)" % task_mom2id,
+                                     "tags": ["migrated_from_MoM", "migration_incomplete"],
+                                     "requirements_doc": {},
+                                     "scheduling_set": scheduling_set,
+                                     "requirements_template": scheduling_unit_template
+                                     # optional:
+                                     # "copy_reason": models.CopyReason.objects.get(value='template'),
+                                     # "copies": None,
+                                     # "generator_instance_doc" : {},
+                                     # "scheduling_constraints_doc": {},
+                                     # "scheduling_constraints_template": None,
+                                     # "observation_strategy_template": None,
+                                     }
+
+    draft = models.SchedulingUnitDraft.objects.create(**scheduling_unit_draft_details)
+
+    scheduling_unit_blueprint_details = {"name": 'dummy',
+                                         "description": "Scheduling unit blueprint (created during MoM migration for task mom2id=%s)" % task_mom2id,
+                                         "tags": ["migrated_from_MoM", "migration_incomplete"],
+                                         "requirements_doc": {},
+                                         "do_cancel": False,
+                                         "draft": draft,
+                                         "requirements_template": scheduling_unit_template}
+
+    blueprint = models.SchedulingUnitBlueprint.objects.create(**scheduling_unit_blueprint_details)
+    logger.info("Created new scheduling unit draft_id=%s blueprint_id=%s for task mom2id=%s" % (draft.id, blueprint.id, task_mom2id))
+    return draft, blueprint
+
+
+def get_or_create_task_for_subtask(scheduling_unit_draft, scheduling_unit_blueprint, subtask_mom2id):
+    """
+    Returns a TMSS task for the given subtask.
+    It is either newly created or an existing task of related subtasks. Subtasks are considered related when they have
+    the same parentid in MoM database.
+    :returns tuple(task_draft, task_blueprint)
+    """
 
-    return models.TaskTemplate.objects.create(**dummy_task_template_details)
+    task_details = query_task_details_for_subtask_from_momdb(subtask_mom2id)
+    if task_details:
+        for details in task_details:   # there should be exactly one, actually
+            if details["mom2id"] in mom2id_to_tmss_representation.keys():
+                blueprint = mom2id_to_tmss_representation[details["mom2id"]]
+                draft = blueprint.draft
+                logger.info("...using existing task draft_id=%s blueprint_id=%s for subtask mom2id=%s" % (draft.id, blueprint.id, subtask_mom2id))
+                return draft, blueprint
+            else:
+                try:
+                    return models.TaskTemplate.objects.get(name=details['default_template'])
+                except:
+                    task_template = _dummy_task_template(details['template'])
+
+                task_draft_details = {"name": details["name"],
+                                            "description": "" if not details['description'] else details['description'],
+                                            "tags": ["migrated_from_MoM", "migration_incomplete"],
+                                            "specifications_doc": {},
+                                            # "copy_reason": models.CopyReason.objects.get(value='template'),
+                                            # "copies": None,
+                                            "scheduling_unit_draft": scheduling_unit_draft,
+                                            "specifications_template": task_template}
+
+                task_draft = models.TaskDraft.objects.create(**task_draft_details)
+
+                task_blueprint_details = {"name": details["name"],
+                                                "description": "" if not details['description'] else details['description'],
+                                                "tags": ["migrated_from_MoM", "migration_incomplete"],
+                                                "specifications_doc": {},
+                                                "do_cancel": False,
+                                                "draft": task_draft,
+                                                "specifications_template": task_template,
+                                                "scheduling_unit_blueprint": scheduling_unit_blueprint}
+
+                task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_details)
+
+                mom2id_to_tmss_representation[details["mom2id"]] = task_blueprint
+                logger.info("...created new task draft_id=%s blueprint_id=%s for subtask mom2id=%s" % (task_draft.id, task_blueprint.id, subtask_mom2id))
+                return task_draft, task_blueprint
+
+
+def _dummy_subtask_template(name):
+    template_name = "%s_dummy" % name
+    try:
+        return models.SubtaskTemplate.objects.get(name=template_name)
+    except:
+        dummy_template_details = {"name": template_name,
+                                  "description": "Dummy subtask template for MoM migration, when no matching template in TMSS",
+                                  "version": '1',
+                                  "schema": {"$id":"http://tmss.lofar.org/api/schemas/subtasktemplate/empty/1#",
+                                             "$schema": "http://json-schema.org/draft-06/schema#"},
+                                  "realtime": False,
+                                  "queue": False,
+                                  "tags": ["DUMMY"],
+                                  "type": models.SubtaskType.objects.get(value='other')}
 
+        return models.SubtaskTemplate.objects.create(**dummy_template_details)
 
-def _dummy_task_draft(scheduling_unit_draft, template):
 
-    dummy_task_draft_details = {"name": 'dummy',
-                                        "description": "Dummy work request draft",
-                                        "tags": ["DUMMY"],
-                                        "requirements_doc": "{}",
-                                        "copy_reason": models.CopyReason.objects.get(value='template'),
-                                        "copies": None,
-                                        "scheduling_unit_draft": scheduling_unit_draft,
-                                        "template": template}
+def _dummy_scheduling_unit_template(name):
+    template_name = "%s_dummy" % name
+    try:
+        return models.SchedulingUnitTemplate.objects.get(name=template_name)
+    except:
+        dummy_scheduling_unit_template_details = {"name": template_name,
+                                  "description": "Dummy scheduling unit template for MoM migration, when no matching template in TMSS",
+                                  "version": 'v0.314159265359',
+                                  "schema": {"$id":"http://tmss.lofar.org/api/schemas/schedulingunittemplate/empty/1#",
+                                             "$schema": "http://json-schema.org/draft-06/schema#"},
+                                  "tags": ["DUMMY"]}
 
-    return models.TaskDraft.objects.create(**dummy_task_draft_details)
+        return models.SchedulingUnitTemplate.objects.create(**dummy_scheduling_unit_template_details)
 
 
-def _dummy_task_blueprint(draft, template, scheduling_unit_blueprint):
+def _dummy_task_template(name):
+    template_name = "%s_dummy" % name
+    try:
+        return models.TaskTemplate.objects.get(name=template_name)
+    except:
+        dummy_task_template_details = {"name": template_name,
+                                       "description": 'Dummy task template for MoM migration, when no matching template in TMSS',
+                                       "validation_code_js": "",
+                                       "version": 'v0.314159265359',
+                                       "schema": {"$id":"http://tmss.lofar.org/api/schemas/tasktemplate/empty/1#",
+                                                  "$schema": "http://json-schema.org/draft-06/schema#"},
+                                       "tags": ["DUMMY"],
+                                       "type": models.TaskType.objects.get(value='other')}
 
-    dummy_task_blueprint_details = {"name": 'dummy',
-                                            "description": "Dummy work request blueprint",
-                                            "tags": ["DUMMY"],
-                                            "requirements_doc": "{}",
-                                            "do_cancel": False,
-                                            "draft": draft,
-                                            "template": template,
-                                            "scheduling_unit_blueprint": scheduling_unit_blueprint}
+        return models.TaskTemplate.objects.create(**dummy_task_template_details)
 
-    return models.TaskBlueprint.objects.create(**dummy_task_blueprint_details)
 
+def create_subtask_trees_for_project_in_momdb(project_mom2id, project):
+    """
+    Migrates all observations and pipelines that belong to the given project as Subtasks to TMSS.
+    This also creates associated Task and SchedulingUnit drafts and blueprints in order to link the subtask to its project.
+    :param project_mom2id: The mom id of the project to migrate
+    :param project: The TMSS project object to refer to
+    """
 
-def get_subtask_details_from_momdb(project_mom2id, project):
+    global stats
+    global mom2id_to_tmss_representation
 
     logger.info("Getting subtask details from MoM database")
     mom_results = query_subtask_details_for_project_from_momdb(project_mom2id)
-    results = {}
+    logger.info("There are %s subtasks to migrate in project name=%s" % (len(mom_results), project.name))
 
     for mom_details in mom_results:
 
-        # different types have some info in different spots, so they end up in different columns.
-        # put same information in same spot to keep following code same for all tasks.
-        # (maybe we want to instead separate these into different queries instead or union them in SQL?)
+        logger.info("...now migrating subtask mom2id=%s mom2objecttype=%s" % (mom_details['mom2id'], mom_details['mom2objecttype']))
+
+        # derive values for TMSS
+
+        #   type and start/end times
 
-        if 'OBSERVATION' in mom_details['mom2objecttype']:
-            type = models.SubtaskType.objects.get(value='observation')
-            template_name = mom_details['default_template']
+        if 'MEASUREMENT' in mom_details['mom2objecttype']:
+            template_name = mom_details['obs_template']
             start_time = mom_details['obs_starttime']
-            end_time = mom_details['obs_endtime']
+            stop_time = mom_details['obs_endtime']
         elif 'PIPELINE' in mom_details['mom2objecttype']:
-            type = models.SubtaskType.objects.get(value='pipeline')
             template_name = mom_details['template']
             start_time = mom_details['starttime']
-            end_time = mom_details['endtime']
+            stop_time = mom_details['endtime']
         else:
-            logger.warning('Unknown type %(mom2objecttype)s' % mom_details)
-            logger.warning('Skipping %s' % mom_details)
+            logger.error('Unknown type %(mom2objecttype)s - Skipping subtask mom2id=%(mom2id)s' % mom_details)
+            stats['subtasks_skipped'] += 1
             continue
 
-        # create new tmss details (leave out stuff that might go wrong)
+        #   timestamps
 
-        details = {"type": type,
-                   "start_time": None,              # mandatory
-                   "stop_time": None,               # mandatory
-                   "state": None,                   # mandatory
-                   "requested_state": None,         # mandatory
-                   "specification": "{}",
-                   "task_blueprint": None,  # optional, but required for project reference
-                   "template": None,                # mandatory
-                   "tags": ["migrated_from_MoM"]}
+        if start_time is None:
+            start_time = datetime.datetime.utcfromtimestamp(0).isoformat()  # not-null constraint
 
-        # timestamps
+        if stop_time is None:
+            stop_time = datetime.datetime.utcfromtimestamp(0).isoformat()  # not-null constraint
 
-        if start_time is not None:
-            details['start_time'] = start_time
-        else:
-            details['start_time'] = datetime.datetime.utcfromtimestamp(0).isoformat() # not-null constraint
-
-        if end_time is not None:
-            details['stop_time'] = end_time
-        else:
-            details['stop_time'] = datetime.datetime.utcfromtimestamp(0).isoformat() # not-null constraint
+        #   state
 
-        # state
+        #   todo: check mapping is correct and complete.
+        #    This now only includes what I ran into during testing and is mapped to what felt right by my intuition (i.e. probably wrong)
+        #   Note: status codes with a verbatim counterpart in TMSS do not need to be mapped here.
+        #   Valid TMSS values are: "defining", "defined", "scheduling", "scheduled", "queueing", "queued", "starting", "started", "finishing", "finished", "cancelling", "cancelled", "error"
+        mom_state_to_subtask_state = {"opened": models.SubtaskState.objects.get(value="defining"),
+                                      "described": models.SubtaskState.objects.get(value="defined"),
+                                      "suspended": models.SubtaskState.objects.get(value="cancelled"),
+                                      "prepared": models.SubtaskState.objects.get(value="scheduling"),
+                                      "aborted": models.SubtaskState.objects.get(value="cancelled"),
+                                      "hold": models.SubtaskState.objects.get(value="cancelled"),
+                                      "approved": models.SubtaskState.objects.get(value="queued"),
+                                      "failed": models.SubtaskState.objects.get(value="error"),
+                                      "successful": models.SubtaskState.objects.get(value="finished"),}
 
-        try:
-            state = models.SubtaskState.objects.get(value=mom_details['code'])
-            details['state'] = state
-            details['requested_state'] = state
-        except Exception as e:
-            logger.error("No state choice matching '%s' in tmss database! %s" % (mom_details['code'], e))
-            logger.warning('Skipping %s' % mom_details)
-            continue
+        if mom_details['code'] in mom_state_to_subtask_state:
+            state = mom_state_to_subtask_state[mom_details['code']]
+        else:
+            try:
+                state = models.SubtaskState.objects.get(value=mom_details['code'])
+            except:
+                logger.error("No SubtaskState choice matching '%(code)s' in tmss database! - Skipping subtask mom2id=%(mom2id)s" % mom_details)
+                stats['subtasks_skipped'] += 1
+                continue
 
-        # template
+        #     template
 
         if template_name is not None:
             try:
-                details['template'] = models.SubtaskTemplate.objects.get(name=template_name)
-            except Exception as e:
-                logger.warning("No task template matching '%s' in tmss database! Using dummy instead! %s" % (template_name, e))
-
+                specifications_template = models.SubtaskTemplate.objects.get(name=template_name)
+                logger.info('...found SubtaskTemplate id=%s for subtask mom2id=%s templatename=%s' % (specifications_template.id, mom_details["mom2id"], template_name))
+            except:
                 # todo: create a lot of templates to reflect what was used for the actual task?
-                # todo: raise Exception (or continue) once we have proper templates for everything.
-                details["template"] = _dummy_subtask_template()
+                #  Then raise Exception once we have proper templates for everything?
+                specifications_template = _dummy_subtask_template(template_name)
+                logger.warning("No SubtaskTemplate matching '%s' in tmss database! Using dummy id=%s instead for subtask mom2id=%s" % (template_name, specifications_template.id, mom_details['mom2id']))
 
         else:
-            logger.warning('Missing template name in MoM details!')
-            logger.warning('Skipping %s' % mom_details)
+            logger.error('Missing template name in MoM details! - Skipping subtask mom2id=%(mom2id)s' % mom_details)
+            stats['subtasks_skipped'] += 1
             continue
 
-        # ----------------
-        # todo: the following entries are needed to relate a task to it's project.
-        # todo: we should substitute the dummy items by items that reflect the actual task details
-
         # scheduling set
-        scheduling_set = _dummy_scheduling_set(project)
+        scheduling_set = get_or_create_scheduling_set_for_project(project)
 
         # scheduling unit template
-        scheduling_unit_template = _dummy_scheduling_unit_template()
+        try:
+            scheduling_unit_template = models.SchedulingUnitTemplate.objects.get(name=template_name)
+            logger.info('...found SchedulingUnitTemplate id=%s for subtask mom2id=%s templatename=%s' % (scheduling_unit_template.id, mom_details["mom2id"], template_name))
+        except:
+            scheduling_unit_template = _dummy_scheduling_unit_template(template_name)
+            logger.warning('No SchedulingUnitTemplate was found for subtask mom2id=%s templatename=%s. Using dummy template id=%s' % (mom_details["mom2id"], template_name, scheduling_unit_template.id))
+
+        # scheduling unit draft + blueprint
+        scheduling_unit_draft, scheduling_unit_blueprint = get_or_create_scheduling_unit_for_subtask(scheduling_set, scheduling_unit_template, mom_details["parent_mom2id"])
+
+        # task draft + blueprint
+        task_draft, task_blueprint = get_or_create_task_for_subtask(scheduling_unit_draft, scheduling_unit_blueprint, mom_details["mom2id"])
+
+        details = {"id": mom_details['mom2id'],
+                   "state": state,
+                   "specifications_doc": {},   # todo: where from? We have user_specification_id (for task?) and system_specification_id (for subtask?) on lofar_observation (I guess referring to lofar_observation_specification). Shall we piece things together from that, or is there a text blob to use? Also: pipeline info lives in obs_spec too?
+                   "task_blueprint": task_blueprint,
+                   "specifications_template": specifications_template,
+                   "tags": ["migrated_from_MoM", "migration_incomplete"],   # todo: set complete once it is verified that all info is present
+                   "priority": project.priority_rank,  # todo: correct to derive from project?
+                   # optional:
+                   "start_time": start_time,
+                   "stop_time": stop_time,
+                   "schedule_method": models.ScheduleMethod.objects.get(value="manual"),  # todo: correct? Or leave None?
+                   # "created_or_updated_by_user" = None,
+                   # "raw_feedback" = None,
+                   # "do_cancel": None,
+                   #"cluster": None  # I guess from lofar_observation.storage_cluster_id
+                   }
 
-        # scheduling unit draft
-        scheduling_unit_draft = _dummy_scheduling_unit_draft(scheduling_set, scheduling_unit_template)
+        subtask_qs = models.Subtask.objects.filter(id=details["id"])
+        if subtask_qs.count():
+            # todo: this will update the subtask, but other TMSS objects do not share id with MoM and get recreated with every migration run. Can we clean this up somehow?
+            subtask_qs.update(**details)
+            subtask = subtask_qs.first()
+            logger.info("...updated existing subtask tmss id=%s" % subtask.id)
+            stats['subtasks_updated'] += 1
+        else:
+            subtask = models.Subtask.objects.create(**details)
+            logger.info("...created new subtask tmss id=%s" % subtask.id)
+            stats['subtasks_created'] += 1
 
-        # scheduling unit blueprint
-        scheduling_unit_blueprint = _dummy_scheduling_unit_blueprint(scheduling_unit_draft, scheduling_unit_template)
+        mom2id_to_tmss_representation[mom_details['mom2id']] = subtask
 
-        # work request template
-        task_template = _dummy_task_template()
+        logger.info("...handled %s TMSS objects so far | %s" % (len(mom2id_to_tmss_representation), stats))
 
-        # work request draft
-        task_draft = _dummy_task_draft(scheduling_unit_draft, task_template)
 
-        # work request blueprint
-        details['task_blueprint'] = _dummy_task_blueprint(task_draft,
-                                                                          task_template,
-                                                                          scheduling_unit_blueprint)
-        # ----------------
+def get_or_create_cycle_for_project(project):
+    """
+    Returns a cycle for a given project. Since cycles don't seem to be a thing in MoM, the cycle is derived from the
+    project name. Returns None, if that fails.
+    """
+    name = project.name
+    if name.lower().startswith('lc'):
+        cycle_no = re.search(r'\d+', name.lower()).group()
+        cycle_name = "Cycle %02d" % int(cycle_no)
+        try:
+            cycle = models.Cycle.objects.get(name=cycle_name)
+            logger.info("...found existing cycle name=%s for project name=%s" % (cycle.name, project.name))
+            return cycle
+        except:
+            details = {"name": cycle_name,
+                       "description": "Cycle %s (created during MoM migration)" % cycle_no,
+                       "tags": ["migrated_from_MoM", "migration_incomplete"],
+                       "start": "1970-01-01T00:00:00",  # todo, where from?
+                       "stop": "1970-01-01T00:00:00",   # todo, where from?
+                       }
+            cycle = models.Cycle.objects.create(**details)
+            logger.info("...created new cycle name=% for project name=%s" % (cycle.name, project.name))
+            return cycle
 
-        # add task mom2id and its details to return dict
-        results[mom_details['mom2id']] = details
+    logger.warning("Could not determine cycle for project name=%s. Using None." % (project.name))
 
-    return results
 
 
 def main():
+    """
+    Migrates data from a MoM database to a TMSS database.
+    Existing objects in TMSS of same name or id are updated, otherwise new objects are created.
+    """
+
+    global mom2id_to_tmss_representation
+    global stats
+
+    # query details of all projects in MoM database
     project_details = get_project_details_from_momdb()
+    logger.info("There are %s projects to migrate" % len(project_details))
 
+    # iterate projects
     for p_id, p_details in project_details.items():
 
-        logger.info("---\nNow migrating project %s..." % p_details['name'])
-        project = models.Project.objects.create(**p_details)
-        logger.info("...created new project with tmss id %s" % project.id)
+        try:
+            logger.info("Now migrating project mom_name=%s mom2id=%s" % (p_details['name'], p_id))
+
+            # create or update project
+            project_qs = models.Project.objects.filter(name=p_details["name"])
+            if project_qs.count():
+                project_qs.update(**p_details)
+                project = project_qs.first()
+                logger.info("...updated existing project tmss_name=%s" % project.name)
+                stats["projects_updated"] += 1
+            else:
+                project = models.Project.objects.create(**p_details)
+                logger.info("...created new project tmss_name=%s" % project.name)
+                stats["projects_created"] += 1
 
-        task_details = get_subtask_details_from_momdb(p_id, project)
-        for t_id, t_details in task_details.items():
+            # create all subtasks and related objects for the project
+            create_subtask_trees_for_project_in_momdb(p_id, project)
 
-            logger.info("...creating new task mom2id %s" % t_id)
-            task = models.Subtask.objects.create(**t_details)
-            logger.info("...created new task with tmss id %s" % task.id)
+            # add project to existing or new cycle
+            cycle = get_or_create_cycle_for_project(project)
+            if cycle:
+                project.cycles.set([cycle])
 
-        logger.info("...done migrating project %s." % p_details['name'])
+            logger.info("...done migrating project mom_name=%s mom2id=%s tmss_name=%s." % (p_details['name'], p_id, project.name))
+
+        except Exception as ex:
+            logger.error(ex, exc_info=True)
+            logger.error("Skipping migration of project mom_name=%s mom2id=%s details=%s." % (p_details['name'], p_id, p_details))
+            stats["projects_skipped"] += 1
+
+    logger.info("Done. Handled %s TMSS objects in total | %s" % (len(mom2id_to_tmss_representation), stats))
 
 
 if __name__ == "__main__":
@@ -354,7 +626,7 @@ if __name__ == "__main__":
 
     global dbcreds
     dbcreds = dbcredentials.parse_options(options)
-    logger.info("Using dbcreds: %s", dbcreds.stringWithHiddenPassword())
+    logger.info("Using MoM dbcreds: %s", dbcreds.stringWithHiddenPassword())
 
     # note: this requires a config file .lofar/dbcredentials/mom.ini, with contents as such (adapt as needed):
     #
@@ -366,5 +638,14 @@ if __name__ == "__main__":
     # password = mompass
     # database = lofar_mom_test_tmss
 
+    # set up Django
+    creds_name = os.environ.get('TMSS_DBCREDENTIALS', 'tmss')
+    os.environ['TMSS_DBCREDENTIALS'] = creds_name
+    tmss_dbcreds = dbcredentials.DBCredentials().get(creds_name)
+    logger.info("Using TMSS dbcreds: %s", tmss_dbcreds.stringWithHiddenPassword())
+
+    os.environ.setdefault("DJANGO_SETTINGS_MODULE", 'lofar.sas.tmss.tmss.settings')
+    django.setup()
+    from lofar.sas.tmss.tmss.tmssapp import models  # has to happen after Django setup
 
     main()
diff --git a/SAS/TMSS/src/tmss/settings.py b/SAS/TMSS/src/tmss/settings.py
index f22c76093073d7c26f2f80c4e2c6150a75690186..ec4f811934c976a158763404d6d039b867ccb4e2 100644
--- a/SAS/TMSS/src/tmss/settings.py
+++ b/SAS/TMSS/src/tmss/settings.py
@@ -97,6 +97,7 @@ INSTALLED_APPS = [
 
 MIDDLEWARE = [
     'django.middleware.gzip.GZipMiddleware',
+    'debug_toolbar.middleware.DebugToolbarMiddleware',
     'django.middleware.security.SecurityMiddleware',
     'django.contrib.sessions.middleware.SessionMiddleware',
     'django.middleware.common.CommonMiddleware',
@@ -124,7 +125,7 @@ ROOT_URLCONF = 'lofar.sas.tmss.tmss.urls'
 TEMPLATES = [
     {
         'BACKEND': 'django.template.backends.django.DjangoTemplates',
-        'DIRS': [BASE_DIR, os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, '../frontend','tmss_webapp')],
+        'DIRS': [BASE_DIR, os.path.join(BASE_DIR, 'templates'), os.path.join(os.environ.get('LOFARROOT'), 'SAS/TMSS/frontend','tmss_webapp')],
         'APP_DIRS': True,
         'OPTIONS': {
             'context_processors': [
@@ -138,7 +139,7 @@ TEMPLATES = [
 ]
 
 STATICFILES_DIRS = [
-    os.path.join(BASE_DIR, '../frontend','tmss_webapp/build/static')
+    os.path.join(os.environ.get('LOFARROOT'), 'SAS/TMSS/frontend','tmss_webapp/build/static')
 ]
 
 WSGI_APPLICATION = 'lofar.sas.tmss.tmss.wsgi.application'
@@ -323,9 +324,3 @@ SWAGGER_SETTINGS = {
     },
 
 }
-
-# TODO Do I need distinguish more between Test and Production Environment??
-# maybe a local file in Development environment for test purposes
-SCU = "http://scu199" if isDevelopmentEnvironment() or isTestEnvironment() else "http://scu001"
-PIPELINE_SUBTASK_LOG_URL = SCU + ".control.lofar:7412/tasks/%s/log.html"
-OBSERVATION_SUBTASK_LOG_URL = "https://proxy.lofar.eu/inspect/%s/rtcp-%s.errors"
diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
index 6f8f6b3615183203828350bcaa678eb520d2dfce..76886db52fd294699ada00094d7269e222916002 100644
--- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py
@@ -273,31 +273,6 @@ class Subtask(BasicCommon):
             # update the previous state value
             self.__original_state_id = self.state_id
 
-    @property
-    def log_url(self):
-        """
-        Return the link to the pipeline log in case of pipeline or
-        link to COBALT error log in case of an observation
-        otherwise just an empty string
-        """
-        if self.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value:
-            url = settings.OBSERVATION_SUBTASK_LOG_URL % (self.id, self.id)
-        elif self.specifications_template.type.value == SubtaskType.Choices.PIPELINE.value:
-            # Get RADBID, subtask must be at least 'scheduled' to exist in radb
-            # If RA is not started don't wait longer than 10 seconds
-            with RADBRPC.create(timeout=10) as radbrpc:
-                try:
-                    radb_id = radbrpc.getTask(tmss_id=self.id)
-                except:
-                    radb_id = None
-            if radb_id is None:
-               url = "not available (missing radbid)"
-            else:
-               url = settings.PIPELINE_SUBTASK_LOG_URL % radb_id['id']
-        else:
-            url = ""
-        return url
-
 
 class SubtaskStateLog(BasicCommon):
     """
diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py
index b0d044eec09d658ad3afba4056f644b0cee8b288..aebea11ec61a25af00d0249380bc7c782861287a 100644
--- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py
+++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py
@@ -631,6 +631,21 @@ class SchedulingUnitBlueprint(NamedCommon):
         else:
             return None
 
+    @property
+    def observed_end_time(self) -> datetime or None:
+        """
+        return the latest stop time of all (observation) tasks of this scheduling unit with the status observed/finished
+        """
+        observed_tasks = []
+        for task in self.task_blueprints.all():
+            if task.specifications_template.type.value == TaskType.Choices.OBSERVATION.value and\
+                    (task.status == "observed" or task.status == "finished") and task.stop_time is not None:
+                observed_tasks.append(task)
+        if observed_tasks:
+            return max(observed_tasks, key=lambda x: x.stop_time).stop_time
+        else:
+            return None
+
     @property
     def status(self):
         """
@@ -771,14 +786,19 @@ class TaskDraft(NamedCommon):
         '''
         scheduling_relations = list(self.first_scheduling_relation.all()) + list(self.second_scheduling_relation.all())
         for scheduling_relation in scheduling_relations:
-            if scheduling_relation.first.id == self.id and scheduling_relation.placement_id == "after":
+            # sometimes self._id does not exist so use self.id instead to avoid Exception
+            if hasattr(self, '_id'):
+                id = self._id
+            else:
+                id = self.id
+            if scheduling_relation.first.id == id and scheduling_relation.placement_id == "after":
                 previous_related_task_draft = TaskDraft.objects.get(id=scheduling_relation.second.id)
                 time_offset = scheduling_relation.time_offset
                 # todo: max of several relations
                 if previous_related_task_draft.relative_stop_time:
                     return previous_related_task_draft.relative_stop_time + datetime.timedelta(seconds=time_offset)
 
-            if scheduling_relation.second.id == self.id and scheduling_relation.placement_id == "before":
+            if scheduling_relation.second.id == id and scheduling_relation.placement_id == "before":
                 previous_related_task_draft = TaskDraft.objects.get(id=scheduling_relation.first.id)
                 time_offset = scheduling_relation.time_offset
                 # todo: max of several relations
@@ -843,6 +863,7 @@ class TaskDraft(NamedCommon):
 
 
 class TaskBlueprint(NamedCommon):
+
     specifications_doc = JSONField(help_text='Schedulings for this task (IMMUTABLE).')
     do_cancel = BooleanField(help_text='Cancel this task.')
     specifications_template = ForeignKey('TaskTemplate', on_delete=CASCADE, help_text='Schema used for specifications_doc (IMMUTABLE).')
@@ -890,14 +911,19 @@ class TaskBlueprint(NamedCommon):
         '''
         scheduling_relations = list(self.first_scheduling_relation.all()) + list(self.second_scheduling_relation.all())
         for scheduling_relation in scheduling_relations:
-            if scheduling_relation.first.id == self.id and scheduling_relation.placement_id == "after":   # self.id and placement.value will hit the db, this does not
+            # sometimes self._id does not exist so use self.id instead to avoid Exception
+            if hasattr(self, '_id'):
+                id = self._id
+            else:
+                id = self.id
+            if scheduling_relation.first.id == id and scheduling_relation.placement_id == "after":   # self.id and placement.value will hit the db, this does not
                     previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.second.id)
                     time_offset = scheduling_relation.time_offset
                     # todo: max of several relations
                     if previous_related_task_blueprint.relative_stop_time:
                         return previous_related_task_blueprint.relative_stop_time + datetime.timedelta(seconds=time_offset)
 
-            if scheduling_relation.second.id == self.id and scheduling_relation.placement_id == "before":   # self.id and placement.value will hit the db, this does not
+            if scheduling_relation.second.id == id and scheduling_relation.placement_id == "before":   # self.id and placement.value will hit the db, this does not
                     previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.first.id)
                     time_offset = scheduling_relation.time_offset
                     # todo: max of several relations
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/subtask_template-ingest-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/subtask_template-ingest-1.json
new file mode 100644
index 0000000000000000000000000000000000000000..d2727dbeaa138caa4e953d5d2caf11ac8a0554bc
--- /dev/null
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/subtask_template-ingest-1.json
@@ -0,0 +1,12 @@
+{
+  "$id":"http://tmss.lofar.org/api/schemas/subtasktemplate/ingest control/1#",
+  "$schema": "http://json-schema.org/draft-06/schema#",
+  "title":"ingest control",
+  "description":"This schema defines the parameters to setup and control an ingest subtask.",
+  "version":1,
+  "type": "object",
+  "properties": {
+  },
+  "required": [
+  ]
+}
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-ingest-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-ingest-1.json
new file mode 100644
index 0000000000000000000000000000000000000000..9877e438a728ce036b8619b6d849106863b93a9d
--- /dev/null
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/task_template-ingest-1.json
@@ -0,0 +1,12 @@
+{
+  "$id": "http://tmss.lofar.org/api/schemas/tasktemplate/ingest/1#",
+  "$schema": "http://json-schema.org/draft-06/schema#",
+  "title": "ingest",
+  "description": "This schema defines the parameters to setup an ingest task.",
+  "version": 1,
+  "type": "object",
+  "properties": {
+  },
+  "required": [
+  ]
+}
diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json
index be02aad2db6566ec71dad3e7730e614066a2a0f9..d9ff8dfc4cdc7b601d0741db3bf6f60083ec258e 100644
--- a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json
+++ b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json
@@ -121,5 +121,15 @@
   {
     "file_name": "sap_template-1.json",
     "template": "sap_template"
+  },
+    {
+    "file_name": "subtask_template-ingest-1.json",
+    "template": "subtask_template",
+    "type": "copy"
+    },
+    {
+    "file_name": "task_template-ingest-1.json",
+    "template": "task_template",
+    "type": "ingest"
   }
 ]
\ No newline at end of file
diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
index a2a10449daef88deacd7eadf8ae421e0e8891bf9..85d7bd21c54ca2ad78badd911131847c11fb3375 100644
--- a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py
@@ -85,7 +85,7 @@ class SubtaskSerializer(RelationalHyperlinkedModelSerializer):
     class Meta:
         model = models.Subtask
         fields = '__all__'
-        extra_fields = ['cluster_value', 'log_url']
+        extra_fields = ['cluster_value']
 
 
 class SubtaskInputSerializer(RelationalHyperlinkedModelSerializer):
diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py
index a82aae88bdd1ba43eec58561450f1d4c0aa2a0a7..e9bdd077090614ed1453674b73da6ff8dcb9d655 100644
--- a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py
+++ b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py
@@ -314,7 +314,7 @@ class SchedulingUnitBlueprintSerializer(RelationalHyperlinkedModelSerializer):
     class Meta:
         model = models.SchedulingUnitBlueprint
         fields = '__all__'
-        extra_fields = ['task_blueprints', 'duration', 'start_time', 'stop_time', 'status']
+        extra_fields = ['task_blueprints', 'duration', 'start_time', 'stop_time', 'status', 'observed_end_time']
 
 class SchedulingUnitBlueprintCopyToSchedulingUnitDraftSerializer(SchedulingUnitBlueprintSerializer):
     class Meta(SchedulingUnitDraftSerializer.Meta):
diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py
index 663f04a8ba2550cc432fb0daa2beb6e1e6cb2452..dcc07a38b11940e4cb6d07c566b2ffb7302e9d16 100644
--- a/SAS/TMSS/src/tmss/tmssapp/subtasks.py
+++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py
@@ -49,7 +49,8 @@ def create_subtasks_from_task_blueprint(task_blueprint: TaskBlueprint) -> [Subta
     generators_mapping = {'target observation': [create_observation_control_subtask_from_task_blueprint,
                                                  create_qafile_subtask_from_task_blueprint,
                                                  create_qaplots_subtask_from_task_blueprint],
-                          'preprocessing pipeline': [create_preprocessing_subtask_from_task_blueprint]}
+                          'preprocessing pipeline': [create_preprocessing_subtask_from_task_blueprint],
+                          'ingest': [create_ingest_subtask_from_task_blueprint]}
     generators_mapping['calibrator observation'] = generators_mapping['target observation']
 
     template_name = task_blueprint.specifications_template.name
@@ -449,6 +450,50 @@ def create_preprocessing_subtask_from_task_blueprint(task_blueprint: TaskBluepri
     return subtask
 
 
+def create_ingest_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask:
+    ''' Create a subtask to for an ingest job
+    This method implements "Instantiate subtasks" step from the "Specification Flow"
+    https://support.astron.nl/confluence/display/TMSS/Specification+Flow
+    '''
+    # step 0: check pre-requisites
+    check_prerequities_for_subtask_creation(task_blueprint)
+
+    # step 1: create subtask in defining state, with filled-in subtask_template
+    subtask_template = SubtaskTemplate.objects.get(name='ingest control')
+    default_subtask_specs = get_default_json_object_for_schema(subtask_template.schema)
+    subtask_specs = default_subtask_specs  # todo: translate specs from task to subtask once we have non-empty templates
+    cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4")
+    subtask_data = {"start_time": None,
+                    "stop_time": None,
+                    "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value),
+                    "task_blueprint": task_blueprint,
+                    "specifications_template": subtask_template,
+                    "specifications_doc": subtask_specs,
+                    "priority": 1,
+                    "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value),
+                    "cluster": Cluster.objects.get(name=cluster_name)}
+    subtask = Subtask.objects.create(**subtask_data)
+
+    # step 2: create and link subtask input
+    for task_relation_blueprint in task_blueprint.produced_by.all():
+        producing_task_blueprint = task_relation_blueprint.producer
+
+        predecessor_subtasks = [st for st in producing_task_blueprint.subtasks.order_by('id').all()]
+        for predecessor_subtask in predecessor_subtasks:
+            for predecessor_subtask_output in predecessor_subtask.outputs.all():
+                SubtaskInput.objects.create(subtask=subtask,
+                                            producer=predecessor_subtask_output,
+                                            selection_doc=task_relation_blueprint.selection_doc,
+                                            selection_template=task_relation_blueprint.selection_template)
+
+    # step 3: set state to DEFINED
+    subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value)
+    subtask.save()
+
+    # done, now return the subtask, and allow the system to wait for the predecessors to be finished before we schedule this ingest
+    return subtask
+
+
 # ==== various schedule* methods to schedule a Subtasks (if possible) ====
 
 def schedule_subtask(subtask: Subtask) -> Subtask:
@@ -468,6 +513,9 @@ def schedule_subtask(subtask: Subtask) -> Subtask:
         if subtask.specifications_template.type.value == SubtaskType.Choices.QA_PLOTS.value:
             return schedule_qaplots_subtask(subtask)
 
+        if subtask.specifications_template.type.value == SubtaskType.Choices.COPY.value:
+            return schedule_copy_subtask(subtask)
+
         raise SubtaskSchedulingException("Cannot schedule subtask id=%d because there is no schedule-method known for this subtasktype=%s." %
                                          (subtask.pk, subtask.specifications_template.type.value))
     except Exception as e:
@@ -924,6 +972,59 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask):
 
     return pipeline_subtask
 
+def schedule_copy_subtask(copy_subtask: Subtask):
+    ''' Schedule the given copy_subtask
+    This method should typically be called upon the event of an predecessor (pipeline or observation) subtask being finished.
+    This method implements "Scheduling subtasks" step from the "Specification Flow"
+    https://support.astron.nl/confluence/display/TMSS/Specification+Flow
+    '''
+    # step 0: check pre-requisites
+    check_prerequities_for_scheduling(copy_subtask)
+
+    if copy_subtask.specifications_template.type.value != SubtaskType.Choices.COPY.value:
+        raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s but type should be %s" % (copy_subtask.pk,
+                                                                                                          copy_subtask.specifications_template.type,
+                                                                                                          SubtaskType.Choices.COPY.value))
+
+    # step 1: set state to SCHEDULING
+    copy_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULING.value)
+    copy_subtask.save()
+
+    # step 1a: check start/stop times
+    # not very relevant for tmss/dynamic scheduling, but the resource assigner demands it.
+    if copy_subtask.start_time is None:
+        now = datetime.utcnow()
+        logger.info("copy id=%s has no starttime. assigned default: %s", copy_subtask.pk, formatDatetime(now))
+        copy_subtask.start_time = now
+
+    if copy_subtask.stop_time is None:
+        stop_time = copy_subtask.start_time  + timedelta(hours=+1)
+        logger.info("copy id=%s has no stop_time. assigned default: %s", copy_subtask.pk, formatDatetime(stop_time))
+        copy_subtask.stop_time = stop_time
+
+    # step 2: link input dataproducts
+    if copy_subtask.inputs.count() == 0:
+        raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because it has no input(s)" % (copy_subtask.pk,
+                                                                                                               copy_subtask.specifications_template.type))
+
+    # iterate over all inputs
+    for copy_subtask_input in copy_subtask.inputs.all():
+
+        # select and set input dataproducts that meet the filter defined in selection_doc
+        dataproducts = [dataproduct for dataproduct in copy_subtask_input.producer.dataproducts.all()
+                        if specifications_doc_meets_selection_doc(dataproduct.specifications_doc, copy_subtask_input.selection_doc)]
+        copy_subtask_input.dataproducts.set(dataproducts)
+
+        # todo: I assume that there is no RA involvement here? If there is, how does a copy parset look like?
+        # step 4: resource assigner (if possible)
+        #_assign_resources(copy_subtask)
+
+        # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it)
+        copy_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value)
+        copy_subtask.save()
+
+    return copy_subtask
+
 # === Misc ===
 
 def create_and_schedule_subtasks_from_task_blueprint(task_blueprint: TaskBlueprint) -> [Subtask]:
diff --git a/SAS/TMSS/src/tmss/tmssapp/views.py b/SAS/TMSS/src/tmss/tmssapp/views.py
index f6ee305bd237b1e0ed695c394e554c538cddbb76..9d50d716fc12066c56f8b16055c20c7c761bb8fa 100644
--- a/SAS/TMSS/src/tmss/tmssapp/views.py
+++ b/SAS/TMSS/src/tmss/tmssapp/views.py
@@ -34,7 +34,7 @@ def subtask_parset(request, subtask_pk:int):
 
 
 def index(request):
-    return render(request, os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), '../../frontend','tmss_webapp/build/index.html'))
+    return render(request, os.path.join(os.environ.get('LOFARROOT'), 'SAS/TMSS/frontend','tmss_webapp/build/index.html'))
     #return render(request, "../../../frontend/frontend_poc/build/index.html")
 
 
diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
index feb52d5dbfc4fe8773b1d4c7634c8c9488b72351..a4b8efc16e262703604531826107d21a093ed558 100644
--- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
+++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py
@@ -16,9 +16,10 @@ from drf_yasg.utils import swagger_auto_schema
 from drf_yasg.inspectors import SwaggerAutoSchema
 
 from rest_framework.decorators import action
-from django.http import HttpResponse, JsonResponse
+from django.http import HttpResponse, JsonResponse, HttpResponseRedirect, HttpResponseNotFound
 from rest_framework.response import Response as RestResponse
 
+from lofar.common import isProductionEnvironment, isTestEnvironment
 from lofar.sas.tmss.tmss.tmssapp.viewsets.lofar_viewset import LOFARViewSet
 from lofar.sas.tmss.tmss.tmssapp import models
 from lofar.sas.tmss.tmss.tmssapp import serializers
@@ -210,6 +211,49 @@ class SubtaskViewSet(LOFARViewSet):
         return RestResponse(serializer.data)
 
 
+    @swagger_auto_schema(responses={302: 'A redirect url to the task log for this Subtask.',
+                                    403: 'forbidden'},
+                         operation_description="Get the task log for this Subtask.")
+    @action(methods=['get'], detail=True)
+    def task_log(self, request, pk=None):
+        """
+        Return a redirect to the the link to the pipeline log in case of pipeline or
+        link to COBALT error log in case of an observation.
+        """
+        subtask = get_object_or_404(models.Subtask, pk=pk)
+
+        # redirect to cobalt log served at proxy.lofar.eu
+        if subtask.specifications_template.type.value == models.SubtaskType.Choices.OBSERVATION.value:
+            url = "https://proxy.lofar.eu/inspect/%s/rtcp-%s.errors" % (subtask.id, subtask.id)
+            return HttpResponseRedirect(redirect_to=url)
+
+        # redirect to pipeline log served via webscheduler
+        if subtask.specifications_template.type.value == models.SubtaskType.Choices.PIPELINE.value:
+            # import here and not at top of module to "loosen" dependency on external packages, such as in this case the RADB RPC.
+            from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC
+            # Get RADBID, subtask must be at least 'scheduled' to exist in radb
+            try:
+                with RADBRPC.create(timeout=2) as radbrpc:
+                    radb_id = radbrpc.getTask(tmss_id=subtask.id)
+
+                    if radb_id is None:
+                        return HttpResponseNotFound(
+                            content='No RADB task found for subtask id=%s type="%s status=%s". Cannot redirect to pipeline log.' % (
+                                subtask.id, subtask.specifications_template.type.value, subtask.state))
+
+                    WEBSCHEDULER_URL = "http://scu001.control.lofar:7412" if isProductionEnvironment() else \
+                                       "http://scu199.control.lofar:7412" if isTestEnvironment() else \
+                                       "http://localhost:7412"
+
+                    url = "%s/tasks/%s/log.html" % (WEBSCHEDULER_URL, radb_id)
+                    return HttpResponseRedirect(redirect_to=url)
+            except Exception as e:
+                return HttpResponseNotFound(content='No RADB task found for subtask id=%s type="%s". Cannot redirect to pipeline log.' % (subtask.id, subtask.specifications_template.type.value))
+
+        # unknown log
+        return HttpResponseNotFound(content='No log (url) available for subtask id=%s type="%s"' % (subtask.id, subtask.specifications_template.type.value) )
+
+
     @swagger_auto_schema(responses={200: 'The input dataproducts of this subtask.',
                                     403: 'forbidden'},
                          operation_description="Get the input dataproducts of this subtask.")
diff --git a/SAS/TMSS/test/t_scheduling.py b/SAS/TMSS/test/t_scheduling.py
index 9c9379197a7370c75c5cd2e7120235163710ce8f..c06100247e32d6a77367b894ce23f11b2704290d 100755
--- a/SAS/TMSS/test/t_scheduling.py
+++ b/SAS/TMSS/test/t_scheduling.py
@@ -196,6 +196,49 @@ class SchedulingTest(unittest.TestCase):
             self.assertEqual('scheduled', subtask['state_value'])
             self.assertEqual('scheduled', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=pipe_subtask['id'])['status'])
 
+    def test_schedule_ingest_subtask(self):
+        with tmss_test_env.create_tmss_client() as client:
+            cluster_url = client.get_path_as_json_object('/cluster/1')['url']
+
+            # setup: first create an observation, so the ingest can have input.
+            obs_subtask_template = client.get_subtask_template("observation control")
+            obs_spec = get_default_json_object_for_schema(obs_subtask_template['schema'])
+            obs_spec['stations']['digital_pointings'][0]['subbands'] = [0]
+
+            obs_subtask_data = test_data_creator.Subtask(specifications_template_url=obs_subtask_template['url'],
+                                                         specifications_doc=obs_spec,
+                                                         cluster_url=cluster_url,
+                                                         task_blueprint_url=test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/'))
+            obs_subtask = test_data_creator.post_data_and_get_response_as_json_object(obs_subtask_data, '/subtask/')
+            obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url']), '/subtask_output/')
+            test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(filename="L%s_SB000.MS"%obs_subtask['id'],
+                                                    subtask_output_url=obs_subtask_output_url), '/dataproduct/')
+
+            # now create the ingest...
+            ingest_subtask_template = client.get_subtask_template("ingest control")
+            ingest_spec = get_default_json_object_for_schema(ingest_subtask_template['schema'])
+
+            ingest_subtask_data = test_data_creator.Subtask(specifications_template_url=ingest_subtask_template['url'],
+                                                          specifications_doc=ingest_spec,
+                                                          task_blueprint_url=obs_subtask['task_blueprint'],
+                                                          cluster_url=cluster_url)
+            ingest_subtask = test_data_creator.post_data_and_get_response_as_json_object(ingest_subtask_data, '/subtask/')
+
+            # ...and connect it to the observation
+            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskInput(subtask_url=ingest_subtask['url'], subtask_output_url=obs_subtask_output_url), '/subtask_input/')
+            test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=ingest_subtask['url']), '/subtask_output/')
+
+            for predecessor in client.get_subtask_predecessors(ingest_subtask['id']):
+                client.set_subtask_status(predecessor['id'], 'finished')
+            client.set_subtask_status(ingest_subtask['id'], 'defined')
+
+            # trigger
+            subtask = client.schedule_subtask(ingest_subtask['id'])
+
+            # assert
+            self.assertEqual('scheduled', subtask['state_value'])
+            self.assertEqual(models.Subtask.objects.get(id=ingest_subtask['id']).inputs.first().dataproducts.count(), 1)
+
 
     def test_schedule_schedulingunit_enough_resources_available(self):
         '''similar test as test_schedule_pipeline_subtask_with_enough_resources_available, but now created from a scheduling_unit'''
@@ -371,27 +414,37 @@ class SAPTest(unittest.TestCase):
         self.assertEqual(dp2_in.sap, dp2_out.sap)
 
 
-
-class CreationFromSchedulingUnitDraft(unittest.TestCase):
+class TestWithUC1Specifications(unittest.TestCase):
     """
-    From scheduling_unit_draft test:
-     create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitBlueprint:
-    This requires Resource Assigner testenvironment being alive
+    The Setup will create Scheduling Unit Draft with UC1 strategy template
+    It will use the function 'create_task_blueprints_and_subtasks_from_scheduling_unit_draft' which is then
+    implicit tested.
+    Create Task Blueprints and Subtasks:
+        Observation Task 'Calibration 1'
+            SubTask Observation Control
+            SubTask QA File
+            SubTask QA Plots
+        Pipeline Task 'Pipeline 1'
+            SubTask Pipeline Control
+        Observation Task 'Target Observation'
+            SubTask Observation Control
+            SubTask QA File
+            SubTask QA Plots
+        Pipeline Task 'Pipeline target1'
+            SubTask Pipeline Control
+        Pipeline Task 'Pipeline target2'
+            SubTask Pipeline Control
+        Observation Task 'Calibration 2'
+            SubTask Observation Control
+            SubTask QA File
+            SubTask QA Plots
+        Pipeline Task 'Pipeline 2'
+            SubTask Pipeline Control
+
+    Note that this test requires Resource Assigner testenvironment being alive
     """
-
-    def test_create_task_blueprints_and_subtasks_from_scheduling_unit_draft_with_UC1_requirements(self):
-        """
-        Create Scheduling Unit Draft with requirements_doc (read from file)
-        Create Task Blueprints and Subtasks
-        Check if tasks (7) are created:
-           Calibration 1     : 1 Observation and 1 Pipeline task
-           Target Observation: 1 Observation and 2 Pipeline tasks
-           Calibration 2     : 1 Observation and 1 Pipeline task
-        Check if subtasks (13) are created:
-           Every Observation Task: 3 subtasks (1 control, 2 QA)
-           Every Pipeline Task:    1 subtasks (1 control)
-           makes 3x3 + 4x1 = 13
-        """
+    @classmethod
+    def setUpClass(cls) -> None:
         strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines")
 
         scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(
@@ -407,20 +460,81 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase):
         create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft)
 
         scheduling_unit_draft.refresh_from_db()
-        task_drafts = scheduling_unit_draft.task_drafts.all()
-        self.assertEqual(7, len(task_drafts))
-
-        scheduling_unit_blueprints = scheduling_unit_draft.scheduling_unit_blueprints.all()
-        self.assertEqual(1, len(scheduling_unit_blueprints))
+        cls.task_drafts = scheduling_unit_draft.task_drafts.all()
+        cls.scheduling_unit_blueprints = scheduling_unit_draft.scheduling_unit_blueprints.all()
+        cls.scheduling_unit_blueprint = cls.scheduling_unit_blueprints[0]
+        cls.task_blueprints = cls.scheduling_unit_blueprint.task_blueprints.all()
 
-        scheduling_unit_blueprint = scheduling_unit_blueprints[0]
-        task_blueprints = scheduling_unit_blueprint.task_blueprints.all()
-        self.assertEqual(7, len(task_blueprints))
+    def test_create_task_blueprints_and_subtasks_from_scheduling_unit_draft(self):
+        """
+        Create Task Blueprints and Subtasks (class setup)
+        Check if tasks (7) are created:
+           Calibration 1     : 1 Observation and 1 Pipeline task
+           Target Observation: 1 Observation and 2 Pipeline tasks
+           Calibration 2     : 1 Observation and 1 Pipeline task
+        Check if subtasks (13) are created:
+           Every Observation Task: 3 subtasks (1 control, 2 QA)
+           Every Pipeline Task:    1 subtasks (1 control)
+           makes 3x3 + 4x1 = 13
+        """
+        self.assertEqual(7, len(self.task_drafts))
+        self.assertEqual(1, len(self.scheduling_unit_blueprints))
+        self.assertEqual(7, len(self.task_blueprints))
         total_subtasks = 0
-        for task_blueprint in task_blueprints:
+        for task_blueprint in self.task_blueprints:
             total_subtasks += task_blueprint.subtasks.count()
         self.assertEqual(13, total_subtasks)
 
+    def test_relative_times(self):
+        """
+        Create Task Blueprints and Subtasks (class setup)
+        Set start and stop times of taskBlueprint
+        Set the subtask start/stop time equal to its taskBlueprint
+        Set all subtask states to 'finished'
+        Check the observed_end_time of the SchedulingUnitBlueprint
+        Check the relative_start/stop_time of the SchedulingUnitBlueprint
+           start = 0
+           stop = calculates like 8hours (Target) + 2x10min (calibrators) + 2*1min (offset between observations) = 8h22min
+        """
+        DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
+        test_timeschedule = {
+            # name of taskBlueprint       start_time             stop_time
+            "Calibrator Observation 1": ["2020-11-01 08:00:00", "2020-11-01 08:10:00"],
+            "Pipeline 1":               ["2020-11-01 08:20:00", "2020-11-01 08:22:00"],
+            "Target Observation":       ["2020-11-01 08:30:00", "2020-11-01 18:00:00"],
+            "Pipeline target1":         ["2020-11-01 18:30:00", "2020-11-01 18:35:00"],
+            "Pipeline target2":         ["2020-11-01 18:40:00", "2020-11-01 18:45:00"],
+            "Calibrator Observation 2": ["2020-11-01 19:00:00", "2020-11-01 19:20:00"],
+            "Pipeline 2":               ["2020-11-01 19:30:00", "2020-11-01 19:40:00"]
+        }
+        # Set time_schedule,
+        for name, times in test_timeschedule.items():
+            task_blueprint = list(filter(lambda x: x.name == name, self.task_blueprints))[0]
+            for subtask in task_blueprint.subtasks.all():
+                subtask.state = models.SubtaskState.objects.get(value="finished")
+                subtask.stop_time = datetime.strptime(times[1], DATETIME_FORMAT)
+                subtask.start_time = datetime.strptime(times[0], DATETIME_FORMAT)
+                subtask.save()
+
+        # Check times
+        self.assertEqual("2020-11-01 19:20:00", self.scheduling_unit_blueprint.observed_end_time.strftime("%Y-%m-%d %H:%M:%S"))
+        self.assertEqual(timedelta(0), self.scheduling_unit_blueprint.relative_start_time)
+        self.assertEqual(timedelta(hours=8, minutes=22), self.scheduling_unit_blueprint.relative_stop_time)
+
+        for task_blueprint in self.task_blueprints:
+            if task_blueprint.name == "Calibrator Observation 1":
+                self.assertEqual(timedelta(0), task_blueprint.relative_start_time)
+                self.assertEqual(timedelta(minutes=10), task_blueprint.relative_stop_time)
+            elif task_blueprint.name == "Target Observation":
+                self.assertEqual(timedelta(minutes=11), task_blueprint.relative_start_time)
+                self.assertEqual(timedelta(hours=8, minutes=11), task_blueprint.relative_stop_time)
+            elif task_blueprint.name == "Calibrator Observation 2":
+                self.assertEqual(timedelta(hours=8, minutes=12), task_blueprint.relative_start_time)
+                self.assertEqual(timedelta(hours=8, minutes=22), task_blueprint.relative_stop_time)
+            else:
+                self.assertEqual(timedelta(0), task_blueprint.relative_start_time)
+                self.assertEqual(timedelta(0), task_blueprint.relative_stop_time)
+
 
 if __name__ == "__main__":
     os.environ['TZ'] = 'UTC'
diff --git a/SAS/TMSS/test/t_subtasks.py b/SAS/TMSS/test/t_subtasks.py
index cc14049e65a0bc1832812fbd0954b5fc42dd962e..b9021a86f94d25f5fcccd620daf7705c07c8d88e 100755
--- a/SAS/TMSS/test/t_subtasks.py
+++ b/SAS/TMSS/test/t_subtasks.py
@@ -34,12 +34,7 @@ tmss_test_env.populate_schemas()
 
 
 from lofar.sas.tmss.test.tmss_test_data_django_models import *
-
-# import and setup rest test data creator
-from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator
-
 from lofar.sas.tmss.tmss.tmssapp import models
-
 from lofar.sas.tmss.tmss.tmssapp.subtasks import *
 
 
@@ -283,6 +278,25 @@ class SubTasksCreationFromTaskBluePrintCalibrator(unittest.TestCase):
         self.assertEqual(2.222, subtask.specifications_doc['stations']['analog_pointing']['angle2'])
 
 
+class SubTaskCreationFromTaskBlueprintIngest(unittest.TestCase):
+
+    def test_create_subtask_from_task_blueprint_ingest(self):
+        """
+        Test that ingest task blueprint can be turned into a ingest control subtask
+        """
+
+        # setup
+        ingest_task_blueprint = create_task_blueprint_object_for_testing(task_template_name="ingest")
+
+        # trigger
+        subtask = create_ingest_subtask_from_task_blueprint(ingest_task_blueprint)
+
+        # assert
+        self.assertEqual("defined", str(subtask.state))
+        self.assertEqual("ingest control", str(subtask.specifications_template.name))
+        self.assertEqual("copy", str(subtask.specifications_template.type))
+
+
 class SubtaskInputSelectionFilteringTest(unittest.TestCase):
 
     def setUp(self) -> None:
@@ -328,6 +342,31 @@ class SubtaskInputSelectionFilteringTest(unittest.TestCase):
         selection = {'sap': ['target0'], 'is_relevant': True}
         self.assertFalse(specifications_doc_meets_selection_doc(specs, selection))
 
+    def test_links_to_log_files(self):
+        """
+        Test redirect urls to subtask logfiles.
+        """
+
+        # the link to log files is a 'view' on the subtask, and NOT part of the subtask model.
+        # the link is served as an action on the REST API, redirecting to externally served log files.
+        # check/test the redirect urls.
+        with tmss_test_env.create_tmss_client() as client:
+            # observation
+            subtask_observation = create_subtask_object_for_testing("observation", "defined")
+            response = client.session.get(url=client.get_full_url_for_path('/subtask/%s/task_log' % (subtask_observation.id,)), allow_redirects=False)
+            self.assertTrue(response.is_redirect)
+            self.assertIn("proxy.lofar.eu", response.headers['Location'])
+            self.assertIn("rtcp-%s.errors" % subtask_observation.id, response.headers['Location'])
+
+            # pipeline
+            subtask_pipeline = create_subtask_object_for_testing("pipeline", "defined")
+            response = client.session.get(url=client.get_full_url_for_path('/subtask/%s/task_log' % (subtask_pipeline.id,)), allow_redirects=False)
+            self.assertEqual(404, response.status_code) # no log (yet) for unscheduled pipeline
+
+            # other (qa_plots)
+            subtask_qa_plots = create_subtask_object_for_testing("qa_plots", "defined")
+            self.assertEqual(404, response.status_code) # no log for other subtasktypes
+
 
 class SettingTest(unittest.TestCase):
 
@@ -340,22 +379,6 @@ class SettingTest(unittest.TestCase):
         with self.assertRaises(SubtaskSchedulingException):
             schedule_observation_subtask(obs_st)
 
-    def test_links_to_log_files(self):
-        """
-        Test if the links to logging of a subtasks is correct:
-        For an observation the subtaskid is in the logging url
-        For a pipeline the radbid of the subtaskid is in the link, BUT because RA is not started is should
-        return "not available"
-        All other subtask types (like qa) should have an empty string (no logging)
-        """
-        subtask_pipeline = create_subtask_object_for_testing("pipeline", "defined")
-        subtask_qa_plots = create_subtask_object_for_testing("qa_plots", "defined")
-        subtask_observation = create_subtask_object_for_testing("observation", "defined")
-
-        self.assertIn("proxy.lofar.eu", subtask_observation.log_url)
-        self.assertIn("rtcp-%s.errors" % subtask_observation.id, subtask_observation.log_url)
-        self.assertIn("not available", subtask_pipeline.log_url)
-        self.assertEqual("", subtask_qa_plots.log_url)
 
 
 if __name__ == "__main__":