diff --git a/CMake/LofarPackageList.cmake b/CMake/LofarPackageList.cmake index d4f6966e12814caac01dda87311fdbea2535433f..e9c1c4bc0f8d36043bf178fbdbe02f3884f5fc69 100644 --- a/CMake/LofarPackageList.cmake +++ b/CMake/LofarPackageList.cmake @@ -1,7 +1,7 @@ # - Create for each LOFAR package a variable containing the absolute path to # its source directory. # -# Generated by gen_LofarPackageList_cmake.sh at do 28 mei 2020 11:22:44 CEST +# Generated by gen_LofarPackageList_cmake.sh at vr 27 nov 2020 16:08:48 CET # # ---- DO NOT EDIT ---- # @@ -210,6 +210,7 @@ if(NOT DEFINED LOFAR_PACKAGE_LIST_INCLUDED) set(TMSSSchedulingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/scheduling) set(TMSSFeedbackHandlingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/feedback_handling) set(TMSSPostgresListenerService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/tmss_postgres_listener) + set(TMSSWorkflowService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/workflow_service) set(TriggerEmailServiceCommon_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TriggerEmailService/Common) set(TriggerEmailServiceServer_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TriggerEmailService/Server) set(CCU_MAC_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SubSystems/CCU_MAC) diff --git a/SAS/TMSS/client/lib/populate.py b/SAS/TMSS/client/lib/populate.py index bb39f6967617e077aa4c8d00f425534cbfc4d95c..63a148b2eeaa2cb59b7f2a77ecc9b9405d67283e 100644 --- a/SAS/TMSS/client/lib/populate.py +++ b/SAS/TMSS/client/lib/populate.py @@ -72,7 +72,7 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None): else: template['schema'] = json_schema - logger.info("Uploading template name='%s' version='%s'", name, version) + logger.info("Uploading template with name='%s' version='%s' template='%s' ", name, version, template) client.post_template(template_path=template_name, name=name, diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/CustomPageSpinner.js b/SAS/TMSS/frontend/tmss_webapp/src/components/CustomPageSpinner.js new file mode 100644 index 0000000000000000000000000000000000000000..cc794fe8ee4f3178c5d61a281cb6357abdde97ca --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/CustomPageSpinner.js @@ -0,0 +1,20 @@ +import React, {Component} from 'react'; +import { ProgressSpinner } from 'primereact/progressspinner'; + +/** + * Custom spinner component for the whole page. + */ +export class CustomPageSpinner extends Component { + render() { + return ( + <> + {this.props.visible && + <div style={{position: 'fixed', left:0, top: 0, width:'100%', height:'100%', backgroundColor: 'grey', zIndex: 9999, opacity: '0.5'}}> + <span style={{position: 'absolute', top: '50%', left:'50%', '-ms-transform': 'translateY(-50%)', transform: 'translateY(-50%)', backgroundColor:'white' }}> + <ProgressSpinner /></span> + </div> + } + </> + ); + } +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js b/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js index 1a4f0d4290cde1e5be6ce84333dad9622678693a..7cc46ca9851a7529b85dd822b454b4164aa53c20 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js @@ -40,17 +40,25 @@ function Jeditor(props) { if (property["$ref"] && !property["$ref"].startsWith("#")) { // 1st level reference of the object const refUrl = property["$ref"]; let newRef = refUrl.substring(refUrl.indexOf("#")); - if (refUrl.endsWith("/pointing")) { // For type pointing - schema.definitions["pointing"] = (await $RefParser.resolve(refUrl)).get(newRef); - property["$ref"] = newRef; - } else { // General object to resolve if any reference in child level - property = await resolveSchema((await $RefParser.resolve(refUrl)).get(newRef)); - } + //>>>>>> TODO if pointin works fine, remove these commented lines + // if (refUrl.endsWith("/pointing")) { // For type pointing + // schema.definitions["pointing"] = (await $RefParser.resolve(refUrl)).get(newRef); + // property["$ref"] = newRef; + // } else { // General object to resolve if any reference in child level + // property = await resolveSchema((await $RefParser.resolve(refUrl)).get(newRef)); + // } + let defKey = refUrl.substring(refUrl.lastIndexOf("/")+1); + schema.definitions[defKey] = (await $RefParser.resolve(refUrl)).get(newRef); + property["$ref"] = newRef; } else if(property["type"] === "array") { // reference in array items definition let resolvedItems = await resolveSchema(property["items"]); schema.definitions = {...schema.definitions, ...resolvedItems.definitions}; delete resolvedItems['definitions']; property["items"] = resolvedItems; + } else if(property["type"] === "object" && property.properties) { + property = await resolveSchema(property); + schema.definitions = {...schema.definitions, ...property.definitions}; + delete property['definitions']; } properties[propertyKey] = property; } @@ -64,19 +72,34 @@ function Jeditor(props) { } else if (schema["$ref"] && !schema["$ref"].startsWith("#")) { //reference in oneOf list item const refUrl = schema["$ref"]; let newRef = refUrl.substring(refUrl.indexOf("#")); - if (refUrl.endsWith("/pointing")) { - schema.definitions["pointing"] = (await $RefParser.resolve(refUrl)).get(newRef); - schema["$ref"] = newRef; - } else { - schema = await resolveSchema((await $RefParser.resolve(refUrl)).get(newRef)); + //>>>>>> TODO: If pointing works fine, remove these commented lines + // if (refUrl.endsWith("/pointing")) { + // schema.definitions["pointing"] = (await $RefParser.resolve(refUrl)).get(newRef); + // schema["$ref"] = newRef; + // } else { + // schema = await resolveSchema((await $RefParser.resolve(refUrl)).get(newRef)); + // } + let defKey = refUrl.substring(refUrl.lastIndexOf("/")+1); + schema.definitions[defKey] = (await $RefParser.resolve(refUrl)).get(newRef); + if (schema.definitions[defKey].properties) { + let property = await resolveSchema(schema.definitions[defKey]); + schema.definitions = {...schema.definitions, ...property.definitions}; + delete property['definitions']; + schema.definitions[defKey] = property; } + schema["$ref"] = newRef; } return schema; } const init = async () => { - const element = document.getElementById('editor_holder'); + const element = document.getElementById(props.id?props.id:'editor_holder'); let schema = await resolveExternalRef(); + /** If any formatting is done at the parent/implementation component pass the resolved schema + and get the formatted schema like adding validation type, field ordering, etc.,*/ + if (props.defintionFormatter) { + props.defintionFormatter(schema); + } pointingProps = []; // Customize the pointing property to capture angle1 and angle2 to specified format for (const definitionKey in schema.definitions) { @@ -541,7 +564,7 @@ function Jeditor(props) { return ( <React.Fragment> - <div id='editor_holder'></div> + <div id={props.id?props.id:'editor_holder'}></div> </React.Fragment> ); }; diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js b/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js index 00f02fc49b31ac94ad2bae9182a4cc2a5291df50..3428a67afc894027d9fb81d49d12ededd102db17 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js @@ -1,5 +1,5 @@ import React, {useRef, useState } from "react"; -import { useSortBy, useTable, useFilters, useGlobalFilter, useAsyncDebounce, usePagination } from 'react-table' +import { useSortBy, useTable, useFilters, useGlobalFilter, useAsyncDebounce, usePagination, useRowSelect } from 'react-table' import matchSorter from 'match-sorter' import _ from 'lodash'; import moment from 'moment'; @@ -15,13 +15,15 @@ import { Button } from "react-bootstrap"; import { InputNumber } from "primereact/inputnumber"; let tbldata =[], filteredData = [] ; +let selectedRows = []; let isunittest = false; let showTopTotal = true; let showGlobalFilter = true; let showColumnFilter = true; let allowColumnSelection = true; +let allowRowSelection = false; let columnclassname =[]; -let parentCallbackFunction; +let parentCallbackFunction, parentCBonSelection; // Define a default UI for filtering function GlobalFilter({ @@ -406,6 +408,7 @@ const defaultColumn = React.useMemo( setHiddenColumns, gotoPage, setPageSize, + selectedFlatRows, } = useTable( { columns, @@ -419,7 +422,8 @@ const defaultColumn = React.useMemo( useFilters, useGlobalFilter, useSortBy, - usePagination + usePagination, + useRowSelect ); React.useEffect(() => { setHiddenColumns( @@ -478,6 +482,15 @@ const defaultColumn = React.useMemo( if (parentCallbackFunction) { parentCallbackFunction(filteredData); } + + /* Select only rows than can be selected. This is required when ALL is selected */ + selectedRows = _.filter(selectedFlatRows, selectedRow => { return (selectedRow.original.canSelect===undefined || selectedRow.original.canSelect)}); + /* Take only the original values passed to the component */ + selectedRows = _.map(selectedRows, 'original'); + /* Callback the parent function if available to pass the selected records on selection */ + if (parentCBonSelection) { + parentCBonSelection(selectedRows) + } return ( <> @@ -524,10 +537,12 @@ const defaultColumn = React.useMemo( setGlobalFilter={setGlobalFilter} /> } - </div> - { showTopTotal && + </div> + { showTopTotal && filteredData.length === data.length && <div className="total_records_top_label"> <label >Total records ({data.length})</label></div> } + { showTopTotal && filteredData.length < data.length && + <div className="total_records_top_label" ><label >Filtered {filteredData.length} from {data.length}</label></div>} </div> <div className="tmss-table table_container"> @@ -575,7 +590,10 @@ const defaultColumn = React.useMemo( </table> </div> <div className="pagination p-grid" > - <div className="total_records_bottom_label" ><label >Total records ({data.length})</label></div> + {filteredData.length === data.length && + <div className="total_records_bottom_label" ><label >Total records ({data.length})</label></div>} + {filteredData.length < data.length && + <div className="total_records_bottom_label" ><label >Filtered {filteredData.length} from {data.length}</label></div>} <div> <Paginator rowsPerPageOptions={[10,25,50,100]} first={currentpage} rows={currentrows} totalRecords={rows.length} onPageChange={onPagination}></Paginator> </div> @@ -612,12 +630,14 @@ function ViewTable(props) { // Data to show in table tbldata = props.data; parentCallbackFunction = props.filterCallback; + parentCBonSelection = props.onRowSelection; isunittest = props.unittest; columnclassname = props.columnclassname; showTopTotal = props.showTopTotal===undefined?true:props.showTopTotal; showGlobalFilter = props.showGlobalFilter===undefined?true:props.showGlobalFilter; showColumnFilter = props.showColumnFilter===undefined?true:props.showColumnFilter; allowColumnSelection = props.allowColumnSelection===undefined?true:props.allowColumnSelection; + allowRowSelection = props.allowRowSelection===undefined?false:props.allowRowSelection; // Default Header to show in table and other columns header will not show until user action on UI let defaultheader = props.defaultcolumns; let optionalheader = props.optionalcolumns; @@ -631,6 +651,33 @@ function ViewTable(props) { let columns = []; let defaultdataheader = Object.keys(defaultheader[0]); let optionaldataheader = Object.keys(optionalheader[0]); + + /* If allowRowSelection property is true for the component, add checkbox column as 1st column. + If the record has property to select, enable the checkbox */ + if (allowRowSelection) { + columns.push({ + Header: ({ getToggleAllRowsSelectedProps }) => { return ( + <div> + <IndeterminateCheckbox {...getToggleAllRowsSelectedProps()} style={{width:'15px', height:'15px'}}/> + </div> + )}, + id:'Select', + accessor: props.keyaccessor, + Cell: ({ row }) => { return ( + <div> + {(row.original.canSelect===undefined || row.original.canSelect) && + <IndeterminateCheckbox {...row.getToggleRowSelectedProps()} style={{width:'15px', height:'15px'}}/> + } + {row.original.canSelect===false && + <input type="checkbox" checked={false} disabled style={{width:'15px', height:'15px'}}></input> + } + </div> + )}, + disableFilters: true, + disableSortBy: true, + isVisible: defaultdataheader.includes(props.keyaccessor), + }); + } if(props.showaction === 'true') { columns.push({ diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/components/CustomDialog.js b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/CustomDialog.js new file mode 100644 index 0000000000000000000000000000000000000000..ea013dca232d1dd5a0cc4e1dcda11542f79af1ce --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/CustomDialog.js @@ -0,0 +1,63 @@ +import React, {Component} from 'react'; +import { Button } from 'primereact/button'; +import { Dialog } from 'primereact/dialog'; + +/** + * Custom Dialog component to get user input before doing something. It can be warning information or confirmation or error message and based on the + * user's input will proceed to next step. + */ +export class CustomDialog extends Component { + + constructor(props) { + super(props); + this.state = { + visible: props.visible===undefined?true:props.visible + } + } + + render() { + const isConfirm = this.props.type.toLowerCase()==='confirmation'; + const isWarning = this.props.type.toLowerCase()==='warning'; + const isSuccess = this.props.type.toLowerCase()==='success'; + // const isError = this.props.type.toLowerCase()==='error'; + let iconClass = isConfirm?"pi-question-circle pi-warning":(isWarning?"pi-info-circle pi-warning": (isSuccess?"pi-check-circle pi-success":"pi-times-circle pi-danger")); + return ( + <div className="p-grid" data-testid="confirm_dialog"> + <Dialog header={this.props.header} visible={this.props.visible} style={{width: this.props.width?this.props.width:'25vw'}} + inputId="confirm_dialog" + modal={true} onHide={this.props.onClose} + footer={<div> + {/* Action buttons based on 'type' props. If 'actions' passed as props, then type is ignored */} + {!this.props.actions && + <> + {isConfirm && + <Button key="back" onClick={this.props.onCancel} label="No" /> + } + <Button key="submit" type="primary" onClick={this.props.onSubmit?this.props.onSubmit:this.props.onClose} label={isConfirm?'Yes':'Ok'} /> + </> + } + {/* Action button based on the 'actions' props */} + {this.props.actions && this.props.actions.map((action, index) => { + return ( + <Button key={action.id} label={action.title} onClick={action.callback} />); + })} + </div> + } > + <div className="p-grid"> + <div className="col-lg-2 col-md-2 col-sm-2"> + <span style={{position: 'absolute', top: '50%', '-ms-transform': 'translateY(-50%)', transform: 'translateY(-50%)'}}> + <i className={`pi pi-large ${iconClass}`}></i> + </span> + </div> + <div className="col-lg-10 col-md-10 col-sm-10"> + {/* Display message passed */} + {this.props.message?this.props.message:""} + {/* Render subcomponent passed as function */} + {this.props.content?this.props.content():""} + </div> + </div> + </Dialog> + </div> + ); + } +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/components/PageHeader.js b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/PageHeader.js index fb95ec75a094fc8a2d86bdf74ba78fab8c885a39..02de326d2c7ab3829d12003304e28da3f77fa090 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/layout/components/PageHeader.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/PageHeader.js @@ -42,7 +42,7 @@ export default ({ title, subTitle, actions, ...props}) => { {(actions || []).map((action, index) =>{ if (action.type === 'button') { return ( - <button className="p-link" key={index}> + <button className="p-link" key={index} title={action.title || ''}> <i className={`fa ${action.icon}`} onMouseOver={(e) => onButtonMouseOver(e, action)} onClick={(e) => onButtonClick(e, action)} /> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js index 1bcbcefcbd4a7cd6a5bbb2c2cf96eb7c4b7c2800..fb4232f12ba8e52cbbb9851ef37f428012dba601 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js @@ -117,20 +117,31 @@ export default (props) => { list.push('disable-field'); } ref.editors['root.time.at'].container.className = list.join(' '); - Array.prototype.slice.call(ref.editors['root.time.at'].control.getElementsByTagName('input')).forEach(input => input.disabled = true); - Array.prototype.slice.call(ref.editors['root.time.at'].control.getElementsByTagName('button')).forEach(button => button.disabled = true); + if (ref.editors['root.time.at'].control) { + Array.prototype.slice.call(ref.editors['root.time.at'].control.getElementsByTagName('input')).forEach(input => input.disabled = true); + Array.prototype.slice.call(ref.editors['root.time.at'].control.getElementsByTagName('button')).forEach(button => button.disabled = true); + } } else { ref.editors['root.time.at'].container.className = ref.editors['root.time.at'].container.className.replace('disable-field', ''); - Array.prototype.slice.call(ref.editors['root.time.at'].control.getElementsByTagName('input')).forEach(input => input.disabled = false); - Array.prototype.slice.call(ref.editors['root.time.at'].control.getElementsByTagName('button')).forEach(button => button.disabled = false); + if (ref.editors['root.time.at'].control) { + Array.prototype.slice.call(ref.editors['root.time.at'].control.getElementsByTagName('input')).forEach(input => input.disabled = false); + Array.prototype.slice.call(ref.editors['root.time.at'].control.getElementsByTagName('button')).forEach(button => button.disabled = false); + } } if (props.callback) { + // Remove 'time' fields if it is empty + for (const key of _.keys(jsonOutput.time)) { + if (!jsonOutput.time[key]) { + delete jsonOutput.time[key]; + } + } props.callback(jsonOutput, errors); } } const constraintStrategy = () => { - const constraintTemplate = { ...props.constraintTemplate } + // const constraintTemplate = { ...props.constraintTemplate } + const constraintTemplate = _.cloneDeep(props.constraintTemplate); if (constraintTemplate.schema) { configureProperties(constraintTemplate.schema.properties); configureDefinitions(constraintTemplate.schema); @@ -183,13 +194,15 @@ export default (props) => { return ( <> {constraintSchema && React.createElement(Jeditor, { + id: "constraint_editor", title: "Scheduling Constraints specification", schema: constraintSchema.schema, callback: onEditForm, initValue: initialValue, disabled: props.disable, formatOutput: props.formatOutput, - parentFunction: parentFunction + parentFunction: parentFunction, + defintionFormatter: configureDefinitions })} </> ); diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js index 570ca6388bd7f10954ccb6fe2731e0b424f92435..f64b1133eb4b63b6787e5913e8e4a2f75f48452f 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/SchedulingUnitList.js @@ -56,6 +56,8 @@ class SchedulingUnitList extends Component{ }], defaultSortColumn: [{id: "Name", desc: false}], } + this.onRowSelection = this.onRowSelection.bind(this); + this.reloadData = this.reloadData.bind(this); } async getSchedulingUnitList () { @@ -86,6 +88,7 @@ class SchedulingUnitList extends Component{ blueP['created_at'] = moment(blueP['created_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss"); blueP['updated_at'] = moment(blueP['updated_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss"); blueP.project = project.name; + blueP.canSelect = false; return blueP; }); output.push(...blueprintdata); @@ -95,11 +98,13 @@ class SchedulingUnitList extends Component{ scheduleunit['created_at'] = moment(scheduleunit['created_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss"); scheduleunit['updated_at'] = moment(scheduleunit['updated_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss"); scheduleunit.project = project.name; + scheduleunit.canSelect = true; output.push(scheduleunit); } this.setState({ scheduleunit: output, isLoading: false }); + this.selectedRows = []; }) } } @@ -109,6 +114,22 @@ class SchedulingUnitList extends Component{ } + /** + * Callback function passed to ViewTable component to pass back the selected rows. + * @param {Array} selectedRows - Subset of data passed to the ViewTable component based on selection. + */ + onRowSelection(selectedRows) { + this.selectedRows = selectedRows; + } + + /** + * Funtion to reload data. This function can be called from the implementing component. + */ + reloadData() { + this.setState({isLoading: true}); + this.getSchedulingUnitList(); + } + render(){ if (this.state.isLoading) { return <AppLoader/> @@ -139,6 +160,8 @@ class SchedulingUnitList extends Component{ paths={this.state.paths} unittest={this.state.unittest} tablename="scheduleunit_list" + allowRowSelection={this.props.allowRowSelection} + onRowSelection = {this.onRowSelection} /> :<div>No scheduling unit found </div> } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js index a76501c5c154cadef3782037eb877ff3db06c865..5a25a62050f2652843210ebe61992ffb126ac661 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js @@ -14,6 +14,10 @@ import SchedulingConstraint from './Scheduling.Constraints'; import { Dialog } from 'primereact/dialog'; import TaskStatusLogs from '../Task/state_logs'; import Stations from './Stations'; +import { Redirect } from 'react-router-dom'; +import { CustomDialog } from '../../layout/components/CustomDialog'; +import { CustomPageSpinner } from '../../components/CustomPageSpinner'; +import { Growl } from 'primereact/components/growl/Growl'; class ViewSchedulingUnit extends Component{ constructor(props){ @@ -34,6 +38,7 @@ class ViewSchedulingUnit extends Component{ filter:"select" }, id: "ID", + subTaskID: 'Control ID', name:"Name", description:"Description", created_at:{ @@ -65,6 +70,7 @@ class ViewSchedulingUnit extends Component{ "Status Logs": "filter-input-0", "Type":"filter-input-75", "ID":"filter-input-50", + "Control ID":"filter-input-75", "Cancelled":"filter-input-50", "Duration (HH:mm:ss)":"filter-input-75", "Template ID":"filter-input-50", @@ -73,42 +79,47 @@ class ViewSchedulingUnit extends Component{ "Relative End Time (HH:mm:ss)": "filter-input-75", "Status":"filter-input-100" }], - stationGroup: [] + stationGroup: [], + dialog: {header: 'Confirm', detail: 'Do you want to create a Scheduling Unit Blueprint?'}, + dialogVisible: false } - this.actions = [ - {icon: 'fa-window-close',title:'Click to Close Scheduling Unit View', link: this.props.history.goBack} - ]; + this.actions = []; this.stations = []; this.constraintTemplates = []; - if (this.props.match.params.type === 'draft') { - this.actions.unshift({icon: 'fa-edit', title: 'Click to edit', props : { pathname:`/schedulingunit/edit/${ this.props.match.params.id}`} - }); - } else { - this.actions.unshift({icon: 'fa-sitemap',title :'View Workflow',props :{pathname:`/schedulingunit/${this.props.match.params.id}/workflow`}}); - this.actions.unshift({icon: 'fa-lock', title: 'Cannot edit blueprint'}); - } - if (this.props.match.params.id) { - this.state.scheduleunitId = this.props.match.params.id; - } - if (this.props.match.params.type) { - this.state.scheduleunitType = this.props.match.params.type; - } + this.checkAndCreateBlueprint = this.checkAndCreateBlueprint.bind(this); + this.createBlueprintTree = this.createBlueprintTree.bind(this); + this.closeDialog = this.closeDialog.bind(this); + + } + + componentDidUpdate(prevProps, prevState) { + if (this.state.scheduleunit && this.props.match.params && + (this.state.scheduleunitId !== this.props.match.params.id || + this.state.scheduleunitType !== this.props.match.params.type)) { + this.getSchedulingUnitDetails(this.props.match.params.type, this.props.match.params.id); } + } async componentDidMount(){ - let schedule_id = this.state.scheduleunitId; - let schedule_type = this.state.scheduleunitType; + let schedule_id = this.props.match.params.id; + let schedule_type = this.props.match.params.type; if (schedule_type && schedule_id) { - const subtaskComponent = (task)=> { - return ( - <button className="p-link" onClick={(e) => {this.setState({showStatusLogs: true, task: task})}}> - <i className="fa fa-history"></i> - </button> - ); - }; this.stations = await ScheduleService.getStationGroup(); this.setState({stationOptions: this.stations}); - this.getScheduleUnit(schedule_type, schedule_id) + this.getSchedulingUnitDetails(schedule_type, schedule_id); + } + } + + subtaskComponent = (task)=> { + return ( + <button className="p-link" onClick={(e) => {this.setState({showStatusLogs: true, task: task})}}> + <i className="fa fa-history"></i> + </button> + ); + }; + + getSchedulingUnitDetails(schedule_type, schedule_id) { + this.getScheduleUnit(schedule_type, schedule_id) .then(schedulingUnit =>{ if (schedulingUnit) { ScheduleService.getSchedulingConstraintTemplates().then((response) => { @@ -118,15 +129,22 @@ class ViewSchedulingUnit extends Component{ this.getScheduleUnitTasks(schedule_type, schedulingUnit) .then(tasks =>{ tasks.map(task => { - task.status_logs = task.tasktype === "Blueprint"?subtaskComponent(task):""; + task.status_logs = task.tasktype === "Blueprint"?this.subtaskComponent(task):""; + //Displaying SubTask ID of the 'control' Task + const subTaskIds = task.subTasks?task.subTasks.filter(sTask => sTask.subTaskTemplate.name.indexOf('control') > 1):[]; + task.subTaskID = subTaskIds.length ? subTaskIds[0].id : ''; return task; }); const targetObservation = _.find(tasks, (task)=> {return task.template.type_value==='observation' && task.tasktype.toLowerCase()===schedule_type && task.specifications_doc.station_groups}); this.setState({ + scheduleunitId: schedule_id, scheduleunit : schedulingUnit, + scheduleunitType: schedule_type, schedule_unit_task : tasks, isLoading: false, - stationGroup: targetObservation?targetObservation.specifications_doc.station_groups:[] + stationGroup: targetObservation?targetObservation.specifications_doc.station_groups:[], + redirect: null, + dialogVisible: false }, this.getAllStations); }); } else { @@ -135,41 +153,76 @@ class ViewSchedulingUnit extends Component{ }); } }); - } + this.actions = [ + {icon: 'fa-window-close',title:'Click to Close Scheduling Unit View', link: this.props.history.goBack} + ]; + if (this.props.match.params.type === 'draft') { + this.actions.unshift({icon: 'fa-edit', title: 'Click to edit', props : { pathname:`/schedulingunit/edit/${ this.props.match.params.id}`} + }); + this.actions.unshift({icon:'fa-stamp', title: 'Create Blueprint', type:'button', + actOn:'click', props : { callback: this.checkAndCreateBlueprint}, + }); + } else { + this.actions.unshift({icon: 'fa-sitemap',title :'View Workflow',props :{pathname:`/schedulingunit/${this.props.match.params.id}/workflow`}}); + this.actions.unshift({icon: 'fa-lock', title: 'Cannot edit blueprint'}); + } } getScheduleUnitTasks(type, scheduleunit){ if(type === 'draft') - return ScheduleService.getTasksBySchedulingUnit(scheduleunit.id, true); + return ScheduleService.getTasksBySchedulingUnit(scheduleunit.id, true, true, true); else - return ScheduleService.getTaskBlueprintsBySchedulingUnit(scheduleunit, true); + return ScheduleService.getTaskBPWithSubtaskTemplateOfSU(scheduleunit); } + getScheduleUnit(type, id){ if(type === 'draft') return ScheduleService.getSchedulingUnitDraftById(id) else return ScheduleService.getSchedulingUnitBlueprintById(id) } + + /** + * Checks if the draft scheduling unit has existing blueprints and alerts. If confirms to create, creates blueprint. + */ + checkAndCreateBlueprint() { + if (this.state.scheduleunit) { + let dialog = this.state.dialog; + if (this.state.scheduleunit.scheduling_unit_blueprints.length>0) { + dialog.detail = "Blueprint(s) already exist for this Scheduling Unit. Do you want to create another one?"; + } + dialog.actions = [{id: 'yes', title: 'Yes', callback: this.createBlueprintTree}, + {id: 'no', title: 'No', callback: this.closeDialog}]; + this.setState({dialogVisible: true, dialog: dialog}); + } + } + + /** + * Funtion called to create blueprint on confirmation. + */ + createBlueprintTree() { + this.setState({dialogVisible: false, showSpinner: true}); + ScheduleService.createSchedulingUnitBlueprintTree(this.state.scheduleunit.id) + .then(blueprint => { + this.growl.show({severity: 'success', summary: 'Success', detail: 'Blueprint created successfully!'}); + this.setState({showSpinner: false, redirect: `/schedulingunit/view/blueprint/${blueprint.id}`, isLoading: true}); + }); + } + + /** + * Callback function to close the dialog prompted. + */ + closeDialog() { + this.setState({dialogVisible: false}); + } - render(){ + render(){ + if (this.state.redirect) { + return <Redirect to={ {pathname: this.state.redirect} }></Redirect> + } return( <> - {/*} <div className="p-grid"> - <div className="p-col-10"> - <h2>Scheduling Unit - Details </h2> - </div> - <div className="p-col-2"> - <Link to={{ pathname: '/schedulingunit'}} title="Close" - style={{float:'right'}}> - <i className="fa fa-times" style={{marginTop: "10px", marginLeft: '5px'}}></i> - </Link> - <Link to={{ pathname: '/schedulingunit/edit', state: {id: this.state.scheduleunit?this.state.scheduleunit.id:''}}} title="Edit" - style={{float:'right'}}> - <i className="fa fa-edit" style={{marginTop: "10px"}}></i> - </Link> - </div> - </div> */ - /*TMSS-363 Blueprint icon changes */} + <Growl ref={(el) => this.growl = el} /> <PageHeader location={this.props.location} title={'Scheduling Unit - Details'} actions={this.actions}/> { this.state.isLoading ? <AppLoader/> :this.state.scheduleunit && @@ -256,6 +309,12 @@ class ViewSchedulingUnit extends Component{ <TaskStatusLogs taskId={this.state.task.id}></TaskStatusLogs> </Dialog> } + {/* Dialog component to show messages and get confirmation */} + <CustomDialog type="confirmation" visible={this.state.dialogVisible} + header={this.state.dialog.header} message={this.state.dialog.detail} actions={this.state.dialog.actions} + onClose={this.closeDialog} onCancel={this.closeDialog} onSubmit={this.createBlueprintTree}></CustomDialog> + {/* Show spinner during backend API call */} + <CustomPageSpinner visible={this.state.showSpinner} /> </> ) } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js index 4dba59e7cc582d9ff47663d3130e2dd9efdea3ab..02547c1bc763eb13b960a1f5582a5b6b18b69073 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js @@ -200,7 +200,7 @@ export class SchedulingUnitCreate extends Component { setConstraintsEditorOutput(jsonOutput, errors) { let err = [ ...errors ]; - if (jsonOutput.scheduler === 'online') { + if (jsonOutput.scheduler === 'online' || jsonOutput.scheduler === 'dynamic') { err = err.filter(e => e.path !== 'root.time.at'); } this.constraintParamsOutput = jsonOutput; @@ -299,7 +299,7 @@ export class SchedulingUnitCreate extends Component { async saveSchedulingUnit() { const constStrategy = _.cloneDeep(this.state.constraintParamsOutput); for (let type in constStrategy.time) { - if (constStrategy.scheduler === 'online') { + if (constStrategy.scheduler === 'online' || constStrategy.scheduler === 'dynamic') { delete constStrategy.time.at; } if (!constStrategy.time.after) { diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/index.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/index.js index 8ff0d98a82fecec96b172ba1b4d85ec20d8466fc..3c4005621301b56437fac0f8ac0389dfb232510b 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/index.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/index.js @@ -1,7 +1,13 @@ import React, {Component} from 'react'; +import _ from 'lodash'; + import SchedulingUnitList from './SchedulingUnitList'; import PageHeader from '../../layout/components/PageHeader'; import { TieredMenu } from 'primereact/tieredmenu'; +import { CustomDialog } from '../../layout/components/CustomDialog'; +import { CustomPageSpinner } from '../../components/CustomPageSpinner'; +import ScheduleService from '../../services/schedule.service'; +import { Growl } from 'primereact/components/growl/Growl'; export class Scheduling extends Component { constructor(props){ @@ -10,7 +16,8 @@ export class Scheduling extends Component { scheduleunit: [], schedule_unit_task: [] , isLoading:false, - redirect: '' + redirect: '', + dialog: {header: 'Confirm', detail: 'Do you want to create blueprints for the selected drafts?'}, }; this.optionsMenu = React.createRef(); @@ -18,6 +25,11 @@ export class Scheduling extends Component { this.showOptionMenu = this.showOptionMenu.bind(this); this.selectOptionMenu = this.selectOptionMenu.bind(this); + this.checkAndCreateBlueprint = this.checkAndCreateBlueprint.bind(this); + this.createBlueprintTree = this.createBlueprintTree.bind(this); + this.createBlueprintTreeNewOnly = this.createBlueprintTreeNewOnly.bind(this); + this.warningContent = this.warningContent.bind(this); + this.closeDialog = this.closeDialog.bind(this); } showOptionMenu(event) { @@ -36,20 +48,133 @@ export class Scheduling extends Component { } } + /** + * Subcomponet to display in the confirmation dialog. + */ + warningContent() { + const suListWithBlueprint = this.state.schedulingUnitsWithBlueprint; + const suListWithoutBlueprint = _.difference(this.suList.selectedRows, suListWithBlueprint); + return ( + <> + {suListWithBlueprint && suListWithBlueprint.length>0 && + <div> + <hr></hr> + <span>Blueprint(s) already exist for the following Scheduling Units. If you want to create a blueprint for all of them click “yes”. If you want to create a blue print for a subset click “no” to change your selection.</span> + <div className="p-grid" key={`dlg-msg-head`} style={{marginTop: '10px'}}> + <label className="col-lg-3">ID</label> + <label className="col-lg-9">Name</label> + </div> + {suListWithBlueprint.map((schedulingUnit, index) => ( + <div className="p-grid" key={`dlg-msg-${index}`} style={{marginBottom: "5px"}}> + <span className="col-lg-3">{schedulingUnit.id}</span> + <span className="col-lg-9">{schedulingUnit.name}</span> + </div> + ))} + </div> + } + {suListWithoutBlueprint && suListWithoutBlueprint.length>0 && + <div> + <hr></hr> + <span>Selected Scheduling Unit drafts without blueprint are listed below.</span> + <div className="p-grid" key={`dlg-msg-head`} style={{marginTop: '10px'}}> + <label className="col-lg-3">ID</label> + <label className="col-lg-9">Name</label> + </div> + {suListWithoutBlueprint.map((schedulingUnit, index) => ( + <div className="p-grid" key={`dlg-msg-${index}`} style={{marginBottom: "5px"}}> + <span className="col-lg-3">{schedulingUnit.id}</span> + <span className="col-lg-9">{schedulingUnit.name}</span> + </div> + ))} + {suListWithBlueprint && suListWithBlueprint.length>0 && + <span>If you want to create blueprints for only drafts without blueprints, click 'Create Only New'</span> + } + </div> + } + + </> + ); + } + + /** + * Function to check if blueprint already exist for the selected Scheduling Units and propmt contfirmation dialog. + * When confirmed will create new blueprints for the selected Scheduling Units. + */ + checkAndCreateBlueprint() { + if (this.suList.selectedRows && this.suList.selectedRows.length>0) { + let dialog = this.state.dialog; + dialog.content = this.warningContent; + const schedulingUnitsWithBlueprint = _.filter(this.suList.selectedRows, schedulingUnit=> { return schedulingUnit.scheduling_unit_blueprints.length>0}); + dialog.actions = [ {id:"yes", title: 'Yes', callback: this.createBlueprintTree}, + {id:"no", title: 'No', callback: this.closeDialog} ] + /* Add this action only when both new and old drafts are selected */ + if (schedulingUnitsWithBlueprint.length > 0 && this.suList.selectedRows.length>schedulingUnitsWithBlueprint.length) { + dialog.actions.unshift({id:"newOnly", title: 'Create Only New', callback: this.createBlueprintTreeNewOnly}); + } + this.setState({dialogVisible: true, dialog: dialog, schedulingUnitsWithBlueprint: _.sortBy(schedulingUnitsWithBlueprint,['id'])}); + } else { + this.growl.show({severity: 'info', summary: 'Select Row', detail: 'Please select one or more Scheduling Unit Draft(s)'}); + } + } + + /** + * Callback function from dialog to create blueprints for only new drafts without blueprints. + * @param {Event} event + */ + createBlueprintTreeNewOnly(event){ + this.createBlueprintTree(event, true); + } + + /** + * Function to create actual blueprints for the selected drafts + * @param {Event} event + * @param {Boolean} excludeOld + */ + async createBlueprintTree(event, excludeOld) { + this.setState({dialogVisible: false, showSpinner: true}); + let selectedRows = this.suList.selectedRows; + // Remove old drafts from selected rows + if (excludeOld) { + selectedRows = _.difference(selectedRows, this.state.schedulingUnitsWithBlueprint); + } + for (const schedulingUnit of selectedRows) { + await ScheduleService.createSchedulingUnitBlueprintTree(schedulingUnit.id); + } + this.setState({showSpinner: false, schedulingUnitsWithBlueprint:null}); + this.growl.show({severity: 'success', summary: 'Success', detail: 'Blueprint(s) created successfully!'}); + this.suList.reloadData(); + } + + /** + * Callback function to close the dialog. + */ + closeDialog() { + this.setState({dialogVisible: false}); + } + render() { return ( <> - <TieredMenu className="app-header-menu" model={this.menuOptions} popup ref={el => this.optionsMenu = el} /> - <PageHeader location={this.props.location} title={'Scheduling Unit - List'} + <Growl ref={(el) => this.growl = el} style={{paddingTop:"50px"}} /> + <TieredMenu className="app-header-menu" model={this.menuOptions} popup ref={el => this.optionsMenu = el} /> + <PageHeader location={this.props.location} title={'Scheduling Unit - List'} actions={[ - + {icon:'fa-stamp', title: 'Create Blueprint', type:'button', + actOn:'click', props : { callback: this.checkAndCreateBlueprint}}, {icon: 'fa fa-plus-square', title: 'Add New Scheduling Unit', props: {pathname: '/schedulingunit/create'}}, {icon: 'fa fa-table', title: 'Add Scheduling Set', props: {pathname: '/schedulingset/schedulingunit/create'}}]} /> {this.state.scheduleunit && - <SchedulingUnitList /> } + <SchedulingUnitList allowRowSelection={true} ref={suList => {this.suList = suList}} /> } + {/* Dialog component to show messages and get confirmation */} + <CustomDialog type="confirmation" visible={this.state.dialogVisible} width="40vw" + header={this.state.dialog.header} message={this.state.dialog.detail} content={this.state.dialog.content} + onClose={this.closeDialog} onCancel={this.closeDialog} onSubmit={this.createBlueprintTree} + actions={this.state.dialog.actions}></CustomDialog> + {/* Show spinner during backend API call */} + <CustomPageSpinner visible={this.state.showSpinner} /> </> ); } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js index 4b2ef70081a130dd75f9567e8c1ec2616b186c90..bb42ed07d8457ef4ea82b9364532acee22d68055 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js @@ -156,10 +156,10 @@ export class SchedulingUnitSummary extends Component { <label>Tasks:</label> <ViewTable data={suTaskList} - defaultcolumns={[{id: "ID", start_time:"Start Time", stop_time:"End Time", status: "Status", + defaultcolumns={[{id: "ID", subTaskID: 'Control ID', start_time:"Start Time", stop_time:"End Time", status: "Status", antenna_set: "Antenna Set", band: 'Band'}]} optionalcolumns={[{actionpath: "actionpath"}]} - columnclassname={[{"ID": "filter-input-50", "Start Time": "filter-input-75", "End Time": "filter-input-75", + columnclassname={[{"ID": "filter-input-50","Control ID":"filter-input-75", "Start Time": "filter-input-75", "End Time": "filter-input-75", "Status": "filter-input-75", "Antenna Set": "filter-input-75", "Band": "filter-input-75"}]} defaultSortColumn= {[{id: "ID", desc: false}]} showaction="false" diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js index 2fd2e272fc587b7ed335de24b48ed3a6c31a4352..9570af1752b60c4a2e16b717eb3a48228fb82879 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js @@ -149,9 +149,12 @@ export class TimelineView extends Component { canExtendSUList: false, canShrinkSUList:false}); if (fetchDetails) { const suBlueprint = _.find(this.state.suBlueprints, {id: (this.state.stationView?parseInt(item.id.split('-')[0]):item.id)}); - ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true) + ScheduleService.getTaskSubTaskBlueprintsBySchedulingUnit(suBlueprint, true) .then(taskList => { for (let task of taskList) { + //Control Task Id + const subTaskIds = (task.subTasks || []).filter(sTask => sTask.subTaskTemplate.name.indexOf('control') > 1); + task. subTaskID = subTaskIds.length ? subTaskIds[0].id : ''; if (task.template.type_value.toLowerCase() === "observation") { task.antenna_set = task.specifications_doc.antenna_set; task.band = task.specifications_doc.filter; diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js index b4b3ae65ec5a753e2b52bff6c251f59c3d60f71e..452f57ec512e57c69e141ec18336838d34d85982 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js @@ -166,9 +166,12 @@ export class WeekTimelineView extends Component { canExtendSUList: false, canShrinkSUList:false}); if (fetchDetails) { const suBlueprint = _.find(this.state.suBlueprints, {id: parseInt(item.id.split('-')[0])}); - ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true) + ScheduleService.getTaskSubTaskBlueprintsBySchedulingUnit(suBlueprint) .then(taskList => { for (let task of taskList) { + //Control Task ID + const subTaskIds = (task.subTasks || []).filter(sTask => sTask.subTaskTemplate.name.indexOf('control') > 1); + task. subTaskID = subTaskIds.length ? subTaskIds[0].id : ''; if (task.template.type_value.toLowerCase() === "observation") { task.antenna_set = task.specifications_doc.antenna_set; task.band = task.specifications_doc.filter; diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js index 1ace4022edceedeffc7a6916b1749edaf81cc4fa..6e65a0019fe412d16ca82b46e114a9cbc2dd6c92 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js @@ -8,24 +8,58 @@ axios.defaults.headers.common['Authorization'] = 'Basic dGVzdDp0ZXN0'; const ScheduleService = { getSchedulingUnitDraft: async function (){ let res = []; - await axios.get('/api/scheduling_unit_draft/?ordering=id') - .then(response => { - res= response; - }).catch(function(error) { + try { + res = await axios.get('/api/scheduling_unit_draft/?limit=1'); + if (res.data.count > res.data.results.length) { + res = await axios.get(`/api/scheduling_unit_draft/?ordering=id&limit=${res.data.count}&offset=0`); + } + } catch(error) { console.error('[schedule.services.getSchedulingUnitDraft]',error); - }); + } return res; }, getSchedulingUnitBlueprint: async function (){ let res = []; - await axios.get('/api/scheduling_unit_blueprint/?ordering=id') - .then(response => { - res= response; - }).catch(function(error) { + try { + res = await axios.get('/api/scheduling_unit_blueprint/?limit=1'); + if (res.data.count > res.data.results.length) { + res = await axios.get(`/api/scheduling_unit_blueprint/?ordering=id&limit=${res.data.count}&offset=0`); + } + } catch(error) { console.error('[schedule.services.getSchedulingUnitBlueprint]',error); - }); + } return res; }, + //>>>>>> TODO: Remove this method by using/modifying other functions with additional parameters + getTaskBPWithSubtaskTemplate: async function(id) { + let result; + try { + result = await axios.get('/api/task_blueprint/'+id); + if (result.data) { + result.data.template = await TaskService.getTaskTemplate(result.data.specifications_template_id); + } + if (result.data) { + let subTasks = []; + let subTasktemplate = {} + for (const subtaskId of result.data.subtasks_ids) { + const subTask = await TaskService.getSubtaskDetails(subtaskId); + //To avoid repeated api call for template if it has already loaded + if (subTasktemplate[subTask.specifications_template_id]) { + subTask.subTaskTemplate = subTasktemplate[subTask.specifications_template_id]; + } else { + const subTaskTemplate = await TaskService.getSubtaskTemplate(subTask.specifications_template_id); + subTask.subTaskTemplate = subTaskTemplate; + subTasktemplate[subTask.specifications_template_id] = subTaskTemplate; + } + subTasks.push((subTask)); + } + result.data.subTasks = subTasks; + } + } catch(error) { + console.error('[schedule.services.getTaskBlueprintById]',error); + } + return result; + }, getSchedulingUnitBlueprintById: async function (id){ try { const response = await axios.get('/api/scheduling_unit_blueprint/'+id); @@ -91,7 +125,27 @@ const ScheduleService = { } return taskblueprintsList; }, - getTasksBySchedulingUnit: async function(id, loadTemplate){ + //>>>>>> TODO: Remove this method by using/modifying other functions with additional parameters + getTaskBPWithSubtaskTemplateOfSU: async function(scheduleunit){ + // there no single api to fetch associated task_blueprint, so iterate the task_blueprint id to fetch associated task_blueprint + let taskblueprintsList = []; + if (scheduleunit.task_blueprints_ids){ + for(const id of scheduleunit.task_blueprints_ids) { + await this.getTaskBPWithSubtaskTemplate(id).then(response =>{ + let taskblueprint = response.data; + taskblueprint['tasktype'] = 'Blueprint'; + taskblueprint['actionpath'] = '/task/view/blueprint/'+taskblueprint['id']; + taskblueprint['blueprint_draft'] = taskblueprint['draft']; + taskblueprint['relative_start_time'] = 0; + taskblueprint['relative_stop_time'] = 0; + taskblueprint.duration = moment.utc((taskblueprint.duration || 0)*1000).format('HH:mm:ss'); + taskblueprintsList.push(taskblueprint); + }) + } + } + return taskblueprintsList; + }, + getTasksBySchedulingUnit: async function(id, loadTemplate, loadSubtasks, loadSubtaskTemplate){ let scheduletasklist=[]; // let taskblueprints = []; // Common keys for Task and Blueprint @@ -128,6 +182,7 @@ const ScheduleService = { // if (o.draft_id === task['id']) return o; // }); + let subTasktemplate = {} for(const blueprint of draftBlueprints){ let taskblueprint = []; taskblueprint['tasktype'] = 'Blueprint'; @@ -146,6 +201,24 @@ const ScheduleService = { if (loadTemplate) { taskblueprint.template = scheduletask.template; } + if (loadSubtasks) { + let subTasks = []; + for (const subtaskId of blueprint.subtasks_ids) { + const subTask = await TaskService.getSubtaskDetails(subtaskId); + if (loadSubtaskTemplate) { + //To avoid repeated api call for template if it has already loaded + if (subTasktemplate[subTask.specifications_template_id]) { + subTask.subTaskTemplate = subTasktemplate[subTask.specifications_template_id]; + } else { + const subTaskTemplate = await TaskService.getSubtaskTemplate(subTask.specifications_template_id); + subTask.subTaskTemplate = subTaskTemplate; + subTasktemplate[subTask.specifications_template_id] = subTaskTemplate; + } + } + subTasks.push((subTask)); + } + taskblueprint.subTasks = subTasks; + } //Add Blue print details to array scheduletasklist.push(taskblueprint); } @@ -350,6 +423,14 @@ const ScheduleService = { console.error('[project.services.getSchedulingUnitBySet]',error); } }, + createSchedulingUnitBlueprintTree: async function(id) { + try { + const response = await axios.get(`/api/scheduling_unit_draft/${id}/create_blueprints_and_subtasks`); + return response.data; + } catch(error) { + console.error(error); + } + }, getStationGroup: async function() { try { // const response = await axios.get('/api/station_type/'); diff --git a/SAS/TMSS/services/CMakeLists.txt b/SAS/TMSS/services/CMakeLists.txt index 7ca90e1a5220ba1c278a45e986029e408c2506d6..cc7f8cb954f815663766cb72e8950d78e621d84f 100644 --- a/SAS/TMSS/services/CMakeLists.txt +++ b/SAS/TMSS/services/CMakeLists.txt @@ -1,4 +1,5 @@ lofar_add_package(TMSSSchedulingService scheduling) lofar_add_package(TMSSFeedbackHandlingService feedback_handling) lofar_add_package(TMSSPostgresListenerService tmss_postgres_listener) +lofar_add_package(TMSSWorkflowService workflow_service) diff --git a/SAS/TMSS/services/scheduling/lib/constraints/template_constraints_v1.py b/SAS/TMSS/services/scheduling/lib/constraints/template_constraints_v1.py index 6eb1f5084d741164d127812a55da7729e379ad7b..27c764a783f8fdf149ef3d961a7ef7a532673191 100644 --- a/SAS/TMSS/services/scheduling/lib/constraints/template_constraints_v1.py +++ b/SAS/TMSS/services/scheduling/lib/constraints/template_constraints_v1.py @@ -32,21 +32,26 @@ from dateutil import parser from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.tmssapp.conversions import create_astroplan_observer_for_station, Time, timestamps_and_stations_to_sun_rise_and_set, coordinates_and_timestamps_to_separation_from_bodies +from lofar.sas.tmss.tmss.exceptions import TMSSException from . import ScoredSchedulingUnit def can_run_within_timewindow(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: '''determine if the given scheduling_unit can run withing the given timewindow evaluating all constraints from the "constraints" version 1 template''' if has_manual_scheduler_constraint(scheduling_unit): + logger.info("SchedulingUnitBlueprint id=%s has manual scheduler constraint and cannot be dynamically scheduled." % (scheduling_unit.id)) return False if not can_run_within_timewindow_with_time_constraints(scheduling_unit, lower_bound, upper_bound): + logger.info("SchedulingUnitBlueprint id=%s does not meet time constraints between %s and %s." % (scheduling_unit.id, lower_bound, upper_bound)) return False if not can_run_within_timewindow_with_sky_constraints(scheduling_unit, lower_bound, upper_bound): + logger.info("SchedulingUnitBlueprint id=%s does not meet sky constraints between %s and %s." % (scheduling_unit.id, lower_bound, upper_bound)) return False if not can_run_within_timewindow_with_daily_constraints(scheduling_unit, lower_bound, upper_bound): + logger.info("SchedulingUnitBlueprint id=%s does not meet daily constraints between %s and %s." % (scheduling_unit.id, lower_bound, upper_bound)) return False return True @@ -72,50 +77,87 @@ def has_manual_scheduler_constraint(scheduling_unit: models.SchedulingUnitBluepr def can_run_within_timewindow_with_daily_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: - '''evaluate the daily contraint''' - constraints = scheduling_unit.draft.scheduling_constraints_doc - if not (constraints['daily']['require_day'] and constraints['daily']['require_night']): - # no day/night restrictions, can run any time - return True - - if constraints['daily']['require_day'] or constraints['daily']['require_night']: - # TODO: TMSS-254 and TMSS-255 - # TODO: take avoid_twilight into account - # Please note that this first crude proof of concept treats sunset/sunrise as 'events', - # whereas in our definition they are transition periods. See: TMSS-435 - - # Ugly code. Should be improved. Works for demo. - # create a series of timestamps in the window of opportunity, and evaluate of there are all during day or night - possible_start_time = get_earliest_possible_start_time(scheduling_unit, lower_bound) - - # ToDo: use specified total observation duration, and ignore pipelines who don't care about day/night - possible_stop_time = possible_start_time + scheduling_unit.duration - timestamps = [possible_start_time] - while timestamps[-1] < possible_stop_time - timedelta(hours=8): - timestamps.append(timestamps[-1] + timedelta(hours=8)) - timestamps.append(possible_stop_time) - - LOFAR_CENTER_OBSERVER = create_astroplan_observer_for_station('CS002') - if constraints['daily']['require_night'] and all(LOFAR_CENTER_OBSERVER.is_night(timestamp) for timestamp in timestamps): - return True - - if constraints['daily']['require_day'] and all(not LOFAR_CENTER_OBSERVER.is_night(timestamp) for timestamp in timestamps): + """ + Checks whether it is possible to run the scheduling unit /somewhere/ in the given time window, considering the duration of the involved observation. + :return: True if there is at least one possibility to place the scheduling unit in a way that all daily constraints are met over the runtime of the observation, else False. + """ + main_observation_task_name = get_target_observation_task_name_from_requirements_doc(scheduling_unit) + duration = timedelta(seconds=scheduling_unit.requirements_doc['tasks'][main_observation_task_name]['specifications_doc']['duration']) + window_lower_bound = lower_bound + while window_lower_bound + duration < upper_bound: + window_upper_bound = window_lower_bound + duration + if can_run_anywhere_within_timewindow_with_daily_constraints(scheduling_unit, window_lower_bound, window_upper_bound): return True + window_lower_bound += min(timedelta(hours=1), upper_bound - window_lower_bound) return False +def can_run_anywhere_within_timewindow_with_daily_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: + """ + Checks whether it is possible to place the scheduling unit arbitrarily in the given time window, i.e. the daily constraints must be met over the full time window. + :return: True if all daily constraints are met over the entire time window, else False. + """ + main_observation_task_name = get_target_observation_task_name_from_requirements_doc(scheduling_unit) + constraints = scheduling_unit.draft.scheduling_constraints_doc + if constraints['daily']['require_day'] or constraints['daily']['require_night'] or constraints['daily']['avoid_twilight']: + + if (upper_bound - lower_bound).days >= 1: + logger.info("SchedulingUnitBlueprint id=%s has daily constraints, but bounds span %s" % (scheduling_unit.id, (upper_bound - lower_bound))) + return False + + if upper_bound < lower_bound: + raise ValueError("Provided upper_bound=%s is earlier than provided lower_bound=%s" % (upper_bound, lower_bound)) + + stations = scheduling_unit.requirements_doc['tasks'][main_observation_task_name]['specifications_doc']['stations'] + + # check contraint and return false on first failure + for station in stations: + # get day/night times for bounds + # we could sample in between bounds, but will instead do some checks so that bounds are sufficient + if constraints['daily']['require_day'] and lower_bound.date() != upper_bound.date(): + logger.info("SchedulingUnitBlueprint id=%s cannot meet require_day constraint when starting and ending on different days." % scheduling_unit.id) + return False + timestamps = [lower_bound, upper_bound] + sun_events = timestamps_and_stations_to_sun_rise_and_set(timestamps=tuple(timestamps), stations=(station,))[station] + if constraints['daily']['require_day']: + for i in range(len(timestamps)): + if timestamps[i] < sun_events['day'][i]['start'] or timestamps[i] > sun_events['day'][i]['end']: + logger.info("SchedulingUnitBlueprint id=%s does not meet require_day constraint at timestamp=%s" % (scheduling_unit.id, timestamps[i])) + return False + + if constraints['daily']['require_night']: + if sun_events['night'][0]['start'].date() != sun_events['night'][1]['start'].date(): + logger.info("SchedulingUnitBlueprint id=%s cannot meet require_night constraint when starting and ending in different nights." % scheduling_unit.id) + return False + for i in range(len(timestamps)): + if timestamps[i] < sun_events['night'][i]['start'] or timestamps[i] > sun_events['night'][i]['end']: + logger.info("SchedulingUnitBlueprint id=%s does not meet require_night constraint at timestamp=%s" % (scheduling_unit.id, timestamps[i])) + return False + + if constraints['daily']['avoid_twilight']: + # Note: the same index for sun_events everywhere is not a typo, but to make sure it's the _same_ night or day for both bounds or obs will span over twilight + if not ((timestamps[0] > sun_events['day'][0]['start'] and timestamps[0] < sun_events['day'][0]['end'] and + timestamps[1] > sun_events['day'][0]['start'] and timestamps[1] < sun_events['day'][0]['end']) or + (timestamps[0] > sun_events['night'][0]['start'] and timestamps[0] < sun_events['night'][0]['end'] and + timestamps[1] > sun_events['night'][0]['start'] and timestamps[1] < sun_events['night'][0]['end'])): + logger.info("SchedulingUnitBlueprint id=%s does not meet avoid_twilight constraint." % scheduling_unit.id) + return False + + return True + + def can_run_within_timewindow_with_time_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: '''evaluate the time contraint(s)''' constraints = scheduling_unit.draft.scheduling_constraints_doc # TODO: TMSS-244 (and more?), evaluate the constraints in constraints['time'] if has_manual_scheduler_constraint(scheduling_unit): at = parser.parse(constraints['time']['at'], ignoretz=True) - return at >= lower_bound and at+scheduling_unit.duration <= upper_bound + return at >= lower_bound and at+scheduling_unit.duration <= upper_bound # todo: suggestion: use scheduling_unit.requirements_doc['tasks']['Observation']['specifications_doc']['duration'] if 'before' in constraints['time']: before = parser.parse(constraints['time']['before'], ignoretz=True) - return before <= upper_bound-scheduling_unit.duration + return before <= upper_bound-scheduling_unit.duration # todo: suggestion: use scheduling_unit.requirements_doc['tasks']['Observation']['specifications_doc']['duration'] if 'after' in constraints['time']: after = parser.parse(constraints['time']['after'], ignoretz=True) @@ -129,30 +171,62 @@ def can_run_within_timewindow_with_time_constraints(scheduling_unit: models.Sche def can_run_within_timewindow_with_sky_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: - '''evaluate the time contraint(s)''' + """ + Checks whether it is possible to run the scheduling unit /somewhere/ in the given time window, considering the duration of the involved observation. + :return: True if there is at least one possibility to place the scheduling unit in a way that all sky constraints are met over the runtime of the observation, else False. + """ + for task in scheduling_unit.requirements_doc['tasks'].values(): + if 'specifications_doc' in task: + if 'duration' in task['specifications_doc']: + duration = timedelta(seconds=task['specifications_doc']['duration']) + window_lower_bound = lower_bound + while window_lower_bound + duration < upper_bound: + window_upper_bound = window_lower_bound + duration + if can_run_anywhere_within_timewindow_with_sky_constraints(scheduling_unit, window_lower_bound, window_upper_bound): + return True + window_lower_bound += min(timedelta(hours=1), upper_bound - window_lower_bound) + return False + + +def can_run_anywhere_within_timewindow_with_sky_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: + """ + Checks whether it is possible to place the scheduling unit arbitrarily in the given time window, i.e. the sky constraints must be met over the full time window. + :return: True if all sky constraints are met over the entire time window, else False. + """ constraints = scheduling_unit.draft.scheduling_constraints_doc - # TODO: TMSS-245 TMSS-250 (and more?), evaluate the constraints in constraints['sky'] - # maybe even split this method into sub methods for the very distinct sky constraints: min_calibrator_elevation, min_target_elevation, transit_offset & min_distance - - beam = scheduling_unit.requirements_doc['tasks']['Observation']['specifications_doc']['tile_beam'] - angle1 = beam['angle1'] - angle2 = beam['angle2'] - direction_type = beam['direction_type'] - if "sky" in constraints and 'min_distance' in constraints['sky']: - distances = coordinates_and_timestamps_to_separation_from_bodies(angle1=angle1, angle2=angle2, direction_type=direction_type, timestamps=(lower_bound, upper_bound), bodies=tuple(constraints['sky']['min_distance'].keys())) - for body, timestamps in distances.items(): - for timestamp, angle in timestamps.items(): - min_distance = constraints['sky']['min_distance'][body] - if angle.rad < min_distance: - logger.info('Distance=%s from body=%s does not meet min_distance=%s constraint at timestamp=%s' % (angle.rad, body, min_distance, timestamp)) - return False + for task in scheduling_unit.requirements_doc['tasks'].values(): + if 'specifications_doc' in task: + if 'tile_beam' in task['specifications_doc']: + beam = task['specifications_doc']['tile_beam'] + angle1 = beam['angle1'] + angle2 = beam['angle2'] + direction_type = beam['direction_type'] + if "sky" in constraints and 'min_distance' in constraints['sky']: + # currently we only check at bounds, we probably want to add some more samples in between later on + distances = coordinates_and_timestamps_to_separation_from_bodies(angle1=angle1, angle2=angle2, direction_type=direction_type, timestamps=(lower_bound, upper_bound), bodies=tuple(constraints['sky']['min_distance'].keys())) + for body, min_distance in constraints['sky']['min_distance'].items(): + timestamps = distances[body] + for timestamp, angle in timestamps.items(): + if angle.rad < min_distance: + logger.info('Distance=%s from body=%s does not meet min_distance=%s constraint at timestamp=%s' % (angle.rad, body, min_distance, timestamp)) + return False return True +def get_target_observation_task_name_from_requirements_doc(scheduling_unit: models.SchedulingUnitBlueprint) -> str: + for task_name, task in scheduling_unit.requirements_doc['tasks'].items(): + if 'specifications_template' in task: + if task['specifications_template'] == 'target observation': + return task_name + raise TMSSException("Cannot find target observation in scheduling_unit requirements_doc") + + def get_earliest_possible_start_time(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> datetime: constraints = scheduling_unit.draft.scheduling_constraints_doc + main_observation_task_name = get_target_observation_task_name_from_requirements_doc(scheduling_unit) + duration = timedelta(seconds=scheduling_unit.requirements_doc['tasks'][main_observation_task_name]['specifications_doc']['duration']) try: if has_manual_scheduler_constraint(scheduling_unit) and 'at' in constraints['time']: at = parser.parse(constraints['time']['at'], ignoretz=True) @@ -162,40 +236,53 @@ def get_earliest_possible_start_time(scheduling_unit: models.SchedulingUnitBluep return parser.parse(constraints['time']['after'], ignoretz=True) if constraints['daily']['require_day'] or constraints['daily']['require_night'] or constraints['daily']['avoid_twilight']: - - # TODO: TMSS-254 and TMSS-255 - # TODO: make sure contraints are met by all stations of this observation, not just CS002. - sun_events = timestamps_and_stations_to_sun_rise_and_set(timestamps=(lower_bound,lower_bound+timedelta(days=1)), stations=('CS002',))['CS002'] - day = sun_events['day'][0] - night = sun_events['night'][0] - next_day = sun_events['day'][1] - next_night = sun_events['night'][1] - if constraints['daily']['require_day']: - # TODO: Do we need to check for observations that are too long and can e.g. only be run in summer? - if lower_bound+scheduling_unit.duration > day['end']: - return next_day['start'] - if lower_bound >= day['start']: - return lower_bound - return day['start'] - - if constraints['daily']['require_night']: - if lower_bound + scheduling_unit.duration > night['end']: - return next_night['start'] - if lower_bound >= night['start']: - return lower_bound - return night['start'] - - if constraints['daily']['avoid_twilight']: - if lower_bound + scheduling_unit.duration < day['end']: + stations = scheduling_unit.requirements_doc['tasks'][main_observation_task_name]['specifications_doc']['stations'] + all_sun_events = timestamps_and_stations_to_sun_rise_and_set(timestamps=(lower_bound,lower_bound+timedelta(days=1)), stations=tuple(stations)) + start_time_per_station = {} + for station in stations: + sun_events = all_sun_events[station] + day = sun_events['day'][0] + night = sun_events['night'][0] + next_day = sun_events['day'][1] + next_night = sun_events['night'][1] + if constraints['daily']['require_day']: + # TODO: Do we need to check for observations that are too long and can e.g. only be run in summer? -> recursively traverse through days or sth? + if lower_bound + duration > day['end']: + start_time_per_station[station] = next_day['start'] + continue if lower_bound >= day['start']: - return lower_bound - return day['start'] - if lower_bound + scheduling_unit.duration < night['end']: + start_time_per_station[station] = lower_bound + continue + start_time_per_station[station] = day['start'] + continue + + if constraints['daily']['require_night']: + if lower_bound + duration > night['end']: + start_time_per_station[station] = next_night['start'] + continue if lower_bound >= night['start']: - return lower_bound - return night['start'] - return next_day['start'] - + start_time_per_station[station] = lower_bound + continue + start_time_per_station[station] = night['start'] + continue + + if constraints['daily']['avoid_twilight']: + if lower_bound >= day['start'] and lower_bound + duration < day['end']: + # starts and ends in daytime + start_time_per_station[station] = lower_bound + continue + if lower_bound >= night['start'] and lower_bound + duration < night['end']: + # starts and ends in nighttime + start_time_per_station[station] = lower_bound + continue + if lower_bound < day['start'] and lower_bound + duration >= night['end']: + # ends in morning twilight + start_time_per_station[station] = day['start'] + continue + # ends in evening twilight + start_time_per_station[station] = night['start'] + continue + return max(start_time_per_station.values()) except Exception as e: logger.exception(str(e)) diff --git a/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.py b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.py index 5d95558568f61159c5975fcb073b7fd0a12ca3c0..10cd3206512e03b8c52a20796ce75b46dfe10384 100755 --- a/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.py +++ b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.py @@ -50,6 +50,7 @@ tmss_test_env = TMSSTestEnvironment(exchange=tmp_exchange.address, start_ra_test_environment=True, enable_viewflow=False, start_dynamic_scheduler=False) # do not start the dynamic scheduler in the testenv, because it is the object-under-test. tmss_test_env.start() +from django.test import TestCase def tearDownModule(): tmss_test_env.stop() @@ -62,15 +63,17 @@ from lofar.sas.tmss.tmss.tmssapp.subtasks import unschedule_subtask from lofar.common.postgres import PostgresDatabaseConnection # the module under test +import lofar.sas.tmss.services.scheduling.constraints.template_constraints_v1 as tc1 from lofar.sas.tmss.services.scheduling.dynamic_scheduling import * -@unittest.skip('Disabled until scheduler can deal with failing constraints. (Currently causes infinite loop.)') -class TestDynamicScheduling(unittest.TestCase): + +class TestDynamicScheduling(TestCase): # Note: we use django.test.TestCase instead of unittest.TestCase to avoid manual cleanup of objects created by other tests ''' Tests for the Dynamic Scheduling ''' @classmethod def setUpClass(cls) -> None: + super(TestDynamicScheduling, cls).setUpClass() # make some re-usable projects with high/low priority cls.project_low = models.Project.objects.create(**Project_test_data("dynamic scheduling test project %s"% (uuid.uuid4(),), priority_rank=1)) cls.project_medium = models.Project.objects.create(**Project_test_data("dynamic scheduling test project %s"% (uuid.uuid4(),), priority_rank=2)) @@ -271,73 +274,472 @@ class TestDynamicScheduling(unittest.TestCase): self.assertGreaterEqual(scheduling_unit_blueprint_high.start_time - scheduling_unit_blueprint_manual.stop_time, DEFAULT_INTER_OBSERVATION_GAP) -class TestSchedulingConstraints(unittest.TestCase): +class TestDailyConstraints(TestCase): ''' Tests for the constraint checkers used in dynamic scheduling ''' - @classmethod - def setUpClass(cls) -> None: - cls.obs_duration = 120 * 60 + def setUp(self) -> None: + # scheduling unit + self.obs_duration = 120 * 60 scheduling_set = models.SchedulingSet.objects.create(**SchedulingSet_test_data()) - scheduling_unit_draft = TestDynamicScheduling.create_simple_observation_scheduling_unit("scheduling unit for contraints tests", + scheduling_unit_draft = TestDynamicScheduling.create_simple_observation_scheduling_unit("scheduling unit for ...%s" % self._testMethodName[30:], scheduling_set=scheduling_set, - obs_duration=cls.obs_duration) - cls.scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + obs_duration=self.obs_duration) + self.scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) - def setUp(self) -> None: + # mock out conversions for speedup and assertable timestamps + # earliest_start_time requests timestamp and timestamp+1day self.sunrise_data = { - 'CS002': {"sunrise": [{"start": datetime(2020, 1, 1, 7, 30, 0), "end": datetime(2020, 1, 1, 9, 30, 0)},{"start": datetime(2020, 1, 2, 7, 30, 0), "end": datetime(2020, 1, 2, 9, 30, 0)}], + 'CS001': {"sunrise": [{"start": datetime(2020, 1, 1, 7, 30, 0), "end": datetime(2020, 1, 1, 9, 30, 0)}, {"start": datetime(2020, 1, 2, 7, 30, 0), "end": datetime(2020, 1, 2, 9, 30, 0)}], + "day": [{"start": datetime(2020, 1, 1, 9, 30, 0), "end": datetime(2020, 1, 1, 15, 30, 0)}, {"start": datetime(2020, 1, 2, 9, 30, 0), "end": datetime(2020, 1, 2, 15, 30, 0)}], + "sunset": [{"start": datetime(2020, 1, 1, 15, 30, 0), "end": datetime(2020, 1, 1, 17, 30, 0)},{"start": datetime(2020, 1, 2, 15, 30, 0), "end": datetime(2020, 1, 2, 17, 30, 0)}], + "night": [{"start": datetime(2020, 1, 1, 17, 30, 0), "end": datetime(2020, 1, 2, 7, 30, 0)}, {"start": datetime(2020, 1, 2, 17, 30, 0), "end": datetime(2020, 1, 3, 7, 30, 0)}]}, + 'DE601': {"sunrise": [{"start": datetime(2020, 1, 1, 7, 45, 0), "end": datetime(2020, 1, 1, 9, 45, 0)}, {"start": datetime(2020, 1, 2, 7, 45, 0), "end": datetime(2020, 1, 2, 9, 45, 0)}], + "day": [{"start": datetime(2020, 1, 1, 9, 45, 0), "end": datetime(2020, 1, 1, 15, 45, 0)}, {"start": datetime(2020, 1, 2, 9, 45, 0), "end": datetime(2020, 1, 2, 15, 45, 0)}], + "sunset": [{"start": datetime(2020, 1, 1, 15, 45, 0), "end": datetime(2020, 1, 1, 17, 45, 0)}, {"start": datetime(2020, 1, 2, 15, 45, 0), "end": datetime(2020, 1, 2, 17, 45, 0)}], + "night": [{"start": datetime(2020, 1, 1, 17, 45, 0), "end": datetime(2020, 1, 2, 7, 45, 0)}, {"start": datetime(2020, 1, 2, 17, 45, 0), "end": datetime(2020, 1, 3, 7, 45, 0)}]}} + + # variant for timestamp before sunrise, which returns the previous night + self.sunrise_data_early_night = { + 'CS001': {"sunrise": [{"start": datetime(2020, 1, 1, 7, 30, 0), "end": datetime(2020, 1, 1, 9, 30, 0)}, {"start": datetime(2020, 1, 2, 7, 30, 0), "end": datetime(2020, 1, 2, 9, 30, 0)}], + "day": [{"start": datetime(2020, 1, 1, 9, 30, 0), "end": datetime(2020, 1, 1, 15, 30, 0)}, {"start": datetime(2020, 1, 2, 9, 30, 0), "end": datetime(2020, 1, 2, 15, 30, 0)}], + "sunset": [{"start": datetime(2020, 1, 1, 15, 30, 0), "end": datetime(2020, 1, 1, 17, 30, 0)},{"start": datetime(2020, 1, 2, 15, 30, 0), "end": datetime(2020, 1, 2, 17, 30, 0)}], + "night": [{"start": datetime(2019, 12, 31, 17, 30, 0), "end": datetime(2020, 1, 1, 7, 30, 0)}, {"start": datetime(2020, 1, 1, 17, 30, 0), "end": datetime(2020, 1, 2, 7, 30, 0)}]}, + 'DE601': {"sunrise": [{"start": datetime(2020, 1, 1, 7, 45, 0), "end": datetime(2020, 1, 1, 9, 45, 0)}, {"start": datetime(2020, 1, 2, 7, 45, 0), "end": datetime(2020, 1, 2, 9, 45, 0)}], + "day": [{"start": datetime(2020, 1, 1, 9, 45, 0), "end": datetime(2020, 1, 1, 15, 45, 0)}, {"start": datetime(2020, 1, 2, 9, 45, 0), "end": datetime(2020, 1, 2, 15, 45, 0)}], + "sunset": [{"start": datetime(2020, 1, 1, 15, 45, 0), "end": datetime(2020, 1, 1, 17, 45, 0)},{"start": datetime(2020, 1, 2, 15, 45, 0), "end": datetime(2020, 1, 2, 17, 45, 0)}], + "night": [{"start": datetime(2019, 12, 31, 17, 45, 0), "end": datetime(2020, 1, 1, 7, 45, 0)}, {"start": datetime(2020, 1, 1, 17, 45, 0), "end": datetime(2020, 1, 2, 7, 45, 0)}]}} + + + # constraint checker requests lower and upper bound, so we need some variants for various cases + self.sunrise_data_early_night_early_night = { + 'CS001': {"sunrise": [{"start": datetime(2020, 1, 1, 7, 30, 0), "end": datetime(2020, 1, 1, 9, 30, 0)}, {"start": datetime(2020, 1, 1, 7, 30, 0), "end": datetime(2020, 1, 1, 9, 30, 0)}], + "day": [{"start": datetime(2020, 1, 1, 9, 30, 0), "end": datetime(2020, 1, 1, 15, 30, 0)}, {"start": datetime(2020, 1, 1, 9, 30, 0), "end": datetime(2020, 1, 1, 15, 30, 0)}], + "sunset": [{"start": datetime(2020, 1, 1, 15, 30, 0), "end": datetime(2020, 1, 1, 17, 30, 0)},{"start": datetime(2020, 1, 1, 15, 30, 0), "end": datetime(2020, 1, 1, 17, 30, 0)}], + "night": [{"start": datetime(2019, 12, 31, 17, 30, 0), "end": datetime(2020, 1, 2, 7, 30, 0)}, {"start": datetime(2019, 12, 31, 17, 30, 0), "end": datetime(2020, 1, 1, 7, 30, 0)}]}} + + self.sunrise_data_early_night_late_night = { + 'CS001': {"sunrise": [{"start": datetime(2020, 1, 1, 7, 30, 0), "end": datetime(2020, 1, 1, 9, 30, 0)}, {"start": datetime(2020, 1, 1, 7, 30, 0), "end": datetime(2020, 1, 1, 9, 30, 0)}], + "day": [{"start": datetime(2020, 1, 1, 9, 30, 0), "end": datetime(2020, 1, 1, 15, 30, 0)}, {"start": datetime(2020, 1, 1, 9, 30, 0), "end": datetime(2020, 1, 1, 15, 30, 0)}], + "sunset": [{"start": datetime(2020, 1, 1, 15, 30, 0), "end": datetime(2020, 1, 1, 17, 30, 0)},{"start": datetime(2020, 1, 1, 15, 30, 0), "end": datetime(2020, 1, 1, 17, 30, 0)}], + "night": [{"start": datetime(2019, 12, 31, 17, 30, 0), "end": datetime(2020, 1, 2, 7, 30, 0)}, {"start": datetime(2020, 1, 1, 17, 30, 0), "end": datetime(2020, 1, 2, 7, 30, 0)}]}} + + self.sunrise_data_late_night_late_night = { + 'CS001': {"sunrise": [{"start": datetime(2020, 1, 1, 7, 30, 0), "end": datetime(2020, 1, 1, 9, 30, 0)}, {"start": datetime(2020, 1, 1, 7, 30, 0), "end": datetime(2020, 1, 1, 9, 30, 0)}], + "day": [{"start": datetime(2020, 1, 1, 9, 30, 0), "end": datetime(2020, 1, 1, 15, 30, 0)}, {"start": datetime(2020, 1, 1, 9, 30, 0), "end": datetime(2020, 1, 1, 15, 30, 0)}], + "sunset": [{"start": datetime(2020, 1, 1, 15, 30, 0), "end": datetime(2020, 1, 1, 17, 30, 0)},{"start": datetime(2020, 1, 1, 15, 30, 0), "end": datetime(2020, 1, 1, 17, 30, 0)}], + "night": [{"start": datetime(2020, 1, 1, 17, 30, 0), "end": datetime(2020, 1, 2, 7, 30, 0)}, {"start": datetime(2020, 1, 1, 17, 30, 0), "end": datetime(2020, 1, 2, 7, 30, 0)}]}} + + self.sunrise_data_late_night_early_night_next_day = { + 'CS001': {"sunrise": [{"start": datetime(2020, 1, 1, 7, 30, 0), "end": datetime(2020, 1, 1, 9, 30, 0)}, {"start": datetime(2020, 1, 2, 7, 30, 0), "end": datetime(2020, 1, 2, 9, 30, 0)}], + "day": [{"start": datetime(2020, 1, 1, 9, 30, 0), "end": datetime(2020, 1, 1, 15, 30, 0)}, {"start": datetime(2020, 1, 2, 9, 30, 0), "end": datetime(2020, 1, 2, 15, 30, 0)}], + "sunset": [{"start": datetime(2020, 1, 1, 15, 30, 0), "end": datetime(2020, 1, 1, 17, 30, 0)},{"start": datetime(2020, 1, 2, 15, 30, 0), "end": datetime(2020, 1, 2, 17, 30, 0)}], + "night": [{"start": datetime(2020, 1, 1, 17, 30, 0), "end": datetime(2020, 1, 2, 7, 30, 0)}, {"start": datetime(2020, 1, 1, 17, 30, 0), "end": datetime(2020, 1, 2, 7, 30, 0)}]}} + + self.sunrise_data_late_night_late_night_next_day = { + 'CS001': {"sunrise": [{"start": datetime(2020, 1, 1, 7, 30, 0), "end": datetime(2020, 1, 1, 9, 30, 0)}, {"start": datetime(2020, 1, 2, 7, 30, 0), "end": datetime(2020, 1, 2, 9, 30, 0)}], "day": [{"start": datetime(2020, 1, 1, 9, 30, 0), "end": datetime(2020, 1, 1, 15, 30, 0)}, {"start": datetime(2020, 1, 2, 9, 30, 0), "end": datetime(2020, 1, 2, 15, 30, 0)}], "sunset": [{"start": datetime(2020, 1, 1, 15, 30, 0), "end": datetime(2020, 1, 1, 17, 30, 0)},{"start": datetime(2020, 1, 2, 15, 30, 0), "end": datetime(2020, 1, 2, 17, 30, 0)}], "night": [{"start": datetime(2020, 1, 1, 17, 30, 0), "end": datetime(2020, 1, 2, 7, 30, 0)}, {"start": datetime(2020, 1, 2, 17, 30, 0), "end": datetime(2020, 1, 3, 7, 30, 0)}]}} + + self.sunrise_patcher = mock.patch('lofar.sas.tmss.services.scheduling.constraints.template_constraints_v1.timestamps_and_stations_to_sun_rise_and_set') self.sunrise_mock = self.sunrise_patcher.start() self.sunrise_mock.return_value = self.sunrise_data self.addCleanup(self.sunrise_patcher.stop) - self.distance_data = { - "sun": {datetime(2020, 1, 1, 10, 0, 0): Angle("0.3rad"), datetime(2020, 1, 1, 12, 0, 0): Angle("0.35rad")}, - "moon": {datetime(2020, 1, 1, 10, 0, 0): Angle("0.2rad"), datetime(2020, 1, 1, 12, 0, 0): Angle("0.25rad")}, - "jupiter": {datetime(2020, 1, 1, 10, 0, 0): Angle("0.1rad"), datetime(2020, 1, 1, 12, 0, 0): Angle("0.15rad")} - } - self.distance_patcher = mock.patch('lofar.sas.tmss.services.scheduling.constraints.template_constraints_v1.coordinates_and_timestamps_to_separation_from_bodies') - self.distance_mock = self.distance_patcher.start() - self.distance_mock.return_value = self.distance_data - self.addCleanup(self.distance_patcher.stop) + # require_day + + def test_get_earliest_possible_start_time_with_daytime_constraint_returns_day_start(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_day'] = True + self.scheduling_unit_blueprint.save() + self.sunrise_mock.return_value = self.sunrise_data_early_night + timestamp = datetime(2020, 1, 1, 4, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, self.sunrise_data['CS001']['day'][0]['start']) - def test_get_earliest_possible_start_time_with_daytime_constraint_timestamp_returns_day_start(self): + def test_get_earliest_possible_start_time_with_daytime_constraint_returns_day_start_of_latest_station(self): + self.scheduling_unit_blueprint.requirements_doc['tasks']['Observation']['specifications_doc']['stations'] = ['CS001', 'DE601'] self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_day'] = True self.scheduling_unit_blueprint.save() + self.sunrise_mock.return_value = self.sunrise_data_early_night timestamp = datetime(2020, 1, 1, 4, 0, 0) returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) - self.assertEqual(returned_time, self.sunrise_data['CS002']['day'][0]['start']) + self.assertEqual(returned_time, self.sunrise_data['DE601']['day'][0]['start']) - def test_get_earliest_possible_start_time_with_daytime_constraint_timestamp_returns_timestamp(self): + def test_get_earliest_possible_start_time_with_daytime_constraint_returns_timestamp(self): self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_day'] = True self.scheduling_unit_blueprint.save() timestamp = datetime(2020, 1, 1, 10, 0, 0) returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) self.assertEqual(returned_time, timestamp) - # todo: add more daytime checks with 255 + def test_get_earliest_possible_start_time_with_daytime_constraint_returns_next_day_start(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_day'] = True + self.scheduling_unit_blueprint.save() + timestamp = datetime(2020, 1, 1, 20, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, self.sunrise_data['CS001']['day'][1]['start']) + + def test_get_earliest_possible_start_time_with_daytime_constraint_returns_next_day_start_when_obs_does_not_fit(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_day'] = True + self.scheduling_unit_blueprint.save() + timestamp = datetime(2020, 1, 1, 14, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, self.sunrise_data['CS001']['day'][1]['start']) + + def test_can_run_anywhere_within_timewindow_with_daily_constraints_with_daytime_constraint_returns_true(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_day'] = True + self.scheduling_unit_blueprint.save() + + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 10, 0, 0) + upper_bound = datetime(2020, 1, 1, 15, 0, 0) + self.assertTrue(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + def test_can_run_anywhere_within_timewindow_with_daily_constraints_with_daytime_constraint_returns_false_when_not_daytime(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_day'] = True + self.scheduling_unit_blueprint.save() + + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 20, 0, 0) + upper_bound = datetime(2020, 1, 1, 23, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + def test_can_run_anywhere_within_timewindow_with_daily_constraints_with_daytime_constraint_returns_false_when_partially_not_daytime(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_day'] = True + self.scheduling_unit_blueprint.save() + + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 14, 0, 0) + upper_bound = datetime(2020, 1, 1, 18, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 8, 0, 0) + upper_bound = datetime(2020, 1, 1, 12, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + def test_can_run_within_timewindow_with_daytime_constraint_returns_correct_value(self): + # todo: for time ranges across dates, consider removing the mock for this because the moving window cannot be easily mocked + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky']['min_distance'] = {} # remove sky constraint + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_day'] = True + self.scheduling_unit_blueprint.save() + + # can run in day + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 8, 0, 0) + upper_bound = datetime(2020, 1, 1, 15, 0, 0) + self.assertTrue(can_run_within_timewindow(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + # cannot run at night + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 15, 0, 0) + upper_bound = datetime(2020, 1, 1, 23, 0, 0) + self.assertFalse(can_run_within_timewindow(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + # require_night + + def test_get_earliest_possible_start_time_with_nighttime_constraint_returns_night_start(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_night'] = True + self.scheduling_unit_blueprint.save() + timestamp = datetime(2020, 1, 1, 14, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, self.sunrise_data['CS001']['night'][0]['start']) + + def test_get_earliest_possible_start_time_with_nighttime_constraint_returns_night_start_of_latest_station(self): + self.scheduling_unit_blueprint.requirements_doc['tasks']['Observation']['specifications_doc']['stations'] = ['CS001', 'DE601'] + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_night'] = True + self.scheduling_unit_blueprint.save() + timestamp = datetime(2020, 1, 1, 14, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, self.sunrise_data['DE601']['night'][0]['start']) + + def test_get_earliest_possible_start_time_with_nighttime_constraint_returns_timestamp(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_night'] = True + self.scheduling_unit_blueprint.save() + + # late night + timestamp = datetime(2020, 1, 1, 23, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, timestamp) + + # early night + self.sunrise_mock.return_value = self.sunrise_data_early_night + timestamp = datetime(2020, 1, 1, 3, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, timestamp) - # todo: add nighttime checks with 254 + def test_get_earliest_possible_start_time_with_nighttime_constraint_returns_next_night_start_when_obs_does_not_fit(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_night'] = True + self.scheduling_unit_blueprint.save() + + # early night + self.sunrise_mock.return_value = self.sunrise_data_early_night + timestamp = datetime(2020, 1, 1, 6, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, self.sunrise_data_early_night['CS001']['night'][1]['start']) + + def test_can_run_anywhere_within_timewindow_with_daily_constraints_with_nighttime_constraint_returns_true(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_night'] = True + self.scheduling_unit_blueprint.save() + + # early night + self.sunrise_mock.return_value = self.sunrise_data_early_night_early_night + lower_bound = datetime(2020, 1, 1, 1, 0, 0) + upper_bound = datetime(2020, 1, 1, 3, 0, 0) + self.assertTrue(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + # late night + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 20, 0, 0) + upper_bound = datetime(2020, 1, 1, 23, 0, 0) + self.assertTrue(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + # night-night next day + self.sunrise_mock.return_value = self.sunrise_data_late_night_early_night_next_day + lower_bound = datetime(2020, 1, 1, 23, 0, 0) + upper_bound = datetime(2020, 1, 2, 3, 0, 0) + self.assertTrue(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + def test_can_run_anywhere_within_timewindow_with_daily_constraints_with_nighttime_constraint_returns_false_when_not_nighttime(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_night'] = True + self.scheduling_unit_blueprint.save() + + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 10, 0, 0) + upper_bound = datetime(2020, 1, 1, 14, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + def test_can_run_anywhere_within_timewindow_with_daily_constraints_with_nighttime_constraint_returns_false_when_partially_not_nighttime(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_night'] = True + self.scheduling_unit_blueprint.save() + + # night-day next day + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night_next_day + lower_bound = datetime(2020, 1, 1, 23, 0, 0) + upper_bound = datetime(2020, 1, 2, 10, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + # day-night next day + self.sunrise_mock.return_value = self.sunrise_data_late_night_early_night_next_day + lower_bound = datetime(2020, 1, 1, 14, 0, 0) + upper_bound = datetime(2020, 1, 2, 3, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + # day-night same day + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 14, 0, 0) + upper_bound = datetime(2020, 1, 1, 20, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + # night-day same day + self.sunrise_mock.return_value = self.sunrise_data_early_night_late_night + lower_bound = datetime(2020, 1, 1, 3, 0, 0) + upper_bound = datetime(2020, 1, 1, 10, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + # day-night-day + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night_next_day + lower_bound = datetime(2020, 1, 1, 14, 0, 0) + upper_bound = datetime(2020, 1, 2, 10, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + # night-day-night + self.sunrise_mock.return_value = self.sunrise_data_early_night_late_night + lower_bound = datetime(2020, 1, 1, 3, 0, 0) + upper_bound = datetime(2020, 1, 1, 23, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + def test_can_run_within_timewindow_with_nighttime_constraint_returns_correct_value(self): + # todo: for time ranges across dates, consider removing the mock for this because the moving window cannot be easily mocked + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky']['min_distance'] = {} # remove sky constraint + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_night'] = True + self.scheduling_unit_blueprint.save() + + # cannot run in day + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 8, 0, 0) + upper_bound = datetime(2020, 1, 1, 15, 0, 0) + self.assertFalse(can_run_within_timewindow(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + # can run at night + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 15, 0, 0) + upper_bound = datetime(2020, 1, 1, 23, 0, 0) + self.assertTrue(can_run_within_timewindow(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + + # avoid_twilight + + def test_get_earliest_possible_start_time_with_twilight_constraint_returns_day_start(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['avoid_twilight'] = True + self.scheduling_unit_blueprint.save() + + self.sunrise_mock.return_value = self.sunrise_data_early_night + timestamp = datetime(2020, 1, 1, 9, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, self.sunrise_data['CS001']['day'][0]['start']) + + def test_get_earliest_possible_start_time_with_twilight_constraint_returns_day_start_of_latest_station(self): + self.scheduling_unit_blueprint.requirements_doc['tasks']['Observation']['specifications_doc']['stations'] = ['CS001', 'DE601'] + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['avoid_twilight'] = True + self.scheduling_unit_blueprint.save() + + self.sunrise_mock.return_value = self.sunrise_data_early_night + timestamp = datetime(2020, 1, 1, 9, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, self.sunrise_data['DE601']['day'][0]['start']) + + def test_get_earliest_possible_start_time_with_twilight_constraint_returns_night_start(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['avoid_twilight'] = True + self.scheduling_unit_blueprint.save() + + self.sunrise_mock.return_value = self.sunrise_data + timestamp = datetime(2020, 1, 1, 17, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, self.sunrise_data['CS001']['night'][0]['start']) + + def test_get_earliest_possible_start_time_with_twilight_constraint_returns_night_start_of_latest_station(self): + self.scheduling_unit_blueprint.requirements_doc['tasks']['Observation']['specifications_doc']['stations'] = ['CS001', 'DE601'] + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['avoid_twilight'] = True + self.scheduling_unit_blueprint.save() + + self.sunrise_mock.return_value = self.sunrise_data + timestamp = datetime(2020, 1, 1, 17, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, self.sunrise_data['DE601']['night'][0]['start']) + + def test_get_earliest_possible_start_time_with_twilight_constraint_returns_timestamp(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['avoid_twilight'] = True + self.scheduling_unit_blueprint.save() + + # daytime + timestamp = datetime(2020, 1, 1, 10, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, timestamp) + + # late time + timestamp = datetime(2020, 1, 1, 20, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, timestamp) + + # early night + self.sunrise_mock.return_value = self.sunrise_data_early_night + timestamp = datetime(2020, 1, 1, 3, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, timestamp) + + def test_get_earliest_possible_start_time_with_twilight_constraint_returns_day_or_night_start_when_obs_does_not_fit(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['avoid_twilight'] = True + self.scheduling_unit_blueprint.save() + + timestamp = datetime(2020, 1, 1, 15, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, self.sunrise_data['CS001']['night'][0]['start']) + + self.sunrise_mock.return_value = self.sunrise_data_early_night + timestamp = datetime(2020, 1, 1, 7, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, self.sunrise_data['CS001']['day'][0]['start']) + + def test_can_run_anywhere_within_timewindow_with_daily_constraints_with_twilight_constraint_returns_true(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['avoid_twilight'] = True + self.scheduling_unit_blueprint.save() + + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 10, 0, 0) + upper_bound = datetime(2020, 1, 1, 15, 0, 0) + self.assertTrue(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + def test_can_run_anywhere_within_timewindow_with_daily_constraints_with_twilight_constraint_returns_false_when_in_twilight(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['avoid_twilight'] = True + self.scheduling_unit_blueprint.save() + + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 8, 0, 0) + upper_bound = datetime(2020, 1, 1, 9, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 16, 0, 0) + upper_bound = datetime(2020, 1, 1, 17, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + def test_can_run_anywhere_within_timewindow_with_daily_constraints_with_twilight_constraint_returns_false_when_partially_in_twilight(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky']['min_distance'] = {} # remove sky constraint + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['avoid_twilight'] = True + self.scheduling_unit_blueprint.save() + + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 10, 0, 0) + upper_bound = datetime(2020, 1, 1, 18, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 8, 0, 0) + upper_bound = datetime(2020, 1, 1, 10, 0, 0) + self.assertFalse(tc1.can_run_anywhere_within_timewindow_with_daily_constraints(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + def test_can_run_within_timewindow_with_twilight_constraint_returns_correct_value(self): + # todo: for time ranges across dates, consider removing the mock for this because the moving window cannot be easily mocked + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky']['min_distance'] = {} # remove sky constraint + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['avoid_twilight'] = True + self.scheduling_unit_blueprint.save() + + # can run in day + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 8, 0, 0) + upper_bound = datetime(2020, 1, 1, 15, 0, 0) + self.assertTrue(can_run_within_timewindow(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + # can run at night + self.sunrise_mock.return_value = self.sunrise_data_late_night_late_night + lower_bound = datetime(2020, 1, 1, 15, 0, 0) + upper_bound = datetime(2020, 1, 1, 23, 0, 0) + self.assertTrue(can_run_within_timewindow(self.scheduling_unit_blueprint, lower_bound, upper_bound)) + + +class TestSkyConstraints(unittest.TestCase): + ''' + Tests for the constraint checkers used in dynamic scheduling + ''' + + def setUp(self) -> None: + # scheduling unit + self.obs_duration = 120 * 60 + scheduling_set = models.SchedulingSet.objects.create(**SchedulingSet_test_data()) + scheduling_unit_draft = TestDynamicScheduling.create_simple_observation_scheduling_unit("scheduling unit for ...%s" % self._testMethodName[30:], + scheduling_set=scheduling_set, + obs_duration=self.obs_duration) + self.scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + + # mock out conversions for speedup and assertable timestamps + self.distance_data = { + "sun": {datetime(2020, 1, 1, 10, 0, 0): Angle("0.3rad"), datetime(2020, 1, 1, 12, 0, 0): Angle("0.35rad")}, + "moon": {datetime(2020, 1, 1, 10, 0, 0): Angle("0.2rad"), datetime(2020, 1, 1, 12, 0, 0): Angle("0.25rad")}, + "jupiter": {datetime(2020, 1, 1, 10, 0, 0): Angle("0.1rad"), datetime(2020, 1, 1, 12, 0, 0): Angle("0.15rad")} + } + self.distance_patcher = mock.patch('lofar.sas.tmss.services.scheduling.constraints.template_constraints_v1.coordinates_and_timestamps_to_separation_from_bodies') + self.distance_mock = self.distance_patcher.start() + self.distance_mock.return_value = self.distance_data + self.addCleanup(self.distance_patcher.stop) - # todo: add twilight checks with 256 + # min_distance - def test_can_run_within_timewindow_with_min_distance_constraint_returns_true_when_met(self): + def test_can_run_anywhere_within_timewindow_with_sky_constraints_with_min_distance_constraint_returns_true_when_met(self): self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky']['min_distance'] = {'sun': 0.1, 'moon': 0.1, 'jupiter': 0.1} self.scheduling_unit_blueprint.save() timestamp = datetime(2020, 1, 1, 10, 0, 0) - returned_value = can_run_within_timewindow(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) + returned_value = tc1.can_run_anywhere_within_timewindow_with_sky_constraints(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) self.assertTrue(returned_value) - def test_can_run_within_timewindow_with_min_distance_constraint_returns_false_when_not_met(self): + def test_can_run_anywhere_within_timewindow_with_sky_constraints_with_min_distance_constraint_returns_false_when_not_met(self): self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky']['min_distance'] = {'sun': 0.2, 'moon': 0.2, 'jupiter': 0.2} self.scheduling_unit_blueprint.save() timestamp = datetime(2020, 1, 1, 10, 0, 0) - returned_value = can_run_within_timewindow(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) + returned_value = tc1.can_run_anywhere_within_timewindow_with_sky_constraints(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) self.assertFalse(returned_value) diff --git a/SAS/TMSS/services/workflow_service/CMakeLists.txt b/SAS/TMSS/services/workflow_service/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..5c5d502c21bb7b28fb93be9f02f0498a44fda702 --- /dev/null +++ b/SAS/TMSS/services/workflow_service/CMakeLists.txt @@ -0,0 +1,7 @@ +lofar_package(TMSSWorkflowService 0.1 DEPENDS TMSSClient PyCommon pyparameterset PyMessaging) + +lofar_find_package(PythonInterp 3.4 REQUIRED) + +add_subdirectory(lib) +add_subdirectory(bin) + diff --git a/SAS/TMSS/services/workflow_service/bin/CMakeLists.txt b/SAS/TMSS/services/workflow_service/bin/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..2e7ec964e60e2c8a2becb4db91c456e8b201a015 --- /dev/null +++ b/SAS/TMSS/services/workflow_service/bin/CMakeLists.txt @@ -0,0 +1,4 @@ +lofar_add_bin_scripts(tmss_workflow_service) + +# supervisord config files +lofar_add_sysconf_files(tmss_workflow_service.ini DESTINATION supervisord.d) diff --git a/SAS/TMSS/services/workflow_service/bin/tmss_workflow_service b/SAS/TMSS/services/workflow_service/bin/tmss_workflow_service new file mode 100755 index 0000000000000000000000000000000000000000..51dd037a08aaa765c994f5aed0df7ca1f2d296e2 --- /dev/null +++ b/SAS/TMSS/services/workflow_service/bin/tmss_workflow_service @@ -0,0 +1,22 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +if __name__ == '__main__': + from lofar.sas.tmss.services.workflow_service import main + main() diff --git a/SAS/TMSS/services/workflow_service/bin/tmss_workflow_service.ini b/SAS/TMSS/services/workflow_service/bin/tmss_workflow_service.ini new file mode 100644 index 0000000000000000000000000000000000000000..0f80770faf3c580ff8a0558e62399adb66e2fa76 --- /dev/null +++ b/SAS/TMSS/services/workflow_service/bin/tmss_workflow_service.ini @@ -0,0 +1,9 @@ +[program:tmss_workflow_service] +command=/bin/bash -c 'source $LOFARROOT/lofarinit.sh;exec tmss_workflow_service' +user=lofarsys +stopsignal=INT ; KeyboardInterrupt +stopasgroup=true ; bash does not propagate signals +stdout_logfile=%(program_name)s.log +redirect_stderr=true +stderr_logfile=NONE +stdout_logfile_maxbytes=0 diff --git a/SAS/TMSS/services/workflow_service/lib/CMakeLists.txt b/SAS/TMSS/services/workflow_service/lib/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..f18bbb8b7ccbc3554a0f0e1c5fb32a44145fab22 --- /dev/null +++ b/SAS/TMSS/services/workflow_service/lib/CMakeLists.txt @@ -0,0 +1,10 @@ +lofar_find_package(PythonInterp 3.4 REQUIRED) +include(PythonInstall) + +set(_py_files + workflow_service.py + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/services) + diff --git a/SAS/TMSS/services/workflow_service/lib/workflow_service.py b/SAS/TMSS/services/workflow_service/lib/workflow_service.py new file mode 100644 index 0000000000000000000000000000000000000000..c38bde688e87903f9b66a4c9f2d6234814a4c808 --- /dev/null +++ b/SAS/TMSS/services/workflow_service/lib/workflow_service.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python3 + +# subtask_scheduling.py +# +# Copyright (C) 2015 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# + +import os +import logging +logger = logging.getLogger(__name__) + +from lofar.sas.tmss.client.tmssbuslistener import * + +class SchedulingUnitEventMessageHandler(TMSSEventMessageHandler): + + def onSchedulingUnitBlueprintStatusChanged(self, id: int, status: str): + try: + # import here and not at top of module because we need the django.setup() to be run first, either from this module's main, or from the TMSSTestEnvironment + from lofar.sas.tmss.tmss.workflowapp.signals import scheduling_unit_blueprint_signal + from lofar.sas.tmss.tmss.tmssapp.models import SchedulingUnitBlueprint + + logger.info("SchedulingUnitBlueprint id=%s status changed to '%s', signalling workflow...", id, status) + scheduling_unit_blueprint = SchedulingUnitBlueprint.objects.get(pk=id) + scheduling_unit_blueprint_signal.send(sender=self.__class__, instance=scheduling_unit_blueprint, status=status) + except Exception as e: + logger.error(e) + + +def create_workflow_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER): + return TMSSBusListener(handler_type=SchedulingUnitEventMessageHandler, + handler_kwargs={}, + exchange=exchange, broker=broker) + +def main(): + # make sure we run in UTC timezone + os.environ['TZ'] = 'UTC' + + from optparse import OptionParser, OptionGroup + from lofar.common import dbcredentials + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + + # Check the invocation arguments + parser = OptionParser('%prog [options]', + description='run the tmss_workflow_service which forwards TMSS events to the workflow engine.') + + group = OptionGroup(parser, 'Messaging options') + group.add_option('-b', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, + help='Address of the message broker, default: %default') + group.add_option('-e', "--exchange", dest="exchange", type="string", default=DEFAULT_BUSNAME, + help="Bus or queue where the TMSS messages are published. [default: %default]") + parser.add_option_group(group) + + parser.add_option_group(dbcredentials.options_group(parser)) + parser.set_defaults(dbcredentials=os.environ.get('TMSS_DBCREDENTIALS', 'TMSS')) + (options, args) = parser.parse_args() + + dbcreds = dbcredentials.parse_options(options) + logger.info("Using TMSS dbcreds: %s" % dbcreds.stringWithHiddenPassword()) + + # setup django + os.environ["TMSS_DBCREDENTIALS"] = options.dbcredentials + os.environ["DJANGO_SETTINGS_MODULE"] = "lofar.sas.tmss.tmss.settings" + os.environ['TMSS_ENABLE_VIEWFLOW'] = 'True' + import django + django.setup() + + with create_workflow_service(options.exchange, options.broker): + waitForInterrupt() + +if __name__ == '__main__': + main() diff --git a/SAS/TMSS/src/tmss/exceptions.py b/SAS/TMSS/src/tmss/exceptions.py index e45ba40745dbfac84a842d9334b3fd687ad2cc23..c918a64950632d8573d7e1cef3f2745f2383dcdc 100644 --- a/SAS/TMSS/src/tmss/exceptions.py +++ b/SAS/TMSS/src/tmss/exceptions.py @@ -14,6 +14,9 @@ class BlueprintCreationException(ConversionException): class SubtaskCreationException(ConversionException): pass +class SubtaskException(TMSSException): + pass + class SchedulingException(TMSSException): pass diff --git a/SAS/TMSS/src/tmss/settings.py b/SAS/TMSS/src/tmss/settings.py index 9ba919e02252205cd5b2d7c0e83565bd2cf088c4..7c6334e185e7e757cad6d70c0d69e0ea5cedb546 100644 --- a/SAS/TMSS/src/tmss/settings.py +++ b/SAS/TMSS/src/tmss/settings.py @@ -93,7 +93,7 @@ INSTALLED_APPS = [ 'django_filters', 'material', 'material.frontend' -] + ] MIDDLEWARE = [ 'django.middleware.gzip.GZipMiddleware', @@ -114,7 +114,6 @@ if show_debug_toolbar(): INSTALLED_APPS.append('debug_toolbar') MIDDLEWARE.insert(MIDDLEWARE.index('django.middleware.gzip.GZipMiddleware')+1, 'debug_toolbar.middleware.DebugToolbarMiddleware') - if bool(os.environ.get('TMSS_ENABLE_VIEWFLOW', False)): INSTALLED_APPS.extend(['viewflow', 'viewflow.frontend', 'lofar.sas.tmss.tmss.workflowapp']) @@ -124,7 +123,7 @@ ROOT_URLCONF = 'lofar.sas.tmss.tmss.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [BASE_DIR, os.path.join(BASE_DIR, 'templates'), os.path.join(os.environ.get('LOFARROOT'), 'lib64/python3.6/site-packages/lofar/SAS/TMSS/frontend','tmss_webapp')], + 'DIRS': [BASE_DIR, os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, '../frontend','tmss_webapp')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ @@ -138,7 +137,7 @@ TEMPLATES = [ ] STATICFILES_DIRS = [ - os.path.join(os.environ.get('LOFARROOT'), 'lib64/python3.6/site-packages/lofar/SAS/TMSS/frontend','tmss_webapp/build/static') + os.path.join(BASE_DIR, '../frontend','tmss_webapp/build/static') ] WSGI_APPLICATION = 'lofar.sas.tmss.tmss.wsgi.application' diff --git a/SAS/TMSS/src/tmss/tmssapp/conversions.py b/SAS/TMSS/src/tmss/tmssapp/conversions.py index af5d004637c17f20118bd660e4e761b22fef288a..335b8937493b5c3e26fa9f0a80b798bee31107c0 100644 --- a/SAS/TMSS/src/tmss/tmssapp/conversions.py +++ b/SAS/TMSS/src/tmss/tmssapp/conversions.py @@ -30,7 +30,9 @@ SUN_SET_RISE_PRECISION = 30 # n_grid_points; higher is more precise but very co @lru_cache(maxsize=256, typed=False) # does not like lists, so use tuples to allow caching def timestamps_and_stations_to_sun_rise_and_set(timestamps: tuple, stations: tuple, angle_to_horizon: Angle=SUN_SET_RISE_ANGLE_TO_HORIZON) -> dict: """ - compute sunrise, sunset, day and night of the given stations at the given timestamps + Compute sunrise, sunset, day and night of the given stations at the given timestamps. + The day/sunrise/sunset is always on the date of the timestamp. + The night is usually the one _starting_ on the date of the time stamp, unless the given timestamp falls before sunrise, in which case it is the night _ending_ on the timestamp date. :param timestamps: tuple of datetimes, e.g. (datetime(2020, 1, 1), datetime(2020, 1, 2)) :param stations: tuple of station names, e.g. ("CS002",) :return A dict that maps station names to a nested dict that contains lists of start and end times for sunrise, sunset, etc, on each requested date. @@ -50,24 +52,27 @@ def timestamps_and_stations_to_sun_rise_and_set(timestamps: tuple, stations: tup return_dict = {} for station in stations: for timestamp in timestamps: + # todo: this can probably be made faster by moving the following logic to an own function with single station/timestamp as input and putting the lru_cache on there. + # This also means that we have to strip the time from the datetime. Can this be safely done? observer = create_astroplan_observer_for_station(station) - sunrise_start = observer.sun_rise_time(time=Time(timestamp), which='previous', n_grid_points=SUN_SET_RISE_PRECISION) - if sunrise_start.to_datetime().date() < timestamp.date(): - sunrise_start = observer.sun_rise_time(time=Time(timestamp), horizon=-angle_to_horizon, which='nearest', n_grid_points=SUN_SET_RISE_PRECISION) - if sunrise_start.to_datetime().date() < timestamp.date(): - sunrise_start = observer.sun_rise_time(time=Time(timestamp), horizon=-angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION) - sunrise_end = observer.sun_rise_time(time=Time(timestamp), horizon=angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION) + sunrise_start = observer.sun_rise_time(time=Time(datetime.combine(timestamp.date(), dtime(12,0,0))), which='previous', n_grid_points=SUN_SET_RISE_PRECISION) + sunrise_end = observer.sun_rise_time(time=Time(sunrise_start), horizon=angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION) sunset_start = observer.sun_set_time(time=sunrise_end, horizon=angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION) - sunset_end = observer.sun_set_time(time=sunrise_end, horizon=-angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION) - sunrise_next_start = observer.sun_rise_time(time=sunset_end, horizon=-angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION) + sunset_end = observer.sun_set_time(time=sunset_start, horizon=-angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION) return_dict.setdefault(station, {}).setdefault("sunrise", []).append({"start": sunrise_start.to_datetime(), "end": sunrise_end.to_datetime()}) return_dict[station].setdefault("sunset", []).append({"start": sunset_start.to_datetime(), "end": sunset_end.to_datetime()}) return_dict[station].setdefault("day", []).append({"start": sunrise_end.to_datetime(), "end": sunset_start.to_datetime()}) - return_dict[station].setdefault("night", []).append({"start": sunset_end.to_datetime(), "end": sunrise_next_start.to_datetime()}) + if timestamp >= sunrise_start: + sunrise_next_start = observer.sun_rise_time(time=sunset_end, horizon=-angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION) + return_dict[station].setdefault("night", []).append({"start": sunset_end.to_datetime(), "end": sunrise_next_start.to_datetime()}) + else: + sunset_previous_end = observer.sun_set_time(time=sunrise_start, horizon=-angle_to_horizon, which='previous', n_grid_points=SUN_SET_RISE_PRECISION) + return_dict[station].setdefault("night", []).append({"start": sunset_previous_end.to_datetime(), "end": sunrise_start.to_datetime()}) + return return_dict -# Depending on usage patterns, we should consider refactoring this a little so that we cache on a function with a single timestamp as input. Requests with similar (but not identical) timestamps or bodies currently make no use of cached results for the subset computed in previous requests. +# todo: Depending on usage patterns, we should consider refactoring this a little so that we cache on a function with a single timestamp as input. Requests with similar (but not identical) timestamps or bodies currently make no use of cached results for the subset computed in previous requests. @lru_cache(maxsize=256, typed=False) # does not like lists, so use tuples to allow caching def coordinates_and_timestamps_to_separation_from_bodies(angle1: float, angle2: float, direction_type: str, timestamps: tuple, bodies: tuple) -> dict: """ diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py index 4ae9725ed8dbaa5c4897e7d309adfdb2fe9ff126..691a5c9ce300985aea3eb03552bc75c72f2c7aa4 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.0.9 on 2020-11-24 11:24 +# Generated by Django 3.0.9 on 2020-12-03 10:10 from django.conf import settings import django.contrib.postgres.fields @@ -243,6 +243,19 @@ class Migration(migrations.Migration): 'abstract': False, }, ), + migrations.CreateModel( + name='DefaultReservationTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ], + options={ + 'abstract': False, + }, + ), migrations.CreateModel( name='DefaultSchedulingConstraintsTemplate', fields=[ @@ -405,6 +418,39 @@ class Migration(migrations.Migration): 'abstract': False, }, ), + migrations.CreateModel( + name='Reservation', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='Short description for this reservation, used in overviews', max_length=255)), + ('start_time', models.DateTimeField(help_text='Start of this reservation.')), + ('duration', models.IntegerField(help_text='Duration of this reservation (in seconds). If null, then this reservation is indefinitely.', null=True)), + ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Properties of this reservation')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ReservationTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(blank=True, default='', help_text='A longer description of this object.', max_length=255)), + ('version', models.IntegerField(editable=False, help_text='Version of this template (with respect to other templates of the same name)')), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ], + options={ + 'abstract': False, + }, + ), migrations.CreateModel( name='ResourceType', fields=[ @@ -1147,6 +1193,20 @@ class Migration(migrations.Migration): name='quantity', field=models.ForeignKey(help_text='The quantity of this resource type.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Quantity'), ), + migrations.AddConstraint( + model_name='reservationtemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='reservationtemplate_unique_name_version'), + ), + migrations.AddField( + model_name='reservation', + name='project', + field=models.ForeignKey(help_text='Reservation will be accounted for this project.', null=True, on_delete=django.db.models.deletion.CASCADE, related_name='reservations', to='tmssapp.Project'), + ), + migrations.AddField( + model_name='reservation', + name='specifications_template', + field=models.ForeignKey(help_text='Schema used for specifications_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.ReservationTemplate'), + ), migrations.AddField( model_name='projectquota', name='project', @@ -1211,6 +1271,11 @@ class Migration(migrations.Migration): name='template', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingConstraintsTemplate'), ), + migrations.AddField( + model_name='defaultreservationtemplate', + name='template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ReservationTemplate'), + ), migrations.AddField( model_name='defaultgeneratortemplate', name='template', @@ -1380,6 +1445,10 @@ class Migration(migrations.Migration): model_name='defaultschedulingconstraintstemplate', index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_480bbd_gin'), ), + migrations.AddIndex( + model_name='defaultreservationtemplate', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_58d7a4_gin'), + ), migrations.AddIndex( model_name='defaultgeneratortemplate', index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_def_tags_89c89d_gin'), diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py index 503bb9dbe020d3e462d936e5dc2f47052a1453d5..b3629f35cfd18d93ccf77af17911b7f9928271cd 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py @@ -375,6 +375,16 @@ class DefaultTaskRelationSelectionTemplate(BasicCommon): name = CharField(max_length=128, unique=True) template = ForeignKey("TaskRelationSelectionTemplate", on_delete=PROTECT) + +class ReservationTemplate(Template): + pass + + +class DefaultReservationTemplate(BasicCommon): + name = CharField(max_length=128, unique=True) + template = ForeignKey("ReservationTemplate", on_delete=PROTECT) + + # # Instance Objects # @@ -1084,3 +1094,22 @@ class TaskSchedulingRelationDraft(BasicCommon): raise ValidationError("Time_offset must be >= 0") super().save(force_insert, force_update, using, update_fields) + +class Reservation(NamedCommon): + project = ForeignKey('Project', null=True, related_name='reservations', on_delete=CASCADE, help_text='Reservation will be accounted for this project.') + description = CharField(max_length=255, help_text='Short description for this reservation, used in overviews') + start_time = DateTimeField(help_text='Start of this reservation.') + duration = IntegerField(null=True, help_text='Duration of this reservation (in seconds). If null, then this reservation is indefinitely.') + specifications_doc = JSONField(help_text='Properties of this reservation') + specifications_template = ForeignKey('ReservationTemplate', on_delete=CASCADE, help_text='Schema used for specifications_doc.') + + @property + def stop_time(self) -> datetime.datetime: + '''The stop_time based on start_time+duration if duration is known, else None''' + if self.duration: + return self.start_time + datetime.timedelta(seconds=self.duration) + return None + + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template') + super().save(force_insert, force_update, using, update_fields) diff --git a/SAS/TMSS/src/tmss/tmssapp/populate.py b/SAS/TMSS/src/tmss/tmssapp/populate.py index 05ec07e83f2f102caa1f65d1bcadf8ffb3447935..73116db990983bddcc038740907cae326e5d75b3 100644 --- a/SAS/TMSS/src/tmss/tmssapp/populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/populate.py @@ -68,6 +68,16 @@ def populate_test_data(): if 'Commissioning' not in tmss_project.tags: continue + # for test purposes also add a reservation object + reservation_template = models.ReservationTemplate.objects.get(name="resource reservation") + reservation_template_spec = get_default_json_object_for_schema(reservation_template.schema) + Reservation.objects.create(name="DummyReservation", + description="Just A non-scheduled reservation as example", + project=tmss_project, + specifications_template=reservation_template, + specifications_doc=reservation_template_spec, + start_time=datetime.now()) + for scheduling_set in tmss_project.scheduling_sets.all(): for unit_nr in range(2): for strategy_template in [uc1_strategy_template, simple_strategy_template]: diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/reservation_template-reservation-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/reservation_template-reservation-1.json new file mode 100644 index 0000000000000000000000000000000000000000..cdcbb306ad3f90ddde1701666ac2eb10845695a7 --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/reservation_template-reservation-1.json @@ -0,0 +1,132 @@ +{ + "$id": "http://tmss.lofar.org/api/schemas/tasktemplate/reservation/1#", + "$schema": "http://json-schema.org/draft-06/schema#", + "title": "resource reservation", + "description": "This schema defines the parameters to reserve instrument resources, and to annotate the reservation.", + "version": 1, + "type": "object", + "properties": { + "activity": { + "title": "Activity", + "description": "Description of the activity during this reservation", + "type": "object", + "additonalProperties": false, + "default":{}, + "properties": { + "type": { + "title": "Type", + "description": "Reason for this reservation", + "type": "string", + "enum": [ "maintenance", "test", "upgrade", "outage", "pr", "stand-alone mode", "test system", "other" ], + "default": "maintenance" + }, + "description": { + "title": "Description", + "description": "Free-form explanation of the reason", + "type": "string", + "default": "" + }, + "contact": { + "title": "Contact", + "description": "Who coordinates this maintenance", + "type": "string", + "default": "" + }, + "subject": { + "title": "Subject", + "description": "What will be modified or affected (select 'system' if multiple)", + "type": "string", + "enum": [ "environment", "hardware", "firmware", "software", "system", "network", "nothing" ], + "default": "nothing" + }, + "planned": { + "title": "Planned", + "description": "Was this planned?", + "type": "boolean", + "default": true + } + }, + "required": [ + "type" + ] + }, + "resources": { + "title": "Resources", + "description": "Which resources are affected", + "type": "object", + "additonalProperties": false, + "default":{}, + "properties": { + "stations": { + "title": "Stations", + "description": "List of stations", + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list" + } + }, + "required": [] + }, + "schedulability": { + "title": "Schedulability", + "description": "Schedulability of the reserved resources", + "type": "object", + "additonalProperties": false, + "default":{}, + "properties": { + "manual": { + "title": "Manual", + "description": "Manual scheduling is allowed", + "type": "boolean", + "default": true + }, + "dynamic": { + "title": "Dynamic", + "description": "Dynamic scheduling is allowed", + "type": "boolean", + "default": false + }, + "project_exclusive": { + "title": "Schedule only for this project", + "description": "Only tasks from this project can be scheduled", + "type": "boolean", + "default": true + } + } + }, + "effects": { + "title": "Effect", + "description": "Effect the actions have during this reservation", + "type": "object", + "additonalProperties": false, + "default":{}, + "properties": { + "lba_rfi": { + "title": "LBA RFI", + "description": "RFI increases in the LBA spectrum during this maintenance", + "type": "boolean", + "default": false + }, + "hba_rfi": { + "title": "HBA RFI", + "description": "RFI increases in the HBA spectrum during this maintenance", + "type": "boolean", + "default": false + }, + "expert": { + "title": "Expert mode", + "description": "Quality cannot be guaranteed", + "type": "boolean", + "default": true + } + }, + "required": [] + } + }, + "required": [ + "activity", "resources", "effects" + ] +} + + + + + diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/subtask_template-reservation-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/subtask_template-reservation-1.json new file mode 100644 index 0000000000000000000000000000000000000000..dd1d1996959e6630bc0b78f5be64a6422ec6721f --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/subtask_template-reservation-1.json @@ -0,0 +1,27 @@ +{ + "$id": "http://tmss.lofar.org/api/schemas/subtasktemplate/reservation/1#", + "$schema": "http://json-schema.org/draft-06/schema#", + "title": "resource reservation", + "description": "This schema defines reserved resources", + "version": 1, + "type": "object", + "properties": { + "resources": { + "title": "Resources", + "description": "Which resources are reserved", + "type": "object", + "additonalProperties": false, + "properties": { + "stations": { + "title": "Stations", + "description": "List of stations", + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list" + } + }, + "required": [] + } + }, + "required": [ + "resources" + ] +} diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json index 6e1d2c710101efe1a396935340fcdee899fe3ded..55f5cd2d4401c5592f0faaa3b627557e29f6cace 100644 --- a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json @@ -126,14 +126,18 @@ "file_name": "sap_template-1.json", "template": "sap_template" }, - { + { "file_name": "subtask_template-ingest-1.json", "template": "subtask_template", "type": "copy" - }, - { + }, + { "file_name": "task_template-ingest-1.json", "template": "task_template", "type": "ingest" + }, + { + "file_name": "reservation_template-reservation-1.json", + "template": "reservation_template" } ] \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py index 0c215aa57d1915e0660bd31572775bd3992d00d9..55eab8b6118553a53fd2ec6f9e4e9b15cf405532 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py @@ -385,3 +385,23 @@ class TaskTypeSerializer(RelationalHyperlinkedModelSerializer): model = models.TaskType fields = '__all__' + +class ReservationTemplateSerializer(AbstractTemplateSerializer): + class Meta: + model = models.ReservationTemplate + fields = '__all__' + + +class DefaultReservationTemplateSerializer(RelationalHyperlinkedModelSerializer): + class Meta: + model = models.DefaultReservationTemplate + fields = '__all__' + + +class ReservationSerializer(RelationalHyperlinkedModelSerializer): + specifications_doc = JSONEditorField(schema_source='specifications_template.schema') + + class Meta: + model = models.Reservation + fields = '__all__' + extra_fields = ['stop_time'] diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py index 2c0a5a50dbce25b18ba327c0872c7fa31cc7ea56..468666fb8e83630762d693bade374662db92ba3e 100644 --- a/SAS/TMSS/src/tmss/tmssapp/subtasks.py +++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py @@ -9,7 +9,7 @@ from lofar.common import isProductionEnvironment from lofar.common.json_utils import add_defaults_to_json_object_for_schema, get_default_json_object_for_schema from lofar.common.lcu_utils import get_current_stations -from lofar.sas.tmss.tmss.exceptions import SubtaskCreationException, SubtaskSchedulingException +from lofar.sas.tmss.tmss.exceptions import SubtaskCreationException, SubtaskSchedulingException, SubtaskException from datetime import datetime, timedelta from lofar.common.datetimeutils import parseDatetime @@ -19,6 +19,7 @@ from lofar.sas.resourceassignment.resourceassigner.rarpc import RARPC from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset_dict from lofar.sas.resourceassignment.taskprescheduler.cobaltblocksize import CorrelatorSettings, BlockConstraints, BlockSize +from lofar.sas.resourceassignment.resourceassigner.schedulers import ScheduleException from lofar.sas.tmss.tmss.tmssapp.conversions import antennafields_for_antennaset_and_station @@ -616,36 +617,161 @@ def check_prerequities_for_scheduling(subtask: Subtask) -> bool: return True -def _assign_or_unassign_resources(subtask: Subtask): - if subtask.state.value not in [SubtaskState.Choices.SCHEDULING.value, SubtaskState.Choices.UNSCHEDULING.value]: - raise SubtaskSchedulingException("Cannot assign resources for subtask id=%d because it is not in (UN)SCHEDULING state. " + +def _create_ra_specification(_subtask): + # Should we do something with station list, for 'detecting' conflicts it can be empty + parset_dict = convert_to_parset_dict(_subtask) + return { 'tmss_id': _subtask.id, + 'task_type': _subtask.specifications_template.type.value.lower(), + 'task_subtype': parset_dict.get("Observation.processSubtype","").lower(), + 'status': 'prescheduled' if _subtask.state.value == SubtaskState.Choices.SCHEDULING.value else 'approved', + 'starttime': _subtask.start_time, + 'endtime': _subtask.stop_time, + 'cluster': _subtask.cluster.name, + 'station_requirements': [], + 'specification': parset_dict } + + +def assign_or_unassign_resources(subtask: Subtask): + """ + :param subtask: + """ + MAX_NBR_ASSIGNMENTS = 10 + + if subtask.state.value != SubtaskState.Choices.SCHEDULING.value: + raise SubtaskSchedulingException("Cannot assign resources for subtask id=%d because it is not in SCHEDULING state. " "Current state=%s" % (subtask.pk, subtask.state.value)) - def create_ra_specification(_subtask): - parset_dict = convert_to_parset_dict(_subtask) - return { 'tmss_id': _subtask.id, - 'task_type': _subtask.specifications_template.type.value.lower(), - 'task_subtype': parset_dict.get("Observation.processSubtype","").lower(), - 'status': 'prescheduled' if subtask.state.value == SubtaskState.Choices.SCHEDULING.value else 'approved', - 'starttime': _subtask.start_time, - 'endtime': _subtask.stop_time, - 'cluster': _subtask.cluster.name, - 'station_requirements': [], - 'specification': parset_dict } - - ra_spec = create_ra_specification(subtask) + ra_spec = _create_ra_specification(subtask) ra_spec['predecessors'] = [] for pred in subtask.predecessors.all(): try: - ra_spec['predecessors'].append(create_ra_specification(pred)) + ra_spec['predecessors'].append(_create_ra_specification(pred)) except: pass - + assigned = False + cnt_do_assignments = 1 with RARPC.create() as rarpc: - assigned = rarpc.do_assignment(ra_spec) - + while not assigned and cnt_do_assignments < MAX_NBR_ASSIGNMENTS: + try: + cnt_do_assignments += 1 + assigned = rarpc.do_assignment(ra_spec) + except ScheduleException as e: + logger.info("Conflicts in assignment detected, lets check the stations in conflict and re-assign if possible") + # Try to re-assign if not assigned yet + if not assigned: + lst_stations_in_conflict = get_stations_in_conflict(subtask.id) + lst_stations = determine_stations_which_can_be_assigned(subtask, lst_stations_in_conflict) + ra_spec = update_specification(ra_spec, lst_stations) + + # At the end still not possible to assign, give Exception. if not assigned: - raise SubtaskSchedulingException("Cannot schedule subtask id=%d because the required resources are not (fully) available." % (subtask.pk, )) + raise SubtaskSchedulingException("Cannot schedule subtask id=%d within %d number of attempts. " + "The required resources are not (fully) available." % (subtask.pk, cnt_do_assignments)) + + +def get_stations_in_conflict(subtask_id): + """ + Retrieve a list of station names which RADB 'marked' as a resource in conflict after the last resource assignment + :param subtask_id: The subtask id + :return: lst_stations_in_conflict List of station names (string) which are in conflict + """ + lst_stations_in_conflict = [] + with RADBRPC.create() as radbrpc: + task_id = radbrpc.getTask(tmss_id=subtask_id)['id'] + conflict_claims = radbrpc.getResourceClaims(task_ids=[task_id], status="conflict", extended=True) + # Conflicts_claims are resources which are in conflict. Determine the resource names in conflict which are + # for example ['CS001rcu', 'CS001chan0', 'CS001bw0', 'CS001chan1', 'CS001bw1'] + resource_names_in_conflict = [] + for resc in conflict_claims: + # cross check on status in conflict + if resc["status"] == "conflict": + resource_names_in_conflict.append(resc["resource_name"]) + logger.info("Resource names with conflict %s" % resource_names_in_conflict) + + # Now get for all the resources in conflict its parent_id. Check for all parent_id which is + # resource_group_type 'station', this will be the station name in conflict which we need + resource_group_memberships = radbrpc.getResourceGroupMemberships() + parent_ids = [] + for resc in resource_group_memberships["resources"].values(): + if resc["resource_name"] in resource_names_in_conflict: + parent_ids.extend(resc['parent_group_ids']) + + logger.info("Parent group ids with conflict %s" % parent_ids) + for parent_id in list(set(parent_ids)): + resc_group_item = resource_group_memberships["groups"][parent_id] + if resc_group_item["resource_group_type"] == "station": + lst_stations_in_conflict.append(resc_group_item["resource_group_name"]) + logger.info("Stations in conflict %s", lst_stations_in_conflict) + return lst_stations_in_conflict + + +def determine_stations_which_can_be_assigned(subtask, lst_stations_in_conflict): + """ + Determine which stations can be assigned when conflict of stations are occurred + Station in conflict should be removed. + Use the max_nr_missing from the task specifications and the conflicted station list to create a station list + which should be possible to assign. If the number of max missing in a station group is larger than the station + to be skipped, then new assignment is not possible so raise an SubtaskSchedulingException with context + :param subtask: + :param lst_stations_in_conflict: + :return: lst_stations: List of station which can be assigned + """ + # Get the station list from specification and remove the conflict stations + lst_specified_stations = subtask.specifications_doc["stations"]["station_list"] + lst_stations = list(set(lst_specified_stations) - set(lst_stations_in_conflict)) + logger.info("Determine stations which can be assigned %s" % lst_stations) + + # Check whether the removing of the conflict station the requirements of max_nr_missing per station_group is + # still fulfilled. If that is OK then we are done otherwise we will raise an Exception + stations_groups = get_station_groups(subtask) + for sg in stations_groups: + nbr_missing = len(set(sg["stations"]) & set(lst_stations_in_conflict)) + if nbr_missing > sg["max_nr_missing"]: + raise SubtaskSchedulingException("There are more stations in conflict than the specification is given " + "(%d is larger than %d). The stations that are in conflict are '%s'." + "Please check station of subtask %d " % + (nbr_missing, sg["max_nr_missing"], lst_stations_in_conflict, subtask.pk)) + return lst_stations + + +def get_station_groups(subtask): + """ + Retrieve the stations_group specifications of the given subtask + Need to retrieve it from (related) Target Observation Task + Note list can be empty (some testcase) which result in no checking max_nr_missing + :param subtask: + :return: station_groups which is a list of dict. { station_list, max_nr_missing } + """ + station_groups = [] + if 'calibrator' in subtask.task_blueprint.specifications_template.name.lower(): + # Calibrator requires related Target Task Observation for some specifications + target_task_blueprint = get_related_target_observation_task_blueprint(subtask.task_blueprint) + if target_task_blueprint is None: + raise SubtaskException("Cannot retrieve related target observation of task_blueprint %d (subtask %d)" % + (subtask.task_blueprint.id, subtask.id)) + if "station_groups" in target_task_blueprint.specifications_doc.keys(): + station_groups = target_task_blueprint.specifications_doc["station_groups"] + else: + if "station_groups" in subtask.task_blueprint.specifications_doc.keys(): + station_groups = subtask.task_blueprint.specifications_doc["station_groups"] + return station_groups + + +def update_specification(ra_spec, lst_stations): + """ + Update the RA Specification dictionary with the correct list of stations + :param ra_spec: Dictionary of the RA specification + :param lst_stations: List of stations to 'assign' + :return: Dictionary with updated RA specification + """ + if len(lst_stations) == 0: + raise SubtaskSchedulingException("Cannot re-assign resources after conflict for subtask id=%d " + "because there are no stations left to assign. " % ra_spec["tmss_id"]) + updated_ra_spec = ra_spec + updated_ra_spec["specification"]["Observation.VirtualInstrument.stationList"] = "[%s]" % ','.join(s for s in lst_stations) + # ?? should the station_requirements also be updated or just leave that empty '[]' assume for now it can be empty + return updated_ra_spec def schedule_qafile_subtask(qafile_subtask: Subtask): @@ -854,7 +980,7 @@ def schedule_observation_subtask(observation_subtask: Subtask): sap=sap) for sb_nr in pointing['subbands']]) # step 4: resource assigner (if possible) - _assign_or_unassign_resources(observation_subtask) + assign_or_unassign_resources(observation_subtask) # TODO: TMSS-382: evaluate the scheduled stations and see if the requiments given in the subtask.task_bluepring.specifications_doc are met for the station_groups and max_nr_missing. @@ -948,7 +1074,7 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): DataproductTransform.objects.bulk_create(transforms) # step 4: resource assigner (if possible) - _assign_or_unassign_resources(pipeline_subtask) + assign_or_unassign_resources(pipeline_subtask) # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it) pipeline_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value) diff --git a/SAS/TMSS/src/tmss/tmssapp/views.py b/SAS/TMSS/src/tmss/tmssapp/views.py index 3c927861dc1153f3563613e4696b8f7d1f5565f6..b099d553958475e0d7410dc7bef529503b7f9d4d 100644 --- a/SAS/TMSS/src/tmss/tmssapp/views.py +++ b/SAS/TMSS/src/tmss/tmssapp/views.py @@ -38,7 +38,7 @@ def subtask_parset(request, subtask_pk:int): def index(request): - return render(request, os.path.join(os.environ.get('LOFARROOT'), 'lib64/python3.6/site-packages/lofar/SAS/TMSS/frontend','tmss_webapp/build/index.html')) + return render(request, os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), '../../frontend','tmss_webapp/build/index.html')) #return render(request, "../../../frontend/frontend_poc/build/index.html") @@ -167,7 +167,6 @@ def get_sun_rise_and_set(request): else: stations = tuple(stations.split(',')) - # todo: to improve speed for the frontend, we should probably precompute/cache these and return those (where available), to revisit after constraint table / TMSS-190 is done return JsonResponse(timestamps_and_stations_to_sun_rise_and_set(timestamps, stations)) diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py index f4f1e95ddbe38152855429597c6360be6448e4dc..64c4e9e588e228a509c12be4f66a687b132ae096 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py @@ -174,6 +174,21 @@ class DefaultTaskRelationSelectionTemplateViewSet(LOFARViewSet): serializer_class = serializers.DefaultTaskRelationSelectionTemplateSerializer +class DefaultReservationTemplateViewSet(LOFARViewSet): + queryset = models.DefaultReservationTemplate.objects.all() + serializer_class = serializers.DefaultReservationTemplateSerializer + + +class ReservationTemplateViewSet(AbstractTemplateViewSet): + queryset = models.ReservationTemplate.objects.all() + serializer_class = serializers.ReservationTemplateSerializer + + +class ReservationViewSet(LOFARViewSet): + queryset = models.Reservation.objects.all() + serializer_class = serializers.ReservationSerializer + + class RoleViewSet(LOFARViewSet): queryset = models.Role.objects.all() serializer_class = serializers.RoleSerializer @@ -914,4 +929,5 @@ class TaskRelationBlueprintNestedViewSet(LOFARNestedViewSet): class TaskTypeViewSet(LOFARViewSet): queryset = models.TaskType.objects.all() - serializer_class = serializers.TaskTypeSerializer \ No newline at end of file + serializer_class = serializers.TaskTypeSerializer + diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py index 623d43642732d4a11463f252adffb0938259d9c9..42c6ac8971fdf9632d1c2de771c1387f8c6845c1 100644 --- a/SAS/TMSS/src/tmss/urls.py +++ b/SAS/TMSS/src/tmss/urls.py @@ -127,12 +127,14 @@ router.register(r'scheduling_unit_template', viewsets.SchedulingUnitTemplateView router.register(r'scheduling_constraints_template', viewsets.SchedulingConstraintsTemplateViewSet) router.register(r'task_template', viewsets.TaskTemplateViewSet) router.register(r'task_relation_selection_template', viewsets.TaskRelationSelectionTemplateViewSet) +router.register(r'reservation_template', viewsets.ReservationTemplateViewSet) router.register(r'task_connector_type', viewsets.TaskConnectorTypeViewSet) router.register(r'default_generator_template', viewsets.DefaultGeneratorTemplateViewSet) router.register(r'default_scheduling_unit_template', viewsets.DefaultSchedulingUnitTemplateViewSet) router.register(r'default_scheduling_constraints_template', viewsets.DefaultSchedulingConstraintsTemplateViewSet) router.register(r'default_task_template', viewsets.DefaultTaskTemplateViewSet) router.register(r'default_task_relation_selection_template', viewsets.DefaultTaskRelationSelectionTemplateViewSet) +router.register(r'default_reservation_template', viewsets.DefaultReservationTemplateViewSet) # instances router.register(r'cycle', viewsets.CycleViewSet) @@ -141,6 +143,7 @@ router.register(r'project', viewsets.ProjectViewSet) router.register(r'resource_type', viewsets.ResourceTypeViewSet) router.register(r'project_quota', viewsets.ProjectQuotaViewSet) router.register(r'setting', viewsets.SettingViewSet) +router.register(r'reservation', viewsets.ReservationViewSet) router.register(r'scheduling_set', viewsets.SchedulingSetViewSet) router.register(r'scheduling_unit_draft', viewsets.SchedulingUnitDraftViewSet) @@ -204,8 +207,6 @@ router.register(r'user', viewsets.UserViewSet) router.register(r'sap', viewsets.SAPViewSet) router.register(r'sip_identifier', viewsets.SIPidentifierViewSet) -# --- - urlpatterns.extend(router.urls) frontend_urlpatterns = [ @@ -222,21 +223,24 @@ urlpatterns = [url(r'^api$', RedirectView.as_view(url='/api/')), ] - -# --- # QA Workflow steps if bool(os.environ.get('TMSS_ENABLE_VIEWFLOW', False)): from .workflowapp import viewsets as workflow_viewsets + viewflow_urlpatterns = [] viewflow_router = OptionalSlashRouter() viewflow_router.APIRootView = TMSSAPIRootView - viewflow_router.register('scheduling_unit_flow/su', workflow_viewsets.SchedulingUnitFlowViewSet, basename='su') + from .workflowapp import viewsets as workflow_viewsets viewflow_router.register('scheduling_unit_flow/qa_reporting_to', workflow_viewsets.QAReportingTOViewSet, basename='qa_reporting_to') viewflow_router.register('scheduling_unit_flow/qa_reporting_sos', workflow_viewsets.QAReportingSOSViewSet, basename='qa_reporting_sos') viewflow_router.register('scheduling_unit_flow/qa_pi_verification', workflow_viewsets.PIVerificationViewSet, basename='qa_pi_verification') viewflow_router.register('scheduling_unit_flow/qa_decide_acceptance', workflow_viewsets.DecideAcceptanceViewSet, basename='qa_decide_acceptance') viewflow_router.register('scheduling_unit_flow/qa_scheduling_unit_process', workflow_viewsets.SchedulingUnitProcessViewSet, basename='qa_scheduling_unit_process') + viewflow_router.register('scheduling_unit_flow/qa_scheduling_unit_task', workflow_viewsets.SchedulingUnitTaskViewSet, basename='qa_scheduling_unit_task') + + viewflow_urlpatterns.extend(viewflow_router.urls) - urlpatterns.extend([url(r'^workflow$', RedirectView.as_view(url='/workflow/', permanent=False)), - url(r'^workflow_api/', include(viewflow_router.urls))]) + urlpatterns.insert(0,url(r'^workflow$', RedirectView.as_view(url='/workflow/', permanent=False))) + #Doesn't work if it is at the end of urlpatterns + urlpatterns.insert(0,url(r'^workflow_api/', include(viewflow_urlpatterns))) \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt index e7c3171661a6fd3927e6b4214251c21f0240d0b1..495fd6fd253557a1af5b9ae7c8231db36c5d1083 100644 --- a/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt @@ -5,7 +5,7 @@ set(_py_files __init__.py admin.py apps.py - tests.py + signals.py ) python_install(${_py_files} @@ -17,4 +17,5 @@ add_subdirectory(flows) add_subdirectory(viewsets) add_subdirectory(forms) add_subdirectory(templates) +add_subdirectory(tests) add_subdirectory(serializers) diff --git a/SAS/TMSS/src/tmss/workflowapp/apps.py b/SAS/TMSS/src/tmss/workflowapp/apps.py index d70dc7921a32145aa2a76285c3362041e091a358..b4d7b07f0d004048aa3fac119f0354ee4bf5a38e 100644 --- a/SAS/TMSS/src/tmss/workflowapp/apps.py +++ b/SAS/TMSS/src/tmss/workflowapp/apps.py @@ -1,5 +1,8 @@ +import os from django.apps import AppConfig class WorkflowappConfig(AppConfig): + name = 'workflowapp' + diff --git a/SAS/TMSS/src/tmss/workflowapp/flows/__init__.py b/SAS/TMSS/src/tmss/workflowapp/flows/__init__.py index a0ae3713747c0b28c5595736d06f4bcb800da5b5..abd9afee878556c103eaad7ef61ce33695f58a50 100644 --- a/SAS/TMSS/src/tmss/workflowapp/flows/__init__.py +++ b/SAS/TMSS/src/tmss/workflowapp/flows/__init__.py @@ -1,2 +1,2 @@ -from .helloworldflow import * +#from .helloworldflow import * from .schedulingunitflow import * \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitflow.py b/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitflow.py index 8d01c51a15bc840bdb775acce1297938234a1611..0bf572d1e31c9c7817a845a12d8d6abdbbb23f8f 100644 --- a/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitflow.py +++ b/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitflow.py @@ -11,8 +11,17 @@ from viewflow import mixins from .. import models from .. import viewsets +from lofar.sas.tmss.tmss.tmssapp.models import Subtask + +from django.dispatch import receiver +from lofar.sas.tmss.tmss.workflowapp.signals import scheduling_unit_blueprint_signal + from viewflow import frontend, ThisObject from viewflow.activation import STATUS +from viewflow.models import Process + +import logging +logger = logging.getLogger(__name__) class ConditionActivation(FuncActivation): @classmethod @@ -45,11 +54,11 @@ class Condition(Signal): sent with Task instance. """ self.condition_check = condition_check - super(Condition, self).__init__(signal, self.signal_handler, sender, task_loader, **kwargs) @method_decorator(flow.flow_signal) def signal_handler(self, activation, sender, instance, **signal_kwargs): + if activation.get_status() == STATUS.DONE: # race condition -- condition was true on activation but we also receive the signal now return @@ -57,31 +66,41 @@ class Condition(Signal): activation.prepare() if activation.flow_task.condition_check(activation, instance): activation.done() + def ready(self): """Resolve internal `this`-references. and subscribe to the signal.""" + if isinstance(self.condition_check, ThisObject): self.condition_check = getattr(self.flow_class.instance, self.condition_check.name) super(Condition, self).ready() + @frontend.register class SchedulingUnitFlow(Flow): process_class = models.SchedulingUnitProcess start = ( flow.StartSignal( - post_save, + scheduling_unit_blueprint_signal, this.on_save_can_start, - sender=models.SchedulingUnit - ).Next(this.wait_schedulable) + ).Next(this.wait_scheduled) + ) + + wait_scheduled = ( + Condition( + this.check_condition_scheduled, + scheduling_unit_blueprint_signal, + task_loader=this.get_scheduling_unit_task + ) + .Next(this.wait_processed) ) - wait_schedulable = ( + wait_processed = ( Condition( - this.check_condition, - post_save, - sender=models.SchedulingUnit, + this.check_condition_processed, + scheduling_unit_blueprint_signal, task_loader=this.get_scheduling_unit_task ) .Next(this.qa_reporting_to) @@ -154,21 +173,28 @@ class SchedulingUnitFlow(Flow): this.do_mark_sub ).Next(this.end) ) - + end = flow.End() - + @method_decorator(flow.flow_start_signal) - def on_save_can_start(self, activation, sender, instance, created, **signal_kwargs): - if created: - activation.prepare() - activation.process.su = instance - - activation.done() - print("workflow started") - else: - print("no workflow started") - return activation - + def on_save_can_start(self, activation, sender, instance, status, **signal_kwargs): + + if status == "schedulable": + try: + process = models.SchedulingUnitProcess.objects.get(su=instance) + + except Process.DoesNotExist: + activation.prepare() + activation.process.su = instance + activation.done() + logger.info("workflow started") + + except Process.MultipleObjectsReturned: + logger.info("QA Workflow for process %s already exists",process) + else: + logger.info("no workflow started") + return activation + def do_mark_sub(self, activation): @@ -177,24 +203,23 @@ class SchedulingUnitFlow(Flow): and (activation.process.qa_reporting_sos is not None and activation.process.qa_reporting_sos.sos_accept_show_pi) and (activation.process.decide_acceptance is not None and activation.process.decide_acceptance.sos_accept_after_pi)) - print("!!!!!!!!!!!END FLOW!!!!!!!!!!!") - print ("can_delete:") - print (activation.process.can_delete) - print ("results_accepted:") - print (activation.process.results_accepted) - + logger.info("End of schedulingunit workflow: can_delete: %s, results_accepted: %s", activation.process.can_delete, activation.process.results_accepted) return activation - - def check_condition(self, activation, instance): + def check_condition_scheduled(self, activation, instance): if instance is None: instance = activation.process.su + + condition = instance.status == "scheduled" + return condition - condition = instance.state == 5 - print("condition is ",condition) + def check_condition_processed(self, activation, instance): + if instance is None: + instance = activation.process.su + + condition = instance.status == "processed" return condition def get_scheduling_unit_task(self, flow_task, sender, instance, **kwargs): - print(kwargs) process = models.SchedulingUnitProcess.objects.get(su=instance) return Task.objects.get(process=process,flow_task=flow_task) diff --git a/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py index cdea4f733fe87cb65a93715ce9fe5f4ebf25f750..8119f3254e3b5d89bd1593f16b715b9d6f2d0d7a 100644 --- a/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py +++ b/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.0.9 on 2020-11-24 11:24 +# Generated by Django 3.0.9 on 2020-12-02 20:16 from django.db import migrations, models import django.db.models.deletion @@ -9,6 +9,7 @@ class Migration(migrations.Migration): initial = True dependencies = [ + ('tmssapp', '0001_initial'), ('viewflow', '0008_jsonfield_and_artifact'), ] @@ -45,27 +46,6 @@ class Migration(migrations.Migration): ('operator_accept', models.BooleanField(default=False)), ], ), - migrations.CreateModel( - name='SchedulingUnit', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=50)), - ('state', models.IntegerField()), - ], - ), - migrations.CreateModel( - name='HelloWorldProcess', - fields=[ - ], - options={ - 'verbose_name': 'World Request', - 'verbose_name_plural': 'World Requests', - 'proxy': True, - 'indexes': [], - 'constraints': [], - }, - bases=('viewflow.process',), - ), migrations.CreateModel( name='SchedulingUnitProcess', fields=[ @@ -76,7 +56,7 @@ class Migration(migrations.Migration): ('pi_verification', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.PIVerification')), ('qa_reporting_sos', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.QAReportingSOS')), ('qa_reporting_to', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.QAReportingTO')), - ('su', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.SchedulingUnit')), + ('su', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='tmssapp.SchedulingUnitBlueprint')), ], options={ 'abstract': False, diff --git a/SAS/TMSS/src/tmss/workflowapp/models/__init__.py b/SAS/TMSS/src/tmss/workflowapp/models/__init__.py index a0ae3713747c0b28c5595736d06f4bcb800da5b5..bfdfbc84e07beb363937412fd7fb6d5788c684d0 100644 --- a/SAS/TMSS/src/tmss/workflowapp/models/__init__.py +++ b/SAS/TMSS/src/tmss/workflowapp/models/__init__.py @@ -1,2 +1 @@ -from .helloworldflow import * from .schedulingunitflow import * \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitflow.py b/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitflow.py index 3e340fbf8c9713fbd37daec0dc977e3d453eb69f..d33f462ed653833794709d701e3d0e0be47f05a4 100644 --- a/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitflow.py +++ b/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitflow.py @@ -1,7 +1,12 @@ # Create your models here. from django.db.models import CharField, IntegerField,BooleanField, ForeignKey, CASCADE, Model,NullBooleanField -from viewflow.models import Process +from viewflow.models import Process, Task +from viewflow.fields import FlowReferenceField +from viewflow.compat import _ + +from lofar.sas.tmss.tmss.tmssapp.models import SchedulingUnitBlueprint + class QAReportingTO(Model): operator_report = CharField(max_length=150) @@ -23,16 +28,11 @@ class DecideAcceptance(Model): sos_accept_after_pi = BooleanField(default=False) -class SchedulingUnit(Model): - name = CharField(max_length=50) - state = IntegerField() - - class SchedulingUnitProcess(Process): - su = ForeignKey(SchedulingUnit, blank=True, null=True, on_delete=CASCADE) + su = ForeignKey(SchedulingUnitBlueprint, blank=True, null=True, on_delete=CASCADE) qa_reporting_to=ForeignKey(QAReportingTO, blank=True, null=True, on_delete=CASCADE) qa_reporting_sos=ForeignKey(QAReportingSOS, blank=True, null=True, on_delete=CASCADE) pi_verification=ForeignKey(PIVerification, blank=True, null=True, on_delete=CASCADE) decide_acceptance=ForeignKey(DecideAcceptance, blank=True, null=True, on_delete=CASCADE) can_delete = BooleanField(default=False) - results_accepted = BooleanField(default=False) \ No newline at end of file + results_accepted = BooleanField(default=False) diff --git a/SAS/TMSS/src/tmss/workflowapp/serializers/schedulingunitflow.py b/SAS/TMSS/src/tmss/workflowapp/serializers/schedulingunitflow.py index e29cf3cb9796afcce95e94e63636fe300791f5b0..694f7f7310cdf9476f1c32d5219737584e5b368f 100644 --- a/SAS/TMSS/src/tmss/workflowapp/serializers/schedulingunitflow.py +++ b/SAS/TMSS/src/tmss/workflowapp/serializers/schedulingunitflow.py @@ -4,14 +4,10 @@ from lofar.sas.tmss.tmss.workflowapp import models from django.views import generic from django.forms.models import modelform_factory +from viewflow.models import Task -from .. import forms -#View to add a fake Scheduling Unit for the QA Workflow -class SchedulingUnitSerializer(ModelSerializer): - class Meta: - model = models.SchedulingUnit - fields = '__all__' +from .. import forms #Viewsets and serializers to access intermediate steps of the QA Workflow #through DRF @@ -37,5 +33,10 @@ class DecideAcceptanceSerializer(ModelSerializer): class SchedulingUnitProcessSerializer(ModelSerializer): class Meta: - model = models.SchedulingUnitProcess - fields = '__all__' \ No newline at end of file + model = models.SchedulingUnitProcess + fields = '__all__' + +class SchedulingUnitTaskSerializer(ModelSerializer): + class Meta: + model = Task + fields = '__all__' \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/signals.py b/SAS/TMSS/src/tmss/workflowapp/signals.py new file mode 100644 index 0000000000000000000000000000000000000000..6087fb1615c6b7a8a5c33f897a4e1cbcce36c6f2 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/signals.py @@ -0,0 +1,3 @@ +import django.dispatch + +scheduling_unit_blueprint_signal = django.dispatch.Signal() \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/tests.py b/SAS/TMSS/src/tmss/workflowapp/tests.py deleted file mode 100644 index 7ce503c2dd97ba78597f6ff6e4393132753573f6..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/tmss/workflowapp/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/SAS/TMSS/src/tmss/workflowapp/tests/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/tests/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..5636f54c85752e46b971f8c3fa94a7b1003536e3 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/tests/CMakeLists.txt @@ -0,0 +1,8 @@ +# $Id: CMakeLists.txt 32679 2015-10-26 09:31:56Z schaap $ + +if(BUILD_TESTING) + include(LofarCTest) + + lofar_add_test(t_workflow_qaworkflow) + set_tests_properties(t_workflow_qaworkflow PROPERTIES TIMEOUT 300) +endif() diff --git a/SAS/TMSS/src/tmss/workflowapp/tests/t_workflow_qaworkflow.py b/SAS/TMSS/src/tmss/workflowapp/tests/t_workflow_qaworkflow.py new file mode 100755 index 0000000000000000000000000000000000000000..342438051afe19d583061a88b29c5ae7a698066e --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/tests/t_workflow_qaworkflow.py @@ -0,0 +1,109 @@ +import os +import unittest +import requests + +import logging +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + +from lofar.common.test_utils import skip_integration_tests +if skip_integration_tests(): + exit(3) + +from lofar.messaging.messagebus import TemporaryExchange +import uuid + + +class SchedulingUnitFlowTest(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + cls.TEST_UUID = uuid.uuid1() + + cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID)) + cls.tmp_exchange.open() + + # override DEFAULT_BUSNAME + import lofar + lofar.messaging.config.DEFAULT_BUSNAME = cls.tmp_exchange.address + + # import here, and not at top of module, because DEFAULT_BUSNAME needs to be set before importing + from lofar.sas.resourceassignment.resourceassigner.test.ra_test_environment import RATestEnvironment + from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment + from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator + + cls.ra_test_env = RATestEnvironment(exchange=cls.tmp_exchange.address) + cls.ra_test_env.start() + + cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address, populate_schemas=True, populate_test_data=False, + start_subtask_scheduler=False, start_postgres_listener=True, start_ra_test_environment=True, + start_dynamic_scheduler=False, enable_viewflow=True, start_workflow_service=True) + cls.tmss_test_env.start() + + + @classmethod + def tearDownClass(cls) -> None: + cls.tmss_test_env.stop() + cls.ra_test_env.stop() + cls.tmp_exchange.close() + + + def test_qa_workflow(self): + from lofar.sas.tmss.tmss.workflowapp.flows.schedulingunitflow import SchedulingUnitFlow + + from lofar.sas.tmss.tmss.tmssapp import models + from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprints_and_subtasks_from_scheduling_unit_draft + from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data + + from lofar.sas.tmss.tmss.workflowapp.models.schedulingunitflow import SchedulingUnitProcess + from viewflow.models import Task + + #check if one QA Workflow is created after scheduling unit blueprint creation + self.assertEqual(0, len(SchedulingUnitProcess.objects.all())) + strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines") + + scheduling_unit_draft = models.SchedulingUnitDraft.objects.create( + name="Test Scheduling Unit UC1", + requirements_doc=strategy_template.template, + requirements_template=strategy_template.scheduling_unit_template, + observation_strategy_template=strategy_template, + copy_reason=models.CopyReason.objects.get(value='template'), + generator_instance_doc="para", + copies=None, + scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data())) + + create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + + scheduling_unit_draft.refresh_from_db() + task_drafts = scheduling_unit_draft.task_drafts.all() + scheduling_unit_blueprints = scheduling_unit_draft.scheduling_unit_blueprints.all() + scheduling_unit_blueprint = scheduling_unit_blueprints[0] + task_blueprints = scheduling_unit_blueprint.task_blueprints.all() + qa_workflow = SchedulingUnitProcess.objects.all() + self.assertEqual(1, len(qa_workflow)) + + #test that QA workflow have two tasks + self.assertEqual(2, len(Task.objects.all())) + self.assertEqual(Task.objects.get(id=1).flow_task.name, 'start') + self.assertEqual(Task.objects.get(id=1).status, 'DONE') + self.assertEqual(Task.objects.get(id=2).flow_task.name, 'wait_scheduled') + self.assertEqual(Task.objects.get(id=2).status, 'NEW') + + #Change subtask status to scheduled + for task_blueprint in task_blueprints: + for subtask in task_blueprint.subtasks.all(): + subtask.state = models.SubtaskState.objects.get(value='scheduled') + subtask.save() + + #Check the QA Workflow is now with 3 Task + self.assertEqual(3, len(Task.objects.all())) + self.assertEqual(Task.objects.get(id=2).flow_task.name, 'wait_scheduled') + self.assertEqual(Task.objects.get(id=2).status, 'DONE') + self.assertEqual(Task.objects.get(id=3).flow_task.name, 'wait_processed') + self.assertEqual(Task.objects.get(id=3).status, 'NEW') + + + +if __name__ == '__main__': + #run the unit tests + unittest.main() diff --git a/SAS/TMSS/src/tmss/workflowapp/tests/t_workflow_qaworkflow.run b/SAS/TMSS/src/tmss/workflowapp/tests/t_workflow_qaworkflow.run new file mode 100755 index 0000000000000000000000000000000000000000..f4f60358833b8b424de8c55201f3c1672720bef2 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/tests/t_workflow_qaworkflow.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*tmss*" t_workflow_qaworkflow.py + diff --git a/SAS/TMSS/src/tmss/workflowapp/tests/t_workflow_qaworkflow.sh b/SAS/TMSS/src/tmss/workflowapp/tests/t_workflow_qaworkflow.sh new file mode 100755 index 0000000000000000000000000000000000000000..ec908c9e200cdce26adc79bcc75f33a3b44e9ae6 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/tests/t_workflow_qaworkflow.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_workflow_qaworkflow \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitflow.py b/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitflow.py index 1c70e87e110fd31d5f2533712165f973d0701733..acaa7459631c3341df848686df8f8ba371f2dbd4 100644 --- a/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitflow.py +++ b/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitflow.py @@ -11,18 +11,10 @@ from viewflow.flow.views.utils import get_next_task_url from django.forms import CharField, CheckboxInput from django.forms.models import modelform_factory +from viewflow.models import Task from .. import forms, models, serializers -class SchedulingUnitFlowViewSet(viewsets.ModelViewSet): - queryset = models.SchedulingUnit.objects.all() - serializer_class = serializers.SchedulingUnitSerializer - - @action(methods=['get'], detail=True) - def trigger(self, request, pk=None): - SchedulingUnitFlow - return Response("ok") - #Viewsets and serializers to access intermediate steps of the QA Workflow #through DRF class QAReportingTOViewSet(viewsets.ModelViewSet): @@ -45,6 +37,10 @@ class SchedulingUnitProcessViewSet(viewsets.ModelViewSet): queryset = models.SchedulingUnitProcess.objects.all() serializer_class = serializers.SchedulingUnitProcessSerializer +class SchedulingUnitTaskViewSet(viewsets.ModelViewSet): + queryset = Task.objects.all() + serializer_class = serializers.SchedulingUnitTaskSerializer + class QAReportingTOView(FlowMixin, generic.CreateView): template_name = 'qa_reporting.html' model = models.QAReportingTO diff --git a/SAS/TMSS/test/t_conversions.py b/SAS/TMSS/test/t_conversions.py index f153900312eac5e6ebab6a268c80386892983c26..18865051aecdd7bd80946e68ef80487e58f8b815 100755 --- a/SAS/TMSS/test/t_conversions.py +++ b/SAS/TMSS/test/t_conversions.py @@ -165,6 +165,38 @@ class UtilREST(unittest.TestCase): response_date = dateutil.parser.parse(r_dict['CS002']['sunrise'][i]['start']).date() self.assertEqual(expected_date, response_date) + def test_util_sun_rise_and_set_returns_correct_date_of_day_sunrise_and_sunset(self): + timestamps = ['2020-01-01T02-00-00'] + r = requests.get(BASE_URL + '/util/sun_rise_and_set?timestamps=%s' % ','.join(timestamps), auth=AUTH) + self.assertEqual(r.status_code, 200) + r_dict = json.loads(r.content.decode('utf-8')) + + # assert day of timestamp matches day of returned values + expected_date = dateutil.parser.parse(timestamps[0]).date() + self.assertEqual(expected_date, dateutil.parser.parse(r_dict['CS002']['sunrise'][0]['start']).date()) + self.assertEqual(expected_date, dateutil.parser.parse(r_dict['CS002']['sunrise'][0]['end']).date()) + self.assertEqual(expected_date, dateutil.parser.parse(r_dict['CS002']['day'][0]['start']).date()) + self.assertEqual(expected_date, dateutil.parser.parse(r_dict['CS002']['day'][0]['end']).date()) + self.assertEqual(expected_date, dateutil.parser.parse(r_dict['CS002']['sunset'][0]['start']).date()) + self.assertEqual(expected_date, dateutil.parser.parse(r_dict['CS002']['sunset'][0]['end']).date()) + + def test_util_sun_rise_and_set_returns_correct_date_of_night(self): + timestamps = ['2020-01-01T02-00-00', '2020-01-01T12-00-00'] + r = requests.get(BASE_URL + '/util/sun_rise_and_set?timestamps=%s' % ','.join(timestamps), auth=AUTH) + self.assertEqual(r.status_code, 200) + r_dict = json.loads(r.content.decode('utf-8')) + + # assert timestamp before sunrise returns night ending on day of timestamp (last night) + expected_date = dateutil.parser.parse(timestamps[0]).date() + response_date = dateutil.parser.parse(r_dict['CS002']['night'][0]['end']).date() + self.assertEqual(expected_date, response_date) + + # assert timestamp after sunrise returns night starting on day of timestamp (next night) + expected_date = dateutil.parser.parse(timestamps[1]).date() + response_date = dateutil.parser.parse(r_dict['CS002']['night'][1]['start']).date() + self.assertEqual(expected_date, response_date) + + def test_util_angular_separation_from_bodies_yields_error_when_no_pointing_is_given(self): r = requests.get(BASE_URL + '/util/angular_separation_from_bodies', auth=AUTH) diff --git a/SAS/TMSS/test/t_scheduling.py b/SAS/TMSS/test/t_scheduling.py index ef00fc0a9956c05a7ce6425db34220e3777165ff..c52ae56ed47dfa46c0a30a0b04f93e95a566dc3d 100755 --- a/SAS/TMSS/test/t_scheduling.py +++ b/SAS/TMSS/test/t_scheduling.py @@ -76,6 +76,27 @@ def create_subtask_object_for_testing(subtask_type_value, subtask_state_value): return models.Subtask.objects.create(**subtask_data) +def create_reserved_stations_for_testing(station_list): + """ + Helper function to create stations in reservation, in other words assigned in Resource Assigner + :param station_list: List of station names to assign + """ + with RARPC.create() as rarpc: + ra_spec = {'task_type': 'reservation', + 'task_subtype': 'maintenance', + 'status': 'prescheduled', + 'starttime': datetime.utcnow() - timedelta(hours=1), + 'endtime': datetime.utcnow() + timedelta(hours=2), + 'cluster': None, + 'specification': {}} + inner_spec = {'Observation.VirtualInstrument.stationList': station_list, + 'Observation.startTime': ra_spec['starttime'], + 'Observation.endTime': ra_spec['starttime']} + ra_spec['specification'] = inner_spec + assigned = rarpc.do_assignment(ra_spec) + return assigned + + class SchedulingTest(unittest.TestCase): def setUp(self): # clean all specs/tasks/claims in RADB (cascading delete) @@ -106,23 +127,47 @@ class SchedulingTest(unittest.TestCase): self.assertEqual('scheduled', subtask['state_value']) self.assertEqual('scheduled', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=subtask_id)['status']) - def test_schedule_observation_subtask_with_blocking_reservations(self): - - # create a reservation on station CS001 - with RARPC.create() as rarpc: - ra_spec = { 'task_type': 'reservation', - 'task_subtype': 'maintenance', - 'status': 'prescheduled', - 'starttime': datetime.utcnow()-timedelta(hours=1), - 'endtime': datetime.utcnow() + timedelta(hours=1), - 'cluster': None, - 'specification': {} } - inner_spec = { 'Observation.VirtualInstrument.stationList': ['CS001'], - 'Observation.startTime': '2020-01-08 06:30:00', - 'Observation.endTime': '2021-07-08 06:30:00' } - ra_spec['specification'] = inner_spec - assigned = rarpc.do_assignment(ra_spec) - self.assertTrue(assigned) + def test_schedule_observation_subtask_with_one_blocking_reservation_failed(self): + """ + Set (Resource Assigner) station CS001 to reserved + Schedule subtask with station CS001 + Check if schedule of the subtask fail + """ + self.assertTrue(create_reserved_stations_for_testing(['CS001'])) + + with tmss_test_env.create_tmss_client() as client: + task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url']) + task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(task_blueprint_data, '/task_blueprint/') + subtask_template = client.get_subtask_template("observation control") + spec = get_default_json_object_for_schema(subtask_template['schema']) + spec['stations']['digital_pointings'][0]['subbands'] = [0] + cluster_url = client.get_path_as_json_object('/cluster/1')['url'] + + subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'], + specifications_doc=spec, + cluster_url=cluster_url, + start_time=datetime.utcnow() + timedelta(minutes=5), + task_blueprint_url=task_blueprint['url']) + subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/') + subtask_id = subtask['id'] + test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/') + + client.set_subtask_status(subtask_id, 'defined') + + with self.assertRaises(Exception): + client.schedule_subtask(subtask_id) + + subtask = client.get_subtask(subtask_id) + self.assertEqual('error', subtask['state_value']) + self.assertEqual('conflict', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=subtask_id)['status']) + + def test_schedule_observation_subtask_with_blocking_reservations_failed(self): + """ + Set (Resource Assigner) station CS001, CS002, CS401, CS501 to reserved + Schedule subtask with stations CS001, CS002, CS401 + Check if schedule of the subtask fail + """ + self.assertTrue(create_reserved_stations_for_testing(['CS001','CS002','CS501','CS401' ])) with tmss_test_env.create_tmss_client() as client: task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url']) @@ -131,11 +176,14 @@ class SchedulingTest(unittest.TestCase): subtask_template = client.get_subtask_template("observation control") spec = get_default_json_object_for_schema(subtask_template['schema']) spec['stations']['digital_pointings'][0]['subbands'] = [0] + spec['stations']['station_list'] = ['CS001', 'CS002', 'CS401'] + cluster_url = client.get_path_as_json_object('/cluster/1')['url'] subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'], specifications_doc=spec, cluster_url=cluster_url, + start_time=datetime.utcnow() + timedelta(minutes=5), task_blueprint_url=task_blueprint['url']) subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/') subtask_id = subtask['id'] @@ -152,6 +200,38 @@ class SchedulingTest(unittest.TestCase): self.assertIsNotNone(ra_task) self.assertEqual('conflict', ra_task['status']) + def test_schedule_observation_subtask_with_blocking_reservation_ok(self): + """ + Set (Resource Assigner) station CS001 to reserved + Schedule subtask with station CS001, CS002, CS003 + Check if schedule of the subtasks do not fail (it can schedule with station CS002 and CS003) + """ + self.assertTrue(create_reserved_stations_for_testing(['CS001','CS003'])) + + with tmss_test_env.create_tmss_client() as client: + task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url']) + task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(task_blueprint_data,'/task_blueprint/') + subtask_template = client.get_subtask_template("observation control") + spec = get_default_json_object_for_schema(subtask_template['schema']) + spec['stations']['digital_pointings'][0]['subbands'] = [0] + cluster_url = client.get_path_as_json_object('/cluster/1')['url'] + spec['stations']['station_list'] = ['CS001', 'CS002', 'CS003'] + subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'], + specifications_doc=spec, + cluster_url=cluster_url, + start_time=datetime.utcnow()+timedelta(minutes=5), + task_blueprint_url=task_blueprint['url']) + subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/') + subtask_id = subtask['id'] + test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), + '/subtask_output/') + + client.set_subtask_status(subtask_id, 'defined') + + subtask = client.schedule_subtask(subtask_id) + self.assertEqual('scheduled', subtask['state_value']) + self.assertEqual('scheduled', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=subtask_id)['status']) + def test_schedule_pipeline_subtask_with_enough_resources_available(self): with tmss_test_env.create_tmss_client() as client: cluster_url = client.get_path_as_json_object('/cluster/1')['url'] @@ -312,7 +392,7 @@ class SubtaskInputOutputTest(unittest.TestCase): setting.value = True setting.save() - @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks._assign_or_unassign_resources") + @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks.assign_or_unassign_resources") def test_schedule_pipeline_subtask_filters_predecessor_output_dataproducts_for_input(self, assign_resources_mock): # setup: # create observation subtask and outputs and dataproducts @@ -388,7 +468,7 @@ class SAPTest(unittest.TestCase): self.assertEqual(models.SAP.objects.first().specifications_doc['pointing']['angle1'], pointing['angle1']) self.assertEqual(models.SAP.objects.first().specifications_doc['pointing']['angle2'], pointing['angle2']) - @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks._assign_or_unassign_resources") + @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks.assign_or_unassign_resources") def test_schedule_pipeline_subtask_copies_sap_from_input_to_output(self, assign_resources_mock): # setup: # create observation subtask and outputs and dataproducts @@ -466,6 +546,31 @@ class TestWithUC1Specifications(unittest.TestCase): cls.scheduling_unit_blueprints = scheduling_unit_draft.scheduling_unit_blueprints.all() cls.scheduling_unit_blueprint = cls.scheduling_unit_blueprints[0] cls.task_blueprints = cls.scheduling_unit_blueprint.task_blueprints.all() + # SubtaskId of the first observation subtask + observation_tbp = list(tb for tb in list(cls.task_blueprints) if tb.specifications_template.type.value == TaskType.Choices.OBSERVATION.value) + observation_tbp.sort(key=lambda tb: tb.relative_start_time) + cls.subtask_id_of_first_observation = list(st for st in observation_tbp[0].subtasks.all() + if st.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value)[0].id + + def setUp(self): + # clean all specs/tasks/claims in RADB (cascading delete) + for spec in tmss_test_env.ra_test_environment.radb.getSpecifications(): + tmss_test_env.ra_test_environment.radb.deleteSpecification(spec['id']) + # Set subtask back to 'defined', start_time to now (and no stoptime) + for tb in self.task_blueprints: + for subtask in tb.subtasks.all(): + subtask.state = models.SubtaskState.objects.get(value="defined") + subtask.stop_time = None + subtask.start_time = datetime.utcnow() + subtask.save() + + def _schedule_subtask_with_failure(self, station_reserved): + with tmss_test_env.create_tmss_client() as client: + with self.assertRaises(Exception) as context: + client.schedule_subtask(self.subtask_id_of_first_observation) + self.assertTrue("There are more stations in conflict than the specification is given" in str(context.exception).lower()) + for station in station_reserved: + self.assertTrue(station in str(context.exception).lower()) def test_create_task_blueprints_and_subtasks_from_scheduling_unit_draft(self): """ @@ -537,6 +642,66 @@ class TestWithUC1Specifications(unittest.TestCase): self.assertEqual(timedelta(0), task_blueprint.relative_start_time) self.assertEqual(timedelta(0), task_blueprint.relative_stop_time) + def test_dutch_stations_conflicts_exception(self): + """ + Test conflict of 'Dutch' station which are have a default of max_nr_missing=4, + Assign stations equal to max_nr_missing+1 before schedule it and check if it can NOT be scheduled + Check the context of the Exception + """ + station_reserved = ['CS002', 'CS003', 'CS004', 'CS401', 'CS501'] + self.assertTrue(create_reserved_stations_for_testing(station_reserved)) + self._schedule_subtask_with_failure(station_reserved) + + def test_dutch_stations_conflicts_ok(self): + """ + Test conflict of 'Dutch' station which are have a default of max_nr_missing=4, + Assign stations equal to max_nr_missing before schedule it and check if it can be scheduled + """ + station_reserved = ['CS002', 'CS003', 'CS004', 'CS401'] + self.assertTrue(create_reserved_stations_for_testing(station_reserved)) + with tmss_test_env.create_tmss_client() as client: + client.schedule_subtask(self.subtask_id_of_first_observation) + + def test_international_stations_conflicts_failed(self): + """ + Test conflict of 'International' stations which are have a default of max_nr_missing=2, + Assign stations equal to max_nr_missing+1 before schedule it and check if it can NOT be scheduled + Check the context of the Exception + """ + station_reserved = ['SE607', 'PL610', 'PL612'] + self.assertTrue(create_reserved_stations_for_testing(station_reserved)) + self._schedule_subtask_with_failure(station_reserved) + + def test_international_stations_conflicts_ok(self): + """ + Test conflict of 'International' stations which are have a default of max_nr_missing=2, + Assign stations equal to max_nr_missing before schedule it and check if it can be scheduled + """ + station_reserved = ['SE607', 'PL612'] + self.assertTrue(create_reserved_stations_for_testing(station_reserved)) + with tmss_test_env.create_tmss_client() as client: + client.schedule_subtask(self.subtask_id_of_first_observation) + + def test_international_required_stations_conflicts_failed(self): + """ + Test conflict of 'International Required' stations which are have a default of max_nr_missing=1, + Assign stations equal to max_nr_missing+1 before schedule it and check if it can NOT be scheduled + Check the context of the Exception + """ + station_reserved = ['DE601', 'DE605'] + self.assertTrue(create_reserved_stations_for_testing(station_reserved)) + self._schedule_subtask_with_failure(station_reserved) + + def test_international_required_stations_conflicts_ok(self): + """ + Test conflict of 'International Required' stations which are have a default of max_nr_missing=1, + Assign stations equal to max_nr_missing before schedule it and check if it can be scheduled + """ + station_reserved = ['DE605'] + self.assertTrue(create_reserved_stations_for_testing(station_reserved)) + with tmss_test_env.create_tmss_client() as client: + client.schedule_subtask(self.subtask_id_of_first_observation) + if __name__ == "__main__": os.environ['TZ'] = 'UTC' diff --git a/SAS/TMSS/test/t_tmssapp_specification_REST_API.py b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py index 018c985f4b69f7b626564bda91f076dcc49591b9..e69a0a55f0bf1cbd466253a051ec5db88cd42392 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_REST_API.py +++ b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py @@ -267,6 +267,70 @@ class SchedulingConstraintsTemplateTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) +class ReservationTemplateTestCase(unittest.TestCase): + def test_reservation_template_list_apiformat(self): + r = requests.get(BASE_URL + '/reservation_template/?format=api', auth=AUTH) + self.assertEqual(r.status_code, 200) + self.assertTrue("Reservation Template List" in r.content.decode('utf8')) + + def test_reservation_template_GET_nonexistant_raises_error(self): + GET_and_assert_equal_expected_code(self, BASE_URL + '/reservation_template/1234321/', 404) + + def test_reservation_template_POST_and_GET(self): + # POST and GET a new item and assert correctness + test_data = test_data_creator.ReservationTemplate() + expected_data = test_data_creator.update_schema_from_template("reservationtemplate", test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/reservation_template/', test_data, 201, expected_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url+'?format=json', expected_data) + + def test_reservation_template_PUT_invalid_raises_error(self): + test_data = test_data_creator.ReservationTemplate() + PUT_and_assert_expected_response(self, BASE_URL + '/reservation_template/9876789876/', test_data, 404, {}) + + def test_reservation_template_PUT(self): + # POST new item, verify + test_data = test_data_creator.ReservationTemplate() + expected_data = test_data_creator.update_schema_from_template("reservationtemplate", test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/reservation_template/', test_data, 201, expected_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, expected_data) + # PUT new values, verify + test_data2 = test_data_creator.ReservationTemplate("reservationtemplate2") + expected_data2 = test_data_creator.update_schema_from_template("reservationtemplate", test_data2) + PUT_and_assert_expected_response(self, url, test_data2, 200, expected_data2) + GET_OK_and_assert_equal_expected_response(self, url, expected_data2) + + def test_reservation_template_PATCH(self): + # POST new item, verify + test_data = test_data_creator.ReservationTemplate() + expected_data = test_data_creator.update_schema_from_template("reservationtemplate", test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/reservation_template/', test_data, 201, expected_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, expected_data) + + test_patch = {"name": "new_name", + "description": "better description", + "schema": minimal_json_schema(properties={"mykey": {"type":"string", "default":"my better value"}})} + + # PATCH item and verify + expected_patch_data = test_data_creator.update_schema_from_template("reservationtemplate", test_patch) + PATCH_and_assert_expected_response(self, url, test_patch, 200, expected_patch_data) + expected_data = dict(test_data) + expected_data.update(expected_patch_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) + + def test_reservation_template_DELETE(self): + # POST new item, verify + test_data = test_data_creator.ReservationTemplate() + expected_data = test_data_creator.update_schema_from_template("reservationtemplate", test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/reservation_template/', test_data, 201, expected_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, expected_data) + # DELETE and check it's gone + DELETE_and_assert_gone(self, url) + + class TaskTemplateTestCase(unittest.TestCase): def test_task_template_list_apiformat(self): @@ -1099,7 +1163,6 @@ class ProjectQuotaTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, project_quota_url, project_quota_test_data) - class SchedulingSetTestCase(unittest.TestCase): def test_scheduling_set_list_apiformat(self): r = requests.get(BASE_URL + '/scheduling_set/?format=api', auth=AUTH) @@ -2661,6 +2724,84 @@ class TaskSchedulingRelationDraftTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_scheduling_relation_draft/%s/' % id2, test_data_2) +class ReservationTestCase(unittest.TestCase): + def test_reservation_list_apiformat(self): + r = requests.get(BASE_URL + '/reservation/?format=api', auth=AUTH) + self.assertEqual(r.status_code, 200) + self.assertTrue("Reservation List" in r.content.decode('utf8')) + + def test_reservation_GET_nonexistant_raises_error(self): + GET_and_assert_equal_expected_code(self, BASE_URL + '/reservation/1234321/', 404) + + def test_reservation_POST_and_GET(self): + reservation_test_data = test_data_creator.Reservation(duration=60) + + # POST and GET a new item and assert correctness + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/reservation/', reservation_test_data, 201, reservation_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, reservation_test_data) + + def test_reservation_PUT_invalid_raises_error(self): + reservation_test_data = test_data_creator.Reservation(duration=60) + PUT_and_assert_expected_response(self, BASE_URL + '/reservation/9876789876/', reservation_test_data, 404, {}) + + def test_reservation_PUT(self): + project_url = test_data_creator.post_data_and_get_url(test_data_creator.Project(), '/project/') + reservation_test_data = test_data_creator.Reservation(name="reservation 1", duration=50, project_url=project_url) + + # POST new item, verify + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/reservation/', reservation_test_data, 201, reservation_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, reservation_test_data) + + reservation_test_data2 = test_data_creator.Reservation(name="reservation2", project_url=project_url) + # PUT new values, verify + PUT_and_assert_expected_response(self, url, reservation_test_data2, 200, reservation_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, reservation_test_data2) + + def test_reservation_PATCH(self): + reservation_test_data = test_data_creator.Reservation(duration=60) + + # POST new item, verify + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/reservation/', reservation_test_data, 201, reservation_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, reservation_test_data) + + test_patch = {"description": "This is a new and improved description", + "duration": 90} + + # PATCH item and verify + expected_patch_data = test_data_creator.update_schema_from_template("reservationtemplate", test_patch) + PATCH_and_assert_expected_response(self, url, test_patch, 200, expected_patch_data) + expected_data = dict(reservation_test_data) + expected_data.update(test_patch) + GET_OK_and_assert_equal_expected_response(self, url, expected_patch_data) + + def test_reservation_DELETE(self): + reservation_test_data = test_data_creator.Reservation(duration=30, start_time=datetime.utcnow() + timedelta(days=1)) + # POST new item, verify + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/reservation/', reservation_test_data, 201, reservation_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, reservation_test_data) + + # DELETE and check it's gone + DELETE_and_assert_gone(self, url) + + def test_GET_Reservation_list_shows_entry(self): + test_data_1 = Reservation_test_data(duration=3600) + models.Reservation.objects.create(**test_data_1) + nbr_results = models.Reservation.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/reservation/', test_data_1, nbr_results) + + def test_GET_Reservation_view_returns_correct_entry(self): + test_data_1 = Reservation_test_data(name="Reservation 1", duration=60, start_time=datetime.utcnow() + timedelta(days=1)) + test_data_2 = Reservation_test_data(name="Reservation 2", duration=120, start_time=datetime.utcnow() + timedelta(days=2)) + id1 = models.Reservation.objects.create(**test_data_1).id + id2 = models.Reservation.objects.create(**test_data_2).id + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/reservation/' + str(id1) + '/', test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/reservation/' + str(id2) + '/', test_data_2) + + if __name__ == "__main__": logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) diff --git a/SAS/TMSS/test/test_utils.py b/SAS/TMSS/test/test_utils.py index 1029deb3474ce830e83f3d8d0a26f07c9bf3620f..95bbec3a5cf6b123fd123be5501feaeaa6e4bf60 100644 --- a/SAS/TMSS/test/test_utils.py +++ b/SAS/TMSS/test/test_utils.py @@ -272,7 +272,8 @@ class TMSSTestEnvironment: populate_schemas:bool=False, populate_test_data:bool=False, start_ra_test_environment: bool=False, start_postgres_listener: bool=False, start_subtask_scheduler: bool=False, start_dynamic_scheduler: bool=False, - start_pipeline_control: bool=False, enable_viewflow: bool=False): + start_pipeline_control: bool=False, + start_workflow_service: bool=False, enable_viewflow: bool=False): self._exchange = exchange self._broker = broker self._populate_schemas = populate_schemas @@ -302,8 +303,10 @@ class TMSSTestEnvironment: self._start_pipeline_control = start_pipeline_control self.pipeline_control = None - if enable_viewflow: - os.environ['TMSS_ENABLE_VIEWFLOW'] = 'True' + self.enable_viewflow = enable_viewflow or start_workflow_service + self._start_workflow_service = start_workflow_service + self.workflow_service = None + os.environ['TMSS_ENABLE_VIEWFLOW'] = str(self.enable_viewflow) # Check for correct Django version, should be at least 3.0 if django.VERSION[0] < 3: @@ -366,6 +369,11 @@ class TMSSTestEnvironment: self.pipeline_control = PipelineControlTMSS(exchange=self._exchange, broker=self._broker) self.pipeline_control.start_listening() + if self._start_workflow_service: + from lofar.sas.tmss.services.workflow_service import create_workflow_service + self.workflow_service = create_workflow_service(exchange=self._exchange, broker=self._broker) + self.workflow_service.start_listening() + if self._populate_schemas or self._populate_test_data: self.populate_schemas() @@ -374,6 +382,10 @@ class TMSSTestEnvironment: def stop(self): + if self.workflow_service is not None: + self.workflow_service.stop_listening() + self.workflow_service = None + if self.postgres_listener is not None: self.postgres_listener.stop() self.postgres_listener = None @@ -488,7 +500,8 @@ def main_test_environment(): populate_schemas=options.schemas, populate_test_data=options.data, start_ra_test_environment=options.services, start_postgres_listener=options.services, start_subtask_scheduler=options.services, start_dynamic_scheduler=options.services, - start_pipeline_control=options.services, enable_viewflow=options.viewflow) as tmss_test_env: + start_pipeline_control=options.services, + start_workflow_service=options.services and options.viewflow, enable_viewflow=options.viewflow) as tmss_test_env: # print some nice info for the user to use the test servers... # use print instead of log for clean lines. diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py index 7f5f266be4ac768d006f7860abd9d8f85351c723..8c59a5c8959d5252825dff208bb07bce1574cb65 100644 --- a/SAS/TMSS/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/test/tmss_test_data_django_models.py @@ -68,7 +68,6 @@ def SchedulingConstraintsTemplate_test_data(name="my_SchedulingConstraintsTempla "tags": ["TMSS", "TESTING"]} - def SchedulingUnitObservingStrategyTemplate_test_data(name="my_SchedulingUnitObservingStrategyTemplate", scheduling_unit_template:models.SchedulingUnitTemplate=None, template:dict=None) -> dict: @@ -495,3 +494,32 @@ def SAPTemplate_test_data() -> dict: "schema": minimal_json_schema(), "tags": ["TMSS", "TESTING"]} + +def ReservationTemplate_test_data(name="my_ReservationTemplate", schema:dict=None) -> dict: + if schema is None: + schema = minimal_json_schema(properties={ "foo" : { "type": "string", "default": "bar" } }, required=["foo"]) + + return {"name": name, + "description": 'My ReservationTemplate description', + "schema": schema, + "tags": ["TMSS", "TESTING"]} + + +def Reservation_test_data(name="MyReservation", duration=None, start_time=None, project: models.Project = None) -> dict: + if project is None: + project = models.Project.objects.create(**Project_test_data()) + + if start_time is None: + start_time = datetime.utcnow() + timedelta(hours=12) + + specifications_template = models.ReservationTemplate.objects.create(**ReservationTemplate_test_data()) + specifications_doc = get_default_json_object_for_schema(specifications_template.schema) + + return {"name": name, + "project": project, + "description": "Test Reservation", + "tags": ["TMSS", "TESTING"], + "start_time": start_time, + "duration": duration, # can be None + "specifications_doc": specifications_doc, + "specifications_template": specifications_template} diff --git a/SAS/TMSS/test/tmss_test_data_rest.py b/SAS/TMSS/test/tmss_test_data_rest.py index 1a16d480f10c74cd783b3ea88d39fd363b1c2cfc..17f78eaf04f89360724f2c1896037f65c2029445 100644 --- a/SAS/TMSS/test/tmss_test_data_rest.py +++ b/SAS/TMSS/test/tmss_test_data_rest.py @@ -130,6 +130,16 @@ class TMSSRESTTestDataCreator(): "tags": ["TMSS", "TESTING"]} + def ReservationTemplate(self, name="reservationtemplate1", schema:dict=None) -> dict: + if schema is None: + schema = minimal_json_schema(properties={"foo": {"type": "string", "default": "bar"}}) + + return { "name": name, + "description": 'My description', + "schema": schema, + "tags": ["TMSS", "TESTING"]} + + def SchedulingUnitObservingStrategyTemplate(self, name="my_SchedulingUnitObservingStrategyTemplate", scheduling_unit_template_url=None, template:dict=None) -> dict: @@ -669,4 +679,30 @@ class TMSSRESTTestDataCreator(): return {"specifications_doc": specifications_doc, "specifications_template": specifications_template_url, - "tags": ['tmss', 'testing']} \ No newline at end of file + "tags": ['tmss', 'testing']} + + def Reservation(self, name="My Reservation", duration=None, start_time=None, project_url=None, + specifications_template_url=None, specifications_doc=None) -> dict: + + if project_url is None: + project_url = self.post_data_and_get_url(self.Project(), '/project/') + if start_time is None: + start_time = datetime.utcnow() + timedelta(hours=12) + + if specifications_template_url is None: + specifications_template_url = self.post_data_and_get_url(self.ReservationTemplate(), '/reservation_template/') + + if specifications_doc is None: + specifications_doc = self.get_response_as_json_object(specifications_template_url + '/default') + + if isinstance(start_time, datetime): + start_time = start_time.isoformat() + + return {"name": name, + "project": project_url, + "description": "Test Reservation", + "tags": ["TMSS", "TESTING"], + "start_time": start_time, + "duration": duration, # can be None + "specifications_doc": specifications_doc, + "specifications_template": specifications_template_url} \ No newline at end of file