diff --git a/RTCP/Cobalt/CoInterface/src/LTAFeedback.cc b/RTCP/Cobalt/CoInterface/src/LTAFeedback.cc index 6b79305d214c3683af91bd861412fd270d66da70..04dfe22d839838008ae7d984bad82f6d262aa176 100644 --- a/RTCP/Cobalt/CoInterface/src/LTAFeedback.cc +++ b/RTCP/Cobalt/CoInterface/src/LTAFeedback.cc @@ -231,6 +231,10 @@ namespace LOFAR ps.add(beamPrefix + "Offset.angle1", str(format("%f") % (tabDir.angle1 - sap.direction.angle1))); ps.add(beamPrefix + "Offset.angle2", str(format("%f") % (tabDir.angle2 - sap.direction.angle2))); + ostringstream antennaFieldNamesStr; + antennaFieldNamesStr << settings.beamFormer.pipelines[tab.pipelineNr].antennaFieldNames; + ps.add(beamPrefix + "antennaFields", antennaFieldNamesStr.str()); + } else if (type == "IncoherentStokesBeam") { ostringstream antennaFieldNamesStr; antennaFieldNamesStr << settings.beamFormer.pipelines[tab.pipelineNr].antennaFieldNames; ps.add(beamPrefix + "antennaFields", antennaFieldNamesStr.str()); diff --git a/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation221197_feedback b/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation221197_feedback index e99e7b034760290e88b463f81bd092d187fd0bf8..c73a8bacf305a319720df90bc4df5bdc6e77e10d 100644 --- a/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation221197_feedback +++ b/RTCP/Cobalt/CoInterface/test/tLTAFeedback.in_reference/Observation221197_feedback @@ -1433,6 +1433,7 @@ Observation.DataProducts.Output_Beamformed_[12].IncoherentStokesBeam[0].Pointing Observation.DataProducts.Output_Beamformed_[12].IncoherentStokesBeam[0].Pointing.target=B0329+54 Observation.DataProducts.Output_Beamformed_[12].IncoherentStokesBeam[0].SAP=0 Observation.DataProducts.Output_Beamformed_[12].IncoherentStokesBeam[0].TAB=12 +Observation.DataProducts.Output_Beamformed_[12].IncoherentStokesBeam[0].antennaFields=[CS002HBA0,CS002HBA1,CS003HBA0,CS003HBA1,CS004HBA0,CS004HBA1,CS005HBA0,CS005HBA1,CS006HBA0,CS006HBA1,CS007HBA0,CS007HBA1] Observation.DataProducts.Output_Beamformed_[12].IncoherentStokesBeam[0].antennaSet=HBA_DUAL Observation.DataProducts.Output_Beamformed_[12].IncoherentStokesBeam[0].centralFrequencies=[119531250.0000, 119726562.5000, 119921875.0000, 120117187.5000, 120312500.0000, 120507812.5000, 120703125.0000, 120898437.5000, 121093750.0000, 121289062.5000, 121484375.0000, 121679687.5000, 121875000.0000, 122070312.5000, 122265625.0000, 122460937.5000, 122656250.0000, 122851562.5000, 123046875.0000, 123242187.5000, 123437500.0000, 123632812.5000, 123828125.0000, 124023437.5000, 124218750.0000, 124414062.5000, 124609375.0000, 124804687.5000, 125000000.0000, 125195312.5000, 125390625.0000, 125585937.5000, 125781250.0000, 125976562.5000, 126171875.0000, 126367187.5000, 126562500.0000, 126757812.5000, 126953125.0000, 127148437.5000, 127343750.0000, 127539062.5000, 127734375.0000, 127929687.5000, 128125000.0000, 128320312.5000, 128515625.0000, 128710937.5000, 128906250.0000, 129101562.5000, 129296875.0000, 129492187.5000, 129687500.0000, 129882812.5000, 130078125.0000, 130273437.5000, 130468750.0000, 130664062.5000, 130859375.0000, 131054687.5000, 131250000.0000, 131445312.5000, 131640625.0000, 131835937.5000, 132031250.0000, 132226562.5000, 132421875.0000, 132617187.5000, 132812500.0000, 133007812.5000, 133203125.0000, 133398437.5000, 133593750.0000, 133789062.5000, 133984375.0000, 134179687.5000, 134375000.0000, 134570312.5000, 134765625.0000, 134960937.5000, 135156250.0000, 135351562.5000, 135546875.0000, 135742187.5000, 135937500.0000, 136132812.5000, 136328125.0000, 136523437.5000, 136718750.0000, 136914062.5000, 137109375.0000, 137304687.5000, 137500000.0000, 137695312.5000, 137890625.0000, 138085937.5000, 138281250.0000, 138476562.5000, 138671875.0000, 138867187.5000, 139062500.0000, 139257812.5000, 139453125.0000, 139648437.5000, 139843750.0000, 140039062.5000, 140234375.0000, 140429687.5000, 140625000.0000, 140820312.5000, 141015625.0000, 141210937.5000, 141406250.0000, 141601562.5000, 141796875.0000, 141992187.5000, 142187500.0000, 142382812.5000, 142578125.0000, 142773437.5000, 142968750.0000, 143164062.5000, 143359375.0000, 143554687.5000, 143750000.0000, 143945312.5000, 144140625.0000, 144335937.5000, 144531250.0000, 144726562.5000, 144921875.0000, 145117187.5000, 145312500.0000, 145507812.5000, 145703125.0000, 145898437.5000, 146093750.0000, 146289062.5000, 146484375.0000, 146679687.5000, 146875000.0000, 147070312.5000, 147265625.0000, 147460937.5000, 147656250.0000, 147851562.5000, 148046875.0000, 148242187.5000, 148437500.0000, 148632812.5000, 148828125.0000, 149023437.5000, 149218750.0000, 149414062.5000, 149609375.0000, 149804687.5000, 150000000.0000, 150195312.5000, 150390625.0000, 150585937.5000, 150781250.0000, 150976562.5000] Observation.DataProducts.Output_Beamformed_[12].IncoherentStokesBeam[0].channelWidth=12207.031250 @@ -2889,6 +2890,7 @@ Observation.DataProducts.Output_Beamformed_[160].IncoherentStokesBeam[0].Pointin Observation.DataProducts.Output_Beamformed_[160].IncoherentStokesBeam[0].Pointing.target=FIELD 3 Observation.DataProducts.Output_Beamformed_[160].IncoherentStokesBeam[0].SAP=2 Observation.DataProducts.Output_Beamformed_[160].IncoherentStokesBeam[0].TAB=12 +Observation.DataProducts.Output_Beamformed_[160].IncoherentStokesBeam[0].antennaFields=[CS002HBA0,CS002HBA1,CS003HBA0,CS003HBA1,CS004HBA0,CS004HBA1,CS005HBA0,CS005HBA1,CS006HBA0,CS006HBA1,CS007HBA0,CS007HBA1] Observation.DataProducts.Output_Beamformed_[160].IncoherentStokesBeam[0].antennaSet=HBA_DUAL Observation.DataProducts.Output_Beamformed_[160].IncoherentStokesBeam[0].centralFrequencies=[119531250.0000, 119726562.5000, 119921875.0000, 120117187.5000, 120312500.0000, 120507812.5000, 120703125.0000, 120898437.5000, 121093750.0000, 121289062.5000, 121484375.0000, 121679687.5000, 121875000.0000, 122070312.5000, 122265625.0000, 122460937.5000, 122656250.0000, 122851562.5000, 123046875.0000, 123242187.5000, 123437500.0000, 123632812.5000, 123828125.0000, 124023437.5000, 124218750.0000, 124414062.5000, 124609375.0000, 124804687.5000, 125000000.0000, 125195312.5000, 125390625.0000, 125585937.5000, 125781250.0000, 125976562.5000, 126171875.0000, 126367187.5000, 126562500.0000, 126757812.5000, 126953125.0000, 127148437.5000, 127343750.0000, 127539062.5000, 127734375.0000, 127929687.5000, 128125000.0000, 128320312.5000, 128515625.0000, 128710937.5000, 128906250.0000, 129101562.5000, 129296875.0000, 129492187.5000, 129687500.0000, 129882812.5000, 130078125.0000, 130273437.5000, 130468750.0000, 130664062.5000, 130859375.0000, 131054687.5000, 131250000.0000, 131445312.5000, 131640625.0000, 131835937.5000, 132031250.0000, 132226562.5000, 132421875.0000, 132617187.5000, 132812500.0000, 133007812.5000, 133203125.0000, 133398437.5000, 133593750.0000, 133789062.5000, 133984375.0000, 134179687.5000, 134375000.0000, 134570312.5000, 134765625.0000, 134960937.5000, 135156250.0000, 135351562.5000, 135546875.0000, 135742187.5000, 135937500.0000, 136132812.5000, 136328125.0000, 136523437.5000, 136718750.0000, 136914062.5000, 137109375.0000, 137304687.5000, 137500000.0000, 137695312.5000, 137890625.0000, 138085937.5000, 138281250.0000, 138476562.5000, 138671875.0000, 138867187.5000, 139062500.0000, 139257812.5000, 139453125.0000, 139648437.5000, 139843750.0000, 140039062.5000, 140234375.0000, 140429687.5000, 140625000.0000, 140820312.5000, 141015625.0000, 141210937.5000, 141406250.0000, 141601562.5000, 141796875.0000, 141992187.5000, 142187500.0000, 142382812.5000, 142578125.0000, 142773437.5000, 142968750.0000, 143164062.5000, 143359375.0000, 143554687.5000, 143750000.0000, 143945312.5000, 144140625.0000, 144335937.5000, 144531250.0000, 144726562.5000, 144921875.0000, 145117187.5000, 145312500.0000, 145507812.5000, 145703125.0000, 145898437.5000, 146093750.0000, 146289062.5000, 146484375.0000, 146679687.5000, 146875000.0000, 147070312.5000, 147265625.0000, 147460937.5000, 147656250.0000, 147851562.5000, 148046875.0000, 148242187.5000, 148437500.0000, 148632812.5000, 148828125.0000, 149023437.5000, 149218750.0000, 149414062.5000, 149609375.0000, 149804687.5000, 150000000.0000, 150195312.5000, 150390625.0000, 150585937.5000, 150781250.0000, 150976562.5000] Observation.DataProducts.Output_Beamformed_[160].IncoherentStokesBeam[0].channelWidth=12207.031250 @@ -8860,6 +8862,7 @@ Observation.DataProducts.Output_Beamformed_[86].IncoherentStokesBeam[0].Pointing Observation.DataProducts.Output_Beamformed_[86].IncoherentStokesBeam[0].Pointing.target=FIELD 2 Observation.DataProducts.Output_Beamformed_[86].IncoherentStokesBeam[0].SAP=1 Observation.DataProducts.Output_Beamformed_[86].IncoherentStokesBeam[0].TAB=12 +Observation.DataProducts.Output_Beamformed_[86].IncoherentStokesBeam[0].antennaFields=[CS002HBA0,CS002HBA1,CS003HBA0,CS003HBA1,CS004HBA0,CS004HBA1,CS005HBA0,CS005HBA1,CS006HBA0,CS006HBA1,CS007HBA0,CS007HBA1] Observation.DataProducts.Output_Beamformed_[86].IncoherentStokesBeam[0].antennaSet=HBA_DUAL Observation.DataProducts.Output_Beamformed_[86].IncoherentStokesBeam[0].centralFrequencies=[119531250.0000, 119726562.5000, 119921875.0000, 120117187.5000, 120312500.0000, 120507812.5000, 120703125.0000, 120898437.5000, 121093750.0000, 121289062.5000, 121484375.0000, 121679687.5000, 121875000.0000, 122070312.5000, 122265625.0000, 122460937.5000, 122656250.0000, 122851562.5000, 123046875.0000, 123242187.5000, 123437500.0000, 123632812.5000, 123828125.0000, 124023437.5000, 124218750.0000, 124414062.5000, 124609375.0000, 124804687.5000, 125000000.0000, 125195312.5000, 125390625.0000, 125585937.5000, 125781250.0000, 125976562.5000, 126171875.0000, 126367187.5000, 126562500.0000, 126757812.5000, 126953125.0000, 127148437.5000, 127343750.0000, 127539062.5000, 127734375.0000, 127929687.5000, 128125000.0000, 128320312.5000, 128515625.0000, 128710937.5000, 128906250.0000, 129101562.5000, 129296875.0000, 129492187.5000, 129687500.0000, 129882812.5000, 130078125.0000, 130273437.5000, 130468750.0000, 130664062.5000, 130859375.0000, 131054687.5000, 131250000.0000, 131445312.5000, 131640625.0000, 131835937.5000, 132031250.0000, 132226562.5000, 132421875.0000, 132617187.5000, 132812500.0000, 133007812.5000, 133203125.0000, 133398437.5000, 133593750.0000, 133789062.5000, 133984375.0000, 134179687.5000, 134375000.0000, 134570312.5000, 134765625.0000, 134960937.5000, 135156250.0000, 135351562.5000, 135546875.0000, 135742187.5000, 135937500.0000, 136132812.5000, 136328125.0000, 136523437.5000, 136718750.0000, 136914062.5000, 137109375.0000, 137304687.5000, 137500000.0000, 137695312.5000, 137890625.0000, 138085937.5000, 138281250.0000, 138476562.5000, 138671875.0000, 138867187.5000, 139062500.0000, 139257812.5000, 139453125.0000, 139648437.5000, 139843750.0000, 140039062.5000, 140234375.0000, 140429687.5000, 140625000.0000, 140820312.5000, 141015625.0000, 141210937.5000, 141406250.0000, 141601562.5000, 141796875.0000, 141992187.5000, 142187500.0000, 142382812.5000, 142578125.0000, 142773437.5000, 142968750.0000, 143164062.5000, 143359375.0000, 143554687.5000, 143750000.0000, 143945312.5000, 144140625.0000, 144335937.5000, 144531250.0000, 144726562.5000, 144921875.0000, 145117187.5000, 145312500.0000, 145507812.5000, 145703125.0000, 145898437.5000, 146093750.0000, 146289062.5000, 146484375.0000, 146679687.5000, 146875000.0000, 147070312.5000, 147265625.0000, 147460937.5000, 147656250.0000, 147851562.5000, 148046875.0000, 148242187.5000, 148437500.0000, 148632812.5000, 148828125.0000, 149023437.5000, 149218750.0000, 149414062.5000, 149609375.0000, 149804687.5000, 150000000.0000, 150195312.5000, 150390625.0000, 150585937.5000, 150781250.0000, 150976562.5000] Observation.DataProducts.Output_Beamformed_[86].IncoherentStokesBeam[0].channelWidth=12207.031250 diff --git a/SAS/DataManagement/Cleanup/CleanupService/service.py b/SAS/DataManagement/Cleanup/CleanupService/service.py index 4234cff4cbd0fbccb4902fd66c1ea1243e4b098c..f43b5438fb1aa8780ae6719c98b9984caae31bd5 100644 --- a/SAS/DataManagement/Cleanup/CleanupService/service.py +++ b/SAS/DataManagement/Cleanup/CleanupService/service.py @@ -482,7 +482,7 @@ class TMSSEventMessageHandlerForCleanup(TMSSEventMessageHandler): # when an ingest subtask finishes, then it is safe for the related cleanup subtask(s) to be started subtasks = self._tmss_client.get_subtasks_in_same_scheduling_unit(subtask) ingest_subtasks = [s for s in subtasks if s['subtask_type'] == 'ingest'] - unfinished_ingest_subtasks = [s for s in ingest_subtasks if s['state_value'] != 'finished'] + unfinished_ingest_subtasks = [s for s in ingest_subtasks if s['state_value'] != 'finished' and s['obsolete_since'] is None] if len(unfinished_ingest_subtasks) > 0: logger.info("cleanup subtask id=%s is scheduled, but waiting for ingest id=%s to finish before queueing the cleanup subtask...", diff --git a/SAS/TMSS/backend/services/scheduling/lib/constraints.py b/SAS/TMSS/backend/services/scheduling/lib/constraints.py index 84003a51f427f43e3fedede05550957aded13f8e..5a65a637b02c19b97d9017f2f8176fea7ed9b995 100644 --- a/SAS/TMSS/backend/services/scheduling/lib/constraints.py +++ b/SAS/TMSS/backend/services/scheduling/lib/constraints.py @@ -40,7 +40,7 @@ from lofar.sas.tmss.tmss.tmssapp.conversions import * from lofar.common.util import noop from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.exceptions import * -from lofar.sas.tmss.tmss.tmssapp.subtasks import enough_stations_available +from lofar.sas.tmss.tmss.tmssapp.subtasks import enough_stations_available, get_missing_stations from lofar.sas.tmss.tmss.tmssapp.tasks import mark_independent_subtasks_in_scheduling_unit_blueprint_as_unschedulable import logging @@ -379,7 +379,7 @@ def sort_scheduling_units_scored_by_constraints(scheduling_units: [models.Schedu x.scheduling_unit.created_at), reverse=True) -def can_run_within_timewindow(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime, gridder: Gridder=None) -> bool: +def can_run_within_timewindow(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime, gridder: Gridder=None, raise_if_interruped: Callable=noop) -> bool: '''determine if the given scheduling_unit can run withing the given timewindow evaluating all constraints from the "constraints" version 1 template :param raise_if_interruped: a callable function which raises under an externally set condition (an 'interrupt' flag was set). This function is/can_be used to interrupt a long-running scheduling call to do an early exit and start a new scheduling call. Default used function is noop (no-operation), thus no interruptable behaviour. ''' @@ -387,7 +387,7 @@ def can_run_within_timewindow(scheduling_unit: models.SchedulingUnitBlueprint, l gridder = Gridder() # Seek the earliest_possible_start_time. If existing and within window, then the unit can run within this window - earliest_possible_start_time = get_earliest_possible_start_time(scheduling_unit, lower_bound, upper_bound, gridder) + earliest_possible_start_time = get_earliest_possible_start_time(scheduling_unit, lower_bound, upper_bound, gridder, raise_if_interruped) if earliest_possible_start_time is not None: earliest_possible_stop_time = earliest_possible_start_time + scheduling_unit.specified_main_observation_duration if earliest_possible_start_time >= lower_bound and earliest_possible_stop_time <= upper_bound: @@ -994,7 +994,7 @@ def evaluate_sky_min_distance_constraint(scheduling_unit: models.SchedulingUnitB return result @lru_cache(maxsize=10000) -def get_earliest_possible_start_time_for_sky_min_elevation(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime=None, gridder: Gridder=Gridder()) -> datetime: +def get_earliest_possible_start_time_for_sky_min_elevation(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime=None, gridder: Gridder=Gridder(), raise_if_interruped: Callable=noop) -> datetime: # do expensive search from lower_bound until 24 hours later with small steps # (sky constrains are (almost) cyclic over 24 hours). # first occurrence where min_elevation constraint is met is taken as rough estimate of earliest_possible_start_time @@ -1004,6 +1004,8 @@ def get_earliest_possible_start_time_for_sky_min_elevation(scheduling_unit: mode upper_bound = lower_bound + timedelta(hours=24) upper_bound = max(lower_bound, upper_bound) while possible_start_time < upper_bound: + raise_if_interruped() + result = evaluate_sky_min_elevation_constraint(scheduling_unit, possible_start_time, gridder=gridder) logger.debug('get_earliest_possible_start_time_for_sky_min_elevation %s', result) @@ -1031,7 +1033,7 @@ def get_earliest_possible_start_time_for_sky_min_elevation(scheduling_unit: mode return None -def get_earliest_possible_start_time_for_sky_transit_offset(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime=None, gridder: Gridder=None) -> datetime: +def get_earliest_possible_start_time_for_sky_transit_offset(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime=None, gridder: Gridder=None, raise_if_interruped: Callable=noop) -> datetime: # compute the transit time, and thus the optimal_start_time and earliest_possible_start_time if gridder is None: gridder = Gridder() @@ -1044,6 +1046,8 @@ def get_earliest_possible_start_time_for_sky_transit_offset(scheduling_unit: mod allow_quick_jump = True # see below, we can quick jump once, but use monotonous increments so ensure an exit of the while loop. while possible_start_time < upper_bound: + raise_if_interruped() + gridded_possible_start_time = gridder.grid_time(possible_start_time) result = evaluate_sky_transit_constraint(scheduling_unit, gridded_possible_start_time, gridder=gridder, which='next') logger.debug('get_earliest_possible_start_time_for_sky_transit_offset %s', result) @@ -1068,7 +1072,7 @@ def get_earliest_possible_start_time_for_sky_transit_offset(scheduling_unit: mod return None @lru_cache(maxsize=10000) -def get_earliest_possible_start_time_for_sky_min_distance(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime=None, gridder: Gridder=None) -> datetime: +def get_earliest_possible_start_time_for_sky_min_distance(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime=None, gridder: Gridder=None, raise_if_interruped: Callable=noop) -> datetime: # do expensive search from lower_bound until 24 hours later with small steps # (sky constrains are (almost) cyclic over 24 hours). # first occurrence where min_distance constraint is met is taken as rough estimate of earliest_possible_start_time @@ -1081,6 +1085,8 @@ def get_earliest_possible_start_time_for_sky_min_distance(scheduling_unit: model upper_bound = max(lower_bound, upper_bound) while possible_start_time < upper_bound: + raise_if_interruped() + result = evaluate_sky_min_distance_constraint(scheduling_unit, possible_start_time, gridder=gridder) logger.debug('get_earliest_possible_start_time_for_sky_min_distance %s', result) @@ -1247,7 +1253,7 @@ def evaluate_daily_constraints(scheduling_unit: models.SchedulingUnitBlueprint, return result @lru_cache(maxsize=10000) -def get_earliest_possible_start_time_for_daily_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime=None, gridder: Gridder=None) -> datetime: +def get_earliest_possible_start_time_for_daily_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime=None, gridder: Gridder=None, raise_if_interruped: Callable=noop) -> datetime: # search from lower_bound until 24 hours later with 6 hour steps # (daily constrains are (almost) cyclic over 24 hours) if gridder is None: @@ -1255,28 +1261,35 @@ def get_earliest_possible_start_time_for_daily_constraints(scheduling_unit: mode gridded_lower_bound = gridder.grid_time(lower_bound) possible_start_time = gridded_lower_bound while possible_start_time < lower_bound+timedelta(hours=24): + raise_if_interruped() + result = evaluate_daily_constraints(scheduling_unit, possible_start_time, gridder=gridder) logger.debug('get_earliest_possible_start_time_for_daily_constraints %s', result) if not result.has_constraint: return None - if result.earliest_possible_start_time is None or result.earliest_possible_start_time < lower_bound: - # advance with a grid step, and evaluate again - possible_start_time += gridder.as_timedelta() - continue - if not result.is_constraint_met and result.earliest_possible_start_time is not None: # advance straight to earliest_possible_start_time, and evaluate again to ensure the constraint is met possible_start_time = gridder.grid_time(result.earliest_possible_start_time) continue if result.is_constraint_met: - return result.earliest_possible_start_time + logger.debug('get_earliest_possible_start_time_for_daily_constraints(id=%s, lb=%s, up=%s) result=%s', scheduling_unit.id, lower_bound, upper_bound, result) + if result.earliest_possible_start_time >= lower_bound: + if upper_bound is None or result.earliest_possible_start_time < upper_bound: + return result.earliest_possible_start_time + else: + # do not advance past upper_bound + return None + + # advance with a grid step, and evaluate again + possible_start_time += gridder.as_timedelta() + return None -def get_earliest_possible_start_time_for_time_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime=None, gridder: Gridder=None) -> datetime: +def get_earliest_possible_start_time_for_time_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime=None, gridder: Gridder=None, raise_if_interruped: Callable=noop) -> datetime: ''' ''' constraints = scheduling_unit.scheduling_constraints_doc @@ -1292,10 +1305,11 @@ def get_earliest_possible_start_time_for_time_constraints(scheduling_unit: model if 'after' in constraints['time']: after = parser.parse(constraints['time']['after'], ignoretz=True) - if lower_bound is not None: - earliest_possible_start_times.add(max(lower_bound, after)) - else: - earliest_possible_start_times.add(after) + if upper_bound is None or after <= upper_bound - scheduling_unit.specified_main_observation_duration: + if lower_bound is not None: + earliest_possible_start_times.add(max(lower_bound, after)) + else: + earliest_possible_start_times.add(after) if 'before' in constraints['time']: before = parser.parse(constraints['time']['before'], ignoretz=True) @@ -1329,6 +1343,10 @@ def get_earliest_possible_start_time_for_time_constraints(scheduling_unit: model earliest_possible_start_times.add(potential_earliest_possible) break + if not earliest_possible_start_times and not constraints.get('time'): + # an empty time constraint means it can just run at/after lower_bound + return lower_bound + if lower_bound is not None: earliest_possible_start_times = [t for t in earliest_possible_start_times if t >= lower_bound] @@ -1338,7 +1356,7 @@ def get_earliest_possible_start_time_for_time_constraints(scheduling_unit: model if earliest_possible_start_times: return max(earliest_possible_start_times) - return lower_bound + return None def get_at_constraint_timestamp(scheduling_unit: models.SchedulingUnitBlueprint) -> datetime: @@ -1348,7 +1366,7 @@ def get_at_constraint_timestamp(scheduling_unit: models.SchedulingUnitBlueprint) return at return None -def get_earliest_possible_start_time(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime=None, gridder: Gridder=None) -> datetime: +def get_earliest_possible_start_time(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime=None, gridder: Gridder=None, raise_if_interruped: Callable=noop) -> datetime: ''' ''' _method_start_timestamp = datetime.utcnow() @@ -1372,9 +1390,11 @@ def get_earliest_possible_start_time(scheduling_unit: models.SchedulingUnitBluep get_earliest_possible_start_time_for_sky_transit_offset, get_earliest_possible_start_time_for_sky_min_distance): try: - earliest_possible_start_time = get_earliest_possible_start_time_method(scheduling_unit, lower_bound, upper_bound, gridder) + earliest_possible_start_time = get_earliest_possible_start_time_method(scheduling_unit, lower_bound, upper_bound, gridder, raise_if_interruped) if earliest_possible_start_time is not None: earliest_possible_start_times.add(earliest_possible_start_time) + except SchedulerInterruptedException: + raise except Exception as e: logger.exception(e) @@ -1574,7 +1594,7 @@ def compute_scheduling_unit_scores(scheduling_unit: models.SchedulingUnitBluepri # return the actual (not the gridded) weighted_start_time start_time=weighted_start_time) -def get_min_earliest_possible_start_time(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound: datetime, upper_bound: datetime=None, raise_if_interruped: Callable=noop, gridder: Gridder=None) -> datetime: +def get_min_earliest_possible_start_time(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound: datetime, upper_bound: datetime=None, gridder: Gridder=None, raise_if_interruped: Callable=noop) -> datetime: '''deterimine the earliest possible starttime over all given scheduling units, taking into account all their constraints :param raise_if_interruped: a callable function which raises under an externally set condition (an 'interrupt' flag was set). This function is/can_be used to interrupt a long-running scheduling call to do an early exit and start a new scheduling call. Default used function is noop (no-operation), thus no interruptable behaviour. ''' @@ -1585,7 +1605,7 @@ def get_min_earliest_possible_start_time(scheduling_units: [models.SchedulingUni raise_if_interruped() if scheduling_unit.scheduling_constraints_template is not None: - earliest_possible_start_time = get_earliest_possible_start_time(scheduling_unit, lower_bound, upper_bound, gridder) + earliest_possible_start_time = get_earliest_possible_start_time(scheduling_unit, lower_bound, upper_bound, gridder, raise_if_interruped) if earliest_possible_start_time is not None: if min_earliest_possible_start_time is None or earliest_possible_start_time < min_earliest_possible_start_time: min_earliest_possible_start_time = earliest_possible_start_time @@ -1606,40 +1626,74 @@ def can_run_within_station_reservations(scheduling_unit: models.SchedulingUnitBl return True -def determine_unschedulable_reason_and_mark_unschedulable_if_needed(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime, gridder: Gridder) -> models.SchedulingUnitBlueprint: +def get_missing_stations_for_scheduling_unit(scheduling_unit: models.SchedulingUnitBlueprint) -> []: + observation_subtasks = models.Subtask.independent_subtasks().filter(task_blueprint__scheduling_unit_blueprint_id=scheduling_unit.id).filter(specifications_template__type__value=models.SubtaskType.Choices.OBSERVATION.value).all() + missing_stations = set() + + for subtask in observation_subtasks: + for station in get_missing_stations(subtask): + missing_stations.add(station) + + return sorted((list(missing_stations))) + +def determine_unschedulable_reason_and_mark_unschedulable_if_needed(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime, gridder: Gridder, raise_if_interruped: Callable=noop) -> models.SchedulingUnitBlueprint: try: + logger.debug("determine_unschedulable_reason_and_mark_unschedulable_if_needed: scheduling_unit id=%s", scheduling_unit.id) + if not can_run_within_station_reservations(scheduling_unit): + missing_stations = get_missing_stations_for_scheduling_unit(scheduling_unit) + msg = "Stations %s are reserved" % (missing_stations, ) + return mark_independent_subtasks_in_scheduling_unit_blueprint_as_unschedulable(scheduling_unit, msg) + if gridder is None: gridder = Gridder() - # this method relies on caching in the used methods in order to be fast. - # check if the unit can run at all in the given window - if not can_run_within_timewindow(scheduling_unit, lower_bound, upper_bound, gridder): + + if not can_run_within_timewindow(scheduling_unit, lower_bound, upper_bound, gridder, raise_if_interruped): # nope, can't run, so check each type of constraint + unmet_constraints = [] - at = get_at_constraint_timestamp(scheduling_unit) - if at: - lower_bound = at - upper_bound = at + if 'time' in scheduling_unit.scheduling_constraints_doc: + if scheduling_unit.scheduling_constraints_doc['time'].get('between', []): + # recurse for each of the 'between' intervals until unschedulable + for between in scheduling_unit.scheduling_constraints_doc['time']['between']: + between_from = parser.parse(between["from"], ignoretz=True) + between_to = parser.parse(between["to"], ignoretz=True) + if between_from != lower_bound or between_to != upper_bound: + scheduling_unit = determine_unschedulable_reason_and_mark_unschedulable_if_needed(scheduling_unit, between_from, between_to, gridder) + if scheduling_unit.status.value == models.SchedulingUnitStatus.Choices.UNSCHEDULABLE.value: + return scheduling_unit + + # check 'at' constraint + at = get_at_constraint_timestamp(scheduling_unit) + if at: + if at < lower_bound or at + scheduling_unit.specified_main_observation_duration > upper_bound: + msg = "constraint time.at='%s' falls outside of window ['%s', '%s']" % (round_to_second_precision(at), + round_to_second_precision(lower_bound), + round_to_second_precision(upper_bound)) + return mark_independent_subtasks_in_scheduling_unit_blueprint_as_unschedulable(scheduling_unit, msg) + + # use the 'at' timestamp as bounds for the remaining checks below + lower_bound = at + upper_bound = at - unmet_constraints = [] if 'sky' in scheduling_unit.scheduling_constraints_doc: if 'min_elevation' in scheduling_unit.scheduling_constraints_doc['sky']: - if get_earliest_possible_start_time_for_sky_min_elevation(scheduling_unit, lower_bound, upper_bound, gridder) is None: - unmet_constraints.append("sky min elevation") + if get_earliest_possible_start_time_for_sky_min_elevation(scheduling_unit, lower_bound, upper_bound, gridder, raise_if_interruped) is None: + unmet_constraints.append("sky min_elevation") if 'transit_offset' in scheduling_unit.scheduling_constraints_doc['sky']: - if get_earliest_possible_start_time_for_sky_transit_offset(scheduling_unit, lower_bound, upper_bound, gridder) is None: - unmet_constraints.append("sky transit offset") + if get_earliest_possible_start_time_for_sky_transit_offset(scheduling_unit, lower_bound, upper_bound, gridder, raise_if_interruped) is None: + unmet_constraints.append("sky transit_offset") if 'min_distance' in scheduling_unit.scheduling_constraints_doc['sky']: - if get_earliest_possible_start_time_for_sky_min_distance(scheduling_unit, lower_bound, upper_bound, gridder) is None: - unmet_constraints.append("sky min distance") + if get_earliest_possible_start_time_for_sky_min_distance(scheduling_unit, lower_bound, upper_bound, gridder, raise_if_interruped) is None: + unmet_constraints.append("sky min_distance") if 'time' in scheduling_unit.scheduling_constraints_doc: - if get_earliest_possible_start_time_for_time_constraints(scheduling_unit, lower_bound, upper_bound, gridder) is None: + if get_earliest_possible_start_time_for_time_constraints(scheduling_unit, lower_bound, upper_bound, gridder, raise_if_interruped) is None: unmet_constraints.append("time") if 'daily' in scheduling_unit.scheduling_constraints_doc: - if get_earliest_possible_start_time_for_daily_constraints(scheduling_unit, lower_bound, upper_bound, gridder) is None: + if get_earliest_possible_start_time_for_daily_constraints(scheduling_unit, lower_bound, upper_bound, gridder, raise_if_interruped) is None: unmet_constraints.append("daily") if unmet_constraints: @@ -1649,10 +1703,13 @@ def determine_unschedulable_reason_and_mark_unschedulable_if_needed(scheduling_u else: mark_independent_subtasks_in_scheduling_unit_blueprint_as_unschedulable(scheduling_unit, "sorry, unknown unschedulable reason.") + except SchedulerInterruptedException: + raise except Exception as e: logger.exception(e) mark_independent_subtasks_in_scheduling_unit_blueprint_as_unschedulable(scheduling_unit, str(e)) + scheduling_unit.refresh_from_db() return scheduling_unit diff --git a/SAS/TMSS/backend/services/scheduling/lib/dynamic_scheduling.py b/SAS/TMSS/backend/services/scheduling/lib/dynamic_scheduling.py index 6c9c7b2eb71b72ed381db5a442d855118e085f26..831746b088bc57b819e627cbe62ea6e1cdec4b4c 100644 --- a/SAS/TMSS/backend/services/scheduling/lib/dynamic_scheduling.py +++ b/SAS/TMSS/backend/services/scheduling/lib/dynamic_scheduling.py @@ -257,13 +257,13 @@ class Scheduler: logger.error(e) mark_independent_subtasks_in_scheduling_unit_blueprint_as_unschedulable(schedulable_unit, reason=str(e)) else: - msg = "fixed_time-scheduled scheduling unit id=%d cannot be scheduled at '%s'" % (schedulable_unit.id, start_time) - logger.warning(msg) - mark_independent_subtasks_in_scheduling_unit_blueprint_as_unschedulable(schedulable_unit, reason=msg) + logger.warning("fixed_time-scheduled scheduling unit id=%d cannot be scheduled at '%s'", schedulable_unit.id, start_time) + determine_unschedulable_reason_and_mark_unschedulable_if_needed(schedulable_unit, start_time, start_time) self.log_schedule(log_level=logging.DEBUG) except Exception as e: logger.exception("Could not schedule fixed_time-scheduled scheduling unit id=%d: %s", schedulable_unit.id, e) + mark_independent_subtasks_in_scheduling_unit_blueprint_as_unschedulable(schedulable_unit, reason=str(e)) else: logger.info("there are no schedulable scheduling units with fixed_time at constraint for active projects to schedule") @@ -287,7 +287,12 @@ class Scheduler: if scheduled_unit: scheduled_units.append(scheduled_unit) - # can we fit any B-prio units in the new gap(s) in the schedule? + # mark the B-prio schedulable units as not placed yet.... + for scheduling_unit in get_dynamically_schedulable_scheduling_units(priority_queue=models.PriorityQueueType.objects.get(value=models.PriorityQueueType.Choices.B.value)).filter(placed=True): + scheduling_unit.placed = False + scheduling_unit.save() + + # ... and see if we can fit any B-prio units in the new gap(s) in the schedule? scheduled_B_units = self.place_B_priority_units_in_gaps_around_scheduling_unit(scheduled_unit, do_schedule=True) scheduled_units.extend(scheduled_B_units) @@ -337,7 +342,7 @@ class Scheduler: logger.info("find_best_next_schedulable_unit: units meeting constraints in window ['%s', '%s']: %s", lower_bound_start_time, upper_bound_stop_time, ','.join([str(su.id) for su in sorted(filtered_scheduling_units, key=lambda x: x.id)]) or 'None') if not filtered_scheduling_units: - logger.warning("find_best_next_schedulable_unit: no units meeting constraints in window ['%s', '%s']", lower_bound_start_time, upper_bound_stop_time) + logger.info("find_best_next_schedulable_unit: no units meeting constraints in window ['%s', '%s']", lower_bound_start_time, upper_bound_stop_time) return None # then, check if there is a subset that can only run exclusively in this window and not later. @@ -479,14 +484,14 @@ class Scheduler: # nothing was found, or an error occurred. # it may be that in the mean time some scheduling_units are not (dynamically) schedulable anymore, filter those out. for su in candidate_units: - determine_unschedulable_reason_and_mark_unschedulable_if_needed(su, lower_bound_start_time, upper_bound_stop_time, self.search_gridder) + determine_unschedulable_reason_and_mark_unschedulable_if_needed(su, lower_bound_start_time, upper_bound_stop_time, self.search_gridder, raise_if_interruped=self._raise_if_triggered) # all units are refreshed and either schedulable or unschedulable. # refresh list of schedulable_units to be considered in next round (only schedulable) candidate_units = [su for su in candidate_units if su.status.value==models.SchedulingUnitStatus.Choices.SCHEDULABLE.value] # advance the window - min_earliest_possible_start_time = get_min_earliest_possible_start_time(candidate_units, lower_bound_start_time+timedelta(hours=1), lower_bound_start_time+timedelta(hours=25), self._raise_if_triggered, gridder=self.search_gridder) + min_earliest_possible_start_time = get_min_earliest_possible_start_time(candidate_units, lower_bound_start_time+timedelta(hours=1), lower_bound_start_time+timedelta(hours=25), gridder=self.search_gridder, raise_if_interruped=self._raise_if_triggered) if min_earliest_possible_start_time is None: lower_bound_start_time += timedelta(hours=6) else: @@ -531,7 +536,7 @@ class Scheduler: '''try to schedule one or more scheduling units from queue B in the gap between the given scheduled_unit and its previous observed+ unit''' placed_units = [] - schedulable_units_queue_B = get_dynamically_schedulable_scheduling_units(priority_queue=models.PriorityQueueType.objects.get(value=models.PriorityQueueType.Choices.B.value)).exclude(id=scheduling_unit.id) + schedulable_units_queue_B = get_dynamically_schedulable_scheduling_units(priority_queue=models.PriorityQueueType.objects.get(value=models.PriorityQueueType.Choices.B.value)).exclude(id=scheduling_unit.id).filter(placed=False) if not schedulable_units_queue_B.exists(): return placed_units @@ -553,14 +558,12 @@ class Scheduler: if do_schedule: self.try_schedule_unit(best_B_candidate_for_gap.scheduling_unit, best_B_candidate_for_gap.start_time) else: - update_subtasks_start_times_for_scheduling_unit(best_B_candidate_for_gap.scheduling_unit, best_B_candidate_for_gap.start_time) + update_subtasks_start_times_for_scheduling_unit(best_B_candidate_for_gap.scheduling_unit, best_B_candidate_for_gap.start_time, placed=True) placed_units.append(best_B_candidate_for_gap.scheduling_unit) - # TODO: at this moment we have no means to distinguish between a placed and an unplaced schedulable (B) unit. When we can make that distinction, remove the "if do_schedule" below, and always recurse. - if do_schedule: - # Recurse. There may be a new gap, so let's try to squeeze in more. - placed_units.extend(self.place_B_priority_units_in_gaps_around_scheduling_unit(best_B_candidate_for_gap.scheduling_unit, do_schedule=do_schedule)) + # Recurse. There may be a new gap, so let's try to squeeze in more. + placed_units.extend(self.place_B_priority_units_in_gaps_around_scheduling_unit(best_B_candidate_for_gap.scheduling_unit, do_schedule=do_schedule)) return placed_units @@ -568,6 +571,12 @@ class Scheduler: '''''' logger.info("Estimating mid-term schedule with lower_bound_start_time=%s ..." % lower_bound_start_time) + # mark the schedulable units that they are not 'placed' yet by the scheduler. + for scheduling_unit in get_dynamically_schedulable_scheduling_units(): + if scheduling_unit.placed: + scheduling_unit.placed = False + scheduling_unit.save() + # use relatively coarser gridders for mid-term schedule. Just to get a rough idea. self.search_gridder = Gridder(grid_minutes=3*60) self.fine_gridder = Gridder(grid_minutes=15) @@ -596,7 +605,7 @@ class Scheduler: scheduling_unit = best_scored_scheduling_unit.scheduling_unit start_time = round_to_second_precision(best_scored_scheduling_unit.start_time) logger.info("mid-term schedule: next scheduling unit id=%s '%s' start_time=%s", scheduling_unit.id, scheduling_unit.name, start_time) - update_subtasks_start_times_for_scheduling_unit(scheduling_unit, start_time) + update_subtasks_start_times_for_scheduling_unit(scheduling_unit, start_time, placed=True) self.log_schedule(log_level=logging.DEBUG) @@ -611,7 +620,7 @@ class Scheduler: scheduling_units.remove(placed_B_unit) else: # search again in a later timeslot - min_earliest_possible_start_time = get_min_earliest_possible_start_time(scheduling_units, lower_bound_start_time+timedelta(minutes=60), lower_bound_start_time+timedelta(hours=25), self._raise_if_triggered, gridder=self.search_gridder) + min_earliest_possible_start_time = get_min_earliest_possible_start_time(scheduling_units, lower_bound_start_time+timedelta(minutes=60), lower_bound_start_time+timedelta(hours=25), gridder=self.search_gridder, raise_if_interruped=self._raise_if_triggered) logger.info("lower_bound_start_time='%s', min_earliest_possible_start_time='%s'", lower_bound_start_time, min_earliest_possible_start_time) if min_earliest_possible_start_time is not None and min_earliest_possible_start_time > lower_bound_start_time: lower_bound_start_time = min_earliest_possible_start_time @@ -800,7 +809,7 @@ class TMSSDynamicSchedulingMessageHandler(TMSSEventMessageHandler): scheduling_unit_blueprint = models.SchedulingUnitBlueprint.objects.get(id=id) at = get_at_constraint_timestamp(scheduling_unit_blueprint) if at is not None: - update_subtasks_start_times_for_scheduling_unit(scheduling_unit_blueprint, at) + update_subtasks_start_times_for_scheduling_unit(scheduling_unit_blueprint, at, placed=True) except: pass self.onSchedulingUnitBlueprintConstraintsRankOrQueueUpdated(id) diff --git a/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py b/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py index e00056c44dcb6152e5bcca55500ce2d8374cdea2..c9bd0d7660056b7a62087d4ea0bceb1c9e17d649 100755 --- a/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py +++ b/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py @@ -98,7 +98,8 @@ class BaseDynamicSchedulingTestCase(unittest.TestCase): weight_factor.weight = 0 weight_factor.save() - def clean_environment(self): + @staticmethod + def clean_environment(): # wipe all scheduling_unit_drafts in between tests, so the tests don't influence each other tmss_test_env.delete_scheduling_unit_drafts_cascade() models.Reservation.objects.all().delete() @@ -1508,6 +1509,58 @@ class TestDynamicScheduling(BaseDynamicSchedulingTestCase): # this test passes when there are no failed templates self.assertTrue(len(failed_templates)==0, msg='failed_templates: %s' % ([(t.name, t.version) for t in failed_templates])) + def test_unschedulable_reasons_due_to_unmet_constraints(self): + """ + Test if the correct unschedulable_reason is set when a constraint is not met. + """ + # use a short cycle, far in the future, for which we known how the target behaves in this period (transit, elevation, etc) + cycle = models.Cycle.objects.create(**Cycle_test_data(start=datetime(2030, 1, 1), stop=datetime(2030, 1, 7))) + project = models.Project.objects.create(**Project_test_data(name=str(uuid.uuid4()), project_state=models.ProjectState.objects.get(value=models.ProjectState.Choices.ACTIVE.value))) + project.cycles.add(cycle) + project.save() + scheduling_set = models.SchedulingSet.objects.create(**SchedulingSet_test_data(project=project)) + scheduling_unit_draft = self.create_simple_observation_scheduling_unit(str(uuid.uuid4()), scheduling_set=scheduling_set, obs_duration=3600) + + # keep matters simple, use one station + obs_task_draft = scheduling_unit_draft.task_drafts.first() + obs_task_draft.specifications_doc['station_configuration']['station_groups'][0]['stations'] = ['CS002'] + obs_task_draft.save() + + scheduling_unit_blueprint = create_scheduling_unit_blueprint_and_tasks_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + + # set density_vs_optimal to 1, meaning "as close to transit as possible" + weight_factor, created = models.SchedulingConstraintsWeightFactor.objects.get_or_create(scheduling_constraints_template=models.SchedulingConstraintsTemplate.get_latest(name="constraints"), constraint_name="density_vs_optimal") + weight_factor.weight = 0 + weight_factor.save() + + for expected_reason, constraints in (("constraint time.at='2029-12-31 00:00:00' falls outside of window ['2030-01-01 00:00:00', '2030-01-08 00:00:00']", {'time': {'at': (cycle.start - timedelta(hours=24)).isoformat()}}), + ("time constraint is not met anywhere between 2030-01-01 00:00:00 and 2030-01-08 00:00:00", {'time': {'before': (cycle.start - timedelta(hours=24)).isoformat()}}), + ("time constraint is not met anywhere between 2030-01-01 00:00:00 and 2030-01-08 00:00:00", {'time': {'after': (cycle.stop + timedelta(hours=24)).isoformat()}}), + ("sky min_elevation constraint is not met anywhere between 2030-01-01 00:00:00 and 2030-01-08 00:00:00", {'sky': {'min_elevation': {'target': 1.57}}}), + ("sky min_distance constraint is not met anywhere between 2030-01-01 00:00:00 and 2030-01-08 00:00:00", {'sky': {'min_distance': {'sun': 1.57}}}), + ("sky transit_offset constraint is not met anywhere between 2030-01-01 00:00:00 and 2030-01-01 07:00:00", {'sky': {'transit_offset': {'from': -900, 'to': 900}}, 'time': {'between': [{'from': '2030-01-01T00:00:00Z', 'to': '2030-01-01T07:00:00Z'}]}}), + ("daily constraint is not met anywhere between 2030-01-01 00:00:00 and 2030-01-01 07:00:00", {'daily': {'require_day': True }, 'time': {'between': [{'from': '2030-01-01T00:00:00Z', 'to': '2030-01-01T07:00:00Z'}]}}), + ("daily constraint is not met anywhere between 2030-01-01 11:00:00 and 2030-01-01 15:00:00", {'daily': {'require_night': True }, 'time': {'between': [{'from': '2030-01-01T11:00:00Z', 'to': '2030-01-01T15:00:00Z'}]}}), + ("daily constraint is not met anywhere between 2030-01-01 07:00:00 and 2030-01-01 09:00:00", {'daily': {'avoid_twilight': True }, 'time': {'between': [{'from': '2030-01-01T07:00:00Z', 'to': '2030-01-01T09:00:00Z'}]}}), + ): + # reset unit... + mark_independent_subtasks_in_scheduling_unit_blueprint_as_schedulable(scheduling_unit_blueprint) + self.assertEqual('', scheduling_unit_blueprint.unschedulable_reason) + wipe_evaluate_constraints_caches() + + # set the constraints + scheduling_unit_blueprint.scheduling_constraints_doc = {'scheduler': 'dynamic', **constraints} + scheduling_unit_blueprint.save() + + # try to schedule, should fail. + scheduled_scheduling_units = self.scheduler.do_dynamic_schedule() + self.assertEqual(0, len(scheduled_scheduling_units)) + + # Assert the scheduling_unit has not been scheduled and that it has the correct expected unschedulable_reason + scheduling_unit_blueprint.refresh_from_db() + self.assertEqual('unschedulable', scheduling_unit_blueprint.status.value) + self.assertEqual(expected_reason, scheduling_unit_blueprint.unschedulable_reason) + class TestReservedStationsTimeWindows(BaseDynamicSchedulingTestCase): """ @@ -1525,6 +1578,8 @@ class TestReservedStationsTimeWindows(BaseDynamicSchedulingTestCase): """ @classmethod def setUpClass(cls) -> None: + cls.clean_environment() + super().setUpClass() # create a three re-usable variants scheduling_unit_blueprint, based on the "IM HBA - 1 Beam" strategy @@ -1830,6 +1885,37 @@ class TestReservedStationsTimeWindows(BaseDynamicSchedulingTestCase): start_time=self.scheduling_unit_blueprint.scheduled_start_time) self.assertTrue(can_run_within_station_reservations(self.scheduling_unit_blueprint)) + def test_unschedulable_reason_when_reserved_stations_block_unit(self): + """ + Test station reservation when 2 station (CS001,CS002) are reserved and stations (CS001, CS002) are used in scheduling_unit + """ + reservation_two = self.create_station_reservation("Two", ["CS001", "CS002"]) + # reservation start_time > SUB start_time and reservation stop_time > SUB stop_time + self.set_1_reservation_start_time_gt_sub_start_time_and_stop_time_gt_sub_stop_time(reservation_two, self.scheduling_unit_blueprint_cs001_cs002) + + # set the constraints for scheduling_unit_blueprint_cs001_cs002 + self.scheduling_unit_blueprint_cs001_cs002.scheduling_constraints_doc['scheduler'] = 'dynamic' + self.scheduling_unit_blueprint_cs001_cs002.scheduling_constraints_doc['time'] = { + 'between': [ {"from": reservation_two.start_time.isoformat(), + "to": reservation_two.stop_time.isoformat()} ]} + self.scheduling_unit_blueprint_cs001_cs002.save() + + # mark the other units as fixed time, so they won't interfere + self.scheduling_unit_blueprint.scheduling_constraints_doc = {'scheduler': 'fixed_time', 'time': {'at': (reservation_two.stop_time + timedelta(days=1)).isoformat()}} + self.scheduling_unit_blueprint.save() + self.scheduling_unit_blueprint_cs001.scheduling_constraints_doc = {'scheduler': 'fixed_time', 'time': {'at': (reservation_two.stop_time + timedelta(days=1)).isoformat()}} + self.scheduling_unit_blueprint_cs001.save() + + # try to schedule, should fail. + wipe_evaluate_constraints_caches() + scheduled_scheduling_units = self.scheduler.do_dynamic_schedule() + self.assertEqual(0, len(scheduled_scheduling_units)) + + # Assert the scheduling_unit has not been scheduled and that it has the correct expected unschedulable_reason + self.scheduling_unit_blueprint_cs001_cs002.refresh_from_db() + self.assertEqual('unschedulable', self.scheduling_unit_blueprint_cs001_cs002.status.value) + self.assertEqual("Stations ['CS001', 'CS002'] are reserved", self.scheduling_unit_blueprint_cs001_cs002.unschedulable_reason) + class TestTriggers(BaseDynamicSchedulingTestCase): """ diff --git a/SAS/TMSS/backend/services/websocket/lib/websocket_service.py b/SAS/TMSS/backend/services/websocket/lib/websocket_service.py index 3f58b2a4d21e7b49af6d9a09b078d8146898df09..ddd66efa952e380583cdcf075b170444dc9d2cc8 100644 --- a/SAS/TMSS/backend/services/websocket/lib/websocket_service.py +++ b/SAS/TMSS/backend/services/websocket/lib/websocket_service.py @@ -125,16 +125,31 @@ class TMSSEventMessageHandlerForWebsocket(TMSSEventMessageHandler): self._run_ws = False # Stop the ws server self.t.join() - def _get_authorised_clients_for_object_in_websocket(self, obj_name): + def _get_authorised_clients_for_object_in_websocket(self, obj): from django.contrib.auth import get_user_model User = get_user_model() + from lofar.sas.tmss.tmss.tmssapp.viewsets.permissions import get_project_roles_for_user, get_project_roles_with_permission + from lofar.sas.tmss.tmss.tmssapp.models import ProjectRole + auth_clients = [] for ws in list(self._ws_server.connections.values()): if ws.authenticated: # Check user permissions for the object user = User.objects.get(username=ws.user) - if user.has_perm("tmssapp.view_%s" % obj_name.replace('_','')): + if user.is_superuser: + auth_clients.append(ws) + elif user.has_perm("tmssapp.view_%s" % type(obj).__name__.lower()): auth_clients.append(ws) + else: + # project-based permission + permitted_project_roles = get_project_roles_with_permission(type(obj).__name__.lower(), 'GET') + user_project_roles = get_project_roles_for_user(user) + related_project = getattr(obj, 'project', None) + for project_role in user_project_roles: + if related_project: + if project_role['project'].lower() == related_project.name.lower() and \ + ProjectRole.objects.get(value=project_role['role']) in permitted_project_roles: + auth_clients.append(ws) return auth_clients def _broadcast_notify_to_clients_websocket(self, msg, clients): @@ -145,20 +160,20 @@ class TMSSEventMessageHandlerForWebsocket(TMSSEventMessageHandler): ws.sendMessage(json_msg) def _post_update_on_websocket(self, pk, object_type, action): - # Get WS clients authorised to get object changes information - auth_ws_clients = self._get_authorised_clients_for_object_in_websocket(object_type.value) - if len(auth_ws_clients) <= 0: # There are no authorised clients, just return - return - - # Prepare the json_blob_template - json_blob = {'object_details': {}, 'object_type': object_type.value, 'action': action.value} if action == self.ObjActions.CREATE or action == self.ObjActions.UPDATE: + # determine model class, and fetch fresh instance + model_class = apps.get_model("tmssapp", object_type.value.replace('_', '')) + model_instance = model_class.objects.get(pk=pk) + model_instance.refresh_from_db() + + # Get WS clients authorised to get object changes information + auth_ws_clients = self._get_authorised_clients_for_object_in_websocket(model_instance) + if len(auth_ws_clients) <= 0: # There are no authorised clients, just return + return + + # Prepare the json_blob_template + json_blob = {'object_details': {}, 'object_type': object_type.value, 'action': action.value} try: - # determine model class, and fetch fresh instance - model_class = apps.get_model("tmssapp", object_type.value.replace('_','')) - model_instance = model_class.objects.get(pk=pk) - model_instance.refresh_from_db() - # insert primary key name and value pk_name = model_class._meta.pk.attname json_blob['object_details'][pk_name] = pk @@ -177,6 +192,8 @@ class TMSSEventMessageHandlerForWebsocket(TMSSEventMessageHandler): json_blob['object_details']['status_value'] = model_instance.status.value if hasattr(model_instance, 'state'): json_blob['object_details']['state_value'] = model_instance.state.value + if hasattr(model_instance, 'placed'): + json_blob['object_details']['placed'] = model_instance.placed except Exception as e: logger.error("Cannot get object details for %s: %s", json_blob, e) diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0042_schedulingunitblueprint_placed.py b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0042_schedulingunitblueprint_placed.py new file mode 100644 index 0000000000000000000000000000000000000000..fdfeb1733811a7329e1bf06c484571f2d2803763 --- /dev/null +++ b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0042_schedulingunitblueprint_placed.py @@ -0,0 +1,18 @@ +# Generated by Django 3.0.9 on 2022-12-23 13:45 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('tmssapp', '0041_dataproduct_constraint'), + ] + + operations = [ + migrations.AddField( + model_name='schedulingunitblueprint', + name='placed', + field=models.BooleanField(db_index=True, default=False, help_text='Was this unit placed by the scheduler, or are the scheduled_start/stop_time just best guesses?'), + ), + ] diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py index f459e5a59f597f83b4d84384f2bc916867f2023d..8a85c22c7538558af78c893e5601eb702f5ccd20 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py @@ -1030,6 +1030,7 @@ class SchedulingUnitBlueprint(ProjectPropertyMixin, TemplateSchemaMixin, NamedCo results_accepted = BooleanField(default=None, null=True, help_text='boolean (default None) which indicates whether this unit was accepted as successful or not.') global_identifier = OneToOneField('SIPidentifier', null=False, editable=False, on_delete=PROTECT, help_text='The global unique identifier for LTA SIP.') path_to_project = 'draft__scheduling_set__project' + placed = BooleanField(default=False, db_index=True, null=False, help_text='Was this unit placed by the scheduler, or are the scheduled_start/stop_time just best guesses?') class Meta(NamedCommon.Meta): constraints = [CheckConstraint(check=Q(rank__gte=SchedulingUnitRank.HIGHEST.value) & Q(rank__lte=SchedulingUnitRank.LOWEST.value), name='schedulingunitblueprint_rank_range_constraint')] diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template/stations-9.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template/stations-9.json index 3e21b45eb0c355098fece58049ca6dfc7534beaa..3e59fe54ed959c0576320601143caa15e78f5a59 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template/stations-9.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template/stations-9.json @@ -136,8 +136,6 @@ "default": "HBA_110_190", "description": "Must match antenna type", "enum": [ - "LBA_10_70", - "LBA_30_70", "LBA_10_90", "LBA_30_90", "HBA_110_190", diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py index 7443de570f6f67a8ce34c583e80549caa7bafff7..cd615925cf656df83c6aa3dbfd70b62fc153e6d6 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py @@ -278,7 +278,7 @@ class SchedulingUnitBlueprintSerializer(DynamicRelationalHyperlinkedModelSeriali class Meta: model = models.SchedulingUnitBlueprint fields = '__all__' - read_only_fields = ['interrupts_telescope', 'unschedulable_reason', 'error_reason'] + read_only_fields = ['interrupts_telescope', 'unschedulable_reason', 'error_reason', 'placed'] extra_fields = ['task_blueprints', 'output_pinned', 'unschedulable_reason', 'error_reason'] expandable_fields = { 'specifications_template': 'lofar.sas.tmss.tmss.tmssapp.serializers.SchedulingUnitTemplateSerializer', diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py index 2a56682e91a32394af3418f3c7dc75de630f62e2..615319ff50700e5b11ac846e6643814bf4e10978 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py @@ -1113,7 +1113,7 @@ def schedule_subtask_and_update_successor_start_times(subtask: Subtask) -> Subta return scheduled_subtask -def update_subtasks_start_times_for_scheduling_unit(scheduling_unit: SchedulingUnitBlueprint, start_time: datetime): +def update_subtasks_start_times_for_scheduling_unit(scheduling_unit: SchedulingUnitBlueprint, start_time: datetime, placed: bool=None): with transaction.atomic(): for task_blueprint in scheduling_unit.task_blueprints.all(): defined_independend_subtasks = task_blueprint.subtasks.filter(state__value='defined').filter(inputs=None).all() @@ -1123,8 +1123,13 @@ def update_subtasks_start_times_for_scheduling_unit(scheduling_unit: SchedulingU else: update_start_time_and_shift_successors_until_after_stop_time(subtask, start_time + subtask.task_blueprint.relative_start_time) - # update cached start/stop times - scheduling_unit.refresh_from_db() + # update cached start/stop times + scheduling_unit.refresh_from_db() + + if placed is not None: + scheduling_unit.placed = placed + scheduling_unit.save() + return scheduling_unit def update_start_time_and_shift_successors_until_after_stop_time(subtask: Subtask, start_time: datetime): @@ -1197,6 +1202,7 @@ def mark_independent_subtasks_in_task_blueprint_as_schedulable(task_blueprint: T def mark_subtask_as_unschedulable(subtask: Subtask, reason: str): '''Convenience method: Mark the subtask as unschedulable. Unschedules first if needed.''' with transaction.atomic(): + logger.info("marking subtask id=%s from scheduling_unit_id=%s as unschedulable. reason: %s", subtask.id, subtask.task_blueprint.scheduling_unit_blueprint.id, reason) if subtask.state.value == SubtaskState.Choices.SCHEDULED.value: unschedule_subtask(subtask, post_state=SubtaskState.objects.get(value=SubtaskState.Choices.UNSCHEDULABLE.value)) else: @@ -1218,6 +1224,7 @@ def mark_subtasks_and_successors_as_unschedulable(subtask: Subtask, reason: str) def mark_subtask_as_defined(subtask: Subtask): '''Convenience method: Mark the subtask as defined, making it's task & scheduling_unit schedulable. Unschedules first if needed.''' with transaction.atomic(): + logger.info("marking subtask id=%s from scheduling_unit_id=%s as defined/schedulable.", subtask.id, subtask.task_blueprint.scheduling_unit_blueprint.id) if subtask.state.value == SubtaskState.Choices.SCHEDULED.value: unschedule_subtask(subtask, post_state=SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value)) else: @@ -1731,7 +1738,7 @@ def convert_task_station_groups_specification_to_station_list_without_used_and_o available_stations = requested_stations - unavailable_stations missing_stations = requested_stations - available_stations max_nr_missing = station_group.get('max_nr_missing', 0) - if len(missing_stations) > max_nr_missing: + if raise_when_too_many_missing and len(missing_stations) > max_nr_missing: # early exit. No need to evaluate more groups when one groups does not meet the requirements raise TooManyStationsUnavailableException('Subtask id=%s is missing more than max_nr_missing=%s stations which are available between \'%s\' and \'%s\'\nunavailable=%s\nrequested=%s\navailable=%s' % ( subtask.id, max_nr_missing, @@ -1766,15 +1773,14 @@ def get_missing_stations(subtask: Subtask) -> []: if subtask.specifications_template.type.value != SubtaskType.Choices.OBSERVATION.value: return [] - # the observation has to be scheduled or "further" - if subtask.state.value in (SubtaskState.Choices.DEFINED.value, SubtaskState.Choices.SCHEDULING.value): - return [] - # fetch the requested stations from the spec (without removing the unavailable ones! and not raising!) requested_stations = set(convert_task_station_groups_specification_to_station_list_without_used_and_or_reserved_stations(subtask, remove_reserved_stations=False, remove_used_stations=False, raise_when_too_many_missing=False)) - # fetch the used_stations from the subtask spec - used_stations = set(subtask.specifications_doc.get('stations', {}).get('station_list', [])) + # fetch the used_stations from the subtask spec, depends on if it's scheduled-and-further + if subtask.state.value in (SubtaskState.Choices.DEFINED.value, SubtaskState.Choices.SCHEDULING.value): + used_stations = set(convert_task_station_groups_specification_to_station_list_without_used_and_or_reserved_stations(subtask, remove_reserved_stations=True, remove_used_stations=True, raise_when_too_many_missing=False)) + else: + used_stations = set(subtask.specifications_doc.get('stations', {}).get('station_list', [])) # missing is the difference return sorted(list(requested_stations-used_stations)) diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py b/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py index f2f6975264c2ce1139321d2417c588d0c121bb66..3bffbd9b999bc225b75ca086b0a621ec83b8bae5 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py @@ -631,6 +631,11 @@ def schedule_independent_subtasks_in_scheduling_unit_blueprint(scheduling_unit_b schedule_independent_subtasks_in_task_blueprint(task_blueprint, start_time=start_time+task_blueprint.relative_start_time) scheduling_unit_blueprint.refresh_from_db() + + if not scheduling_unit_blueprint.placed: + scheduling_unit_blueprint.placed = True + scheduling_unit_blueprint.save() + return scheduling_unit_blueprint diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/excelview.schedulingset.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/excelview.schedulingset.js index 856859cf3abdf5c023814b67fb8fbf66cc90f416..7470afb39e3e3196a0f43e407af74f82af61a5a4 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/excelview.schedulingset.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/excelview.schedulingset.js @@ -866,7 +866,7 @@ export class SchedulingSetCreate extends Component { if (constraint.time.between){ observationProps['between'] = this.getBetweenStringValue(constraint.time.between); } - if (constraint.time.between){ + if (constraint.time.not_between){ observationProps['notbetween'] = this.getBetweenStringValue(constraint.time.not_between); } @@ -1961,8 +1961,8 @@ export class SchedulingSetCreate extends Component { tmpRowData =this.state.commonRowData; tmpRowData[0] = row; await this.setState({commonRowData: tmpRowData}); - this.state.topGridApi.setRowData(this.state.commonRowData); - this.state.topGridApi.redrawRows(); + //this.state.topGridApi.setRowData(this.state.commonRowData); + // this.state.topGridApi.redrawRows(); } else { row = this.state.rowData[rowIndex]; @@ -1972,8 +1972,8 @@ export class SchedulingSetCreate extends Component { tmpRowData[rowIndex] = row; await this.setState({rowData: tmpRowData,isDirty: true}); publish('edit-dirty', true); - this.state.gridApi.setRowData(this.state.rowData); - this.state.gridApi.redrawRows(); + // this.state.gridApi.setRowData(this.state.rowData); + // this.state.gridApi.redrawRows(); } if(isSetFocus === true) { if (field.startsWith('gdef_')) { @@ -2632,9 +2632,13 @@ export class SchedulingSetCreate extends Component { if (this.isNotEmpty(between)){ constraint.time.between = between; + } else { + delete constraint.time.between; } if (this.isNotEmpty(notbetween)){ constraint.time.not_between = notbetween; + } else { + delete constraint.time.not_between; } this.state.daily.forEach(daily => { if (_.includes(suRow.daily, daily)){ @@ -3362,7 +3366,7 @@ export class SchedulingSetCreate extends Component { } else if (copiedValues.length === 2 && focusedCell && focusedCell.column['colId'] !== '0') { if (focusedCell.rowIndex >= 0) { this.tmpRowData[focusedCell.rowIndex][focusedCell.column['colId']] = this.getValidCellValue(focusedCell.column['colId'], copiedValues[0], copiedValues[1]); - this.tmpRowData[focusedCell.rowIndex][focusedCell.column['isDirty']] = true; + this.tmpRowData[focusedCell.rowIndex]['isDirty'] = true; if (selectedRows && selectedRows.length === 0 ) { if (_.includes(this.constraintVariables, focusedCell.column['colId']) ) { this.validateSchedulingConstraints(focusedCell.rowIndex, this.tmpRowData[focusedCell.rowIndex]); @@ -3376,7 +3380,7 @@ export class SchedulingSetCreate extends Component { await selectedRows.map(selectedRow =>{ selectedRowIndex = selectedRow.rowIndex; this.tmpRowData[selectedRowIndex][focusedCell.column['colId']] = this.getValidCellValue(focusedCell.column['colId'], copiedValues[0], copiedValues[1]); - this.tmpRowData[selectedRowIndex][focusedCell.column['isDirty']] = true; + this.tmpRowData[selectedRowIndex]['isDirty'] = true; if (_.includes(this.constraintVariables, focusedCell.column['colId']) ) { this.validateSchedulingConstraints(selectedRowIndex, this.tmpRowData[selectedRowIndex]); } @@ -3455,7 +3459,7 @@ export class SchedulingSetCreate extends Component { isDirty: true }); this.actions = null; - publish('edit-dirty', true); + publish('edit-dirty', true); this.state.gridApi.setRowData(this.state.rowData); this.state.gridApi.redrawRows(); } @@ -3477,7 +3481,7 @@ export class SchedulingSetCreate extends Component { } } else if (_.includes(["true","false", "FALSE", "TRUE"], value)) { return JSON.parse(_.lowerCase(value)); - } else if (_.includes(value, ',')) { + } else if (_.includes(value, ',') && (key !== 'between' && key !== 'notbetween')) { return value.split(','); } else { return value; @@ -3993,6 +3997,7 @@ export class SchedulingSetCreate extends Component { suppressMultiRangeSelection={true} data-testid="sulist" testid="sulist" tooltipShowDelay={50} + suppressScrollOnNewData={true} > </AgGridReact> </div>