diff --git a/LCS/PyCommon/CMakeLists.txt b/LCS/PyCommon/CMakeLists.txt index 044b7da9bfbaaeb4b0364febcb55f347e08e386a..f5040f3b74119e61788d2cf3793230496216397b 100644 --- a/LCS/PyCommon/CMakeLists.txt +++ b/LCS/PyCommon/CMakeLists.txt @@ -34,6 +34,7 @@ set(_py_files json_utils.py locking.py test_utils.py + typing.py ring_coordinates.py) python_install(${_py_files} DESTINATION lofar/common) diff --git a/LCS/PyCommon/test/CMakeLists.txt b/LCS/PyCommon/test/CMakeLists.txt index bf1bfce981f17ca4553ce3fba4329c4d350298d9..624f130d2336df98ce720564ea572792c39bc894 100644 --- a/LCS/PyCommon/test/CMakeLists.txt +++ b/LCS/PyCommon/test/CMakeLists.txt @@ -28,6 +28,7 @@ IF(BUILD_TESTING) lofar_add_test(t_util) lofar_add_test(t_test_utils) lofar_add_test(t_cep4_utils) + lofar_add_test(t_typing) IF(PYTHON_JSONSCHEMA) lofar_add_test(t_json_utils) @@ -37,4 +38,4 @@ IF(BUILD_TESTING) lofar_add_test(t_postgres) ENDIF() -ENDIF() \ No newline at end of file +ENDIF() diff --git a/LCS/PyCommon/test/t_typing.py b/LCS/PyCommon/test/t_typing.py new file mode 100755 index 0000000000000000000000000000000000000000..55eb4fc32e433106d39371da9ce59ade2b227060 --- /dev/null +++ b/LCS/PyCommon/test/t_typing.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import logging +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(process)s %(threadName)s %(levelname)s %(message)s', level=logging.DEBUG) + +from lofar.common.typing import check_type_hints + +import typing +import unittest + +class TestCheckTypeHints(unittest.TestCase): + def test_no_argument(self): + """ Elementary test for the type hint of the return type. """ + + @check_type_hints + def myfunc() -> str: + return "ok" + + self.assertEqual("ok", myfunc()) + + def test_one_argument(self): + """ Elementary test for one argument with a type hint. """ + + @check_type_hints + def myfunc(i: int) -> str: + return str(i) + + self.assertEqual("1", myfunc(1)) + + with self.assertRaises(TypeError): + myfunc("1") + + with self.assertRaises(TypeError): + myfunc(i="1") + + def test_argument_default(self): + """ Check whether argument defaults still function correctly. """ + + @check_type_hints + def myfunc(i: int = 1) -> str: + return str(i) + + self.assertEqual("1", myfunc()) + + def test_multiple_arguments(self): + """ Check whether multiple arguments are handled correctly with various calling conventions. """ + + @check_type_hints + def myfunc(i: int, j:int) -> str: + return "%d %d" % (i,j) + + self.assertEqual("1 2", myfunc(1,2)) + self.assertEqual("1 2", myfunc(1,j=2)) + self.assertEqual("1 2", myfunc(i=1,j=2)) + + with self.assertRaises(TypeError): + myfunc("1",2) + + with self.assertRaises(TypeError): + myfunc(1,"2") + + with self.assertRaises(TypeError): + myfunc(1, j="2") + + with self.assertRaises(TypeError): + myfunc(i="1", j=2) + + def test_wrong_return_value(self): + """ Check whether return values are validated. """ + + @check_type_hints + def myfunc(i: int) -> str: + return i + + with self.assertRaises(TypeError): + myfunc(1) + + def test_inheritance(self): + """ Provided values can also be subclasses of the types provided in the hints. """ + + @check_type_hints + def myfunc(i: int) -> int: + return i + + class DerivedInt(int): + pass + + myfunc(DerivedInt(1)) + + def test_no_hints(self): + """ Functions without any hints should always work. """ + + @check_type_hints + def myfunc(i): + return str(i) + + self.assertEqual("1", myfunc(1)) + self.assertEqual("1", myfunc("1")) + + def test_some_hints(self): + """ Not all parameters are necessarily annotated. """ + + @check_type_hints + def myfunc(i, j: int): + return str(i) + + self.assertEqual("1", myfunc(1, 2)) + self.assertEqual("1", myfunc("1", 2)) + + with self.assertRaises(TypeError): + self.assertEqual("1", myfunc("1", "2")) + + def test_union_hint(self): + """ Python allows supplying multiple types as a list, any of which is valid. """ + + @check_type_hints + def myfunc(i: [int, str]): + return str(i) + + self.assertEqual("1", myfunc(1)) + self.assertEqual("1", myfunc("1")) + + with self.assertRaises(TypeError): + self.assertEqual("1", myfunc(1.0)) + + def test_args_kwargs(self): + """ Check whether args & kwargs don't break. """ + + @check_type_hints + def myfunc(*args, **kwargs): + return str(kwargs["i"]) + + self.assertEqual("1", myfunc(i=1)) + self.assertEqual("1", myfunc(i="1")) + + + def test_asterics(self): + """ Check whether forced named arguments don't break. """ + + @check_type_hints + def myfunc(*, i: int): + return str(i) + + self.assertEqual("1", myfunc(i=1)) + + with self.assertRaises(TypeError): + self.assertEqual("1", myfunc(i="1")) + + def test_none(self): + """ Check whether None as an argument functions correctly. """ + + @check_type_hints + def myfunc(i: int) -> str: + return str(i) + + with self.assertRaises(TypeError): + myfunc(None) + +if __name__ == "__main__": + unittest.main() diff --git a/LCS/PyCommon/test/t_typing.run b/LCS/PyCommon/test/t_typing.run new file mode 100755 index 0000000000000000000000000000000000000000..6bc23fadc736235c1143d3317d88307ffeac0f67 --- /dev/null +++ b/LCS/PyCommon/test/t_typing.run @@ -0,0 +1,5 @@ +#!/bin/bash + +source python-coverage.sh +python_coverage_test "*json_utils*" t_typing.py + diff --git a/LCS/PyCommon/test/t_typing.sh b/LCS/PyCommon/test/t_typing.sh new file mode 100755 index 0000000000000000000000000000000000000000..d788f5a03bee1f34f0c524afadfee796de8e081a --- /dev/null +++ b/LCS/PyCommon/test/t_typing.sh @@ -0,0 +1,2 @@ +#!/bin/sh +./runctest.sh t_typing diff --git a/LCS/PyCommon/typing.py b/LCS/PyCommon/typing.py new file mode 100644 index 0000000000000000000000000000000000000000..cd154ec09a2352afe744e5605a460a989b6413bc --- /dev/null +++ b/LCS/PyCommon/typing.py @@ -0,0 +1,67 @@ +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from functools import wraps +import inspect + +def check_type_hints(func): + """ Decorator that verifies the type hints of the decorated function. + + Raises a TypeError if the type is not met, that is, the parameters and/or return value + that have a type hint are not given values that are of that type, or a subclass. + + Example usage: + + @check_type_hints + def myfunc(i: int, j) -> str: + return "%d %s" % (i,j) + + myfunc(1, 2) # ok, type of i matches type hint + myfunc(1, "2") # ok, type of j is not checked, as it has no type hint + myfunc("1", 2) # throws TypeError, type i does not match type hint + """ + + def check_type(obj, cls): + if isinstance(cls, list): + return any((isinstance(obj, c) for c in cls)) + + return isinstance(obj, cls) + + @wraps(func) + def decorator(*args, **kwargs): + argspec = inspect.getfullargspec(func) + hints = argspec.annotations + + for i, (arg, argname) in enumerate(zip(args, argspec.args)): + if argname in hints: + argtype = hints[argname] + if not check_type(arg, argtype): + raise TypeError("Positional parameter %d (named %s) must have type %s (has type %s)" % (i, argname, argtype, type(arg))) + + for argname, argtype in hints.items(): + if argname in kwargs: + if not check_type(kwargs[argname], argtype): + raise TypeError("Parameter %s must have type %s (has type %s)" % (argname, argtype, type(kwargs[argname]))) + + return_value = func(*args, **kwargs) + if 'return' in hints: + if not check_type(return_value, hints['return']): + raise TypeError("Return value must have type %s (has type %s)" % (hints['return'], type(return_value))) + + return return_value + + return decorator diff --git a/SAS/ResourceAssignment/TaskPrescheduler/lib/cobaltblocksize.py b/SAS/ResourceAssignment/TaskPrescheduler/lib/cobaltblocksize.py index daa5266fc31381ea20a84d6200d696383b0608e9..ac14727d9a2c2645de608bf7454bd9bf60e30175 100644 --- a/SAS/ResourceAssignment/TaskPrescheduler/lib/cobaltblocksize.py +++ b/SAS/ResourceAssignment/TaskPrescheduler/lib/cobaltblocksize.py @@ -47,7 +47,7 @@ class BlockConstraints(object): """ Provide the constraints for the block size, as derived from the correlator and beamformer settings. """ - def __init__(self, correlatorSettings=None, coherentStokesSettings=None, incoherentStokesSettings=None, clockMHz=200): + def __init__(self, correlatorSettings=None, coherentStokesSettings=[], incoherentStokesSettings=[], clockMHz=200): self.correlator = correlatorSettings self.coherentStokes = coherentStokesSettings self.incoherentStokes = incoherentStokesSettings @@ -107,28 +107,28 @@ class BlockConstraints(object): # Correlator.cu (minimum of 16 samples per channel) factor = lcm(factor, CORRELATOR_BLOCKSIZE * self.correlator.nrChannelsPerSubband * self.nrSubblocks()) - if self.coherentStokes: + for coherentStokes in self.coherentStokes: # DelayAndBandPass.cu factor = lcm(factor, BEAMFORMER_DELAYCOMPENSATION_BLOCKSIZE * BEAMFORMER_NR_DELAYCOMPENSATION_CHANNELS) # FIR_Filter.cu - factor = lcm(factor, NR_PPF_TAPS * self.coherentStokes.nrChannelsPerSubband) + factor = lcm(factor, NR_PPF_TAPS * coherentStokes.nrChannelsPerSubband) # CoherentStokesKernel.cc - factor = lcm(factor, MAX_THREADS_PER_BLOCK * self.coherentStokes.timeIntegrationFactor) + factor = lcm(factor, MAX_THREADS_PER_BLOCK * coherentStokes.timeIntegrationFactor) #CoherentStokes.cu (integration should fit) - factor = lcm(factor, 1024 * self.coherentStokes.timeIntegrationFactor * self.coherentStokes.nrChannelsPerSubband) + factor = lcm(factor, 1024 * coherentStokes.timeIntegrationFactor * coherentStokes.nrChannelsPerSubband) - if self.incoherentStokes: + for incoherentStokes in self.incoherentStokes: # DelayAndBandPass.cu factor = lcm(factor, BEAMFORMER_DELAYCOMPENSATION_BLOCKSIZE * BEAMFORMER_NR_DELAYCOMPENSATION_CHANNELS) # FIR_Filter.cu - factor = lcm(factor, NR_PPF_TAPS * self.incoherentStokes.nrChannelsPerSubband) + factor = lcm(factor, NR_PPF_TAPS * incoherentStokes.nrChannelsPerSubband) # IncoherentStokes.cu (integration should fit) - factor = lcm(factor, 1024 * self.incoherentStokes.timeIntegrationFactor * self.incoherentStokes.nrChannelsPerSubband) + factor = lcm(factor, 1024 * incoherentStokes.timeIntegrationFactor * incoherentStokes.nrChannelsPerSubband) return factor diff --git a/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler.py b/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler.py index e3cf4e6ccc1730279de43c26cb2617b56709e09a..5cf07d6b85ab9866355c1a352df47d1a3697e1ab 100644 --- a/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler.py +++ b/SAS/ResourceAssignment/TaskPrescheduler/taskprescheduler.py @@ -69,7 +69,7 @@ def calculateCobaltSettings(spec): incoherent = None clock = parset["Observation.sampleClock"] - constraints = BlockConstraints(corr, coherent, incoherent, clock) + constraints = BlockConstraints(corr, [coherent], [incoherent], clock) calculator = BlockSize(constraints) return {'nrSubblocks': calculator.nrSubblocks, 'blockSize': calculator.blockSize, diff --git a/SAS/ResourceAssignment/TaskPrescheduler/test/t_cobaltblocksize.py b/SAS/ResourceAssignment/TaskPrescheduler/test/t_cobaltblocksize.py index fe7acef4cf0ab8d1c2fb3baa6938b2eeacfa7e1b..8eaec011e3fd642723377b9ace171db8a687dfd1 100644 --- a/SAS/ResourceAssignment/TaskPrescheduler/test/t_cobaltblocksize.py +++ b/SAS/ResourceAssignment/TaskPrescheduler/test/t_cobaltblocksize.py @@ -56,7 +56,7 @@ class TestBlockConstraints(unittest.TestCase): coh.nrChannelsPerSubband = 16 coh.timeIntegrationFactor = 4 - c = BlockConstraints(coherentStokesSettings=coh) + c = BlockConstraints(coherentStokesSettings=[coh]) self.assertEqual(c.nrSubblocks(), 1) self.assertGreaterEqual(c.factor(), 1) @@ -69,7 +69,7 @@ class TestBlockConstraints(unittest.TestCase): incoh.nrChannelsPerSubband = 16 incoh.timeIntegrationFactor = 4 - c = BlockConstraints(incoherentStokesSettings=incoh) + c = BlockConstraints(incoherentStokesSettings=[incoh]) self.assertEqual(c.nrSubblocks(), 1) self.assertGreaterEqual(c.factor(), 1) @@ -94,7 +94,7 @@ class TestBlockSize(unittest.TestCase): correlator.nrChannelsPerSubband = 64 correlator.integrationTime = integrationTime - c = BlockConstraints( correlator, None, None ) + c = BlockConstraints(correlator) bs = BlockSize(c) self.assertAlmostEquals(c._samples2time(bs.integrationSamples), integrationTime, delta = integrationTime * 0.05) diff --git a/SAS/TMSS/backend/services/scheduling/lib/constraints/template_constraints_v1.py b/SAS/TMSS/backend/services/scheduling/lib/constraints/template_constraints_v1.py index 2b68de96458c95d1cd2e068f7916dc9851ff3a45..550efab2b7be2627304ce24c24f4cf95cd5cb9c0 100644 --- a/SAS/TMSS/backend/services/scheduling/lib/constraints/template_constraints_v1.py +++ b/SAS/TMSS/backend/services/scheduling/lib/constraints/template_constraints_v1.py @@ -283,7 +283,9 @@ def can_run_anywhere_within_timewindow_with_sky_constraints(scheduling_unit: mod target_rise_and_set_times = coordinates_timestamps_and_stations_to_target_rise_and_set(angle1=angle1, angle2=angle2, direction_type=direction_type, timestamps=timestamps, stations=tuple(stations), angle_to_horizon=min_elevation) for station, times in target_rise_and_set_times.items(): for i in range(len(timestamps)): - if not (timestamps[i] > times[0]['rise'] and timestamps[i] < times[0]['set']): + if times[0]['always_above_horizon']: + continue + if times[0]['always_below_horizon'] or not (timestamps[i] > times[0]['rise'] and timestamps[i] < times[0]['set']): if task['specifications_template'] == 'calibrator observation': logger.info('min_calibrator_elevation=%s constraint is not met at timestamp=%s' % (min_elevation.rad, timestamps[i])) else: diff --git a/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py b/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py index 98c1c41ea5d494bcff75f928fc2da1216777ff51..e8fadb2c6085117007f7913c8ecee0fa3808b434 100755 --- a/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py +++ b/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py @@ -735,8 +735,11 @@ class TestSkyConstraints(unittest.TestCase): self.distance_mock.return_value = self.distance_data self.addCleanup(self.distance_patcher.stop) - self.target_rise_and_set_data = {"CS002": [{"rise": datetime(2020, 1, 1, 8, 0, 0), "set": datetime(2020, 1, 1, 12, 30, 0)}, - {"rise": datetime(2020, 1, 1, 8, 0, 0), "set": datetime(2020, 1, 1, 12, 30, 0)}]} + self.target_rise_and_set_data = {"CS002": [{"rise": datetime(2020, 1, 1, 8, 0, 0), "set": datetime(2020, 1, 1, 12, 30, 0), "always_above_horizon": False, "always_below_horizon": False}, + {"rise": datetime(2020, 1, 1, 8, 0, 0), "set": datetime(2020, 1, 1, 12, 30, 0), "always_above_horizon": False, "always_below_horizon": False}]} + self.target_rise_and_set_data_always_above = {"CS002": [{"rise": None, "set": None, "always_above_horizon": True, "always_below_horizon": False}]} + self.target_rise_and_set_data_always_below = {"CS002": [{"rise": None, "set": None, "always_above_horizon": False, "always_below_horizon": True}]} + self.target_rise_and_set_patcher = mock.patch('lofar.sas.tmss.services.scheduling.constraints.template_constraints_v1.coordinates_timestamps_and_stations_to_target_rise_and_set') self.target_rise_and_set_mock = self.target_rise_and_set_patcher.start() self.target_rise_and_set_mock.return_value = self.target_rise_and_set_data @@ -760,21 +763,45 @@ class TestSkyConstraints(unittest.TestCase): # min_target_elevation - def test_can_run_anywhere_within_timewindow_with_sky_constraints_with_min_target_elevation_constraint_returns_true_when_met(self): + def test_can_run_anywhere_within_timewindow_with_sky_constraints_with_min_target_elevation_constraint_returns_true(self): + self.target_rise_and_set_mock.return_value = self.target_rise_and_set_data + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky'] = {'min_target_elevation': 0.1} self.scheduling_unit_blueprint.save() - timestamp = datetime(2020, 1, 1, 10, 0, 0) + timestamp = datetime(2020, 1, 1, 10, 0, 0) # target sets after obs ends (mocked response) returned_value = tc1.can_run_anywhere_within_timewindow_with_sky_constraints(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) self.assertTrue(returned_value) - def test_can_run_anywhere_within_timewindow_with_sky_constraints_with_min_target_elevation_constraint_returns_false_when_not_met(self): - self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky'] = {'min_target_elevation': 0.2} + def test_can_run_anywhere_within_timewindow_with_sky_constraints_with_min_target_elevation_when_target_always_above_returns_true(self): + self.target_rise_and_set_mock.return_value = self.target_rise_and_set_data_always_above + + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky'] = {'min_target_elevation': 0.1} self.scheduling_unit_blueprint.save() - timestamp = datetime(2020, 1, 1, 11, 0, 0) + timestamp = datetime(2020, 1, 1, 10, 0, 0) # target is always up (mocked response) + returned_value = tc1.can_run_anywhere_within_timewindow_with_sky_constraints(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) + self.assertTrue(returned_value) + + def test_can_run_anywhere_within_timewindow_with_sky_constraints_with_min_target_elevation_constraint_returns_false(self): + self.target_rise_and_set_mock.return_value = self.target_rise_and_set_data + + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky'] = {'min_target_elevation': 0.1} + self.scheduling_unit_blueprint.save() + timestamp = datetime(2020, 1, 1, 11, 0, 0) # target sets before obs ends (mocked response) + returned_value = tc1.can_run_anywhere_within_timewindow_with_sky_constraints(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) + self.assertFalse(returned_value) + + def test_can_run_anywhere_within_timewindow_with_sky_constraints_with_min_target_elevation_when_target_is_always_below_returns_false(self): + self.target_rise_and_set_mock.return_value = self.target_rise_and_set_data_always_below + + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky'] = {'min_target_elevation': 0.1} + self.scheduling_unit_blueprint.save() + timestamp = datetime(2020, 1, 1, 10, 0, 0) # target is never up (mocked response) returned_value = tc1.can_run_anywhere_within_timewindow_with_sky_constraints(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) self.assertFalse(returned_value) + + class TestTimeConstraints(TestCase): """ Tests for the time constraint checkers used in dynamic scheduling with different boundaries diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py index a4fd63788ffff44af3696a8b2c3e4be9999e4d49..68ed8f2dd2893a96bd4e678935fcf0b60cdec4ea 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/parset.py @@ -107,15 +107,20 @@ def _convert_correlator_settings_to_parset_dict(subtask: models.Subtask, spec: d parset[beam_prefix+"Correlator.angle2"] = phase_center['pointing']['angle2'] - # TODO: do not use SubtaskOutput.objects.filter but make subtask.subtask_outputs work - subtask_outputs = list(models.SubtaskOutput.objects.filter(subtask_id=subtask.id)) - subtask_output_ids = [o.id for o in subtask_outputs] + dataproducts = list(subtask.output_dataproducts.filter(dataformat__value=Dataformat.Choices.MEASUREMENTSET.value).filter(datatype__value=Datatype.Choices.VISIBILITIES.value).order_by('filename')) - # TODO: don't assume ordering by filename is sufficient: we need to inspect the dataproduct properties to make sure saps and subbands are in the correct order - dataproducts = list(models.Dataproduct.objects.filter(producer_id__in=subtask_output_ids).filter(dataformat=Dataformat.Choices.MEASUREMENTSET.value).filter(datatype=Datatype.Choices.VISIBILITIES).order_by('filename')) + # marshall dataproducts, but only if they're supplied. in some use cases, we want a parset before the subtask is scheduled. + correlator_dataproducts = [] + for digi_beam in digi_beams: + for subband in digi_beam["subbands"]: + dataproduct = [dp for dp in dataproducts + if dp.specifications_doc["sap"] == digi_beam['name'] + and dp.specifications_doc["subband"] == subband] - parset["Observation.DataProducts.Output_Correlated.filenames"] = [dp.filename for dp in dataproducts] - parset["Observation.DataProducts.Output_Correlated.locations"] = ["%s:%s" % (subtask.cluster.name, dp.directory) for dp in dataproducts] + correlator_dataproducts.append(dataproduct[0] if dataproduct else null_dataproduct) + + parset["Observation.DataProducts.Output_Correlated.filenames"] = [dp.filename for dp in correlator_dataproducts] + parset["Observation.DataProducts.Output_Correlated.locations"] = ["%s:%s" % (subtask.cluster.name, dp.directory) for dp in correlator_dataproducts] # mimic MoM placeholder thingy (the resource estimator parses this) parset["Observation.DataProducts.Output_Correlated.identifications"] = ["TMSS_subtask_%s.SAP%03d" % (subtask.id, sap_nr) for sap_nr in range(len(digi_beams))] @@ -129,12 +134,8 @@ def _convert_beamformer_settings_to_parset_dict(subtask: models.Subtask, spec: d parset = {} - # TODO: do not use SubtaskOutput.objects.filter but make subtask.subtask_outputs work - subtask_outputs = list(models.SubtaskOutput.objects.filter(subtask_id=subtask.id)) - subtask_output_ids = [o.id for o in subtask_outputs] - # TODO: don't assume ordering by filename is sufficient: we need to inspect the dataproduct properties to make sure saps and subbands are in the correct order - dataproducts = list(models.Dataproduct.objects.filter(producer_id__in=subtask_output_ids).filter(dataformat=Dataformat.Choices.MEASUREMENTSET.value).filter(datatype=Datatype.Choices.TIME_SERIES.value).order_by('filename')) + dataproducts = list(subtask.output_dataproducts.filter(dataformat__value=Dataformat.Choices.BEAMFORMED.value).filter(datatype__value=Datatype.Choices.TIME_SERIES.value).order_by('filename')) # Lists of coherent and incoherent dataproducts that will be produced, in the order COBALT wants them coherent_dataproducts = [] @@ -174,11 +175,17 @@ def _convert_beamformer_settings_to_parset_dict(subtask: models.Subtask, spec: d # marshall dataproducts, but only if they're supplied. in some use cases, we want a parset before the subtask is scheduled. for s in range(nr_stokes): for p in range(nr_parts): - # TODO: don't assume ordering by filename is sufficient: we need to inspect the dataproduct properties to make sure saps and subbands are in the correct order + dataproduct = [dp for dp in dataproducts + if dp.specifications_doc["sap"] == sap['name'] + and dp.specifications_doc["identifiers"]["pipeline_index"] == pipeline_idx + and dp.specifications_doc["identifiers"]["tab_index"] == field_idx + and dp.specifications_doc["identifiers"]["stokes_index"] == s + and dp.specifications_doc["identifiers"]["part_index"] == p + and dp.specifications_doc["coherent"] == tab['coherent']] if tab['coherent']: - coherent_dataproducts.append(dataproducts.pop(0) if dataproducts else null_dataproduct) + coherent_dataproducts.append(dataproduct[0] if dataproduct else null_dataproduct) else: - incoherent_dataproducts.append(dataproducts.pop(0) if dataproducts else null_dataproduct) + incoherent_dataproducts.append(dataproduct[0] if dataproduct else null_dataproduct) if cobalt_version >= 2: pipeline_parset['Beam[%s].subbandList' % sap_idx] = sap['subbands'] @@ -192,7 +199,8 @@ def _convert_beamformer_settings_to_parset_dict(subtask: models.Subtask, spec: d beamformer_pipeline_parsets.append(pipeline_parset) # Process fly's eye pipelines - for pipeline in spec['COBALT']['beamformer']['flyseye_pipelines']: + pipeline_idx_offset = len(beamformer_pipeline_parsets) + for pipeline_idx, pipeline in enumerate(spec['COBALT']['beamformer']['flyseye_pipelines'], start=pipeline_idx_offset): pipeline_parset = {} pipeline_parset.update(_add_prefix(_stokes_settings_parset_subkeys(pipeline['coherent']), "CoherentStokes.")) pipeline_parset['flysEye'] = True @@ -206,7 +214,7 @@ def _convert_beamformer_settings_to_parset_dict(subtask: models.Subtask, spec: d antennaset = spec['stations']['antenna_set'] fields = sum([list(antenna_fields(station, antennaset)) for station in stations], []) - for field in fields: + for field_idx, field in enumerate(fields): stokes_settings = pipeline['coherent'] nr_subbands = len(sap['subbands']) @@ -216,8 +224,14 @@ def _convert_beamformer_settings_to_parset_dict(subtask: models.Subtask, spec: d # marshall dataproducts, but only if they're supplied. in some use cases, we want a parset before the subtask is scheduled. for s in range(nr_stokes): for p in range(nr_parts): - # TODO: don't assume ordering by filename is sufficient: we need to inspect the dataproduct properties to make sure saps and subbands are in the correct order - coherent_dataproducts.append(dataproducts.pop(0) if dataproducts else null_dataproduct) + dataproduct = [dp for dp in dataproducts + if dp.specifications_doc["sap"] == sap["name"] + and dp.specifications_doc["identifiers"]["pipeline_index"] == pipeline_idx + and dp.specifications_doc["identifiers"]["tab_index"] == field_idx + and dp.specifications_doc["identifiers"]["stokes_index"] == s + and dp.specifications_doc["identifiers"]["part_index"] == p + and dp.specifications_doc["coherent"] == True] + coherent_dataproducts.append(dataproduct[0] if dataproduct else null_dataproduct) if cobalt_version >= 2: pipeline_parset['Beam[%s].stationList' % sap_idx] = pipeline['stations'] @@ -519,9 +533,15 @@ def _convert_to_parset_dict_for_pipelinecontrol_schema(subtask: models.Subtask) # TODO: do not use SubtaskOutput.objects.filter but make subtask.subtask_outputs work subtask_outputs = list(models.SubtaskOutput.objects.filter(subtask_id=subtask.id)) - out_dataproducts = [] - for subtask_output in subtask_outputs: - out_dataproducts = list(models.Dataproduct.objects.filter(producer_id=subtask_output.id)) + unsorted_out_dataproducts = sum([list(models.Dataproduct.objects.filter(producer_id=subtask_output.id)) for subtask_output in subtask_outputs],[]) + + def find_dataproduct(dataproducts: list, specification_doc: dict): + hits = [dp for dp in dataproducts if dp.specifications_doc['sap'] == specification_doc['sap'] + and dp.specifications_doc['subband'] == specification_doc['subband']] + return hits[0] if hits else null_dataproduct + + # list output dataproducts in the same order as input dataproducts, matched by the identifiers + out_dataproducts = [find_dataproduct(unsorted_out_dataproducts, in_dp.specifications_doc) for in_dp in in_dataproducts] parset["Observation.DataProducts.Output_Correlated.enabled"] = "true" parset["Observation.DataProducts.Output_Correlated.filenames"] = "[%s]" % ",".join([dp.filename for dp in out_dataproducts]) diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/conversions.py b/SAS/TMSS/backend/src/tmss/tmssapp/conversions.py index ae926e172f4a39a4ff77a442346fbf25d4505e35..3c0e184ce79ac8e697043dcf8ced5dceba3bf1eb 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/conversions.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/conversions.py @@ -126,9 +126,10 @@ def coordinates_timestamps_and_stations_to_target_rise_and_set(angle1: float, an :param stations: tuple of station names, e.g. ("CS002",) :param angle_to_horizon: the angle between horizon and given coordinates for which rise and set times are returned :return A dict that maps station names to a list of dicts with rise and set times for each requested date. + If rise and set are None, the target is always above or below horizon, and the respective boolean is True. E.g. - {"CS002": [{"rise": datetime(2020, 1, 1, 4, 0, 0), "set": datetime(2020, 1, 1, 11, 0, 0)}, - {"rise": datetime(2020, 1, 2, 4, 0, 0), "set": datetime(2020, 1, 2, 11, 0, 0)}] + {"CS002": [{"rise": datetime(2020, 1, 1, 4, 0, 0), "set": datetime(2020, 1, 1, 11, 0, 0), "always_above_horizon": False, "always_below_horizon": False}, + {"rise": datetime(2020, 1, 2, 4, 0, 0), "set": datetime(2020, 1, 2, 11, 0, 0), "always_above_horizon": False, "always_below_horizon": False}] } """ if direction_type == "J2000": @@ -140,10 +141,29 @@ def coordinates_timestamps_and_stations_to_target_rise_and_set(angle1: float, an for timestamp in timestamps: # todo: this can probably be made faster by moving the following logic to an own function with single station/timestamp as input and putting the lru_cache on there. observer = create_astroplan_observer_for_station(station) - target_set = observer.target_set_time(target=coord, time=Time(timestamp), horizon=angle_to_horizon, which='next', n_grid_points=TARGET_SET_RISE_PRECISION) - target_rise = observer.target_rise_time(target=coord, time=Time(target_set), horizon=angle_to_horizon, which='previous', n_grid_points=TARGET_SET_RISE_PRECISION) + try: + target_set = observer.target_set_time(target=coord, time=Time(timestamp), horizon=angle_to_horizon, which='next', n_grid_points=TARGET_SET_RISE_PRECISION) + target_rise = observer.target_rise_time(target=coord, time=Time(target_set), horizon=angle_to_horizon, which='previous', n_grid_points=TARGET_SET_RISE_PRECISION) + return_dict.setdefault(station, []).append( + {"rise": target_rise.to_datetime(), + "set": target_set.to_datetime(), + "always_above_horizon": False, + "always_below_horizon": False}) + except TypeError as e: + if "numpy.float64" in str(e): + # Note: when the target is always above or below horizon, astroplan excepts with the not very + # meaningful error: 'numpy.float64' object does not support item assignment + # Determine whether the target is always above or below horizon so that we can return some useful + # additional info, e.g. for scheduling purposes. + is_up = observer.target_is_up(target=coord, time=Time(timestamp), horizon=angle_to_horizon) + return_dict.setdefault(station, []).append( + {"rise": None, + "set": None, + "always_above_horizon": is_up, + "always_below_horizon": not is_up}) + else: + raise - return_dict.setdefault(station, []).append({"rise": target_rise.to_datetime(), "set": target_set.to_datetime()}) return return_dict diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py index 6d5b024f76d499aa3f3ed6fe6b90e1481a3f69d6..a89b16fc3d51aaf571122805d7ccde6d3ae46213 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py @@ -460,6 +460,22 @@ class Migration(migrations.Migration): 'abstract': False, }, ), + migrations.CreateModel( + name='ReservationStrategyTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags',django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(blank=True, default='', help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('template', django.contrib.postgres.fields.jsonb.JSONField(help_text='JSON-data compliant with the JSON-schema in the scheduling_unit_template. This observation strategy template like a predefined recipe with all the correct settings, and defines which parameters the user can alter.')), + ], + options={ + 'abstract': False, + }, + ), migrations.CreateModel( name='ReservationTemplate', fields=[ @@ -1196,6 +1212,11 @@ class Migration(migrations.Migration): model_name='reservationtemplate', constraint=models.UniqueConstraint(fields=('name', 'version'), name='reservationtemplate_unique_name_version'), ), + migrations.AddField( + model_name='reservationstrategytemplate', + name='reservation_template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ReservationTemplate'), + ), migrations.AddField( model_name='reservation', name='project', @@ -1214,12 +1235,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='projectquotaarchivelocation', name='project_quota', - field=models.ForeignKey(help_text='The ProjectQuota for this archive location', on_delete=django.db.models.deletion.PROTECT, related_name='project_quota_archive_location', to='tmssapp.ProjectQuota'), + field=models.ForeignKey(help_text='Project to which this quota belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='project_quota', to='tmssapp.ProjectQuota'), ), migrations.AddField( model_name='projectquota', name='project', - field=models.ForeignKey(help_text='Project to wich this quota belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='quota', to='tmssapp.Project'), + field=models.ForeignKey(help_text='Project to which this quota belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='quota', to='tmssapp.Project'), ), migrations.AddField( model_name='projectquota', diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py index 47c2ef0bd9cf8592a72c9a86764c522af68302e2..2ebdaf12e1bb2f89b818da644773373ba697da99 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py @@ -238,6 +238,26 @@ class DefaultTaskRelationSelectionTemplate(BasicCommon): template = ForeignKey("TaskRelationSelectionTemplate", on_delete=PROTECT) +class ReservationStrategyTemplate(NamedCommon): + ''' + A ReservationStrategyTemplate is a template in the sense that it serves as a template to fill in json data objects + conform its referred reservation_template. + It is however not derived from the (abstract) Template super-class, because the Template super class is for + JSON schemas, not JSON data objects. + ''' + version = CharField(max_length=128, help_text='Version of this template (with respect to other templates of the same name).') + template = JSONField(null=False, help_text='JSON-data compliant with the JSON-schema in the reservation_template. ' + 'This reservation strategy template like a predefined recipe with all ' + 'the correct settings, and defines which parameters the user can alter.') + reservation_template = ForeignKey("ReservationTemplate", on_delete=PROTECT, null=False, help_text="") + + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.template and self.reservation_template_id and self.reservation_template.schema: + validate_json_against_schema(self.template, self.reservation_template.schema) + + super().save(force_insert, force_update, using, update_fields) + + class ReservationTemplate(Template): pass @@ -1066,3 +1086,4 @@ class Reservation(NamedCommon): def save(self, force_insert=False, force_update=False, using=None, update_fields=None): annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template') super().save(force_insert, force_update, using, update_fields) + diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/populate.py b/SAS/TMSS/backend/src/tmss/tmssapp/populate.py index e4ae9bd9d75d4dd5b149f11bef291de8d0c033d4..43d73566a0ff3de217ac61dba88354a548f5a0b4 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/populate.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/populate.py @@ -79,15 +79,18 @@ def populate_test_data(): if 'Commissioning' not in tmss_project.tags: continue - # for test purposes also add a reservation object - reservation_template = models.ReservationTemplate.objects.get(name="resource reservation") - reservation_template_spec = get_default_json_object_for_schema(reservation_template.schema) - Reservation.objects.create(name="DummyReservation", - description="Just A non-scheduled reservation as example", - project=tmss_project, - specifications_template=reservation_template, - specifications_doc=reservation_template_spec, - start_time=datetime.now()) + # for test purposes also create reservation objects from all reservation strategies + for strategy_template in ReservationStrategyTemplate.objects.all(): + reservation_spec = add_defaults_to_json_object_for_schema(strategy_template.template, + strategy_template.reservation_template.schema) + reservation = Reservation.objects.create(name=strategy_template.name, + description=" %s created from reservation strategy" % strategy_template.description, + project=None, + specifications_template=strategy_template.reservation_template, + specifications_doc=reservation_spec, + start_time=datetime.now()+timedelta(days=1), + stop_time=None) + logger.info('created test reservation: %s', reservation.name) for scheduling_set in tmss_project.scheduling_sets.all(): for unit_nr in range(2): diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/dataproduct_specifications_template-timeseries-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/dataproduct_specifications_template-timeseries-1.json new file mode 100644 index 0000000000000000000000000000000000000000..d11ec11cc085263e455984410ad0f4e3dcc8e5ca --- /dev/null +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/dataproduct_specifications_template-timeseries-1.json @@ -0,0 +1,71 @@ +{ + "$id":"http://tmss.lofar.org/api/schemas/dataproductspecificationtemplate/timeseries/1#", + "$schema": "http://json-schema.org/draft-06/schema#", + "title": "timeseries", + "type": "object", + "default": {}, + "properties": { + "sap": { + "type": "string", + "title": "SAP", + "default": "" + }, + "identifiers": { + "title": "Identifiers", + "description": "Identification of this dataproduct within the producing subtask.", + "type": "object", + "default": {}, + "properties": { + "sap_index": { + "title": "SAP index", + "type": "integer", + "default": 0, + "minimum": 0 + }, + "pipeline_index": { + "title": "TAB index", + "description": "Index of beamformer pipeline within COBALT", + "type": "integer", + "default": 0, + "minimum": 0 + }, + "tab_index": { + "title": "TAB index", + "description": "TAB index within the SAP", + "type": "integer", + "default": 0, + "minimum": 0 + }, + "part_index": { + "title": "Part index", + "description": "Part index within the TAB", + "type": "integer", + "default": 0, + "minimum": 0 + }, + "stokes_index": { + "title": "Stokes index", + "description": "Stokes index within the TAB", + "type": "integer", + "default": 0, + "minimum": 0, + "maximum": 3 + }, + "coherent": { + "title": "Coherent", + "description": "TAB is a coherent addition", + "type": "boolean", + "default": true + } + }, + "required": [ + "sap_index", + "tab_index", + "part_index", + "stokes_index", + "coherent" + ] + } + }, + "required": [ "identifiers" ] +} diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/dataproduct_specifications_template-visibilities-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/dataproduct_specifications_template-visibilities-1.json new file mode 100644 index 0000000000000000000000000000000000000000..161f96803940afef59c4ceaf35787ad6012f5e66 --- /dev/null +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/dataproduct_specifications_template-visibilities-1.json @@ -0,0 +1,22 @@ +{ + "$id":"http://tmss.lofar.org/api/schemas/dataproductspecificationstemplate/visibilities/1#", + "$schema": "http://json-schema.org/draft-06/schema#", + "title": "visibilities", + "type": "object", + "default": {}, + "properties": { + "sap": { + "type": "string", + "title": "SAP", + "default": "" + }, + "subband": { + "type": "integer", + "title": "subband number", + "default": 0, + "minimum": 0, + "maximum": 511 + } + }, + "required": [ "sap", "subband" ] +} diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-ILTswitch.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-ILTswitch.json new file mode 100644 index 0000000000000000000000000000000000000000..73e493db102862eafe7a179489f7bac0631f605f --- /dev/null +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-ILTswitch.json @@ -0,0 +1,38 @@ +{ + "activity": { + "type": "stand-alone mode", + "name": "ILT stations in local mode", + "description": "Planned switch of international stations for local use by station owners", + "contact": "Operator", + "subject": "system", + "planned": true + }, + "resources": { + "stations": [ + "DE601", + "DE602", + "DE603", + "DE604", + "DE605", + "DE609", + "FR606", + "SE607", + "UK608", + "PL610", + "PL611", + "PL612", + "IE613", + "LV614" + ] + }, + "effects": { + "lba_rfi": false, + "hba_rfi": false, + "expert": false + }, + "schedulability": { + "manual": false, + "dynamic": false, + "project_exclusive": false + } +} diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-VLBIsession.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-VLBIsession.json new file mode 100644 index 0000000000000000000000000000000000000000..7c25f0f83ed1efb86bedcbf5803e0dd7b56eb59b --- /dev/null +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-VLBIsession.json @@ -0,0 +1,38 @@ +{ + "activity": { + "type": "stand-alone mode", + "name": "VLBI session", + "description": "VLBI session ongoing. International station network not available.", + "contact": "Operator", + "subject": "network", + "planned": true + }, + "resources": { + "stations": [ + "DE601", + "DE602", + "DE603", + "DE604", + "DE605", + "DE609", + "FR606", + "SE607", + "UK608", + "PL610", + "PL611", + "PL612", + "IE613", + "LV614" + ] + }, + "effects": { + "lba_rfi": false, + "hba_rfi": false, + "expert": false + }, + "schedulability": { + "manual": false, + "dynamic": false, + "project_exclusive": false + } +} diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-core-stations.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-core-stations.json new file mode 100644 index 0000000000000000000000000000000000000000..334ab09f6fdf28f42793add9565d0d38c2010fb7 --- /dev/null +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-core-stations.json @@ -0,0 +1,47 @@ +{ + "activity": { + "type": "maintenance", + "description": "Maintenance of all core stations", + "contact": "Operator", + "subject": "system", + "planned": true + }, + "resources": { + "stations": [ + "CS001", + "CS002", + "CS003", + "CS004", + "CS005", + "CS006", + "CS007", + "CS011", + "CS013", + "CS017", + "CS021", + "CS024", + "CS026", + "CS028", + "CS030", + "CS031", + "CS032", + "CS101", + "CS103", + "CS201", + "CS301", + "CS302", + "CS401", + "CS501" + ] + }, + "effects": { + "lba_rfi": false, + "hba_rfi": false, + "expert": false + }, + "schedulability": { + "manual": false, + "dynamic": false, + "project_exclusive": false + } +} \ No newline at end of file diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-maintenance.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-maintenance.json new file mode 100644 index 0000000000000000000000000000000000000000..cd938b2737ac725fc13c1d7db31f8e2aca1fd26c --- /dev/null +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-maintenance.json @@ -0,0 +1,24 @@ +{ + "activity": { + "type": "maintenance", + "name": "Regular station maintenance", + "description": "Planned station maintenance", + "contact": "Operator", + "subject": "system", + "planned": true + }, + "resources": { + "stations": [ + ] + }, + "effects": { + "lba_rfi": false, + "hba_rfi": false, + "expert": false + }, + "schedulability": { + "manual": false, + "dynamic": false, + "project_exclusive": false + } +} diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-overheating.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-overheating.json new file mode 100644 index 0000000000000000000000000000000000000000..c559225a8e5df256191f080bd8c7f3de3455c11c --- /dev/null +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/reservation-strategy-overheating.json @@ -0,0 +1,57 @@ +{ + "activity": { + "type": "outage", + "name": "Station cool down", + "description": "Stations unavailable because of too high temperature", + "contact": "Operator", + "subject": "system", + "planned": true + }, + "resources": { + "stations": [ + "CS001", + "CS002", + "CS003", + "CS004", + "CS005", + "CS006", + "CS007", + "CS011", + "CS013", + "CS017", + "CS021", + "CS024", + "CS026", + "CS030", + "CS032", + "CS301", + "CS302", + "CS401", + "CS501", + "RS106", + "RS205", + "RS208", + "RS210", + "RS305", + "RS306", + "RS307", + "RS310", + "RS406", + "RS407", + "RS409", + "RS503", + "RS508", + "RS509" + ] + }, + "effects": { + "lba_rfi": false, + "hba_rfi": false, + "expert": false + }, + "schedulability": { + "manual": false, + "dynamic": false, + "project_exclusive": false + } +} \ No newline at end of file diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/subtask_template-observation-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/subtask_template-observation-1.json index 985274ec00ccab31533717ae489dee21ad4a6b14..3555487e83beaf29a2c66bab6f7327c4cf6cee99 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/subtask_template-observation-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/subtask_template-observation-1.json @@ -98,7 +98,7 @@ "type":"integer", "title":"Specification version", "description":"Version of the COBALT specification to emit", - "default":2, + "default":1, "minimum":1, "maximum":2 }, diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/templates.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/templates.json index 480d7a4abb715673befa1742ef8fedb6ac04a00f..33140a263020d32e0b1d705713bc7368d7844183 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/templates.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/templates.json @@ -35,6 +35,14 @@ "file_name": "dataproduct_specifications_template-empty-1.json", "template": "dataproduct_specifications_template" }, + { + "file_name": "dataproduct_specifications_template-timeseries-1.json", + "template": "dataproduct_specifications_template" + }, + { + "file_name": "dataproduct_specifications_template-visibilities-1.json", + "template": "dataproduct_specifications_template" + }, { "file_name": "dataproduct_feedback_template-empty-1.json", "template": "dataproduct_feedback_template" @@ -167,5 +175,51 @@ { "file_name": "reservation_template-reservation-1.json", "template": "reservation_template" + }, + { + "file_name": "reservation-strategy-core-stations.json", + "template": "reservation_strategy_template", + "reservation_template_name": "reservation", + "reservation_template_version": "1", + "name": "Simple Core Reservation", + "description": "This reservation strategy template defines a reservation of all core station for system maintenance.", + "version": 1 + }, + { + "file_name": "reservation-strategy-ILTswitch.json", + "template": "reservation_strategy_template", + "reservation_template_name": "reservation", + "reservation_template_version": "1", + "name": "ILT stations in local mode", + "description": "Planned switch of international stations for local use by station owners", + "version": 1 + }, + { + "file_name": "reservation-strategy-maintenance.json", + "template": "reservation_strategy_template", + "reservation_template_name": "reservation", + "reservation_template_version": "1", + "name": "Regular station maintenance", + "description": "Planned station maintenance", + "version": 1 + }, + { + "file_name": "reservation-strategy-overheating.json", + "template": "reservation_strategy_template", + "reservation_template_name": "reservation", + "reservation_template_version": "1", + "name": "Station cool down", + "description": "Stations unavailable because of too high temperature", + "version": 1 + }, + { + "file_name": "reservation-strategy-VLBIsession.json", + "template": "reservation_strategy_template", + "reservation_template_name": "reservation", + "reservation_template_version": "1", + "name": "VLBI session", + "description": "VLBI session ongoing. International station network not available.", + "version": 1 } + ] diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py index d5643db21650bee2126de69c15884f2b26f9521f..47086104958108a4cc364a1c07c84c200d909d64 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/specification.py @@ -364,6 +364,14 @@ class TaskTypeSerializer(DynamicRelationalHyperlinkedModelSerializer): fields = '__all__' +class ReservationStrategyTemplateSerializer(DynamicRelationalHyperlinkedModelSerializer): + template = JSONEditorField(schema_source="reservation_template.schema") + + class Meta: + model = models.ReservationStrategyTemplate + fields = '__all__' + + class ReservationTemplateSerializer(AbstractTemplateSerializer): class Meta: model = models.ReservationTemplate diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py index 60499017cdc4a4247f7716881ca2840f45ffb96a..856c523be56c5a471099ab484f6eb04412b678a8 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py @@ -4,12 +4,14 @@ logger = logging.getLogger(__name__) from copy import deepcopy from functools import cmp_to_key from collections.abc import Iterable +from math import ceil from lofar.common.ring_coordinates import RingCoordinates from lofar.common.datetimeutils import formatDatetime, round_to_second_precision from lofar.common import isProductionEnvironment from lofar.common.json_utils import add_defaults_to_json_object_for_schema, get_default_json_object_for_schema from lofar.common.lcu_utils import get_current_stations +from lofar.stationmodel.antennafields import antenna_fields from lofar.sas.tmss.tmss.exceptions import SubtaskCreationException, SubtaskSchedulingException, SubtaskException @@ -20,7 +22,7 @@ from lofar.sas.tmss.tmss.tmssapp.models import * from lofar.sas.resourceassignment.resourceassigner.rarpc import RARPC from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset_dict -from lofar.sas.resourceassignment.taskprescheduler.cobaltblocksize import CorrelatorSettings, BlockConstraints, BlockSize +from lofar.sas.resourceassignment.taskprescheduler.cobaltblocksize import CorrelatorSettings, StokesSettings, BlockConstraints, BlockSize from lofar.sas.resourceassignment.resourceassigner.schedulers import ScheduleException from lofar.sas.tmss.tmss.tmssapp.conversions import antennafields_for_antennaset_and_station @@ -73,8 +75,9 @@ def create_subtasks_from_task_blueprint(task_blueprint: TaskBlueprint) -> [Subta task_blueprint.id, task_blueprint.name, task_blueprint.specifications_template.type.value, task_blueprint.scheduling_unit_blueprint.id) subtasks.append(subtask) - except SubtaskCreationException as e: - logger.error(e) + except Exception as e: + logger.exception(e) + raise SubtaskCreationException('Cannot create subtasks for task id=%s for its schema name=%s in generator %s' % (task_blueprint.pk, template_name, generator)) from e return subtasks else: logger.error('Cannot create subtasks for task id=%s because no generator exists for its schema name=%s' % (task_blueprint.pk, template_name)) @@ -152,6 +155,9 @@ def create_observation_subtask_specifications_from_observation_task_blueprint(ta # now go over the settings in the task_spec and 'copy'/'convert' them to the subtask_spec task_spec = task_blueprint.specifications_doc + # block size calculator will need to be fed all the relevant specs + cobalt_calculator_constraints = BlockConstraints(None, [], []) + # The calibrator has a minimal calibration-specific specification subset. # The rest of it's specs are 'shared' with the target observation. # So... copy the calibrator specs first, then loop over the shared target/calibrator specs... @@ -195,6 +201,17 @@ def create_observation_subtask_specifications_from_observation_task_blueprint(ta logger.info("Using station and correlator settings for calibrator observation task_blueprint id=%s from target observation task_blueprint id=%s", task_blueprint.id, target_task_blueprint.id) + # correlator + subtask_spec["COBALT"]["correlator"] = { "enabled": False } + + if "correlator" in task_spec: + subtask_spec["COBALT"]["correlator"]["enabled"] = True + subtask_spec["COBALT"]["correlator"]["channels_per_subband"] = task_spec["correlator"]["channels_per_subband"] + + corr = CorrelatorSettings() + corr.nrChannelsPerSubband = task_spec["correlator"]["channels_per_subband"] + corr.integrationTime = task_spec["correlator"]["integration_time"] + cobalt_calculator_constraints.correlator = corr # At this moment of subtask creation we known which stations we *want* from the task_spec # But we do not know yet which stations are available at the moment of observing. @@ -215,70 +232,89 @@ def create_observation_subtask_specifications_from_observation_task_blueprint(ta # The beamformer obs has a beamformer-specific specification block. # The rest of it's specs is the same as in a target observation. # So... copy the beamformer specs first, then loop over the shared specs... - if 'beamforming' in task_blueprint.specifications_template.name.lower(): + if 'beamformers' in task_spec: subtask_spec['COBALT']['beamformer']['tab_pipelines'] = [] subtask_spec['COBALT']['beamformer']['flyseye_pipelines'] = [] - if 'beamformers' in task_spec: - for task_beamformer_spec in task_spec['beamformers']: - task_beamformer_spec = deepcopy(task_beamformer_spec) - - # the wanted/specified beamformer station list is the intersecion of the observation station list with the requested beamformer stations. - # at the moment of scheduling this list is re-evaluated for available stations, and the max_nr_missing is evaluated as well. - # this intersection is not needed per se, because COBALT plays nicely and does similar filtering for stations that are actually available, - # but hey, if cobalt can play nice, then so can we! :) - # So, let's come up with the correct complete beamforming-stations-list, and ask cobalt to explicitely uses these. - beamformer_station_list = [] - if "station_groups" in task_beamformer_spec: - # combine all stations in the groups... - for station_group in task_beamformer_spec["station_groups"]: - beamformer_station_list.extend(station_group["stations"]) - - # make intersection with observing-stations... - beamformer_station_set = set(beamformer_station_list).intersection(set(subtask_spec['stations']['station_list'])) - - # make it a nice readable sorted list. - beamformer_station_list = sorted(list(beamformer_station_list)) - # use the beamformer_station_list below for the tab pipeline and/or flys eye - - for stokes_type in ["coherent", "incoherent"]: - if stokes_type in task_beamformer_spec: - # SAPs - saps = task_beamformer_spec[stokes_type]["SAPs"] - for sap in saps: - # determine absolute tab pointing for subtask by adding relative tab pointing from task to target sap pointing - target_sap = _get_related_target_sap_by_name(task_blueprint, sap['name']) - if "tabs" in sap: - for tab in sap["tabs"]: - tab['coherent'] = (stokes_type == "coherent") - if "relative" in tab: - if tab.pop("relative"): - tab['pointing'] = _add_pointings(tab['pointing'], target_sap['digital_pointing']) - elif stokes_type == 'incoherent': - sap.setdefault('tabs', []) - sap["tabs"] += [{'coherent': False}] # todo: according to confluence. Is that needed? - if "tab_rings" in sap: - ring_pointings = _generate_tab_ring_pointings(target_sap["digital_pointing"], sap.pop("tab_rings")) - sap['tabs'] += [{'coherent': (stokes_type == "coherent"), 'pointing': pointing} for pointing in ring_pointings] - if "subbands" in sap: - sap['subbands'] = _filter_subbands(target_sap['subbands'], sap['subbands']) - - # create a pipeline item and add it to the list - beamformer_pipeline = {stokes_type: task_beamformer_spec[stokes_type]["settings"], - "stations": beamformer_station_list, - "SAPs": saps} - subtask_spec['COBALT']['beamformer']['tab_pipelines'].append(beamformer_pipeline) - if task_beamformer_spec['flys eye'].get("enabled", False): - flyseye_pipeline = {"coherent": task_beamformer_spec["flys eye"]["settings"], - "stations": beamformer_station_list} - subtask_spec['COBALT']['beamformer']['flyseye_pipelines'].append(flyseye_pipeline) - # todo: Clarify if we can add a subbands_selection on the flys eye task spec, to filter down for sap['subbands'] - # If I got that correctly, specifying subbands is not really supported later down the chain, so whatever we do here gets ignored anyway? - # for sap in task_spec["SAPs"]: - # target_sap = _get_related_target_sap_by_name(task_blueprint, sap['name']) - # sap['subbands'] = filter_subbands(...) - # if sap['subbands'] == target_sap['subbands']: # todo: is this really required? pseudo-code in confluence suggests so, but what harm does the list do? - # sap['subbands'] = [] + for task_beamformer_spec in task_spec['beamformers']: + # the wanted/specified beamformer station list is the intersecion of the observation station list with the requested beamformer stations. + # at the moment of scheduling this list is re-evaluated for available stations, and the max_nr_missing is evaluated as well. + # this intersection is not needed per se, because COBALT plays nicely and does similar filtering for stations that are actually available, + # but hey, if cobalt can play nice, then so can we! :) + # So, let's come up with the correct complete beamforming-stations-list, and ask cobalt to explicitely uses these. + + # combine all stations in the groups... + beamformer_station_list = sum([station_group["stations"] for station_group in task_beamformer_spec["station_groups"]], []) + + # make intersection with observing-stations... + beamformer_station_set = set(beamformer_station_list).intersection(set(subtask_spec['stations']['station_list'])) + + # make it a nice readable sorted list. + beamformer_station_list = sorted(list(beamformer_station_list)) + # use the beamformer_station_list below for the tab pipeline and/or flys eye + + for stokes_type in ["coherent", "incoherent"]: + if not task_beamformer_spec[stokes_type]["SAPs"]: + # nothing specified for this stokes type + continue + + # SAPs + subtask_saps = [] + for sap in task_beamformer_spec[stokes_type]["SAPs"]: + subtask_sap = { "name": sap["name"], "tabs": [] } + + target_sap = _get_related_target_sap_by_name(task_blueprint, sap['name']) + if stokes_type == "coherent": + for tab in sap["tabs"]: + subtask_sap["tabs"].append({ + "coherent": True, + # determine absolute tab pointing for subtask by adding relative tab pointing from task to target sap pointing + "pointing": tab["pointing"] if not tab.get("relative", False) else _add_pointings(tab['pointing'], target_sap['digital_pointing']) + }) + + if "tab_rings" in sap: + ring_pointings = _generate_tab_ring_pointings(target_sap["digital_pointing"], sap.pop("tab_rings")) + subtask_sap['tabs'] += [{'coherent': True, 'pointing': pointing} for pointing in ring_pointings] + else: + subtask_sap["tabs"] = [{"coherent": False}] + + if "subbands" in sap: + sap['subbands'] = _filter_subbands(target_sap['subbands'], sap['subbands']) + + subtask_saps.append(subtask_sap) + + # create a pipeline item and add it to the list + beamformer_pipeline = {stokes_type: task_beamformer_spec[stokes_type]["settings"], + "stations": beamformer_station_list, + "SAPs": subtask_saps} + subtask_spec['COBALT']['beamformer']['tab_pipelines'].append(beamformer_pipeline) + + # add constraints for calculator + ss = StokesSettings() + ss.nrChannelsPerSubband = task_beamformer_spec[stokes_type]["settings"]["channels_per_subband"] + ss.timeIntegrationFactor = task_beamformer_spec[stokes_type]["settings"]["time_integration_factor"] + if stokes_type == "coherent": + cobalt_calculator_constraints.coherentStokes.append(ss) + else: + cobalt_calculator_constraints.incoherentStokes.append(ss) + + if task_beamformer_spec['flys eye']['enabled']: + # add constraints for calculator + ss = StokesSettings() + ss.nrChannelsPerSubband = task_beamformer_spec["flys eye"]["settings"]["channels_per_subband"] + ss.timeIntegrationFactor = task_beamformer_spec["flys eye"]["settings"]["time_integration_factor"] + cobalt_calculator_constraints.coherentStokes.append(ss) + + flyseye_pipeline = {"coherent": task_beamformer_spec["flys eye"]["settings"], + "stations": beamformer_station_list} + subtask_spec['COBALT']['beamformer']['flyseye_pipelines'].append(flyseye_pipeline) + # todo: Clarify if we can add a subbands_selection on the flys eye task spec, to filter down for sap['subbands'] + # If I got that correctly, specifying subbands is not really supported later down the chain, so whatever we do here gets ignored anyway? + # for sap in task_spec["SAPs"]: + # target_sap = _get_related_target_sap_by_name(task_blueprint, sap['name']) + # sap['subbands'] = filter_subbands(...) + # if sap['subbands'] == target_sap['subbands']: # todo: is this really required? pseudo-code in confluence suggests so, but what harm does the list do? + # sap['subbands'] = [] subtask_spec['stations']["antenna_set"] = task_spec["antenna_set"] subtask_spec['stations']["filter"] = task_spec["filter"] @@ -301,15 +337,15 @@ def create_observation_subtask_specifications_from_observation_task_blueprint(ta "angle1": task_spec["tile_beam"]["angle1"], "angle2": task_spec["tile_beam"]["angle2"] } + + + # Calculate block sizes and feed those to the spec + cobalt_calculator = BlockSize(constraints=cobalt_calculator_constraints) + subtask_spec["COBALT"]["blocksize"] = cobalt_calculator.blockSize + if "correlator" in task_spec: - corr = CorrelatorSettings() - corr.nrChannelsPerSubband = task_spec["correlator"]["channels_per_subband"] - corr.integrationTime = task_spec["correlator"]["integration_time"] - calculator = BlockSize(constraints=BlockConstraints(correlatorSettings=corr)) - subtask_spec["COBALT"]["correlator"] = {} - subtask_spec["COBALT"]["correlator"]["enabled"] = True - subtask_spec["COBALT"]["correlator"]["blocks_per_integration"] = calculator.nrBlocks - subtask_spec["COBALT"]["correlator"]["integrations_per_block"] = calculator.nrSubblocks + subtask_spec["COBALT"]["correlator"]["blocks_per_integration"] = cobalt_calculator.nrBlocks + subtask_spec["COBALT"]["correlator"]["integrations_per_block"] = cobalt_calculator.nrSubblocks # make sure that the subtask_spec is valid conform the schema validate_json_against_schema(subtask_spec, subtask_template.schema) @@ -472,6 +508,18 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask) def create_qaplots_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask: + if 'calibrator' in task_blueprint.specifications_template.name.lower(): + # Calibrator requires related Target Task Observation for some specifications + target_task_blueprint = get_related_target_observation_task_blueprint(task_blueprint) + if target_task_blueprint is None: + raise SubtaskCreationException("Cannot retrieve specifications for task id=%d because no related target observation is found " % task.pk) + else: + target_task_blueprint = task_blueprint + + if not target_task_blueprint.specifications_doc.get("QA", {}).get("file_conversion", {}).get("enabled", False): + logger.debug("Skipping creation of qaplots_subtask because QA.file_conversion is not enabled") + return None + qafile_subtasks = [st for st in task_blueprint.subtasks.all() if st.specifications_template.type.value == SubtaskType.Choices.QA_FILES.value] if qafile_subtasks: qafile_subtask = qafile_subtasks[0] # TODO: decide what to do when there are multiple qafile subtasks? @@ -673,7 +721,7 @@ def schedule_subtask(subtask: Subtask) -> Subtask: logger.error(e2) finally: # ... and re-raise the original exception (wrapped) - raise SubtaskSchedulingException("Error while scheduling subtask id=%d: %s" % (subtask.pk, str(e))) + raise SubtaskSchedulingException("Error while scheduling subtask id=%d" % (subtask.pk,)) from e def unschedule_subtask(subtask: Subtask) -> Subtask: @@ -1100,48 +1148,117 @@ def schedule_observation_subtask(observation_subtask: Subtask): # TODO: are there any observations that take input dataproducts? # step 3: create output dataproducts, and link these to the output + dataproducts = [] specifications_doc = observation_subtask.specifications_doc - dataproduct_specifications_template = DataproductSpecificationsTemplate.objects.get(name="SAP") # todo: should this be derived from the task relation specification template? dataproduct_feedback_template = DataproductFeedbackTemplate.objects.get(name="empty") subtask_output = observation_subtask.outputs.first() # TODO: make proper selection, not default first() - directory = "/data/%s/%s/L%s/uv" % ("projects" if isProductionEnvironment() else "test-projects", - observation_subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name, - observation_subtask.id) - - for sap_nr, pointing in enumerate(specifications_doc['stations']['digital_pointings']): - antennaset = specifications_doc['stations']['antenna_set'] - antennafields = [] - for station in specifications_doc['stations']['station_list']: - fields = antennafields_for_antennaset_and_station(antennaset, station) - antennafields += [{"station": station, "field": field, "type": antennaset.split('_')[0]} for field in fields] - - sap = SAP.objects.create(specifications_doc={ "name": "%s_%s" % (observation_subtask.id, pointing['name']), - "pointing": pointing['pointing'], - "time": {"start_time": observation_subtask.start_time.isoformat(), - "duration": (observation_subtask.stop_time - observation_subtask.start_time).total_seconds()}, - "antennas": { + + # create SAP objects, as observations create new beams + antennaset = specifications_doc['stations']['antenna_set'] + antennafields = [] + for station in specifications_doc['stations']['station_list']: + fields = antennafields_for_antennaset_and_station(antennaset, station) + antennafields += [{"station": station, "field": field, "type": antennaset.split('_')[0]} for field in fields] + + saps = [SAP.objects.create(specifications_doc={ "name": "%s_%s" % (observation_subtask.id, pointing['name']), + "pointing": pointing['pointing'], + "time": {"start_time": observation_subtask.start_time.isoformat(), + "duration": (observation_subtask.stop_time - observation_subtask.start_time).total_seconds()}, + "antennas": { "antenna_set": antennaset, "fields": antennafields - } - }, - specifications_template=SAPTemplate.objects.get(name="SAP")) - - # create dataproducts in bulk, and assign each dp its own unique global identifier - dp_global_identifiers = SIPidentifier.objects.bulk_create([SIPidentifier(source="TMSS") for _ in pointing['subbands']]) - Dataproduct.objects.bulk_create([Dataproduct(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr), - directory=directory, - dataformat=Dataformat.objects.get(value="MeasurementSet"), - datatype=Datatype.objects.get(value="visibilities"), - producer=subtask_output, - specifications_doc={"sap": [str(sap_nr)]}, - specifications_template=dataproduct_specifications_template, - feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), - feedback_template=dataproduct_feedback_template, - size=0 if sb_nr%10==0 else 1024*1024*1024*sb_nr, - expected_size=1024*1024*1024*sb_nr, - sap=sap, - global_identifier=dp_global_identifier) - for sb_nr, dp_global_identifier in zip(pointing['subbands'], dp_global_identifiers)]) + } + }, + specifications_template=SAPTemplate.objects.get(name="SAP")) for sap_nr, pointing in enumerate(specifications_doc['stations']['digital_pointings'])] + + # store everything below this directory + directory = "/data/%s/%s/L%s" % ("projects" if isProductionEnvironment() else "test-projects", + observation_subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name, + observation_subtask.id) + + # create correlated dataproducts + if specifications_doc['COBALT']['correlator']['enabled']: + dataproduct_specifications_template_visibilities = DataproductSpecificationsTemplate.objects.get(name="visibilities") + sb_nr_offset = 0 # subband numbers run from 0 to (nr_subbands-1), increasing across SAPs + + for sap_nr, pointing in enumerate(specifications_doc['stations']['digital_pointings']): + for sb_nr, subband in enumerate(pointing['subbands'], start=sb_nr_offset): + dataproducts.append(Dataproduct(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr), + directory=directory+"/uv", + dataformat=Dataformat.objects.get(value="MeasurementSet"), + datatype=Datatype.objects.get(value="visibilities"), + producer=subtask_output, + specifications_doc={"sap": pointing["name"], "subband": subband}, + specifications_template=dataproduct_specifications_template_visibilities, + feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), + feedback_template=dataproduct_feedback_template, + size=0, + expected_size=1024*1024*1024*sb_nr, + sap=saps[sap_nr], + global_identifier=None)) + + sb_nr_offset += len(pointing['subbands']) + + + # create beamformer dataproducts + dataproduct_specifications_template_timeseries = DataproductSpecificationsTemplate.objects.get(name="timeseries") + + def _sap_index(saps: dict, sap_name: str) -> int: + """ Return the SAP index in the observation given a certain SAP name. """ + + sap_indices = [idx for idx,sap in enumerate(saps) if sap['name'] == sap_name] + + # needs to be exactly one hit + if len(sap_indices) != 1: + raise SubtaskSchedulingException("SAP name %s must appear exactly once in the specification. It appeared %d times. Available names: %s" % (sap_name, len(sap_indices), [sap['name'] for sap in saps])) + + return sap_indices[0] + + def tab_dataproducts(sap_nr, pipeline_nr, tab_nr, stokes_settings, coherent): + nr_subbands = len(sap['subbands']) or len(specifications_doc['stations']['digital_pointings'][sap_nr]['subbands']) + nr_stokes = len(stokes_settings['stokes']) + nr_parts = ceil(1.0 * nr_subbands / stokes_settings['subbands_per_file']) + + return [Dataproduct(filename="L%d_SAP%03d_N%03d_B%03d_S%03d_P%03d_bf.h5" % (observation_subtask.id, sap_nr, pipeline_nr, tab_nr, stokes_nr, part_nr), + directory=directory+("/cs" if coherent else "/is"), + dataformat=Dataformat.objects.get(value="Beamformed"), + datatype=Datatype.objects.get(value="time series"), + producer=subtask_output, + specifications_doc={"sap": specifications_doc['stations']['digital_pointings'][sap_nr]["name"], "coherent": coherent, "identifiers": {"pipeline_index": pipeline_nr, "tab_index": tab_nr, "stokes_index": stokes_nr, "part_index": part_nr}}, + specifications_template=dataproduct_specifications_template_timeseries, + feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), + feedback_template=dataproduct_feedback_template, + size=0, + expected_size=1024*1024*1024*tab_nr, + sap=saps[sap_nr], + global_identifier=None) + for part_nr in range(nr_parts) for stokes_nr in range(nr_stokes)] + + + # beamformer pipelines: one set of dataproducts per TAB. + pipeline_nr_offset = 0 + for pipeline_nr, pipeline in enumerate(specifications_doc['COBALT']['beamformer']['tab_pipelines'], start=pipeline_nr_offset): + for sap in pipeline['SAPs']: + sap_idx = _sap_index(specifications_doc['stations']['digital_pointings'], sap['name']) + + for tab_idx, tab in enumerate(sap['tabs']): + dataproducts += tab_dataproducts(sap_idx, pipeline_nr, tab_idx, pipeline['coherent'] if tab['coherent'] else pipeline['incoherent'], tab['coherent']) + + # fly's eye pipelines: one set of dataproducts per antenna field. + pipeline_nr_offset += len(specifications_doc['COBALT']['beamformer']['tab_pipelines']) + for pipeline_nr, pipeline in enumerate(specifications_doc['COBALT']['beamformer']['flyseye_pipelines'], start=pipeline_nr_offset): + for sap_idx, sap in enumerate(specifications_doc['stations']['digital_pointings']): + stations = pipeline['stations'] or specifications_doc['stations']['station_list'] + fields = sum([list(antenna_fields(station, antennaset)) for station in stations], []) + for tab_idx, tab in enumerate(fields): + dataproducts += tab_dataproducts(sap_idx, pipeline_nr, tab_idx, pipeline['coherent'], True) + + # Bulk create identifiers, and then update the dataproducts with a link to the actual created objects. + # This is needed as bulk_create needs to have any relations resolved. + dp_global_identifiers = SIPidentifier.objects.bulk_create([SIPidentifier(source="TMSS") for _ in dataproducts]) + for dp, global_identifier in zip(dataproducts, dp_global_identifiers): + dp.global_identifier = global_identifier + Dataproduct.objects.bulk_create(dataproducts) # step 4: resource assigner (if possible) assign_or_unassign_resources(observation_subtask) @@ -1194,7 +1311,7 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): pipeline_subtask.specifications_template.type)) # TODO: use existing and reasonable selection and specification templates for output when we have those, for now, use "empty" - dataproduct_specifications_template = DataproductSpecificationsTemplate.objects.get(name="empty") + dataproduct_specifications_template = DataproductSpecificationsTemplate.objects.get(name="visibilities") dataproduct_feedback_template = DataproductFeedbackTemplate.objects.get(name="empty") # iterate over all inputs @@ -1225,7 +1342,7 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): dataformat=dataformat, datatype=Datatype.objects.get(value="visibilities"), # todo: is this correct? producer=pipeline_subtask_output, - specifications_doc=get_default_json_object_for_schema(dataproduct_specifications_template.schema), + specifications_doc=input_dp.specifications_doc, specifications_template=dataproduct_specifications_template, feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), feedback_template=dataproduct_feedback_template, @@ -1485,10 +1602,13 @@ def specifications_doc_meets_selection_doc(specifications_doc, selection_doc): meets_criteria = False else: spec = specifications_doc[k] - if isinstance(spec, Iterable) and isinstance(v, Iterable): + if isinstance(spec, list) and isinstance(v, list): for spec_v in spec: if spec_v not in v: meets_criteria = False + elif isinstance(v, list): + if spec not in v: + meets_criteria = False else: if spec != v: meets_criteria = False diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py index 9605ead221a9ae4a18596d0c6d887b4ad2791bc2..d53ace784b028f01ba199a80e067090526a66a41 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py @@ -37,6 +37,7 @@ from rest_framework.filters import OrderingFilter import json import logging +import dateutil from django.core.exceptions import ObjectDoesNotExist @@ -199,6 +200,66 @@ class DefaultTaskRelationSelectionTemplateViewSet(LOFARViewSet): serializer_class = serializers.DefaultTaskRelationSelectionTemplateSerializer +class ReservationStrategyTemplateViewSet(LOFARViewSet): + queryset = models.ReservationStrategyTemplate.objects.all() + serializer_class = serializers.ReservationStrategyTemplateSerializer + + @swagger_auto_schema(responses={status.HTTP_201_CREATED: 'The newly created reservation', + status.HTTP_403_FORBIDDEN: 'forbidden'}, + operation_description="Create a new Reservation based on this ReservationStrategyTemplate, " + "with the given <name>, <description>, <start_time> and <stop_time>", + manual_parameters=[Parameter(name='start_time', required=True, type='string', in_='query', + description="The start time as a timestamp string in isoformat"), + Parameter(name='stop_time', required=True, type='string', in_='query', + description="The stop time as a timestamp string in isoformat"), + Parameter(name='name', required=False, type='string', in_='query', + description="The name for the newly created reservation"), + Parameter(name='description', required=False, type='string', in_='query', + description="The description for the newly created reservation"), + Parameter(name='project_id', required=False, type='integer', in_='query', + description="the id of the project which will be the parent of the newly created reservation"), + ]) + @action(methods=['get'], detail=True) + def create_reservation(self, request, pk=None): + strategy_template = get_object_or_404(models.ReservationStrategyTemplate, pk=pk) + reservation_template_spec = add_defaults_to_json_object_for_schema(strategy_template.template, + strategy_template.reservation_template.schema) + + start_time = request.query_params.get('start_time', None) + stop_time = request.query_params.get('stop_time', None) + if start_time: + start_time = dateutil.parser.parse(start_time) # string to datetime + else: + start_time = datetime.now() + if stop_time: + stop_time = dateutil.parser.parse(stop_time) # string to datetime + else: + stop_time = None + + project_id = request.query_params.get('project_id', None) + if project_id: + project = get_object_or_404(models.Project, pk=request.query_params['project_id']) + else: + project = None + + reservation = Reservation.objects.create(name=request.query_params.get('name', "reservation"), + description=request.query_params.get('description', ""), + project=project, + specifications_template=strategy_template.reservation_template, + specifications_doc=reservation_template_spec, + start_time=start_time, + stop_time=stop_time) + + reservation_strategy_template_path = request._request.path + base_path = reservation_strategy_template_path[:reservation_strategy_template_path.find('/reservation_strategy_template')] + reservation_path = '%s/reservation/%s/' % (base_path, reservation.id,) + + # return a response with the new serialized Reservation, and a Location to the new instance in the header + return Response(serializers.ReservationSerializer(reservation, context={'request':request}).data, + status=status.HTTP_201_CREATED, + headers={'Location': reservation_path}) + + class DefaultReservationTemplateViewSet(LOFARViewSet): queryset = models.DefaultReservationTemplate.objects.all() serializer_class = serializers.DefaultReservationTemplateSerializer diff --git a/SAS/TMSS/backend/src/tmss/urls.py b/SAS/TMSS/backend/src/tmss/urls.py index 039b531a658e3bed589f131860f3d1193bfc3b39..66e58162725f917a20c5020e6492ad6f39bed7d0 100644 --- a/SAS/TMSS/backend/src/tmss/urls.py +++ b/SAS/TMSS/backend/src/tmss/urls.py @@ -142,6 +142,7 @@ router.register(r'default_scheduling_constraints_template', viewsets.DefaultSche router.register(r'default_task_template', viewsets.DefaultTaskTemplateViewSet) router.register(r'default_task_relation_selection_template', viewsets.DefaultTaskRelationSelectionTemplateViewSet) router.register(r'default_reservation_template', viewsets.DefaultReservationTemplateViewSet) +router.register(r'reservation_strategy_template', viewsets.ReservationStrategyTemplateViewSet) # instances router.register(r'cycle', viewsets.CycleViewSet) diff --git a/SAS/TMSS/backend/test/t_conversions.py b/SAS/TMSS/backend/test/t_conversions.py index 7f8d66d6e4b8758b3cf13bf04bf3d8488deb89ad..1773168c7b1ded14c41aee27f0fddd6683d9f9f7 100755 --- a/SAS/TMSS/backend/test/t_conversions.py +++ b/SAS/TMSS/backend/test/t_conversions.py @@ -362,6 +362,35 @@ class UtilREST(unittest.TestCase): self.assertNotEqual(rise, rise_last) rise_last = rise + def test_util_target_rise_and_set_detects_when_target_above_horizon(self): + + # assert always below and always above are usually false + r = requests.get(BASE_URL + '/util/target_rise_and_set?angle1=0.5&angle2=0.8×tamps=2020-01-01&horizon=0.2', auth=AUTH) + self.assertEqual(r.status_code, 200) + r_dict = json.loads(r.content.decode('utf-8')) + self.assertIsNotNone(r_dict['CS002'][0]['rise']) + self.assertIsNotNone(r_dict['CS002'][0]['set']) + self.assertFalse(r_dict['CS002'][0]['always_below_horizon']) + self.assertFalse(r_dict['CS002'][0]['always_above_horizon']) + + # assert rise and set are None and flag is true when target is always above horizon + r = requests.get(BASE_URL + '/util/target_rise_and_set?angle1=0.5&angle2=0.8×tamps=2020-01-01&horizon=0.1', auth=AUTH) + self.assertEqual(r.status_code, 200) + r_dict = json.loads(r.content.decode('utf-8')) + self.assertIsNone(r_dict['CS002'][0]['rise']) + self.assertIsNone(r_dict['CS002'][0]['set']) + self.assertTrue(r_dict['CS002'][0]['always_above_horizon']) + self.assertFalse(r_dict['CS002'][0]['always_below_horizon']) + + # assert rise and set are None and flag is true when target is always below horizon + r = requests.get(BASE_URL + '/util/target_rise_and_set?angle1=0.5&angle2=-0.5×tamps=2020-01-01&horizon=0.2', auth=AUTH) + self.assertEqual(r.status_code, 200) + r_dict = json.loads(r.content.decode('utf-8')) + self.assertIsNone(r_dict['CS002'][0]['rise']) + self.assertIsNone(r_dict['CS002'][0]['set']) + self.assertFalse(r_dict['CS002'][0]['always_above_horizon']) + self.assertTrue(r_dict['CS002'][0]['always_below_horizon']) + if __name__ == "__main__": os.environ['TZ'] = 'UTC' diff --git a/SAS/TMSS/backend/test/t_reservations.py b/SAS/TMSS/backend/test/t_reservations.py index c6afaed6dc60041c8d811437d76c11d0e9d7a28f..9cc99f3a7da802c98d3e39f3dd608068351fbff1 100755 --- a/SAS/TMSS/backend/test/t_reservations.py +++ b/SAS/TMSS/backend/test/t_reservations.py @@ -240,4 +240,79 @@ class TestStationReservations(unittest.TestCase): get_active_station_reservations_in_timewindow(reservation_start_time, reservation_stop_time-timedelta(days=5))) self.assertCountEqual(["CS001"], get_active_station_reservations_in_timewindow(reservation_start_time, reservation_stop_time-timedelta(days=6))) +from lofar.sas.tmss.tmss.exceptions import SchemaValidationException +from django.core.exceptions import ValidationError + + +class CreationFromReservationStrategyTemplate(unittest.TestCase): + """ + Test that reservations can be created from strategy template + """ + + def test_create_reservation_ok(self): + """ + Check that reservations from the reservation strategy can be created with api + """ + strategy_template = models.ReservationStrategyTemplate.objects.get(name="Regular station maintenance") + + reservation_spec = add_defaults_to_json_object_for_schema(strategy_template.template, + strategy_template.reservation_template.schema) + reservation = models.Reservation.objects.create(name=strategy_template.name, + description="Unittest with %s" % strategy_template.description, + project=None, + specifications_template=strategy_template.reservation_template, + specifications_doc=reservation_spec, + start_time=datetime.now(), + stop_time=None) + + # Check URL of the reservation that is created + response = GET_and_assert_equal_expected_code(self, BASE_URL + '/reservation/%d' % reservation.pk, 200) + self.assertEqual(response['id'], reservation.pk) # should be different id then previous one created + + # Check that action call 'create_reservation' (no parameters) of strategy template creates a + # new reservation (with http result code 201) + response = GET_and_assert_equal_expected_code(self, BASE_URL + '/reservation_strategy_template/%d/create_reservation' % strategy_template.pk, 201) + self.assertNotEqual(response['id'], reservation.pk) # should be different id then previous one created + self.assertLess(response['start_time'], datetime.utcnow().isoformat()) # start_time created with now so that was some micro seconds ago + self.assertEqual(response['stop_time'], None) + self.assertEqual(response['duration'], None) + self.assertEqual(response['name'], "reservation") + self.assertEqual(response['specifications_doc'], reservation_spec) + + def test_create_reservation_exception(self): + """ + Check that reservations from the reservation strategy results in an Exception due to wrong + station assignment + """ + strategy_template = models.ReservationStrategyTemplate.objects.get(name="Regular station maintenance") + strategy_template.template['resources']['stations'] = ['CS999'] + # using ValidationError seem not to work? + with self.assertRaises(Exception) as context: + strategy_template.save() + self.assertIn('is not one of', str(context.exception)) + self.assertIn('Failed validating', str(context.exception)) + + +class ReservationTest(unittest.TestCase): + """ + Check the Reservation model + TODO: more testcases to be added + """ + + def test_create_reservation_validation_error(self): + """ + Check that creating reservation with results in SchemaValidationException due to wrong station assignment + """ + reservation_template = models.ReservationTemplate.objects.get(pk=1) + reservation_spec = get_default_json_object_for_schema(reservation_template.schema) + reservation_spec['resources']['stations'] = ['CS999'] + with self.assertRaises(SchemaValidationException) as context: + models.Reservation.objects.create(name="Test Reservation", + description="Unittest", + project=None, + specifications_template=reservation_template, + specifications_doc=reservation_spec, + start_time=datetime.now(), + stop_time=None) + self.assertIn('is not one of', str(context.exception)) diff --git a/SAS/TMSS/backend/test/t_scheduling.py b/SAS/TMSS/backend/test/t_scheduling.py index 6dda9cf61de9fa857d009bec6204fad744de1e75..5bcfa16e9e29e9e82b75a3c5f13dff663a89289d 100755 --- a/SAS/TMSS/backend/test/t_scheduling.py +++ b/SAS/TMSS/backend/test/t_scheduling.py @@ -97,6 +97,18 @@ def create_reserved_stations_for_testing(station_list): assigned = rarpc.do_assignment(ra_spec) return assigned +def duplicates(l: list) -> list: + # O(n^2), but that's good enough. + uniques = [] + dupes = [] + + for e in l: + if e not in uniques: + uniques.append(e) + elif e not in dupes: + dupes.append(e) + + return dupes class SchedulingTest(unittest.TestCase): def setUp(self): @@ -113,13 +125,12 @@ class SchedulingTest(unittest.TestCase): test_data_creator.wipe_cache() - def test_schedule_observation_subtask_with_enough_resources_available(self): + def _test_schedule_observation_subtask_with_enough_resources_available(self, observation_specification_doc): with tmss_test_env.create_tmss_client() as client: task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url']) task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(task_blueprint_data, '/task_blueprint/') subtask_template = client.get_subtask_template("observation control") - spec = get_default_json_object_for_schema(subtask_template['schema']) - spec['stations']['digital_pointings'][0]['subbands'] = [0] + spec = add_defaults_to_json_object_for_schema(observation_specification_doc, subtask_template['schema']) cluster_url = client.get_path_as_json_object('/cluster/1')['url'] subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'], @@ -137,6 +148,34 @@ class SchedulingTest(unittest.TestCase): self.assertEqual('scheduled', subtask['state_value']) self.assertEqual('scheduled', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=subtask_id)['status']) + # test whether all dataproduct specifications are unique + outputs = Subtask.objects.get(pk=subtask_id).outputs.all() + dataproduct_specifications_docs = [dp.specifications_doc for output in outputs for dp in output.dataproducts.all()] + duplicate_dataproduct_specification_docs = duplicates(dataproduct_specifications_docs) + + self.assertEqual([], duplicate_dataproduct_specification_docs) + + def test_schedule_observation_subtask_with_enough_resources_available(self): + spec = { "stations": { "digital_pointings": [ { "subbands": [0] } ] } } + self._test_schedule_observation_subtask_with_enough_resources_available(spec) + + def test_schedule_beamformer_observation_subtask_with_enough_resources_available(self): + spec = { + "stations": { "digital_pointings": [ { "name": "target0", "subbands": [0] } ] }, + "COBALT": { + "version": 1, + "correlator": { "enabled": False }, + "beamformer": { + "tab_pipelines": [ + { + "SAPs": [ { "name": "target0", "tabs": [ { "coherent": False }, { "coherent": True } ] } ] + } + ] + } + } + } + self._test_schedule_observation_subtask_with_enough_resources_available(spec) + def test_schedule_observation_subtask_with_one_blocking_reservation_failed(self): """ Set (Resource Assigner) station CS001 to reserved @@ -260,6 +299,7 @@ class SchedulingTest(unittest.TestCase): obs_subtask = test_data_creator.post_data_and_get_response_as_json_object(obs_subtask_data, '/subtask/') obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url']), '/subtask_output/') test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(filename="L%s_SB000.MS"%obs_subtask['id'], + specifications_doc={"sap": "target0", "subband": 0 }, subtask_output_url=obs_subtask_output_url), '/dataproduct/') # now create the pipeline... @@ -304,6 +344,7 @@ class SchedulingTest(unittest.TestCase): obs_subtask = test_data_creator.post_data_and_get_response_as_json_object(obs_subtask_data, '/subtask/') obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url']), '/subtask_output/') test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(filename="L%s_SB000.MS"%obs_subtask['id'], + specifications_doc={"sap": "target0", "subband": 0}, subtask_output_url=obs_subtask_output_url), '/dataproduct/') # now create the ingest... @@ -416,6 +457,23 @@ class SubtaskInputOutputTest(unittest.TestCase): setting.value = True setting.save() + + def test_specifications_doc_meets_selection_doc(self): + # empty selection matches all + self.assertTrue(specifications_doc_meets_selection_doc({'something else': 'target0'}, {})) + + # specification is a list? specification must be a subset of the selection + self.assertTrue(specifications_doc_meets_selection_doc({'sap': ['target0']}, {'sap': ['target0']})) + self.assertFalse(specifications_doc_meets_selection_doc({'sap': ['target0','target1','target2']}, {'sap': ['target0','target1']})) + + # specification is a value? it must appear in the selection + self.assertTrue(specifications_doc_meets_selection_doc({'sap': 'target0'}, {'sap': ['target0']})) + self.assertTrue(specifications_doc_meets_selection_doc({'sap': 'target0'}, {'sap': ['target0','target1']})) + self.assertTrue(specifications_doc_meets_selection_doc({'sap': 'target0'}, {'sap': 'target0'})) + + # specification must contain the selection key + self.assertFalse(specifications_doc_meets_selection_doc({'something else': 'target0'}, {'sap': 'target0'})) + @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks.assign_or_unassign_resources") def test_schedule_pipeline_subtask_filters_predecessor_output_dataproducts_for_input(self, assign_resources_mock): # setup: @@ -431,12 +489,12 @@ class SubtaskInputOutputTest(unittest.TestCase): pipe_in2 = models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=pipe_st, producer=obs_out2, selection_doc={'sap': ['target1']})) # create obs output dataproducts with specs we can filter on - dp1_1 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out1, specifications_doc={'sap': ['target0']})) - dp1_2 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out1, specifications_doc={'sap': ['target1']})) - dp1_3 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out1, specifications_doc={'sap': ['target0']})) + dp1_1 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out1, specifications_doc={'sap': 'target0', 'subband': 0})) + dp1_2 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out1, specifications_doc={'sap': 'target1', 'subband': 0})) + dp1_3 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out1, specifications_doc={'sap': 'target0', 'subband': 1})) - dp2_1 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out2, specifications_doc={'sap': ['target0']})) - dp2_2 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out2, specifications_doc={'sap': ['target1']})) + dp2_1 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out2, specifications_doc={'sap': 'target0', 'subband': 0})) + dp2_2 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out2, specifications_doc={'sap': 'target1', 'subband': 0})) # trigger: # schedule pipeline, which should attach the correct subset of dataproducts to the pipeline inputs @@ -488,6 +546,7 @@ class SAPTest(unittest.TestCase): client.set_subtask_status(subtask_id, 'defined') subtask = client.schedule_subtask(subtask_id) + self.assertEqual(1, subtask_model.output_dataproducts.count()) self.assertEqual(1, subtask_model.output_dataproducts.values('sap').count()) self.assertEqual(subtask_model.output_dataproducts.first().sap.specifications_doc['pointing']['angle1'], pointing['angle1']) self.assertEqual(subtask_model.output_dataproducts.first().sap.specifications_doc['pointing']['angle2'], pointing['angle2']) @@ -505,8 +564,8 @@ class SAPTest(unittest.TestCase): pipe_in = models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=pipe_st, producer=obs_out)) # create obs output dataproducts - dp1_in = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out)) - dp2_in = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out)) + dp1_in = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out, specifications_doc={"identifiers": { "sap_index": 0, "subband_index": 0 }})) + dp2_in = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out, specifications_doc={"identifiers": { "sap_index": 0, "subband_index": 1 }})) # schedule pipeline, which should copy the SAP schedule_pipeline_subtask(pipe_st) diff --git a/SAS/TMSS/backend/test/t_schemas.py b/SAS/TMSS/backend/test/t_schemas.py index 0cf0157e39e2917d8baaa06384836c4795c41ab4..e9b25c35efca7a967bf7bf541c027cb15b836f7b 100755 --- a/SAS/TMSS/backend/test/t_schemas.py +++ b/SAS/TMSS/backend/test/t_schemas.py @@ -43,13 +43,17 @@ class TestSchemas(unittest.TestCase): """ Check whether the given schema is valid. """ # Can all $refs be actually resolved? - logger.info("Resolving references for schema %s", name) - resolved_refs(schema) + try: + resolved_refs(schema) + except Exception as e: + raise Exception("Failed to resolve references in schema %s" % name) from e # Does this schema provide actually valid defaults? - logger.info("Validating defaults of schema %s", name) - defaults = get_default_json_object_for_schema(schema) - validate_json_against_schema(defaults, schema) + try: + defaults = get_default_json_object_for_schema(schema) + validate_json_against_schema(defaults, schema) + except Exception as e: + raise Exception("Failure in defaults in schema %s" % name) from e def check_schema_table(self, model): """ Check all schemas present in the database for a given model. """ diff --git a/SAS/TMSS/backend/test/t_subtasks.py b/SAS/TMSS/backend/test/t_subtasks.py index 806fcd682579d20829b1b010f5548fb530ae73e1..8086f231da703fba4bcdf574bed9940f0ee6d3d2 100755 --- a/SAS/TMSS/backend/test/t_subtasks.py +++ b/SAS/TMSS/backend/test/t_subtasks.py @@ -181,9 +181,9 @@ class SubTasksCreationFromTaskBluePrint(unittest.TestCase): self.assertEqual(None, subtask) # Next call will fail due to no qa_files object - # ValueError: Cannot create qa_plots subtask for task_blueprint id=1 because it has no qafile subtask(s) - with self.assertRaises(SubtaskCreationException): - subtask = create_qaplots_subtask_from_task_blueprint(task_blueprint) + subtask = create_qaplots_subtask_from_task_blueprint(task_blueprint) + # subtask object is None because QA file conversion is by default not enabled!!!! + self.assertEqual(None, subtask) def test_create_sequence_of_subtask_from_task_blueprint_with_QA_enabled(self): diff --git a/SAS/TMSS/backend/test/t_tasks.py b/SAS/TMSS/backend/test/t_tasks.py index 2652a8ff989b584ae69834b1b50beaf5dc51a2f2..88e4791390c6e46ff365372fe86cc79be91f24b3 100755 --- a/SAS/TMSS/backend/test/t_tasks.py +++ b/SAS/TMSS/backend/test/t_tasks.py @@ -44,6 +44,8 @@ rest_data_creator = TMSSRESTTestDataCreator(tmss_test_env.django_server.url, (tm from lofar.sas.tmss.tmss.tmssapp.tasks import * +from lofar.sas.tmss.tmss.exceptions import SchemaValidationException + class CreationFromSchedulingUnitDraft(unittest.TestCase): """ @@ -397,7 +399,6 @@ class TaskBlueprintStateTest(unittest.TestCase): self.assertEqual(expected_task_state, task_blueprint.status) - if __name__ == "__main__": os.environ['TZ'] = 'UTC' unittest.main() diff --git a/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py b/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py index d3dd67b17e87d2fbe5e8f3b110a70c054170f46d..f0c8c331dc951757c7e98c3a3c90b467591446f7 100755 --- a/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py +++ b/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py @@ -335,6 +335,69 @@ class ReservationTemplateTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) +class ReservationStrategyTemplateTestCase(unittest.TestCase): + def test_reservation_strategy_template_list_apiformat(self): + r = requests.get(BASE_URL + '/reservation_strategy_template/?format=api', auth=AUTH) + self.assertEqual(r.status_code, 200) + self.assertTrue("Reservation Strategy Template List" in r.content.decode('utf8')) + + def test_reservation_strategy_template_GET_nonexistant_raises_error(self): + GET_and_assert_equal_expected_code(self, BASE_URL + '/reservation_strategy_template/1234321/', 404) + + def test_reservation_strategy_template_POST_and_GET(self): + # POST and GET a new item and assert correctness + test_data = test_data_creator.ReservationStrategyTemplate() + expected_data = test_data_creator.update_schema_from_template("reservationtemplate", test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/reservation_strategy_template/', test_data, 201, expected_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, expected_data) + + def test_reservation_strategy_template_PUT_invalid_raises_error(self): + test_data = test_data_creator.ReservationStrategyTemplate() + PUT_and_assert_expected_response(self, BASE_URL + '/reservation_strategy_template/9876789876/', test_data, 404, {}) + + def test_reservation_strategy_template_PUT(self): + # POST new item, verify + test_data = test_data_creator.ReservationStrategyTemplate() + expected_data = test_data_creator.update_schema_from_template("reservationtemplate", test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/reservation_strategy_template/', test_data, 201, expected_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, expected_data) + # PUT new values, verify + test_data2 = test_data_creator.ReservationStrategyTemplate("reservationtemplate2") + expected_data2 = test_data_creator.update_schema_from_template("reservationtemplate", test_data2) + PUT_and_assert_expected_response(self, url, test_data2, 200, expected_data2) + GET_OK_and_assert_equal_expected_response(self, url, expected_data2) + + def test_reservation_strategy_template_PATCH(self): + # POST new item, verify + test_data = test_data_creator.ReservationStrategyTemplate() + expected_data = test_data_creator.update_schema_from_template("reservationtemplate", test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/reservation_strategy_template/', test_data, 201, expected_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, expected_data) + + test_patch = {"name": "new_name", + "description": "better description"} + + # PATCH item and verify + expected_patch_data = test_data_creator.update_schema_from_template("reservationtemplate", test_patch) + PATCH_and_assert_expected_response(self, url, test_patch, 200, expected_patch_data) + expected_data = dict(test_data) + expected_data.update(expected_patch_data) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) + + def test_reservation_strategy_template_DELETE(self): + # POST new item, verify + test_data = test_data_creator.ReservationStrategyTemplate() + expected_data = test_data_creator.update_schema_from_template("reservationtemplate", test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/reservation_strategy_template/', test_data, 201, expected_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, expected_data) + # DELETE and check it's gone + DELETE_and_assert_gone(self, url) + + class TaskTemplateTestCase(unittest.TestCase): def test_task_template_list_apiformat(self): diff --git a/SAS/TMSS/backend/test/tmss_test_data_django_models.py b/SAS/TMSS/backend/test/tmss_test_data_django_models.py index aa6e8d49db19e7c5daeef025e9fa50ec930089df..2a6c110bc7fd857e9706c061d9b7c237b924cd4a 100644 --- a/SAS/TMSS/backend/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/backend/test/tmss_test_data_django_models.py @@ -580,6 +580,22 @@ def Reservation_test_data(name="MyReservation", duration=None, start_time=None, "specifications_template": specifications_template} +def ReservationStrategyTemplate_test_data(name="my_ReservationStrategyTemplate", + reservation_template:models.ReservationTemplate=None, + template:dict=None) -> dict: + if reservation_template is None: + reservation_template = models.ReservationTemplate.objects.create(**ReservationTemplate_test_data()) + + if template is None: + template = get_default_json_object_for_schema(reservation_template.schema) + + return {"name": name, + "description": 'My Reservation Template description', + "template": template, + "reservation_template": reservation_template, + "tags": ["TMSS", "TESTING"]} + + def ProjectPermission_test_data(name=None, GET=None, PUT=None, POST=None, PATCH=None, DELETE=None) -> dict: if name is None: name = 'MyProjectPermission_%s' % uuid.uuid4() diff --git a/SAS/TMSS/backend/test/tmss_test_data_rest.py b/SAS/TMSS/backend/test/tmss_test_data_rest.py index 16754bbfdb3b139a06f84fbf2bf800deb4f2db1d..340b365afe6f116d668e92ba6f31256cd64bb523 100644 --- a/SAS/TMSS/backend/test/tmss_test_data_rest.py +++ b/SAS/TMSS/backend/test/tmss_test_data_rest.py @@ -147,6 +147,29 @@ class TMSSRESTTestDataCreator(): "schema": schema, "tags": ["TMSS", "TESTING"]} + @property + def cached_reservation_template_url(self): + try: + return self._reservation_template_url + except AttributeError: + self._reservation_template_url = self.post_data_and_get_url(self.ReservationTemplate(), '/reservation_template/') + return self._reservation_template_url + + def ReservationStrategyTemplate(self, name="my_ReservationStrategyTemplate", + reservation_template_url=None, + template:dict=None) -> dict: + if reservation_template_url is None: + reservation_template_url = self.cached_reservation_template_url + + if template is None: + template = self.get_response_as_json_object(reservation_template_url+'/default') + + return {"name": name, + "description": 'My ReservationTemplate description', + "template": template, + "reservation_template": reservation_template_url, + "version": "1", + "tags": ["TMSS", "TESTING"]} def SchedulingUnitObservingStrategyTemplate(self, name="my_SchedulingUnitObservingStrategyTemplate", scheduling_unit_template_url=None, diff --git a/SAS/TMSS/client/lib/populate.py b/SAS/TMSS/client/lib/populate.py index 6d3420403a6490f9b74c7117f4fb845bce66e9e5..ccadba3d1274599f1d78b56c40c2be74405085fd 100644 --- a/SAS/TMSS/client/lib/populate.py +++ b/SAS/TMSS/client/lib/populate.py @@ -38,14 +38,18 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None): # keep track of the templates, json schemas and references templates_dict = {} observing_strategy_templates = [] + reservation_strategy_templates = [] schema_references = {} all_references = set() # load all templates and schemas and prepare them for upload. # determine the dependencies, and upload the depenends first, and the rest in parallel later. for template in templates: - with open(os.path.join(schema_dir, template['file_name'])) as schema_file: - json_schema = json.loads(schema_file.read()) + try: + with open(os.path.join(schema_dir, template['file_name'])) as schema_file: + json_schema = json.loads(schema_file.read()) + except Exception as e: + raise Exception("Could not decode JSON schema %s" % template['file_name']) from e # add template name/description/version from schema if not already in template template['name'] = template.get('name', json_schema.get('title', '<no name>')) @@ -71,7 +75,7 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None): # get the id without trailing # and/or / json_schema_id = json_schema.get('$id', "").rstrip("#").rstrip("/") - if template_name == 'scheduling_unit_observing_strategy_template': + if 'strategy_template' in template_name: template['template'] = json_schema else: template['schema'] = json_schema @@ -83,10 +87,17 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None): # store the prepared template for upload if template_name == 'scheduling_unit_observing_strategy_template': + template["strategy_template_name"] = template_name # so the 'strategy_template' name + template["template_name"] = "scheduling_unit_template" observing_strategy_templates.append(template) + elif template_name == 'reservation_strategy_template': + template["strategy_template_name"] = template_name + template["template_name"] = "reservation_template" + reservation_strategy_templates.append(template) else: templates_dict[json_schema_id] = template + # helper functions for uploading def upload_template(template: dict): logger.info("Uploading template with name='%s' version='%s'", template['name'], template['version']) @@ -103,13 +114,18 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None): template = templates_dict.pop(id) upload_template(template) - # helper functions for uploading observing_strategy_templates - def upload_observing_strategy_templates(template: dict): - scheduling_unit_templates = client.get_path_as_json_object('scheduling_unit_template?name=' + template.get('scheduling_unit_template_name') + '&version=' + template.get('scheduling_unit_template_version')) - scheduling_unit_template = scheduling_unit_templates[0] - template['scheduling_unit_template'] = scheduling_unit_template['url'] - logger.info("Uploading observation strategy with name='%s' version='%s'", template['name'], template['version']) - client.post_template(template_path='scheduling_unit_observing_strategy_template', **template) + def upload_strategy_templates(template: dict): + """ + Helper function for uploading strategy_templates + Use template["strategy_template_name"] for the name of the 'strategy_template' to be uploaded + Use template["template_name"] for the name of the template (used for validation) + """ + tn = template.get('template_name') + response_templates = client.get_path_as_json_object(tn+'?name=' + template.get(tn+'_name') + '&version=' + template.get(tn+'_version')) + template[tn] = response_templates[0]['url'] + logger.info("Uploading strategy with name='%s' version='%s'", template['name'], template['version']) + client.post_template(template_path=template.get('strategy_template_name'), **template) + # first, upload all dependent templates for ref in all_references: @@ -118,11 +134,15 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None): # then, upload the remaining templates in parallel rest_templates = [template for template in templates_dict.values()] with ThreadPoolExecutor() as executor: - executor.map(upload_template, rest_templates) + executor.map(upload_template, rest_templates) + + # the reservation_strategy_templates + with ThreadPoolExecutor() as executor: + executor.map(upload_strategy_templates, reservation_strategy_templates) # and finally, the observing_strategy_templates with ThreadPoolExecutor() as executor: - executor.map(upload_observing_strategy_templates, observing_strategy_templates) + executor.map(upload_strategy_templates, observing_strategy_templates) scheduling_constraints_templates = client.get_path_as_json_object('scheduling_constraints_template') if scheduling_constraints_templates: diff --git a/SAS/TMSS/frontend/tmss_webapp/package.json b/SAS/TMSS/frontend/tmss_webapp/package.json index e9cc1d244a28ffcb034292897693fb7875c7a0f9..5de0cf8841f3e116bcd8cf264c26613650b2467f 100644 --- a/SAS/TMSS/frontend/tmss_webapp/package.json +++ b/SAS/TMSS/frontend/tmss_webapp/package.json @@ -38,6 +38,7 @@ "react-bootstrap-datetimepicker": "0.0.22", "react-calendar-timeline": "^0.27.0", "react-dom": "^16.13.1", + "react-flatpickr": "^3.10.7", "react-frame-component": "^4.1.2", "react-json-to-table": "^0.1.7", "react-json-view": "^1.19.1", diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/CustomDateComp.js b/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/CustomDateComp.js index 98d84429009f475b44411113fe6d7d6d319dcf88..50623578335782048c11ba4ff25bb4f182370119 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/CustomDateComp.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/CustomDateComp.js @@ -1,7 +1,11 @@ import React, { Component } from 'react'; +import Flatpickr from "react-flatpickr"; import {Calendar} from 'primereact/calendar'; import moment from 'moment'; import UIConstants from '../../utils/ui.constants'; +import UtilService from '../../services/util.service'; + +import "flatpickr/dist/flatpickr.css"; //const DATE_TIME_FORMAT = 'YYYY-MM-DD HH:mm:ss'; @@ -16,9 +20,13 @@ export default class CustomDateComp extends Component { componentDidMount(){ let parentRows = this.props.agGridReact.props.rowData[this.props.node.rowIndex]; let parentCellData = parentRows[this.props.colDef.field]; - this.setState({ - date:parentCellData - }) + UtilService.getUTC() + .then(systemTime => { + this.setState({ + date:parentCellData, + systemTime: moment.utc(systemTime) + }) + }); } isPopup() { @@ -33,30 +41,30 @@ export default class CustomDateComp extends Component { } render() { - return ( - <Calendar - d dateFormat = {UIConstants.CALENDAR_DATE_FORMAT} - value= {this.state.date} - onChange= {e => {this.updateDateChanges(e)}} - // onBlur= {e => {this.updateDateChanges(e)}} - //data-testid="start" - todayButtonClassName="today-calendar-btn" - showButtonBar - showTime= {true} - showSeconds= {true} - hourFormat= "24" - showIcon= {false} inline - /> - ); + return this.state.systemTime?( + <Flatpickr + data-enable-time + options={{ + "inline": true, + "enableSeconds": true, + "time_24hr": true, + "defaultDate": this.state.systemTime?this.state.systemTime.format(UIConstants.CALENDAR_DEFAULTDATE_FORMAT):"", + "defaultHour": this.state.systemTime?this.state.systemTime.hours():12, + "defaultMinute": this.state.systemTime?this.state.systemTime.minutes():0 + }} + value={this.state.date} + onChange= {value => {this.updateDateChanges(value[0]?value[0]:this.state.date)}} + /> + ):""; } updateDateChanges(e){ - this.setState({date : e.value || ''}); + this.setState({date : e || ''}); } ondatechange(e){ - this.setState({date : e.value}); + this.setState({date : e}); } getDate() { diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js index 35725047eaeceb182457875029cbba90b4cd7320..396b74fd9c413e4ca93798b76ba3462889ac7dd0 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js @@ -4,6 +4,7 @@ import _ from 'lodash'; import Jeditor from '../../components/JSONEditor/JEditor'; import UnitConversion from '../../utils/unit.converter'; import UIConstants from '../../utils/ui.constants'; +import UtilService from '../../services/util.service'; /* eslint-disable react-hooks/exhaustive-deps */ export default (props) => { @@ -11,6 +12,7 @@ export default (props) => { const { parentFunction = (editorFn) => { editorFunction = editorFn;} } = props; const [constraintSchema, setConstraintSchema] = useState(); const [initialValue, setInitialValue] = useState(); + const [systemTime, setSystemTime] = useState(); //SU Constraint Editor Property Order,format and validation const configureProperties = (properties) => { for (const propertyKey in properties) { @@ -69,7 +71,8 @@ export default (props) => { } }; //DateTime flatPicker component enabled with seconds - const setDateTimeOption = (propertyValue) => { + const setDateTimeOption = async(propertyValue) => { + const systemTime = moment.utc((await UtilService.getUTC())); propertyValue.format = 'datetime-local'; propertyValue.validationType = 'dateTime'; propertyValue.skipFormat = true; @@ -83,6 +86,9 @@ export default (props) => { "enableSeconds": true, "time_24hr": true, "allowInput": true, + "defaultDate": systemTime.format(UIConstants.CALENDAR_DEFAULTDATE_FORMAT), + "defaultHour": systemTime.hour(), + "defaultMinute": systemTime.minutes() } }; }; @@ -101,7 +107,7 @@ export default (props) => { } else if(definitionName === 'timewindow') { for (let property in schema.definitions.timewindow.properties) { if(property === 'to' || property === 'from'){ - setDateTimeOption(schema.definitions.timewindow.properties[property]); + // setDateTimeOption(schema.definitions.timewindow.properties[property]); if (property === 'from') { schema.definitions.timewindow.properties[property].propertyOrder = 1; } else { @@ -148,7 +154,9 @@ export default (props) => { } } - const constraintStrategy = () => { + const constraintStrategy = async() => { + const currentSystemTime = moment.utc(await UtilService.getUTC()) + setSystemTime(currentSystemTime); // const constraintTemplate = { ...props.constraintTemplate } const constraintTemplate = _.cloneDeep(props.constraintTemplate); if (constraintTemplate.schema) { @@ -196,6 +204,9 @@ export default (props) => { if (!props.constraintTemplate) { return; } + UtilService.getUTC().then(utcTime => { + setSystemTime(moment.utc(utcTime)); + }); if (props.initValue) { modifyInitiValue(); } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/reservation.create.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/reservation.create.js index 9436b57558dd41712e24a988ccb18751884cb32a..e1b884053169d5a05dd9ab001e45af3c7ae0804a 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/reservation.create.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/reservation.create.js @@ -6,6 +6,7 @@ import { Growl } from 'primereact/components/growl/Growl'; import AppLoader from '../../layout/components/AppLoader'; import PageHeader from '../../layout/components/PageHeader'; import UIConstants from '../../utils/ui.constants'; +import Flatpickr from "react-flatpickr"; import { Calendar } from 'primereact/calendar'; import { InputMask } from 'primereact/inputmask'; import { Dropdown } from 'primereact/dropdown'; @@ -18,6 +19,9 @@ import ProjectService from '../../services/project.service'; import ReservationService from '../../services/reservation.service'; import UnitService from '../../utils/unit.converter'; import Jeditor from '../../components/JSONEditor/JEditor'; +import UtilService from '../../services/util.service'; + +import "flatpickr/dist/flatpickr.css"; /** * Component to create a new Reservation @@ -38,7 +42,7 @@ export class ReservationCreate extends Component { reservation: { name: '', description: '', - start_time: '', + start_time: null, duration: '', project: (props.match?props.match.params.project:null) || null, }, @@ -78,9 +82,11 @@ export class ReservationCreate extends Component { async initReservation() { const promises = [ ProjectService.getProjectList(), ReservationService.getReservationTemplates(), + UtilService.getUTC() ]; let emptyProjects = [{url: null, name: "Select Project"}]; Promise.all(promises).then(responses => { + let systemTime = moment.utc(responses[2]); this.projects = emptyProjects.concat(responses[0]); this.reservationTemplates = responses[1]; @@ -95,8 +101,9 @@ export class ReservationCreate extends Component { paramsSchema: schema, isLoading: false, reservationTemplate: reservationTemplate, + systemTime: systemTime }); - }); + }); } @@ -209,7 +216,6 @@ export class ReservationCreate extends Component { } } } - this.setState({errors: errors, validFields: validFields}); if (Object.keys(validFields).length === Object.keys(this.formRules).length) { validForm = true; @@ -356,20 +362,27 @@ export class ReservationCreate extends Component { <div className="p-field p-grid"> <label htmlFor="reservationName" className="col-lg-2 col-md-2 col-sm-12">From Date <span style={{color:'red'}}>*</span></label> <div className="col-lg-3 col-md-3 col-sm-12"> - <Calendar - d dateFormat="yy-mm-dd" - value= {this.state.reservation.start_time} - onChange= {e => this.setParams('start_time',e.value)} - data-testid="start_time" - tooltip="Moment at which the reservation starts from, that is, when its reservation can run." tooltipOptions={this.tooltipOptions} - showIcon={true} - showTime= {true} - showSeconds= {true} - hourFormat= "24" - /> - - <label className={this.state.errors.from?"error":"info"}> - {this.state.errors.start_time ? this.state.errors.start_time : ""} + <Flatpickr data-enable-time data-input options={{ + "inlineHideInput": true, + "wrap": true, + "enableSeconds": true, + "time_24hr": true, + "allowInput": true, + "defaultDate": this.state.systemTime.format(UIConstants.CALENDAR_DEFAULTDATE_FORMAT), + "defaultHour": this.state.systemTime.hours(), + "defaultMinute": this.state.systemTime.minutes() + }} + title="Start of this reservation" + value={this.state.reservation.start_time} + onChange= {value => {this.setParams('start_time', value[0]?value[0]:this.state.reservation.start_time); + this.setReservationParams('start_time', value[0]?value[0]:this.state.reservation.start_time)}} > + <input type="text" data-input className={`p-inputtext p-component ${this.state.errors.start_time && this.state.touched.start_time?'input-error':''}`} /> + <i className="fa fa-calendar" data-toggle style={{position: "absolute", marginLeft: '-25px', marginTop:'5px', cursor: 'pointer'}} ></i> + <i className="fa fa-times" style={{position: "absolute", marginLeft: '-50px', marginTop:'5px', cursor: 'pointer'}} + onClick={e => {this.setParams('start_time', ''); this.setReservationParams('start_time', '')}}></i> + </Flatpickr> + <label className={this.state.errors.start_time && this.state.touched.start_time?"error":"info"}> + {this.state.errors.start_time && this.state.touched.start_time ? this.state.errors.start_time : ""} </label> </div> <div className="col-lg-1 col-md-1 col-sm-12"></div> @@ -380,6 +393,8 @@ export class ReservationCreate extends Component { value={this.state.reservation.duration} mask="99:99:99" placeholder="HH:mm:ss" + tooltip="Duration of this reservation. If it is empty, then this reservation is indefinite." + tooltipOptions={this.tooltipOptions} onChange= {e => this.setParams('duration',e.value)} ref={input =>{this.input = input}} /> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js index 828386d19450af0473e199ad44430a7b4491e8ed..56437953e4ee0ed1bd91c69fda2fd9512bb7f703 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js @@ -845,9 +845,9 @@ export class TimelineView extends Component { if (this.state.redirect) { return <Redirect to={ {pathname: this.state.redirect} }></Redirect> } - if (this.state.loader) { - return <AppLoader /> - } + // if (this.state.loader) { + // return <AppLoader /> + // } const isSUDetsVisible = this.state.isSUDetsVisible; const isTaskDetsVisible = this.state.isTaskDetsVisible; const canExtendSUList = this.state.canExtendSUList;