diff --git a/LCS/Messaging/python/messaging/messagebus.py b/LCS/Messaging/python/messaging/messagebus.py index 7bc94c6719b6a51193969591d8b8e9c39a5deea9..1e0375f1fe91f5ec80380b0f74c16d19a1253cc3 100644 --- a/LCS/Messaging/python/messaging/messagebus.py +++ b/LCS/Messaging/python/messaging/messagebus.py @@ -942,7 +942,7 @@ class TemporaryExchange: uuid.uuid4().hex[:8])) logger.debug("Creating TemporaryExchange at %s ...", self.address) create_exchange(name=self.address, broker=self.broker) - logger.debug("Created TemporaryExchange at %s", self.address) + logger.info("Created TemporaryExchange at %s", self.address) def close(self): """ @@ -954,7 +954,7 @@ class TemporaryExchange: delete_exchange(self.address) except Exception as e: logger.error(e) - logger.debug("Closed TemporaryExchange at %s", self.address) + logger.info("Closed TemporaryExchange at %s", self.address) self.address = None def __str__(self): diff --git a/LCS/PyCommon/ring_coordinates.py b/LCS/PyCommon/ring_coordinates.py index cc536c4ccf04445217d3f0073c5e5380d462f544..1113ff871821d7bd4c35abba6323f82a9df0d314 100755 --- a/LCS/PyCommon/ring_coordinates.py +++ b/LCS/PyCommon/ring_coordinates.py @@ -9,8 +9,8 @@ class RingCoordinates: """ This has been taken from RTCP/Conbalt test tRinGCoordinates.py - Original RingCoordinates implementation (+ Vlad's fix). Taken from parset.py in - RTCP\Run\src\LOFAR\parset + Original RingCoordinates implementation (+ Vlad's fix). + Taken from parset.py in RTCP\\Run\\src\\LOFAR\\parset """ def __init__(self, numrings, width, center, dirtype): self.numrings = numrings diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestjobmanagementserver.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestjobmanagementserver.py index 78d2cd998044a685f9b5eba177d30f43648f72b8..7452f5bf6d1cad4b264b67d1280eba140db587fc 100644 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestjobmanagementserver.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingestjobmanagementserver.py @@ -57,6 +57,16 @@ class IngestJobManager: self._tobus = ToBus(exchange=exchange, broker=broker) + self._incoming_jobs_listener = BusListener(IngestIncomingJobsHandler, {'job_manager': self}, + exchange=self._tobus.exchange, broker=self._tobus.broker, + routing_key="%s.#" % DEFAULT_INGEST_INCOMING_JOB_SUBJECT) + + self._ingest_event_listener = IngestEventMesssageBusListener(IngestEventMessageHandlerForJobManager, {'job_manager': self}, + exchange=self._tobus.exchange, broker=self._tobus.broker) + + self._ingest_service = RPCService(DEFAULT_INGEST_SERVICENAME, IngestServiceMessageHandler, {'job_manager': self}, + exchange=self._tobus.exchange, broker=self._tobus.broker, num_threads=4) + self.__running_jobs_log_timestamp = datetime.utcnow() self.__last_putStalledJobsBackToToDo_timestamp = datetime.utcnow() @@ -86,19 +96,8 @@ class IngestJobManager: logger.info('starting listening for new jobs and notifications') - incoming_jobs_listener = BusListener(IngestIncomingJobsHandler, {'job_manager': self}, - exchange=self._tobus.exchange, broker=self._tobus.broker, - routing_key="%s.#" % DEFAULT_INGEST_INCOMING_JOB_SUBJECT) - - ingest_event_listener = IngestEventMesssageBusListener(IngestEventMessageHandlerForJobManager, - {'job_manager': self}, - exchange=self._tobus.exchange, broker=self._tobus.broker) - - ingest_service = RPCService(DEFAULT_INGEST_SERVICENAME, IngestServiceMessageHandler, {'job_manager': self}, - exchange=self._tobus.exchange, broker=self._tobus.broker, num_threads=4) - # open exchange connections... - with incoming_jobs_listener, ingest_event_listener, ingest_service, self._tobus: + with self._incoming_jobs_listener, self._ingest_event_listener, self._ingest_service, self._tobus: with self.__lock: # start with full jobs dir scan to retreive state from disk self.scanJobsdir() diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingesttmssadapter.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingesttmssadapter.py index 3f89b769ebdcd86c9131ebf1da31f4ee648041e3..7fd829007bf08bc58122d8ba8b1ad33e8f62c1ff 100644 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingesttmssadapter.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingesttmssadapter.py @@ -131,8 +131,12 @@ class TMSSEventMessageHandlerForIngestTMSSAdapter(UsingToBusMixin, TMSSEventMess self.tmss_client.close() def init_tobus(self, exchange, broker): - logger.warning("FOR COMMISSIONING WE LET THE INGESTTMSSADAPTER SEND ITS INGEST JOBS TO THE PRODUCTION BROKER!") - self._tobus = ToBus(exchange='lofar', broker='scu001.control.lofar') + from lofar.common import isDevelopmentEnvironment + if isDevelopmentEnvironment(): + self._tobus = ToBus(exchange=exchange, broker=broker) + else: + logger.warning("FOR COMMISSIONING WE LET THE INGESTTMSSADAPTER SEND ITS INGEST JOBS TO THE PRODUCTION BROKER!") + self._tobus = ToBus(exchange='lofar', broker='scu001.control.lofar') def onSubTaskStatusChanged(self, id: int, status: str): super().onSubTaskStatusChanged(id, status) @@ -162,7 +166,7 @@ class TMSSEventMessageHandlerForIngestTMSSAdapter(UsingToBusMixin, TMSSEventMess obs_id=producing_subtask['id'], # the name 'obs_id' is somewhat misleading, but that's a legacy name 'forced' by MoM/OTDB. TODO: refactor when removing MoM/OTDB. dataproduct_name=input_dp['filename'], archive_id=dp_global_identifier['unique_identifier'], - location=subtask['cluster_value']+':'+os.path.join(input_dp['directory'], input_dp['filename']), + location=subtask['cluster_name']+':'+os.path.join(input_dp['directory'], input_dp['filename']), tmss_ingest_subtask_id=subtask['id'], tmss_input_dataproduct_id=input_dp['id']) @@ -180,13 +184,16 @@ class IngestTMSSAdapter: It has two purpouses: 1) create and enqueue ingest jobs upon receiving an ingest-subtask scheduled event and 2) track progress of the ingest-subtask (number of dataproducts transferred) and updating the (finished) state of the ingest-subtask''' def __init__(self, tmss_creds: DBCredentials, exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER): + from lofar.common import isDevelopmentEnvironment self.ingest2tmss_adapter = IngestEventMesssageBusListener(handler_type=IngestEventMessageHandlerForIngestTMSSAdapter, handler_kwargs={'tmss_creds': tmss_creds}, - exchange='lofar', broker='scu001.control.lofar') # TODO: replace hardcoded commissioning brokers by parameters + exchange=exchange if isDevelopmentEnvironment() else 'lofar', # TODO: replace hardcoded commissioning exchange by parameter + broker=broker if isDevelopmentEnvironment() else 'scu001.control.lofar') # TODO: replace hardcoded commissioning brokers by parameter self.tmss2ingest_adapter = TMSSBusListener(handler_type=TMSSEventMessageHandlerForIngestTMSSAdapter, handler_kwargs={'tmss_creds': tmss_creds}, - routing_key=TMSS_ALL_EVENTS_FILTER, - exchange='test.lofar', broker='scu199.control.lofar') # TODO: replace hardcoded commissioning brokers by parameters + routing_key=TMSS_SUBTASK_STATUS_EVENT_PREFIX+'.#', + exchange=exchange if isDevelopmentEnvironment() else 'test.lofar', # TODO: replace hardcoded commissioning brokers by parameter + broker=broker if isDevelopmentEnvironment() else 'scu199.control.lofar') # TODO: replace hardcoded commissioning brokers by parameter def open(self): self.ingest2tmss_adapter.start_listening() diff --git a/LTA/LTAIngest/LTAIngestServer/test/CMakeLists.txt b/LTA/LTAIngest/LTAIngestServer/test/CMakeLists.txt index 5e0c61f4f40ed78dd3a5c1147604321e9cab1e00..fd2961464fc6c66d5687648b5ea7890d57157ea2 100644 --- a/LTA/LTAIngest/LTAIngestServer/test/CMakeLists.txt +++ b/LTA/LTAIngest/LTAIngestServer/test/CMakeLists.txt @@ -1,6 +1,8 @@ include(LofarCTest) -IF(BUILD_TESTING) +IF(BUILD_TMSSBackend) lofar_add_test(t_ingest_tmss_integration_test) set_tests_properties(t_ingest_tmss_integration_test PROPERTIES TIMEOUT 600) -ENDIF(BUILD_TESTING) +ELSE() + message(WARNING "Skipping t_ingest_tmss_integration_test because it depends on the TMSSBackend package which is not included in the build") +ENDIF(BUILD_TMSSBackend) diff --git a/LTA/LTAIngest/LTAIngestServer/test/t_ingest_tmss_integration_test.py b/LTA/LTAIngest/LTAIngestServer/test/t_ingest_tmss_integration_test.py index 82f9593d8d5d08fb318f6956488bcf69f2e0f69a..0300cb0df79de6dbf8aba4d8a25f3c70d4e8a47a 100755 --- a/LTA/LTAIngest/LTAIngestServer/test/t_ingest_tmss_integration_test.py +++ b/LTA/LTAIngest/LTAIngestServer/test/t_ingest_tmss_integration_test.py @@ -1,8 +1,5 @@ #!/usr/bin/env python3 -#TODO: Fix test -exit(3) - import unittest from unittest import mock from random import randint @@ -31,7 +28,7 @@ class TestIngestTMSSIntegration(unittest.TestCase): def test(self): with TemporaryExchange("TestIngestTMSSIntegration") as tmp_exchange: - # override DEFAULT_BUSNAME + # override DEFAULT_BUSNAME (which is used in a call from TMSS to RA to schedule) import lofar lofar.messaging.config.DEFAULT_BUSNAME = tmp_exchange.address @@ -87,49 +84,64 @@ class TestIngestTMSSIntegration(unittest.TestCase): # mock throttling method transfer_server.enoughResourcesAvailable = lambda: True - strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines") - scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema) - # limit the number of subbands - for task_name, task in scheduling_unit_spec['tasks'].items(): - if 'SAPs' in task['specifications_doc']: - SAPs = task['specifications_doc']['SAPs'] - for SAP in SAPs: - SAP['subbands'] = [0] - scheduling_unit_spec['tasks'][task_name]['specifications_doc']['SAPs'] = SAPs - - scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data(template=strategy_template.scheduling_unit_template, requirements_doc=scheduling_unit_spec)) - scheduling_unit = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) - ingest_subtask = models.Subtask.objects.get(task_blueprint__scheduling_unit_blueprint__id=scheduling_unit.id, - specifications_template__type__value=models.SubtaskType.Choices.INGEST.value) - schedule_independent_subtasks_in_scheduling_unit_blueprint(scheduling_unit, datetime.utcnow()) - - # make sure each dataproduct uses TEST_DATA_DIR as root - for task in scheduling_unit.task_blueprints.all(): - for subtask in task.subtasks.all(): - if subtask.state.value == models.SubtaskState.Choices.SCHEDULED.value: - for output_dp in subtask.output_dataproducts.all(): - output_dp.directory = output_dp.directory.replace('/data', TEST_DATA_DIR) - output_dp.save() - - # start a simulator, forcing the scheduling_unit to "run" the observations and pipelines.... - # and let the ingest server act on the eventmessages. - # as a result, the scheduling_unit should be finished at the end, and the dataproducts should be "archived" (not in the real LTA of course, because we faked the transfer) - stop_event = threading.Event() - with create_scheduling_unit_blueprint_simulator(scheduling_unit.id, stop_event, - handle_ingest=False, handle_observations=True, handle_QA=True, handle_pipelines=True, create_output_dataproducts=True, - delay=0, duration=0, - exchange=tmp_exchange.address) as simulator: - self.assertTrue(stop_event.wait(300)) - - scheduling_unit.refresh_from_db() - self.assertEqual("finished", scheduling_unit.status) - - ingest_subtask.refresh_from_db() - self.assertGreater(ingest_subtask.output_dataproducts.count(), 0) - - for output_dp in ingest_subtask.output_dataproducts.all(): - self.assertEqual(1, models.DataproductArchiveInfo.objects.filter(dataproduct__id=output_dp.id).count()) - + # cleanup queues with janitor + with BusListenerJanitor(ingest_job_manager._incoming_jobs_listener), BusListenerJanitor(ingest_job_manager._ingest_event_listener), BusListenerJanitor(ingest_job_manager._ingest_service), \ + BusListenerJanitor(ingest_tmss_adapter.ingest2tmss_adapter), BusListenerJanitor(ingest_tmss_adapter.tmss2ingest_adapter), BusListenerJanitor(transfer_server.incoming_jobs_listener): + + strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="Short Test Observation - Pipeline - Ingest") + scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema) + # limit the number of subbands, and disable QA subtasks, and cleanup task + for task_name, task in list(scheduling_unit_spec['tasks'].items()): + if 'SAPs' in task['specifications_doc']: + SAPs = task['specifications_doc']['SAPs'] + for SAP in SAPs: + SAP['subbands'] = [0] + scheduling_unit_spec['tasks'][task_name]['specifications_doc']['SAPs'] = SAPs + if 'QA' in task['specifications_doc']: + task['specifications_doc']['QA']['plots']['enabled'] = False + task['specifications_doc']['QA']['file_conversion']['enabled'] = False + if task['specifications_template'] == 'cleanup': + # remove cleanup task and its relations + scheduling_unit_spec['tasks'].pop(task_name) + scheduling_unit_spec['task_relations'] = [task_rel for task_rel in scheduling_unit_spec['task_relations'] if task_rel['consumer'] != task_name] + + scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data(template=strategy_template.scheduling_unit_template, requirements_doc=scheduling_unit_spec)) + scheduling_unit = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + ingest_subtask = models.Subtask.objects.get(task_blueprint__scheduling_unit_blueprint__id=scheduling_unit.id, + specifications_template__type__value=models.SubtaskType.Choices.INGEST.value) + schedule_independent_subtasks_in_scheduling_unit_blueprint(scheduling_unit, datetime.utcnow()) + + # make sure each dataproduct uses TEST_DATA_DIR as root + for task in scheduling_unit.task_blueprints.all(): + for subtask in task.subtasks.all(): + if subtask.state.value == models.SubtaskState.Choices.SCHEDULED.value: + for output_dp in subtask.output_dataproducts.all(): + output_dp.directory = output_dp.directory.replace('/data', TEST_DATA_DIR) + output_dp.save() + + # start a simulator, forcing the scheduling_unit to "run" the observations and pipelines.... + # and let the ingest server act on the eventmessages. + # as a result, the scheduling_unit should be finished at the end, and the dataproducts should be "archived" (not in the real LTA of course, because we faked the transfer) + stop_event = threading.Event() + with create_scheduling_unit_blueprint_simulator(scheduling_unit.id, stop_event, + handle_ingest=False, handle_observations=True, handle_QA=True, handle_pipelines=True, create_output_dataproducts=True, + delay=0, duration=0, auto_grant_ingest_permission=True, + exchange=tmp_exchange.address) as simulator: + + # wait until the observations/pipelines finished simulating + stop_event.wait(300) + + # scheduling_unit (including ingest) should be finished + scheduling_unit.refresh_from_db() + self.assertEqual("finished", scheduling_unit.status) + ingest_subtask.refresh_from_db() + self.assertEqual("finished", ingest_subtask.state.value) + + # check ingested dataproducts + self.assertGreater(ingest_subtask.output_dataproducts.count(), 0) + for output_dp in ingest_subtask.output_dataproducts.all(): + self.assertTrue(output_dp.filepath.startswith("srm://")) + self.assertEqual(1, models.DataproductArchiveInfo.objects.filter(dataproduct__id=output_dp.id).count()) logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) diff --git a/SAS/DataManagement/Cleanup/CleanupClient/rpc.py b/SAS/DataManagement/Cleanup/CleanupClient/rpc.py index cda86ffe8ec9efff420c28fbe61c51519b0dc50d..1247c5f4724881995ddde755631428bab5984e4e 100644 --- a/SAS/DataManagement/Cleanup/CleanupClient/rpc.py +++ b/SAS/DataManagement/Cleanup/CleanupClient/rpc.py @@ -25,17 +25,20 @@ class CleanupRPC(RPCClientContextManagerMixin): def getPathForOTDBId(self, otdb_id): return self._rpc_client.execute('GetPathForOTDBId', otdb_id=otdb_id) + def getPathForTMSSId(self, tmss_id): + return self._rpc_client.execute('GetPathForTMSSId', tmss_id=tmss_id) + def removePath(self, path): return self._rpc_client.execute('RemovePath', path=path) - def removeTaskData(self, otdb_id, delete_is=True, delete_cs=True, delete_uv=True, delete_im=True, delete_img=True, delete_pulp=True, delete_scratch=True, force=False): - return self._rpc_client.execute('RemoveTaskData', otdb_id=otdb_id, delete_is=delete_is, delete_cs=delete_cs, delete_uv=delete_uv, delete_im=delete_im, delete_img=delete_img, delete_pulp=delete_pulp, delete_scratch=delete_scratch, force=force) + def removeTaskData(self, otdb_id=None, tmss_id=None, delete_is=True, delete_cs=True, delete_uv=True, delete_im=True, delete_img=True, delete_pulp=True, delete_scratch=True, force=False): + return self._rpc_client.execute('RemoveTaskData', otdb_id=otdb_id, tmss_id=tmss_id, delete_is=delete_is, delete_cs=delete_cs, delete_uv=delete_uv, delete_im=delete_im, delete_img=delete_img, delete_pulp=delete_pulp, delete_scratch=delete_scratch, force=force) - def setTaskDataPinned(self, otdb_id, pinned=True): - return self._rpc_client.execute('SetTaskDataPinned', otdb_id=otdb_id, pinned=pinned) + def setTaskDataPinned(self, otdb_id, tmss_id=None, pinned=True): + return self._rpc_client.execute('SetTaskDataPinned', otdb_id=otdb_id, tmss_id=tmss_id, pinned=pinned) - def isTaskDataPinned(self, otdb_id): - return convertStringDigitKeysToInt(self._rpc_client.execute('IsTaskDataPinned', otdb_id=otdb_id)).get(otdb_id, False) + def isTaskDataPinned(self, otdb_id=None, tmss_id=None): + return self._rpc_client.execute('IsTaskDataPinned', otdb_id=otdb_id, tmss_id=tmss_id) def getPinnedStatuses(self): return convertStringDigitKeysToInt(self._rpc_client.execute('GetPinnedStatuses')) @@ -45,12 +48,12 @@ def main(): from optparse import OptionParser # Check the invocation arguments - parser = OptionParser('%prog [options] <otdb_id>', + parser = OptionParser('%prog [options] <otdb_id/tmss_id>', description='do cleanup actions on cep4 from the commandline') - parser.add_option('-d', '--delete', dest='delete', action='store_true', help='delete the data for the given otdb_id (see also --force option)') + parser.add_option('-d', '--delete', dest='delete', action='store_true', help='delete the data for the given otdb_id/tmss_id (see also --force option)') parser.add_option('-f', '--force', dest='force', action='store_true', help='in combination with --delete, always delete the data even when safety checks block deletion. (But pinned data is still kept, even when this force flag is supplied.)') - parser.add_option('-p', '--pin', dest='pin', action='store_true', help='pin the data for the given otdb_id') - parser.add_option('-u', '--unpin', dest='unpin', action='store_true', help='unpin the data for the given otdb_id') + parser.add_option('-p', '--pin', dest='pin', action='store_true', help='pin the data for the given otdb_id/tmss_id') + parser.add_option('-u', '--unpin', dest='unpin', action='store_true', help='unpin the data for the given otdb_id/tmss_id') parser.add_option('-b', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, help='Address of the broker, default: localhost') parser.add_option('-e', '--exchange', dest='exchange', type='string', default=DEFAULT_BUSNAME, @@ -66,29 +69,32 @@ def main(): level=logging.INFO if options.verbose else logging.WARN) with CleanupRPC.create(exchange=options.exchange, broker=options.broker) as rpc: - otdb_id = int(args[0]) + # the cmdline given id is either an otdb_id or a tmss_id, based on the fact that tmss_id's start at 2000000 + id = int(args[0]) + otdb_id = id if id < 2000000 else None + tmss_id = id if id >= 2000000 else None if options.pin or options.unpin: - rpc.setTaskDataPinned(otdb_id, bool(options.pin)) + rpc.setTaskDataPinned(otdb_id=otdb_id, tmss_id=tmss_id, pinned=bool(options.pin)) elif not options.delete: - print('data for otdb_id %s is %spinned' % (otdb_id, '' if rpc.isTaskDataPinned(otdb_id) else 'not ')) + print('data for otdb_id=%s tmss_id=%s is %spinned' % (otdb_id, tmss_id, '' if rpc.isTaskDataPinned(otdb_id=otdb_id, tmss_id=tmss_id) else 'not ')) if options.delete: if options.pin: print("You can't delete and pin data at the same time!") exit(1) - path_result = rpc.getPathForOTDBId(otdb_id) + path_result = rpc.getPathForOTDBId(otdb_id) if otdb_id is not None else rpc.getPathForTMSSId(tmss_id) if path_result['found']: path = path_result['path'] scratch_paths = path_result.get('scratch_paths', []) paths = scratch_paths + [path] print("This will delete everything in '%s'." % ', '.join(paths)) if input("Are you sure? (y/n) ") == 'y': - result = rpc.removeTaskData(otdb_id, force=options.force) + result = rpc.removeTaskData(otdb_id=otdb_id, tmss_id=tmss_id, force=options.force) print() if not result['deleted']: - print('Could not delete data for task with otdb_id=%s' % otdb_id) + print('Could not delete data for task with otdb_id=%s tmss_id=%s' % (otdb_id, tmss_id)) print(result['message']) exit(0 if result['deleted'] else 1) else: diff --git a/SAS/DataManagement/Cleanup/CleanupService/CMakeLists.txt b/SAS/DataManagement/Cleanup/CleanupService/CMakeLists.txt index e5455f56fc0c486e136a98037e12bace6930822f..0b3ab070dfb77de051931f3d05f659981c64d07c 100644 --- a/SAS/DataManagement/Cleanup/CleanupService/CMakeLists.txt +++ b/SAS/DataManagement/Cleanup/CleanupService/CMakeLists.txt @@ -1,6 +1,6 @@ # $Id$ -lofar_package(CleanupService 1.0 DEPENDS PyMessaging DataManagementCommon CleanupCommon) +lofar_package(CleanupService 1.0 DEPENDS PyMessaging DataManagementCommon CleanupCommon CleanupClient TMSSClient) lofar_find_package(Python 3.4 REQUIRED) include(PythonInstall) diff --git a/SAS/DataManagement/Cleanup/CleanupService/service.py b/SAS/DataManagement/Cleanup/CleanupService/service.py index 243fc0a5c9679ad6932e619b2134316eeb2a0770..fbe3288b9ebcd923679bbdd42f109f464c1f3e98 100644 --- a/SAS/DataManagement/Cleanup/CleanupService/service.py +++ b/SAS/DataManagement/Cleanup/CleanupService/service.py @@ -22,6 +22,10 @@ from lofar.sas.datamanagement.cleanup.config import DEFAULT_CLEANUP_SERVICENAME from lofar.sas.datamanagement.common.config import DEFAULT_DM_NOTIFICATION_PREFIX from lofar.sas.datamanagement.storagequery.rpc import StorageQueryRPC +from lofar.sas.datamanagement.cleanup.rpc import CleanupRPC + +from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession +from lofar.sas.tmss.client.tmssbuslistener import * logger = logging.getLogger(__name__) @@ -30,7 +34,7 @@ logger = logging.getLogger(__name__) pinfile = os.path.join(os.environ.get('LOFARROOT', '.'), 'var', 'run', 'auto_cleanup_pinned_tasks.py') #TODO: this local method is a temporary solution to store the pins in until it can be specified and stored for each task in mom/radb -def _setTaskDataPinned(otdb_id, pinned=True): +def _setOTDBTaskDataPinned(otdb_id, pinned=True): try: pins = {} @@ -50,8 +54,8 @@ def _setTaskDataPinned(otdb_id, pinned=True): logger.error(str(e)) return False -#TODO: this local method is a temporary solution to store the pins in until it can be specified and stored for each task in mom/radb -def _isTaskDataPinned(otdb_id): +#TODO: this local method was a temporary solution to store the pins for otdb tasks. The method can be removed once we use TMSS only. +def _isOTDBTaskDataPinned(otdb_id): try: if os.path.exists(pinfile): with open(pinfile) as f: @@ -62,8 +66,8 @@ def _isTaskDataPinned(otdb_id): return False -#TODO: this local method is a temporary solution to store the pins in until it can be specified and stored for each task in mom/radb -def _getPinnedStatuses(): +#TODO: this local method was a temporary solution to store the pins for otdb tasks. The method can be removed once we use TMSS only. +def _getOTDBPinnedStatuses(): try: if os.path.exists(pinfile): with open(pinfile) as f: @@ -76,16 +80,18 @@ def _getPinnedStatuses(): class CleanupHandler(ServiceMessageHandler): - def __init__(self, mountpoint=CEP4_DATA_MOUNTPOINT): + def __init__(self, mountpoint=CEP4_DATA_MOUNTPOINT, tmss_dbcreds_id: str=None): super().__init__() self.mountpoint = mountpoint self.path_resolver = None self._sqrpc = None + self._tmss_client = TMSSsession.create_from_dbcreds_for_ldap(tmss_dbcreds_id) def init_service_handler(self, service_name: str): super().init_service_handler(service_name) self.register_service_method('GetPathForOTDBId', self.path_resolver.getPathForOTDBId) + self.register_service_method('GetPathForTMSSId', self.path_resolver.getPathForTMSSId) self.register_service_method('RemovePath', self._removePath) self.register_service_method('RemoveTaskData', self._removeTaskData) self.register_service_method('SetTaskDataPinned', self._setTaskDataPinned) @@ -99,26 +105,82 @@ class CleanupHandler(ServiceMessageHandler): self._sqrpc = StorageQueryRPC.create(exchange=exchange, broker=broker) def start_handling(self): - super().start_handling() + self._tmss_client.open() self.path_resolver.open() self._sqrpc.open() + super().start_handling() logger.info("%s started with projects_path=%s", self, self.path_resolver.projects_path) def stop_handling(self): + super().stop_handling() self.path_resolver.close() self._sqrpc.close() - super().stop_handling() + self._tmss_client.close() - def _setTaskDataPinned(self, otdb_id, pinned=True): - logger.info('setTaskDataPinned(otdb_id=%s, pinned=%s)', otdb_id, pinned) - _setTaskDataPinned(otdb_id, pinned) - self._sendNotification(subject='TaskDataPinned', content={ 'otdb_id':otdb_id, 'pinned': _isTaskDataPinned(otdb_id) }) + def _setTaskDataPinned(self, otdb_id:int=None, tmss_id:int=None, pinned: bool=True): + logger.info('setTaskDataPinned(otdb_id=%s, tmss_id=%s, pinned=%s)', otdb_id, tmss_id, pinned) + if otdb_id is not None: + _setOTDBTaskDataPinned(otdb_id, pinned) + elif tmss_id is not None: + subtask = self._tmss_client.get_subtask(tmss_id) + self._tmss_client.session.patch(subtask['task_blueprint'], json={'output_pinned': pinned}) - def _isTaskDataPinned(self, otdb_id): - return { str(otdb_id): _isTaskDataPinned(otdb_id) } + self._sendNotification(subject='TaskDataPinned', content={ 'otdb_id':otdb_id, 'tmss_id':tmss_id, 'pinned': self._isTaskDataPinned(otdb_id, tmss_id) }) + + def _isTaskDataPinned(self, otdb_id:int, tmss_id:int): + # TODO: otdb handling can be removed once we use TMSS only. + if otdb_id is not None: + return _isOTDBTaskDataPinned(otdb_id) + + subtask = self._tmss_client.get_subtask(tmss_id) + task = self._tmss_client.get_url_as_json_object(subtask['task_blueprint']) + return task['output_pinned'] def _getPinnedStatuses(self): - return _getPinnedStatuses() + # TODO: otdb handling can be removed once we use TMSS only. + # This method is currently only used in the web-scheduler for otdb/mom tasks. No need to TMSS-ify it. + return _getOTDBPinnedStatuses() + + def _has_unfinished_non_cleanup_successors(self, otdb_id: int, tmss_id: int) -> bool: + # TODO: otdb handling can be removed once we use TMSS only. + if otdb_id is not None: + radbrpc = self.path_resolver.radbrpc + task = radbrpc.getTask(otdb_id=otdb_id) + if task: + suc_tasks = radbrpc.getTasks(task_ids=task['successor_ids']) + unfinished_suc_tasks = [t for t in suc_tasks if not (t['status'] == 'finished' or t['status'] == 'obsolete')] + return len(unfinished_suc_tasks)>0 + + successors = self._tmss_client.get_subtask_successors(tmss_id) + unfinished_successors = [x for x in successors + if x['state_value'] not in ('finished', 'cancelled') + and x['subtask_type'] != 'cleanup'] + return len(unfinished_successors) > 0 + + def _has_uningested_output_dataproducts(self, otdb_id: int, tmss_id: int) -> bool: + # TODO: otdb/mom handling can be removed once we use TMSS only. + if otdb_id is not None: + radbrpc = self.path_resolver.radbrpc + task = radbrpc.getTask(otdb_id=otdb_id) + if task: + momrpc = self.path_resolver.momrpc + dataproducts = momrpc.getDataProducts(task['mom_id']).get(task['mom_id']) + ingestable_dataproducts = [dp for dp in dataproducts if dp['status'] not in [None, 'has_data', 'no_data', 'populated'] ] + ingested_dataproducts = [dp for dp in ingestable_dataproducts if dp['status'] == 'ingested'] + + if len(ingestable_dataproducts) > 0 and len(ingested_dataproducts) < len(ingestable_dataproducts): + uningested_dataproducts = [dp for dp in ingestable_dataproducts if dp['status'] != 'ingested'] + return len(uningested_dataproducts) > 0 + return False + + subtask = self._tmss_client.get_subtask(tmss_id) + subtasks = self._tmss_client.get_subtasks_in_same_scheduling_unit(subtask) + for subtask in subtasks: + if subtask['subtask_type'] == 'ingest': + # TMSS keeps track per dataproduct if it's ingested or not, and translates that into a progress value 0.0 <= p <= 1.0 + return self._tmss_client.get_subtask_progress(tmss_id)['id'] < 1.0 + + return False def _sendNotification(self, subject, content): try: @@ -128,53 +190,49 @@ class CleanupHandler(ServiceMessageHandler): except Exception as e: logger.error(str(e)) - def _removeTaskData(self, otdb_id, delete_is=True, delete_cs=True, delete_uv=True, delete_im=True, delete_img=True, delete_pulp=True, delete_scratch=True, force=False): - logger.info("Remove task data for otdb_id %s, force=%s" % (otdb_id, force)) + def _removeTaskData(self, otdb_id, tmss_id, delete_is=True, delete_cs=True, delete_uv=True, delete_im=True, delete_img=True, delete_pulp=True, delete_scratch=True, force=False): + logger.info("Remove task data for otdb_id=%s, tmss_id=%s force=%s" % (otdb_id, tmss_id, force)) - if not isinstance(otdb_id, int): + if otdb_id is not None and not isinstance(otdb_id, int): message = "Provided otdb_id is not an int" logger.error(message) return {'deleted': False, 'message': message} - self._sendNotification(subject='TaskDeleting', content={ 'otdb_id': otdb_id }) + if tmss_id is not None and not isinstance(tmss_id, int): + message = "Provided tmss_id is not an int" + logger.error(message) + return {'deleted': False, 'message': message} - if _isTaskDataPinned(otdb_id): - message = "Task otdb_id=%s is pinned. Not deleting data." % (otdb_id) + self._sendNotification(subject='TaskDeleting', content={ 'otdb_id': otdb_id, 'tmss_id': tmss_id }) + + if self._isTaskDataPinned(otdb_id, tmss_id): + message = "Task otdb_id=%s tmss_id=%s is pinned. Not deleting data." % (otdb_id, tmss_id) logger.error(message) self._sendNotification(subject='TaskDeleted', content={'deleted': False, 'otdb_id': otdb_id, + 'tmss_id': tmss_id, 'message': message}) return {'deleted': False, 'message': message} - radbrpc = self.path_resolver.radbrpc - task = radbrpc.getTask(otdb_id=otdb_id) - if task: - suc_tasks = radbrpc.getTasks(task_ids=task['successor_ids']) - unfinished_scu_tasks = [t for t in suc_tasks if not (t['status'] == 'finished' or t['status'] == 'obsolete')] - if unfinished_scu_tasks: - message = "Task otdb_id=%s has unfinished successor tasks (otdb_ids: %s). Not deleting data." % (task['otdb_id'], [t['otdb_id'] for t in unfinished_scu_tasks]) - logger.error(message) - self._sendNotification(subject='TaskDeleted', content={'deleted': False, - 'otdb_id': otdb_id, - 'message': message}) - return {'deleted': False, 'message': message} + if self._has_unfinished_non_cleanup_successors(otdb_id, tmss_id): + message = "Task otdb_id=%s tmss_id=%s has unfinished successor tasks. Not deleting data." % (otdb_id, tmss_id) + logger.error(message) + self._sendNotification(subject='TaskDeleted', content={'deleted': False, + 'otdb_id': otdb_id, + 'tmss_id': tmss_id, + 'message': message}) + return {'deleted': False, 'message': message} - momrpc = self.path_resolver.momrpc - dataproducts = momrpc.getDataProducts(task['mom_id']).get(task['mom_id']) - ingestable_dataproducts = [dp for dp in dataproducts if dp['status'] not in [None, 'has_data', 'no_data', 'populated'] ] - ingested_dataproducts = [dp for dp in ingestable_dataproducts if dp['status'] == 'ingested'] + if not force and self._has_uningested_output_dataproducts(otdb_id, tmss_id): + message = "Task otdb_id=%s tmss_id=%s has un-ingested dataproducts. Not deleting data." % (otdb_id, tmss_id) + logger.error(message) + self._sendNotification(subject='TaskDeleted', content={'deleted': False, + 'otdb_id': otdb_id, + 'tmss_id': tmss_id, + 'message': message}) + return {'deleted': False, 'message': message} - if not force: - if len(ingestable_dataproducts) > 0 and len(ingested_dataproducts) < len(ingestable_dataproducts): - uningested_dataproducts = [dp for dp in ingestable_dataproducts if dp['status'] != 'ingested'] - message = "Task otdb_id=%s has un-ingested dataproducts. Not deleting data." % (task['otdb_id'],) - logger.error(message) - self._sendNotification(subject='TaskDeleted', content={'deleted': False, - 'otdb_id': otdb_id, - 'message': message}) - return {'deleted': False, 'message': message} - - path_result = self.path_resolver.getPathForOTDBId(otdb_id) + path_result = self.path_resolver.getPathForOTDBId(otdb_id) if otdb_id is not None else self.path_resolver.getPathForTMSSId(tmss_id) if path_result['found']: rm_results = [] if delete_is and delete_cs and delete_uv and delete_im and delete_img and delete_pulp: @@ -205,29 +263,37 @@ class CleanupHandler(ServiceMessageHandler): combined_message = '\n'.join(x.get('message','') for x in rm_results) if rm_result['deleted'] and not 'does not exist' in combined_message: - task_type = task.get('type', 'task') if task else 'task' - rm_result['message'] = 'Deleted %s of data from disk for %s with otdb_id %s\n' % (humanreadablesize(rm_result['size']), task_type, otdb_id) + task_type = path_result.get('task',{}).get('type', 'task') if otdb_id else self._tmss_client.get_subtask(tmss_id).get('subtask_type', 'task') + rm_result['message'] = 'Deleted %s of data from disk for %s with otdb_id=%s tmss_id=%s\n' % (humanreadablesize(rm_result['size']), task_type, otdb_id, tmss_id) rm_result['message'] += combined_message self._sendNotification(subject='TaskDeleted', content={'deleted':rm_result['deleted'], 'otdb_id':otdb_id, + 'tmss_id':tmss_id, 'paths': rm_result['paths'], 'message': rm_result['message'], 'size': rm_result['size'], 'size_readable': humanreadablesize(rm_result['size'])}) - self._endStorageResourceClaim(otdb_id) + if rm_result['deleted']: + self._endStorageResourceClaim(otdb_id=otdb_id, tmss_id=tmss_id) + + if tmss_id is not None: + # annotate the dataproducts in tmss that they are deleted + dataprodutcs = self._tmss_client.get_subtask_output_dataproducts(tmss_id) + for dp in dataprodutcs: + self._tmss_client.session.patch(dp['url'], json={'deleted_since': datetime.utcnow().isoformat()}) return rm_result return {'deleted': False, 'message': path_result['message']} - def _endStorageResourceClaim(self, otdb_id): + def _endStorageResourceClaim(self, otdb_id=None, tmss_id=None): try: #check if all data has actually been removed, #and adjust end time of claim on storage - path_result = self.path_resolver.getPathForOTDBId(otdb_id) + path_result = self.path_resolver.getPathForTask(otdb_id=otdb_id, tmss_id=tmss_id) if path_result['found']: path = path_result['path'] @@ -237,7 +303,7 @@ class CleanupHandler(ServiceMessageHandler): radbrpc = self.path_resolver.radbrpc storage_resources = radbrpc.getResources(resource_types='storage') cep4_storage_resource = next(x for x in storage_resources if 'CEP4' in x['name']) - task = radbrpc.getTask(otdb_id=otdb_id) + task = radbrpc.getTask(otdb_id=otdb_id, tmss_id=tmss_id) if task: claims = radbrpc.getResourceClaims(task_ids=task['id'], resource_type='storage') cep4_storage_claim_ids = [c['id'] for c in claims if c['resource_id'] == cep4_storage_resource['id']] @@ -341,18 +407,161 @@ class CleanupHandler(ServiceMessageHandler): 'message': 'Failed to delete (part of) %s' % path, 'path': path } - - - -def createService(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER, - mountpoint=CEP4_DATA_MOUNTPOINT): +def create_rpc_service(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER, + mountpoint=CEP4_DATA_MOUNTPOINT, + tmss_dbcreds_id: str=None): return RPCService(DEFAULT_CLEANUP_SERVICENAME, handler_type=CleanupHandler, - handler_kwargs={'mountpoint': mountpoint}, + handler_kwargs={'mountpoint': mountpoint, + 'tmss_dbcreds_id': tmss_dbcreds_id}, exchange=exchange, broker=broker, num_threads=4) + +class TMSSEventMessageHandlerForCleanup(TMSSEventMessageHandler): + def __init__(self, tmss_dbcreds_id: str="TMSSClient", exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER): + super().__init__(log_event_messages=False) + self._tmss_client = TMSSsession.create_from_dbcreds_for_ldap(tmss_dbcreds_id) + self._cleanup_rpc = CleanupRPC.create(exchange=exchange, broker=broker) + + def start_handling(self): + self._cleanup_rpc.open() + self._tmss_client.open() + super().start_handling() + + def stop_handling(self): + super().start_handling() + self._tmss_client.close() + self._cleanup_rpc.close() + + def onSubTaskStatusChanged(self, id: int, status: str): + logger.info("onSubTaskStatusChanged: id=%s status=%s", id, status) + + if status in ('scheduled', 'queued', 'finished'): + subtask = self._tmss_client.get_subtask(id) + + if subtask['subtask_type'] == 'cleanup': + if status == 'scheduled': + # a scheduled cleanup subtask should "just be startable", + # but we also need to check if the dataproducts are ingested. + # So, we change the state to queued, + # as a result this method onSubTaskStatusChanged will be called again for the queued status, + # and we can check the prerequisites before starting it + self.queue_cleanup_subtask_if_prerequisites_met(subtask) + elif status == 'queued': + self.start_cleanup_subtask_if_prerequisites_met(subtask) + elif status == 'started': + self.run_cleanup_subtask_if_prerequisites_met(subtask) + + elif subtask['subtask_type'] == 'ingest': + if status == 'finished': + # when an ingest subtask finishes, then it is safe for the related cleanup subtask(s) to be started + subtasks = self._tmss_client.get_subtasks_in_same_scheduling_unit(subtask) + cleanup_subtasks = [s for s in subtasks if s['subtask_type'] == 'cleanup' and s['state_value']=='scheduled'] + for cleanup_subtask in cleanup_subtasks: + self.queue_cleanup_subtask_if_prerequisites_met(cleanup_subtask) + + def queue_cleanup_subtask_if_prerequisites_met(self, subtask: dict): + logger.debug("queue_cleanup_subtask_if_prerequisites_met: subtask id=%s type=%s status=%s", subtask['id'], subtask['subtask_type'], subtask['state_value']) + + # check prerequisites + if subtask['subtask_type'] != 'cleanup': + # skip non-cleanup subtasks + return + + if subtask['state_value'] != 'scheduled': + # skip cleanup subtasks which are not scheduled + return + + # when an ingest subtask finishes, then it is safe for the related cleanup subtask(s) to be started + subtasks = self._tmss_client.get_subtasks_in_same_scheduling_unit(subtask) + ingest_subtasks = [s for s in subtasks if s['subtask_type'] == 'ingest'] + unfinished_ingest_subtasks = [s for s in ingest_subtasks if s['state_value'] != 'finished'] + + if len(unfinished_ingest_subtasks) > 0: + logger.info("cleanup subtask id=%s is scheduled, but waiting for ingest id=%s to finish before queueing the cleanup subtask...", + subtask['id'], [s['id'] for s in unfinished_ingest_subtasks]) + return + + logger.info("cleanup subtask id=%s is scheduled, and all ingest subtasks id=%s are finished. queueing the cleanup subtask...", + subtask['id'], [s['id'] for s in ingest_subtasks]) + + self._tmss_client.set_subtask_status(subtask['id'], 'queueing') + self._tmss_client.set_subtask_status(subtask['id'], 'queued') + # as a result of setting the queued state, start_cleanup_subtask_if_prerequisites_met is called in onSubTaskStatusChanged + + + def start_cleanup_subtask_if_prerequisites_met(self, subtask: dict): + logger.debug("start_cleanup_subtask_if_prerequisites_met: subtask id=%s type=%s status=%s", subtask['id'], subtask['subtask_type'], subtask['state_value']) + + # check prerequisites + if subtask['subtask_type'] != 'cleanup': + # skip non-cleanup subtasks + return + + if subtask['state_value'] != 'queued': + # skip cleanup subtasks which are not queued + return + + # prerequisites are met. Proceed. + logger.info("starting cleanup subtask id=%s...", subtask['id']) + self._tmss_client.set_subtask_status(subtask['id'], 'starting') + self._tmss_client.set_subtask_status(subtask['id'], 'started') + + predecessors = self._tmss_client.get_subtask_predecessors(subtask['id']) + results = [] + + for predecessor in predecessors: + logger.info("cleanup subtask id=%s removing output data for subtask id=%s ...", subtask['id'], predecessor['id']) + result = self._cleanup_rpc.removeTaskData(tmss_id=predecessor['id']) + results.append(result) + logger.info("cleanup subtask id=%s: %s", subtask['id'], result.get('message',"")) + + if any([not r['deleted'] for r in results]): + self._tmss_client.set_subtask_status(subtask['id'], 'error') + else: + self._tmss_client.set_subtask_status(subtask['id'], 'finishing') + self._tmss_client.set_subtask_status(subtask['id'], 'finished') + + def run_cleanup_subtask_if_prerequisites_met(self, subtask: dict): + logger.debug("run_cleanup_subtask_if_prerequisites_met: subtask id=%s type=%s status=%s", subtask['id'], subtask['subtask_type'], subtask['state_value']) + + # check prerequisites + if subtask['subtask_type'] != 'cleanup': + # skip non-cleanup subtasks + return + + if subtask['state_value'] != 'started': + # skip cleanup subtasks which are not queued + return + + # prerequisites are met. Proceed. + logger.info("running cleanup subtask id=%s...", subtask['id']) + + predecessors = self._tmss_client.get_subtask_predecessors(subtask['id']) + results = [] + + for predecessor in predecessors: + logger.info("cleanup subtask id=%s removing output data for subtask id=%s ...", subtask['id'], predecessor['id']) + result = self._cleanup_rpc.removeTaskData(tmss_id=predecessor['id']) + results.append(result) + logger.info("cleanup subtask id=%s: %s", subtask['id'], result.get('message',"")) + + if any([not r['deleted'] for r in results]): + self._tmss_client.set_subtask_status(subtask['id'], 'error') + else: + self._tmss_client.set_subtask_status(subtask['id'], 'finishing') + self._tmss_client.set_subtask_status(subtask['id'], 'finished') + +def create_tmss_buslistener(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER, tmss_dbcreds_id: str="TMSSClient"): + return TMSSBusListener(handler_type=TMSSEventMessageHandlerForCleanup, + handler_kwargs={'tmss_dbcreds_id': tmss_dbcreds_id, + 'exchange': exchange, + 'broker': broker}, + exchange=exchange, broker=broker) + + def main(): # make sure we run in UTC timezone import os @@ -366,15 +575,16 @@ def main(): parser.add_option("-e", "--exchange", dest="exchange", type="string", default=DEFAULT_BUSNAME, help="Name of the bus exchange on the broker, [default: %default]") parser.add_option("--mountpoint", dest="mountpoint", type="string", default=CEP4_DATA_MOUNTPOINT, help="path of local cep4 mount point, default: %default") + parser.add_option('-R', '--rest_credentials', dest='rest_credentials', type='string', default='TMSSClient', help='TMSS django REST API credentials name, default: %default') parser.add_option('-V', '--verbose', dest='verbose', action='store_true', help='verbose logging') (options, args) = parser.parse_args() logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG if options.verbose else logging.INFO) - with createService(exchange=options.exchange, - broker=options.broker): - waitForInterrupt() + with create_rpc_service(exchange=options.exchange, broker=options.broker, tmss_dbcreds_id=options.rest_credentials, mountpoint=options.mountpoint): + with create_tmss_buslistener(exchange=options.exchange, broker=options.broker, tmss_dbcreds_id=options.rest_credentials): + waitForInterrupt() if __name__ == '__main__': main() diff --git a/SAS/DataManagement/Cleanup/CleanupService/test/CMakeLists.txt b/SAS/DataManagement/Cleanup/CleanupService/test/CMakeLists.txt index 70fad49899886146924be3dc84ad2081fb47209b..f1124d403bd3dbe7c483dee67f49f41f9a7866a9 100644 --- a/SAS/DataManagement/Cleanup/CleanupService/test/CMakeLists.txt +++ b/SAS/DataManagement/Cleanup/CleanupService/test/CMakeLists.txt @@ -3,3 +3,8 @@ include(LofarCTest) lofar_add_test(test_cleanup_service_and_rpc) +IF(BUILD_TMSSBackend) + lofar_add_test(t_cleanup_tmss_integration_test) +ELSE() + message(WARNING "Skipping t_cleanup_tmss_integration_test because it depends on the TMSSBackend package which is not included in the build") +ENDIF(BUILD_TMSSBackend) diff --git a/SAS/DataManagement/Cleanup/CleanupService/test/t_cleanup_tmss_integration_test.py b/SAS/DataManagement/Cleanup/CleanupService/test/t_cleanup_tmss_integration_test.py new file mode 100755 index 0000000000000000000000000000000000000000..fc33cc56e106133760f46e89aa2a64374b6febe2 --- /dev/null +++ b/SAS/DataManagement/Cleanup/CleanupService/test/t_cleanup_tmss_integration_test.py @@ -0,0 +1,175 @@ +#!/usr/bin/env python3 + +import unittest + +import logging +logger = logging.getLogger('lofar.'+__name__) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + +from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor +from lofar.common.test_utils import integration_test + +from datetime import datetime +from uuid import uuid4 +import threading +import os +from unittest import mock + +@integration_test +class TestCleanupTMSSIntegration(unittest.TestCase): + def setUp(self) -> None: + self.TEST_DIR = '/tmp/cleanup_tmss_integration_test/' + str(uuid4()) + os.makedirs(self.TEST_DIR) + + # mockpatch the ssh calls which are issued from the cleanup subtask normally to cep4. + # in this test we just keep the original command without the ssh + ssh_cmd_list_patcher1 = mock.patch('lofar.common.ssh_utils.ssh_cmd_list') + self.addCleanup(ssh_cmd_list_patcher1.stop) + self.ssh_cmd_list_mock1 = ssh_cmd_list_patcher1.start() + self.ssh_cmd_list_mock1.side_effect = lambda host, user: [] + + def tearDown(self) -> None: + import shutil + shutil.rmtree(self.TEST_DIR, ignore_errors=True) + + def test(self): + with TemporaryExchange("TestCleanupTMSSIntegration") as tmp_exchange: + # override DEFAULT_BUSNAME + import lofar + lofar.messaging.config.DEFAULT_BUSNAME = tmp_exchange.address + + from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment + with TMSSTestEnvironment(exchange=tmp_exchange.address, + populate_schemas=True, start_ra_test_environment=True, start_postgres_listener=True, + populate_test_data=False, enable_viewflow=False, start_dynamic_scheduler=False, + start_subtask_scheduler=True, start_workflow_service=False) as tmss_test_env: + + from lofar.sas.tmss.tmss.tmssapp import models + from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprints_and_subtasks_from_scheduling_unit_draft, schedule_independent_subtasks_in_scheduling_unit_blueprint + from lofar.sas.tmss.test.test_utils import create_scheduling_unit_blueprint_simulator + from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingUnitDraft_test_data, SchedulingSet_test_data + from lofar.common.json_utils import add_defaults_to_json_object_for_schema + from lofar.sas.tmss.client.tmssbuslistener import TMSSEventMessageHandler, TMSSBusListener + + scheduling_set = models.SchedulingSet.objects.create(**SchedulingSet_test_data()) + scheduling_set.project.auto_ingest = False # for user granting permission (in this test the simulator does that for us) + scheduling_set.project.save() + + strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="Short Test Observation - Pipeline - Ingest") + scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema) + scheduling_unit_spec['tasks']['Observation']['specifications_doc']['SAPs'][0]['subbands'] = [0,1] #limit nr of subbands for readability + scheduling_unit_spec['tasks']['Observation']['specifications_doc']['QA']['plots']['enabled'] = False + scheduling_unit_spec['tasks']['Observation']['specifications_doc']['QA']['file_conversion']['enabled'] = False + + scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data(template=strategy_template.scheduling_unit_template, + requirements_doc=scheduling_unit_spec, + scheduling_set=scheduling_set)) + + scheduling_unit = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + schedule_independent_subtasks_in_scheduling_unit_blueprint(scheduling_unit, datetime.utcnow()) + + # make sure each dataproduct uses TEST_DATA_DIR as root + for task in scheduling_unit.task_blueprints.all(): + for subtask in task.subtasks.all(): + if subtask.state.value == models.SubtaskState.Choices.SCHEDULED.value: + for output_dp in subtask.output_dataproducts.all(): + output_dp.directory = output_dp.directory.replace('/data', self.TEST_DIR) + output_dp.save() + + # ensure/check the data dir is empty at the start + self.assertEqual([], os.listdir(self.TEST_DIR)) + + class TestEventhandler(TMSSEventMessageHandler): + """This test-TMSSEventMessageHandler tracks the interesing subtask status changes and determines + if the dataproducts were first written by the obs/pipeline and then deleted by the cleanuptask""" + def __init__(self, sync_object:{}): + self._sync_object = sync_object + super().__init__() + + def onSubTaskStatusChanged(self, id: int, status: str): + if status=='starting': + subtask = models.Subtask.objects.get(id=id) + if subtask.specifications_template.type.value == models.SubtaskType.Choices.CLEANUP.value: + logger.info("subtask %s %s starting", id, subtask.specifications_template.type.value) + + # from lofar.common.util import waitForInterrupt + # waitForInterrupt() + + self._sync_object['cleanup_sees_written_files'] = subtask.input_dataproducts.count() > 0 and \ + all(os.path.exists(dp.filepath) and os.path.getsize(dp.filepath) > 0 + for dp in subtask.input_dataproducts.all()) + elif status=='finished': + subtask = models.Subtask.objects.get(id=id) + logger.info("subtask %s %s finished", id, subtask.specifications_template.type.value) + + subtask_did_write_files = all(os.path.exists(dp.filepath) and os.path.getsize(dp.filepath) > 0 + for dp in subtask.output_dataproducts.all()) + + if subtask.specifications_template.type.value == models.SubtaskType.Choices.OBSERVATION.value: + self._sync_object['observation_did_write_files'] = subtask_did_write_files + elif subtask.specifications_template.type.value == models.SubtaskType.Choices.PIPELINE.value: + self._sync_object['pipeline_did_write_files'] = subtask_did_write_files + elif subtask.specifications_template.type.value == models.SubtaskType.Choices.CLEANUP.value: + self._sync_object['cleanup_deleted_written_files'] = not any(os.path.exists(dp.filepath) and os.path.getsize(dp.filepath) > 0 + for dp in subtask.input_dataproducts.all()) + # signal simulator and test-method that we are done + self._sync_object['stop_event'].set() + + # helper object to communicate events/results + sync_object = {'observation_did_write_files': False, + 'pipeline_did_write_files': False, + 'cleanup_sees_written_files': False, + 'cleanup_deleted_written_files': False, + 'stop_event': threading.Event()} + + with BusListenerJanitor(TMSSBusListener(handler_type=TestEventhandler, exchange=tmp_exchange.address, handler_kwargs={'sync_object': sync_object})): + # start a simulator, forcing the scheduling_unit to "run" the observations, pipelines, ingest.... + # and let the cleanup server act on the eventmessages. + # as a result, the scheduling_unit should be finished at the end, and the dataproducts should be "cleaned up" + + # check that the cleanup task is defined and ready to be used + cleanup_subtask = models.Subtask.objects.get(task_blueprint__scheduling_unit_blueprint__id=scheduling_unit.id, specifications_template__type__value=models.SubtaskType.Choices.CLEANUP.value) + self.assertEqual("defined", cleanup_subtask.state.value) + + # check that the sync-results are in initial state. + # nobody wrote any files yet, and nothing was deleted yet. + self.assertFalse(sync_object['observation_did_write_files']) + self.assertFalse(sync_object['pipeline_did_write_files']) + self.assertFalse(sync_object['cleanup_sees_written_files']) + self.assertFalse(sync_object['cleanup_deleted_written_files']) + + # start the objects-under-test: the cleanup service + # this service should respond to subtask events, and take care of the cleanup at the right moment. + from lofar.sas.datamanagement.cleanup.service import create_tmss_buslistener, create_rpc_service + with create_rpc_service(exchange=tmp_exchange.address, tmss_dbcreds_id=tmss_test_env.client_credentials.dbcreds_id, mountpoint=self.TEST_DIR): + with create_tmss_buslistener(exchange=tmp_exchange.address, tmss_dbcreds_id=tmss_test_env.client_credentials.dbcreds_id): + # simulate the obs/pipeline/ingest... + # allowing the cleanup service to handle the events and cleanup the obs/pipeline output + with create_scheduling_unit_blueprint_simulator(scheduling_unit.id, + sync_object['stop_event'], + handle_cleanup=False, handle_ingest=True, + handle_observations=True, handle_QA=True, + handle_pipelines=True, + create_output_dataproducts=True, + auto_grant_ingest_permission=True, + delay=0, duration=0, + exchange=tmp_exchange.address): + + # wait until scheduling_unit including the cleanup task is done + # the actual tests are done in the TestEventhandler above, setting their results in the sync_object + self.assertTrue(sync_object['stop_event'].wait(300)) + + # check states + cleanup_subtask.refresh_from_db() + self.assertEqual("finished", cleanup_subtask.state.value) + scheduling_unit.refresh_from_db() + self.assertEqual("finished", scheduling_unit.status) + + # check that the files were written and deleted + self.assertTrue(sync_object['observation_did_write_files']) + self.assertTrue(sync_object['pipeline_did_write_files']) + self.assertTrue(sync_object['cleanup_sees_written_files']) + self.assertTrue(sync_object['cleanup_deleted_written_files']) + +if __name__ == '__main__': + unittest.main() diff --git a/SAS/DataManagement/Cleanup/CleanupService/test/t_cleanup_tmss_integration_test.run b/SAS/DataManagement/Cleanup/CleanupService/test/t_cleanup_tmss_integration_test.run new file mode 100755 index 0000000000000000000000000000000000000000..909e0b819d34e37e6205d6369c8cb0df1107436d --- /dev/null +++ b/SAS/DataManagement/Cleanup/CleanupService/test/t_cleanup_tmss_integration_test.run @@ -0,0 +1,4 @@ +#!/bin/bash + +python3 t_cleanup_tmss_integration_test.py + diff --git a/SAS/DataManagement/Cleanup/CleanupService/test/t_cleanup_tmss_integration_test.sh b/SAS/DataManagement/Cleanup/CleanupService/test/t_cleanup_tmss_integration_test.sh new file mode 100755 index 0000000000000000000000000000000000000000..413a9673c1dba3c644bc04b2badeac2f5e7c8094 --- /dev/null +++ b/SAS/DataManagement/Cleanup/CleanupService/test/t_cleanup_tmss_integration_test.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_cleanup_tmss_integration_test diff --git a/SAS/DataManagement/Cleanup/CleanupService/test/test_cleanup_service_and_rpc.py b/SAS/DataManagement/Cleanup/CleanupService/test/test_cleanup_service_and_rpc.py index 5e28031cec8909f04098279fd098750f79c9a1c6..0142d880842209912b03eeb3f0c4f4fe850d1e67 100755 --- a/SAS/DataManagement/Cleanup/CleanupService/test/test_cleanup_service_and_rpc.py +++ b/SAS/DataManagement/Cleanup/CleanupService/test/test_cleanup_service_and_rpc.py @@ -41,7 +41,7 @@ class CleanupServiceRpcTest(unittest.TestCase): #mock_momrpc.getObjectDetails.return_value = {'1000042': {'project_name': 'my_project'}} ## now that we have a mocked the external dependencies, import cleanupservice - #from lofar.sas.datamanagement.cleanup.service import createService + #from lofar.sas.datamanagement.cleanup.service import create_rpc_service #from lofar.sas.datamanagement.cleanup.rpc import CleanupRPC #class TestCleanupServiceAndRPC(unittest.TestCase): @@ -102,7 +102,7 @@ class CleanupServiceRpcTest(unittest.TestCase): #self.assertTrue('Could not find task' in result['message']) ## create and run the service - #with createService(busname=busname): + #with create_rpc_service(busname=busname): ## and run all tests #unittest.main() diff --git a/SAS/DataManagement/DataManagementCommon/CMakeLists.txt b/SAS/DataManagement/DataManagementCommon/CMakeLists.txt index 5c160faa9b105d0325130a1f10e2f6ff86b433e5..5e0c0554e1ef45dcdab16bcbcda2d331c336a8b3 100644 --- a/SAS/DataManagement/DataManagementCommon/CMakeLists.txt +++ b/SAS/DataManagement/DataManagementCommon/CMakeLists.txt @@ -1,6 +1,6 @@ # $Id$ -lofar_package(DataManagementCommon 1.0 DEPENDS PyMessaging ResourceAssignmentService MoMQueryServiceClient) +lofar_package(DataManagementCommon 1.0 DEPENDS PyMessaging ResourceAssignmentService MoMQueryServiceClient TMSSClient) lofar_find_package(Python 3.4 REQUIRED) include(PythonInstall) diff --git a/SAS/DataManagement/DataManagementCommon/getPathForTask b/SAS/DataManagement/DataManagementCommon/getPathForTask old mode 100644 new mode 100755 diff --git a/SAS/DataManagement/DataManagementCommon/path.py b/SAS/DataManagement/DataManagementCommon/path.py index 36c15d93513d97b9ce8310cc47c5196370ad50a3..6bdcae38744c1420eaa9799a3a40ac6df0d13af8 100644 --- a/SAS/DataManagement/DataManagementCommon/path.py +++ b/SAS/DataManagement/DataManagementCommon/path.py @@ -17,6 +17,7 @@ from lofar.sas.datamanagement.common.config import CEP4_DATA_MOUNTPOINT from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC from lofar.mom.momqueryservice.momqueryrpc import MoMQueryRPC +from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession logger = logging.getLogger(__name__) @@ -24,7 +25,8 @@ class PathResolver: def __init__(self, mountpoint=CEP4_DATA_MOUNTPOINT, exchange=DEFAULT_BUSNAME, - broker=DEFAULT_BROKER): + broker=DEFAULT_BROKER, + tmss_dbcreds_id: str=None): self.mountpoint = mountpoint self.projects_path = os.path.join(self.mountpoint, 'projects' if isProductionEnvironment() else 'test-projects') @@ -33,14 +35,17 @@ class PathResolver: self.radbrpc = RADBRPC.create(exchange=exchange, broker=broker) self.momrpc = MoMQueryRPC.create(exchange=exchange, broker=broker) + self._tmss_client = TMSSsession.create_from_dbcreds_for_ldap(tmss_dbcreds_id) def open(self): self.radbrpc.open() self.momrpc.open() + self._tmss_client.open() def close(self): self.radbrpc.close() self.momrpc.close() + self._tmss_client.close() def __enter__(self): self.open() @@ -61,47 +66,76 @@ class PathResolver: logger.debug("Get path for otdb_id %s" % (otdb_id,)) return self.getPathForTask(otdb_id=otdb_id) - def getPathForTask(self, radb_id=None, mom_id=None, otdb_id=None, include_scratch_paths=True): - logger.info("getPathForTask(radb_id=%s, mom_id=%s, otdb_id=%s)", radb_id, mom_id, otdb_id) - '''get the path for a task for either the given radb_id, or for the given mom_id, or for the given otdb_id''' - result = self._getProjectPathAndDetails(radb_id=radb_id, mom_id=mom_id, otdb_id=otdb_id) + def getPathForTMSSId(self, tmss_id): + logger.debug("Get path for tmss_id %s" % (tmss_id,)) + return self.getPathForTask(tmss_id=tmss_id) + + def getPathForTask(self, radb_id=None, mom_id=None, otdb_id=None, tmss_id=None, include_scratch_paths=True): + logger.info("getPathForTask(radb_id=%s, mom_id=%s, otdb_id=%s, tmss_id=%s)", radb_id, mom_id, otdb_id, tmss_id) + '''get the path for a task for either the given radb_id, or for the given mom_id, or for the given otdb_id, or for the given tmss_id''' + result = self._getProjectPathAndDetails(radb_id=radb_id, mom_id=mom_id, otdb_id=otdb_id, tmss_id=tmss_id) if result['found']: project_path = result['path'] - task = result['task'] - task_data_path = os.path.join(project_path, 'L%s' % task['otdb_id']) - logger.info("constructed path '%s' for otdb_id=%s mom_id=%s radb_id=%s" % (task_data_path, task['otdb_id'], task['mom_id'], task['id'])) - path_result = {'found': True, 'message': '', 'path': task_data_path, - 'radb_id': task.get('id'), 'mom_id': task.get('mom_id'), 'otdb_id': task.get('otdb_id')} + if 'task' in result: + task = result['task'] + task_data_path = os.path.join(project_path, 'L%s' % task['otdb_id']) + elif tmss_id is not None: + task_data_path = os.path.join(project_path, 'L%s' % tmss_id) + else: + task_data_path = None - if include_scratch_paths and task['type'] == 'pipeline': - path_result['scratch_paths'] = [] + path_result = {'found': task_data_path is not None, 'message': '', 'path': task_data_path, + 'radb_id': radb_id, 'mom_id': mom_id, 'otdb_id': otdb_id, 'tmss_id': tmss_id} - scratch_path = os.path.join(self.scratch_path, 'Observation%s' % task['otdb_id']) - share_path = os.path.join(self.share_path, 'Observation%s' % task['otdb_id']) - logger.info("Checking scratch paths %s %s for otdb_id=%s mom_id=%s radb_id=%s" % (scratch_path, share_path, task['otdb_id'], task['mom_id'], task['id'])) + logger.info("constructed path '%s' for otdb_id=%s mom_id=%s radb_id=%s tmss_id=%s" % (task_data_path, otdb_id, mom_id, radb_id, tmss_id)) + + if include_scratch_paths: + path_result['scratch_paths'] = [] - if self.pathExists(scratch_path): - path_result['scratch_paths'].append(scratch_path) + if 'task' in result and task['type'] == 'pipeline': + task = result['task'] + path_result['scratch_paths'].append(os.path.join(self.scratch_path, 'Observation%s' % task['otdb_id'])) + path_result['scratch_paths'].append(os.path.join(self.share_path, 'Observation%s' % task['otdb_id'])) + elif tmss_id is not None: + subtask = self._tmss_client.get_subtask(tmss_id) + if subtask['subtask_type'].lower() == 'pipeline': + path_result['scratch_paths'].append(os.path.join(self.scratch_path, 'Observation%s' % tmss_id)) + path_result['scratch_paths'].append(os.path.join(self.share_path, 'Observation%s' % tmss_id)) - if self.pathExists(share_path): - path_result['scratch_paths'].append(share_path) + logger.info("Checking scratch paths %s for otdb_id=%s mom_id=%s radb_id=%s tmss_id=%s" % (path_result['scratch_paths'], otdb_id, mom_id, radb_id, tmss_id)) + path_result['scratch_paths'] = [path for path in path_result['scratch_paths'] if self.pathExists(path)] - logger.info("result for getPathForTask(radb_id=%s, mom_id=%s, otdb_id=%s): %s", radb_id, mom_id, otdb_id, path_result) + logger.info("result for getPathForTask(radb_id=%s, mom_id=%s, otdb_id=%s, tmss_id=%s): %s", radb_id, mom_id, otdb_id, tmss_id, path_result) return path_result result = {'found': False, 'message': result.get('message', ''), 'path': '', - 'radb_id': radb_id, 'mom_id': mom_id, 'otdb_id': otdb_id} - logger.warn("result for getPathForTask(radb_id=%s, mom_id=%s, otdb_id=%s): %s", radb_id, mom_id, otdb_id, result) + 'radb_id': radb_id, 'mom_id': mom_id, 'otdb_id': otdb_id, 'tmss_id': tmss_id} + logger.warning("result for getPathForTask(radb_id=%s, mom_id=%s, otdb_id=%s, tmss_id=%s): %s", radb_id, mom_id, otdb_id, tmss_id, result) return result - def _getProjectPathAndDetails(self, radb_id=None, mom_id=None, otdb_id=None): - '''get the project path and details of a task for either the given radb_id, or for the given mom_id, or for the given otdb_id''' - ids = [radb_id, mom_id, otdb_id] + def _getProjectPathAndDetails(self, radb_id=None, mom_id=None, otdb_id=None, tmss_id=None): + '''get the project path and details of a task for either the given radb_id, or for the given mom_id, or for the given otdb_id, or for the given tmss_id''' + ids = [radb_id, mom_id, otdb_id, tmss_id] validIds = [x for x in ids if x != None and isinstance(x, int)] if len(validIds) != 1: - raise KeyError("Provide one and only one id: radb_id=%s, mom_id=%s, otdb_id=%s" % (radb_id, mom_id, otdb_id)) + raise KeyError("Provide one and only one id: radb_id=%s, mom_id=%s, otdb_id=%s, tmss_id=%s" % (radb_id, mom_id, otdb_id, tmss_id)) + + if tmss_id is not None: + output_dataproducts = self._tmss_client.get_subtask_output_dataproducts(tmss_id) + directories = set([dp['directory'] for dp in output_dataproducts]) + subtask_dir_name = 'L%s' % (tmss_id,) + # extract the project path + project_paths = [dir[:dir.find(subtask_dir_name)] for dir in directories] + + if len(project_paths) != 1: + message = "Could not determine project path for tmss_id=%s" % (tmss_id,) + logger.error(message) + return {'found': False, 'message': message, 'path': None} + + project_path = project_paths[0] + return {'found': True, 'path': project_path} task = self.radbrpc.getTask(id=radb_id, mom_id=mom_id, otdb_id=otdb_id) @@ -125,22 +159,24 @@ class PathResolver: project_path = os.path.join(self.projects_path, "_".join(project_name.split())) return {'found': True, 'path': project_path, 'mom_details':mom_details, 'task':task} - def getProjectPath(self, radb_id=None, mom_id=None, otdb_id=None): - result = self._getProjectPathAndDetails(radb_id=radb_id, mom_id=mom_id, otdb_id=otdb_id) + def getProjectPath(self, radb_id=None, mom_id=None, otdb_id=None, tmss_id=None): + result = self._getProjectPathAndDetails(radb_id=radb_id, mom_id=mom_id, otdb_id=otdb_id, tmss_id=tmss_id) if result['found']: - del result['mom_details'] - del result['task'] + if 'mom_details' in result: + del result['mom_details'] + if 'task' in result: + del result['task'] return result - def getProjectDirAndSubDirectories(self, radb_id=None, mom_id=None, otdb_id=None, project_name=None): + def getProjectDirAndSubDirectories(self, radb_id=None, mom_id=None, otdb_id=None, tmss_id=None, project_name=None): '''get the project directory and its subdirectories of either the project_name, or the task's project for either the given radb_id, or for the given mom_id, or for the given otdb_id''' if project_name: project_path = os.path.join(self.projects_path, "_".join(project_name.split())) return self.getSubDirectories(project_path) - result = self.getProjectPath(radb_id=radb_id, mom_id=mom_id, otdb_id=otdb_id) + result = self.getProjectPath(radb_id=radb_id, mom_id=mom_id, otdb_id=otdb_id, tmss_id=tmss_id) if result['found']: return self.getSubDirectories(result['path']) return result @@ -154,8 +190,11 @@ class PathResolver: def getSubDirectoriesForRADBId(self, radb_id): return self.getSubDirectoriesForTask(radb_id=radb_id) - def getSubDirectoriesForTask(self, radb_id=None, mom_id=None, otdb_id=None): - result = self.getPathForTask(radb_id=radb_id, mom_id=mom_id, otdb_id=otdb_id) + def getSubDirectoriesForTMSSId(self, tmss_id): + return self.getSubDirectoriesForTask(tmss_id=tmss_id) + + def getSubDirectoriesForTask(self, radb_id=None, mom_id=None, otdb_id=None, tmss_id=None): + result = self.getPathForTask(radb_id=radb_id, mom_id=mom_id, otdb_id=otdb_id, tmss_id=tmss_id) if result['found']: return self.getSubDirectories(result['path']) return result @@ -214,13 +253,15 @@ def main(): parser.add_option('-o', '--otdb_id', dest='otdb_id', type='int', default=None, help='otdb_id of task to get the path for') parser.add_option('-m', '--mom_id', dest='mom_id', type='int', default=None, help='mom_id of task to get the path for') parser.add_option('-r', '--radb_id', dest='radb_id', type='int', default=None, help='radb_id of task to get the path for') + parser.add_option('-t', '--tmss_id', dest='tmss_id', type='int', default=None, help='tmss_id of the TMSS subtask to get the path for') parser.add_option('-q', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, help='Address of the broker, default: localhost') parser.add_option("--mountpoint", dest="mountpoint", type="string", default=CEP4_DATA_MOUNTPOINT, help="path of local cep4 mount point, default: %default") parser.add_option("--exchange", dest="exchange", type="string", default=DEFAULT_BUSNAME, help="Name of the exchange on which the services listen, default: %default") + parser.add_option('-R', '--rest_credentials', dest='rest_credentials', type='string', default='TMSSClient', help='TMSS django REST API credentials name, default: %default') parser.add_option('-V', '--verbose', dest='verbose', action='store_true', help='verbose logging') (options, args) = parser.parse_args() - if not (options.otdb_id or options.mom_id or options.radb_id): + if not (options.otdb_id or options.mom_id or options.radb_id or options.tmss_id): parser.print_help() exit(1) @@ -230,7 +271,7 @@ def main(): with PathResolver(exchange=options.exchange, broker=options.broker) as path_resolver: if options.path: - result = path_resolver.getPathForTask(otdb_id=options.otdb_id, mom_id=options.mom_id, radb_id=options.radb_id) + result = path_resolver.getPathForTask(otdb_id=options.otdb_id, mom_id=options.mom_id, radb_id=options.radb_id, tmss_id=options.tmss_id) if result['found']: print("path: %s" % (result['path'])) else: @@ -238,7 +279,7 @@ def main(): exit(1) if options.project: - result = path_resolver.getProjectDirAndSubDirectories(otdb_id=options.otdb_id, mom_id=options.mom_id, radb_id=options.radb_id) + result = path_resolver.getProjectDirAndSubDirectories(otdb_id=options.otdb_id, mom_id=options.mom_id, radb_id=options.radb_id, tmss_id=options.tmss_id) if result['found']: print("projectpath: %s" % (result['path'])) print("subdirectories: %s" % (' '.join(result['sub_directories']))) @@ -247,7 +288,7 @@ def main(): exit(1) if options.subdirs: - result = path_resolver.getSubDirectoriesForTask(otdb_id=options.otdb_id, mom_id=options.mom_id, radb_id=options.radb_id) + result = path_resolver.getSubDirectoriesForTask(otdb_id=options.otdb_id, mom_id=options.mom_id, radb_id=options.radb_id, tmss_id=options.tmss_id) if result['found']: print("path: %s" % (result['path'])) print("subdirectories: %s" % (' '.join(result['sub_directories']))) diff --git a/SAS/TMSS/backend/services/scheduling/lib/subtask_scheduling.py b/SAS/TMSS/backend/services/scheduling/lib/subtask_scheduling.py index d6909516fe3c6bf2417c382ec7f1322923b8744c..4ca2887f4bc7ce9c82fa6068964db11081cb4e85 100644 --- a/SAS/TMSS/backend/services/scheduling/lib/subtask_scheduling.py +++ b/SAS/TMSS/backend/services/scheduling/lib/subtask_scheduling.py @@ -96,8 +96,9 @@ class TMSSSubTaskSchedulingEventMessageHandler(TMSSEventMessageHandler): if subtask['state_value'] == 'defined': subtask_template = self.tmss_client.get_url_as_json_object(subtask['specifications_template']) if subtask_template['type_value'] == 'ingest': - logger.info("trying to schedule ingest subtask id=%s for scheduling_unit_blueprint id=%s...", subtask['id'], id) - self.tmss_client.schedule_subtask(subtask['id']) + if all(pred['state_value'] == 'finished' for pred in self.tmss_client.get_subtask_predecessors(subtask['id'])): + logger.info("trying to schedule ingest subtask id=%s for scheduling_unit_blueprint id=%s...", subtask['id'], id) + self.tmss_client.schedule_subtask(subtask['id']) def create_subtask_scheduling_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER, tmss_client_credentials_id: str=None): diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/reports.py b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/reports.py index 43a699ad5c8e4529d3e711cc46248132dba3cb13..0c0684ea018102e796393bcdf1bafe2bcd6f9456 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/reports.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/reports.py @@ -12,7 +12,7 @@ def create_project_report(request: Request, project: models.Project) -> {}: """ result = {'project': project.pk} result['quota'] = _get_quotas_from_project(request, project.pk) - result['durations'] = _get_subs_and_durations_from_project(project.pk) + result['SUBs'], result['durations'] = _get_subs_and_durations_from_project(project.pk) result['LTA dataproducts'] = _get_lta_dataproducts(project.name) result['SAPs'] = _get_saps(project.pk) @@ -29,7 +29,7 @@ def _get_quotas_from_project(request: Request, project_pk: int) -> []: return quotas -def _get_subs_and_durations_from_project(project_pk: int) -> {}: +def _get_subs_and_durations_from_project(project_pk: int) -> ({}, {}): """ Help function to retrieve durations and scheduling_units distinguished by success/fail. """ @@ -52,9 +52,10 @@ def _get_subs_and_durations_from_project(project_pk: int) -> {}: total_not_cancelled = total_duration - total_failed_duration # Calculate not_cancelled duration durations = {'total': total_duration.total_seconds(), 'total_succeeded': total_succeeded_duration.total_seconds(), - 'total_not_cancelled': total_not_cancelled.total_seconds(), 'total_failed': total_failed_duration.total_seconds(), - 'scheduling_unit_blueprints_finished': subs_succeeded, 'scheduling_unit_blueprints_failed': subs_failed} - return durations + 'total_not_cancelled': total_not_cancelled.total_seconds(), 'total_failed': total_failed_duration.total_seconds()} + subs = {'finished': subs_succeeded, 'failed': subs_failed} + + return subs, durations def _get_lta_dataproducts(project_name: str) -> {}: diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py index 570e8b9c7663a7c5878bdaf89f154235cf04e1a9..f647a9a9caada1b2c7b4e8a044ce5e15a6d22619 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/adapters/sip.py @@ -193,7 +193,7 @@ def create_sip_representation_for_subtask(subtask: Subtask): frequencyintegrationstep=spec['demixer']['frequency_steps'] if spec['demixer']['enabled'] else 1, timeintegrationstep=spec['demixer']['time_steps'] if spec['demixer']['enabled'] else 1, flagautocorrelations=spec['preflagger1']['enabled'] and spec['preflagger1']['corrtype'] == 'auto', - demixing=spec['demixer']['enabled'] and (spec['demixer']['demix_always'] or spec['demixer']['demix_if_needed']) + demixing=spec['demixer']['enabled'] and (len(spec['demixer']['demix_always']) > 0 or len(spec['demixer']['demix_if_needed']) > 0) ) # todo: distinguish and create other pipeline types. Probably most of these can be filled in over time as needed, # but they are not required for UC1. Here are stubs to start from for the other types the LTA supports: diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py index 8cbbf14c8d50013c84e344d5459036911854d707..72d9c2a2c79fd4483bbc8201aa6561bb2848073e 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.0.9 on 2021-04-14 07:47 +# Generated by Django 3.0.9 on 2021-04-08 14:57 from django.conf import settings import django.contrib.postgres.fields @@ -124,6 +124,7 @@ class Migration(migrations.Migration): ('size', models.BigIntegerField(help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).', null=True)), ('feedback_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Dataproduct properties, as reported by the producing process.')), ], + bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin), ), migrations.CreateModel( name='DataproductArchiveInfo', @@ -472,6 +473,7 @@ class Migration(migrations.Migration): options={ 'abstract': False, }, + bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin), ), migrations.CreateModel( name='ReservationStrategyTemplate', @@ -539,6 +541,7 @@ class Migration(migrations.Migration): options={ 'abstract': False, }, + bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin), ), migrations.CreateModel( name='SAPTemplate', @@ -595,6 +598,7 @@ class Migration(migrations.Migration): options={ 'abstract': False, }, + bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin), ), migrations.CreateModel( name='SchedulingUnitBlueprint', @@ -609,7 +613,6 @@ class Migration(migrations.Migration): ('do_cancel', models.BooleanField()), ('ingest_permission_required', models.BooleanField(default=False, help_text='Explicit permission is needed before the task.')), ('ingest_permission_granted_since', models.DateTimeField(help_text='The moment when ingest permission was granted.', null=True)), - ('output_data_allowed_to_be_ingested', models.BooleanField(default=False, help_text='boolean (default FALSE), which blocks Ingest Tasks from starting if OFF. When toggled ON, backend must scan for startable Ingest Tasks.')), ('output_pinned', models.BooleanField(default=False, help_text='boolean (default FALSE), which blocks deleting unpinned dataproducts. When toggled ON, backend must pick SUB up for deletion. It also must when dataproducts are unpinned.')), ('results_accepted', models.BooleanField(default=False, help_text='boolean (default NULL), which records whether the results were accepted, allowing the higher-level accounting to be adjusted.')), ('priority_rank', models.FloatField(default=0.0, help_text='Priority of this scheduling unit w.r.t. other scheduling units within the same queue and project.')), @@ -617,7 +620,7 @@ class Migration(migrations.Migration): options={ 'abstract': False, }, - bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model), + bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin, models.Model), ), migrations.CreateModel( name='SchedulingUnitDraft', @@ -637,7 +640,7 @@ class Migration(migrations.Migration): options={ 'abstract': False, }, - bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model), + bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin), ), migrations.CreateModel( name='SchedulingUnitObservingStrategyTemplate', @@ -715,6 +718,7 @@ class Migration(migrations.Migration): options={ 'abstract': False, }, + bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin), ), migrations.CreateModel( name='SubtaskAllowedStateTransitions', @@ -734,6 +738,7 @@ class Migration(migrations.Migration): options={ 'abstract': False, }, + bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin), ), migrations.CreateModel( name='SubtaskOutput', @@ -826,7 +831,7 @@ class Migration(migrations.Migration): ('do_cancel', models.BooleanField(help_text='Cancel this task.')), ('output_pinned', models.BooleanField(default=False, help_text='True if the output of this task is pinned to disk, that is, forbidden to be removed.')), ], - bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, models.Model), + bases=(lofar.sas.tmss.tmss.tmssapp.models.common.RefreshFromDbInvalidatesCachedPropertiesMixin, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin, models.Model), ), migrations.CreateModel( name='TaskConnectorType', @@ -852,7 +857,7 @@ class Migration(migrations.Migration): ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Specifications for this task.')), ('output_pinned', models.BooleanField(default=False, help_text='True if the output of this task is pinned to disk, that is, forbidden to be removed.')), ], - bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.specification.ProjectPropertyMixin), + bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.specification.ProjectPropertyMixin, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin), ), migrations.CreateModel( name='TaskRelationBlueprint', @@ -863,6 +868,7 @@ class Migration(migrations.Migration): ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), ], + bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin), ), migrations.CreateModel( name='TaskRelationDraft', @@ -873,6 +879,7 @@ class Migration(migrations.Migration): ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), ], + bases=(models.Model, lofar.sas.tmss.tmss.tmssapp.models.common.TemplateSchemaMixin), ), migrations.CreateModel( name='TaskRelationSelectionTemplate', @@ -965,11 +972,6 @@ class Migration(migrations.Migration): name='consumer', field=models.ForeignKey(help_text='Task Draft that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskDraft'), ), - migrations.AddField( - model_name='taskrelationdraft', - name='dataformat', - field=models.ForeignKey(help_text='Selected data format to use. One of (MS, HDF5).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat'), - ), migrations.AddField( model_name='taskrelationdraft', name='input_role', @@ -995,11 +997,6 @@ class Migration(migrations.Migration): name='consumer', field=models.ForeignKey(help_text='Task Blueprint that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskBlueprint'), ), - migrations.AddField( - model_name='taskrelationblueprint', - name='dataformat', - field=models.ForeignKey(help_text='Selected data format to use.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat'), - ), migrations.AddField( model_name='taskrelationblueprint', name='draft', @@ -1047,8 +1044,8 @@ class Migration(migrations.Migration): ), migrations.AddField( model_name='taskconnectortype', - name='dataformats', - field=models.ManyToManyField(blank=True, to='tmssapp.Dataformat'), + name='dataformat', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat'), ), migrations.AddField( model_name='taskconnectortype', diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/common.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/common.py index 80a9fb61594cbe8996f45fe0b0b35a1c842fe319..4eeeb68e1a42963aeabbd1111c7dcd509f0eb781 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/models/common.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/common.py @@ -13,6 +13,7 @@ from lofar.sas.tmss.tmss.exceptions import SchemaValidationException from django.urls import reverse as reverse_url import json import jsonschema +from datetime import timedelta class RefreshFromDbInvalidatesCachedPropertiesMixin(): """Helper Mixin class which invalidates all 'cached_property' attributes on a model upon refreshing from the db""" @@ -174,36 +175,41 @@ class Tags(Model): description = CharField(max_length=255) -# methods - -def annotate_validate_add_defaults_to_doc_using_template(model: Model, document_attr:str, template_attr:str) -> None: - ''' - annotate, validate and add defaults to the JSON document in the model instance using the schema of the given template. - ''' - try: - # fetch the actual JSON document and template-model-instance - document = getattr(model, document_attr) - template = getattr(model, template_attr) - - if document is not None and template is not None: - try: - if isinstance(document, str): - document = json.loads(document) - - # always annotate the json data document with a $schema URI to the schema that it is based on. - # this enables all users using this document (inside or outside of TMSS) to do their own validation and usage of editors which use the schema as UI template - document['$schema'] = template.schema['$id'] - except (KeyError, TypeError, AttributeError) as e: - raise SchemaValidationException("Cannot set $schema in json_doc to the schema's $id.\nError: %s \njson_doc: %s\nschema: %s" % (str(e), document, template.schema)) - - # add defaults for missing properies, and validate on the fly - document = add_defaults_to_json_object_for_schema(document, template.schema) - - # update the model instance with the updated and validated document - setattr(model, document_attr, document) - except AttributeError: - pass - except json.JSONDecodeError as e: - raise SchemaValidationException("Invalid JSON.\nError: %s \ndata: %s" % (str(e), document)) - except jsonschema.ValidationError as e: - raise SchemaValidationException(str(e)) \ No newline at end of file +class TemplateSchemaMixin(): + '''The TemplateSchemaMixin class can be mixed in to models which do validate and add defaults to json documents given a json-schema. + It uses an internal cache with a max age to minimize the number of requests to schema's, subschema's or referenced (sub)schema's.''' + _schema_cache = {} + _MAX_SCHEMA_CACHE_AGE = timedelta(minutes=1) + + def annotate_validate_add_defaults_to_doc_using_template(self, document_attr:str, template_attr:str) -> None: + ''' + annotate, validate and add defaults to the JSON document in the model instance using the schema of the given template. + ''' + try: + # fetch the actual JSON document and template-model-instance + document = getattr(self, document_attr) + template = getattr(self, template_attr) + + if document is not None and template is not None: + try: + if isinstance(document, str): + document = json.loads(document) + + # always annotate the json data document with a $schema URI to the schema that it is based on. + # this enables all users using this document (inside or outside of TMSS) to do their own validation and usage of editors which use the schema as UI template + document['$schema'] = template.schema['$id'] + except (KeyError, TypeError, AttributeError) as e: + raise SchemaValidationException("Cannot set $schema in json_doc to the schema's $id.\nError: %s \njson_doc: %s\nschema: %s" % (str(e), document, template.schema)) + + # add defaults for missing properies, and validate on the fly + # use the class's _schema_cache + document = add_defaults_to_json_object_for_schema(document, template.schema, self._schema_cache) + + # update the model instance with the updated and validated document + setattr(self, document_attr, document) + except AttributeError: + pass + except json.JSONDecodeError as e: + raise SchemaValidationException("Invalid JSON.\nError: %s \ndata: %s" % (str(e), document)) + except jsonschema.ValidationError as e: + raise SchemaValidationException(str(e)) \ No newline at end of file diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py index 4f31d2841833933db4eca3afc7b9b024ce60cd2d..3fa4cc2134aa7b636f5a8809f0483fc749c2c229 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/scheduling.py @@ -12,7 +12,7 @@ from django.db.models import Model, ForeignKey, OneToOneField, CharField, DateTi ManyToManyField, CASCADE, SET_NULL, PROTECT, QuerySet, BigAutoField, UniqueConstraint from django.contrib.postgres.fields import ArrayField, JSONField from django.contrib.auth.models import User -from .common import AbstractChoice, BasicCommon, Template, NamedCommon, annotate_validate_add_defaults_to_doc_using_template +from .common import AbstractChoice, BasicCommon, Template, NamedCommon, TemplateSchemaMixin from enum import Enum from django.db.models.expressions import RawSQL from django.core.exceptions import ValidationError @@ -62,7 +62,7 @@ class SubtaskType(AbstractChoice): INSPECTION = "inspection" QA_FILES = "qa_files" # task which creates "adder" QA h5 file(s) from a MeasurementSet of beamformed data QA_PLOTS = "qa_plots" # task which creates "adder" QA plots from an "adder" QA h5 file h5 - DELETION = "deletion" + CLEANUP = "cleanup" MANUAL = 'manual' OTHER = 'other' @@ -138,7 +138,7 @@ class SIPidentifier(Model): # # Instance Objects # -class Subtask(BasicCommon): +class Subtask(BasicCommon, TemplateSchemaMixin): """ Represents a low-level task, which is an atomic unit of execution, such as running an observation, running inspection plots on the observed data, etc. Each task has a specific configuration, will have resources allocated @@ -277,7 +277,7 @@ class Subtask(BasicCommon): def save(self, force_insert=False, force_update=False, using=None, update_fields=None): creating = self._state.adding # True on create, False on update - annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template') + self.annotate_validate_add_defaults_to_doc_using_template('specifications_doc', 'specifications_template') SIPidentifier.assign_new_id_to_model(self) # check for uniqueness of SAP names: @@ -354,7 +354,7 @@ class SubtaskStateLog(BasicCommon): new_state = ForeignKey('SubtaskState', null=False, editable=False, on_delete=PROTECT, related_name='is_new_state_of', help_text='Subtask state after update (see Subtask State Machine).') -class SubtaskInput(BasicCommon): +class SubtaskInput(BasicCommon, TemplateSchemaMixin): subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, related_name='inputs', help_text='Subtask to which this input specification refers.') task_relation_blueprint = ForeignKey('TaskRelationBlueprint', null=True, on_delete=SET_NULL, help_text='Task Relation Blueprint which this Subtask Input implements (NULLable).') producer = ForeignKey('SubtaskOutput', on_delete=PROTECT, related_name='consumers', help_text='The SubtaskOutput producing the input dataproducts for this SubtaskInput.') @@ -363,7 +363,7 @@ class SubtaskInput(BasicCommon): selection_template = ForeignKey('TaskRelationSelectionTemplate', on_delete=PROTECT, help_text='Schema used for selection_doc.') def save(self, force_insert=False, force_update=False, using=None, update_fields=None): - annotate_validate_add_defaults_to_doc_using_template(self, 'selection_doc', 'selection_template') + self.annotate_validate_add_defaults_to_doc_using_template('selection_doc', 'selection_template') super().save(force_insert, force_update, using, update_fields) @@ -372,19 +372,19 @@ class SubtaskOutput(BasicCommon): task_blueprint = ForeignKey('TaskBlueprint', null=False, on_delete=CASCADE, related_name='outputs', help_text='Task to which this output specification refers.') -class SAP(BasicCommon): +class SAP(BasicCommon, TemplateSchemaMixin): specifications_doc = JSONField(help_text='SAP properties.') specifications_template = ForeignKey('SAPTemplate', null=False, on_delete=CASCADE, help_text='Schema used for specifications_doc.') global_identifier = OneToOneField('SIPidentifier', null=False, editable=False, on_delete=PROTECT, help_text='The global unique identifier for LTA SIP.') def save(self, force_insert=False, force_update=False, using=None, update_fields=None): - annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template') + self.annotate_validate_add_defaults_to_doc_using_template('specifications_doc', 'specifications_template') SIPidentifier.assign_new_id_to_model(self) super().save(force_insert, force_update, using, update_fields) -class Dataproduct(BasicCommon): +class Dataproduct(BasicCommon, TemplateSchemaMixin): """ A data product represents an atomic dataset, produced and consumed by subtasks. The consumed dataproducts are those resulting from interpreting the Subtask Connector filters of the inputs. These links are explicitly saved, should @@ -411,8 +411,8 @@ class Dataproduct(BasicCommon): constraints = [UniqueConstraint(fields=['directory', 'filename'], name='%(class)s_unique_path')] def save(self, force_insert=False, force_update=False, using=None, update_fields=None): - annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template') - annotate_validate_add_defaults_to_doc_using_template(self, 'feedback_doc', 'feedback_template') + self.annotate_validate_add_defaults_to_doc_using_template('specifications_doc', 'specifications_template') + self.annotate_validate_add_defaults_to_doc_using_template('feedback_doc', 'feedback_template') SIPidentifier.assign_new_id_to_model(self) super().save(force_insert, force_update, using, update_fields) diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py index 222653d1fe21c9d6b064ba5525b885efc0477155..b7e4c3b56074d6d6b21d6bec481ef603f562895b 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py @@ -10,7 +10,7 @@ from django.contrib.postgres.fields import JSONField from enum import Enum from django.db.models.expressions import RawSQL from django.db.models.deletion import ProtectedError -from .common import AbstractChoice, BasicCommon, Template, NamedCommon, annotate_validate_add_defaults_to_doc_using_template, NamedCommonPK, RefreshFromDbInvalidatesCachedPropertiesMixin +from .common import AbstractChoice, BasicCommon, Template, NamedCommon, TemplateSchemaMixin, NamedCommonPK, RefreshFromDbInvalidatesCachedPropertiesMixin from lofar.common.json_utils import validate_json_against_schema, validate_json_against_its_schema, add_defaults_to_json_object_for_schema from lofar.sas.tmss.tmss.exceptions import * from django.core.exceptions import ValidationError @@ -45,7 +45,8 @@ class ProjectPropertyMixin(RefreshFromDbInvalidatesCachedPropertiesMixin): class Role(AbstractChoice): """Defines the model and predefined list of possible Role's for TaskConnectorType. - The items in the Choises class below are automagically populated into the database via a data migration.""" + The items in the Choises class below are automagically populated into the database via a data migration. + When changing/adding/removing items to the Choices, please update the common json schema for tasks as well.""" class Choices(Enum): CORRELATOR = "correlator" BEAMFORMER = "beamformer" @@ -66,7 +67,8 @@ class IOType(AbstractChoice): class Datatype(AbstractChoice): """Defines the model and predefined list of possible Datatype's for TaskConnectorType. - The items in the Choises class below are automagically populated into the database via a data migration.""" + The items in the Choises class below are automagically populated into the database via a data migration. + When changing/adding/removing items to the Choices, please update the common json schema for tasks as well.""" class Choices(Enum): VISIBILITIES = "visibilities" TIME_SERIES = "time series" @@ -76,8 +78,9 @@ class Datatype(AbstractChoice): class Dataformat(AbstractChoice): - """Defines the model and predefined list of possible Dataformat's for TaskRelationDraft and TaskRelationBlueprint. - The items in the Choises class below are automagically populated into the database via a data migration.""" + """Defines the model and predefined list of possible Dataformat's for TaskConnectorType. + The items in the Choises class below are automagically populated into the database via a data migration. + When changing/adding/removing items to the Choices, please update the common json schema for tasks as well.""" class Choices(Enum): MEASUREMENTSET = "MeasurementSet" BEAMFORMED = "Beamformed" @@ -152,6 +155,7 @@ class TaskType(AbstractChoice): OBSERVATION = "observation" PIPELINE = "pipeline" INGEST = "ingest" + CLEANUP = 'cleanup' MAINTENANCE = "maintenance" OTHER = 'other' @@ -177,7 +181,7 @@ class TaskConnectorType(BasicCommon): TARGET roles.''' role = ForeignKey('Role', null=False, on_delete=PROTECT) datatype = ForeignKey('Datatype', null=False, on_delete=PROTECT) - dataformats = ManyToManyField('Dataformat', blank=True) + dataformat = ForeignKey('Dataformat', null=False, on_delete=PROTECT) task_template = ForeignKey("TaskTemplate", related_name='output_connector_types', null=False, on_delete=CASCADE) iotype = ForeignKey('IOType', null=False, on_delete=PROTECT, help_text="Is this connector an input or output") @@ -372,18 +376,18 @@ class ResourceType(NamedCommonPK): quantity = ForeignKey('Quantity', null=False, on_delete=PROTECT, help_text='The quantity of this resource type.') -class SchedulingSet(NamedCommon): +class SchedulingSet(NamedCommon, TemplateSchemaMixin): generator_doc = JSONField(null=True, help_text='Parameters for the generator (NULLable).') generator_template = ForeignKey('GeneratorTemplate', on_delete=SET_NULL, null=True, help_text='Generator for the scheduling units in this set (NULLable).') generator_source = ForeignKey('SchedulingUnitDraft', on_delete=SET_NULL, null=True, help_text='Reference for the generator to an existing collection of specifications (NULLable).') project = ForeignKey('Project', related_name="scheduling_sets", on_delete=PROTECT, help_text='Project to which this scheduling set belongs.') # protected to avoid accidents def save(self, force_insert=False, force_update=False, using=None, update_fields=None): - annotate_validate_add_defaults_to_doc_using_template(self, 'generator_doc', 'generator_template') + self.annotate_validate_add_defaults_to_doc_using_template('generator_doc', 'generator_template') super().save(force_insert, force_update, using, update_fields) -class SchedulingUnitDraft(RefreshFromDbInvalidatesCachedPropertiesMixin, NamedCommon): +class SchedulingUnitDraft(NamedCommon, TemplateSchemaMixin): requirements_doc = JSONField(help_text='Scheduling and/or quality requirements for this run.') copies = ForeignKey('SchedulingUnitDraft', related_name="copied_from", on_delete=SET_NULL, null=True, help_text='Source reference, if we are a copy (NULLable).') copy_reason = ForeignKey('CopyReason', null=True, on_delete=PROTECT, help_text='Reason why source was copied (NULLable).') @@ -409,11 +413,11 @@ class SchedulingUnitDraft(RefreshFromDbInvalidatesCachedPropertiesMixin, NamedCo # This code only happens if the objects is not in the database yet. self._state.adding is True creating if self._state.adding and hasattr(self, 'scheduling_set') and self.scheduling_set.project.auto_ingest is False: - #When auto_ingest=False, the scheduling units will be created with ingest_permission_required = True + # When project.auto_ingest=False, the scheduling units will be created with ingest_permission_required = True self.ingest_permission_required=True - annotate_validate_add_defaults_to_doc_using_template(self, 'requirements_doc', 'requirements_template') - annotate_validate_add_defaults_to_doc_using_template(self, 'scheduling_constraints_doc', 'scheduling_constraints_template') + self.annotate_validate_add_defaults_to_doc_using_template('requirements_doc', 'requirements_template') + self.annotate_validate_add_defaults_to_doc_using_template('scheduling_constraints_doc', 'scheduling_constraints_template') super().save(force_insert, force_update, using, update_fields) @cached_property @@ -449,7 +453,7 @@ class SchedulingUnitDraft(RefreshFromDbInvalidatesCachedPropertiesMixin, NamedCo return self.scheduling_set.project -class SchedulingUnitBlueprint(RefreshFromDbInvalidatesCachedPropertiesMixin, NamedCommon): +class SchedulingUnitBlueprint(RefreshFromDbInvalidatesCachedPropertiesMixin, TemplateSchemaMixin, NamedCommon): class Status(Enum): DEFINED = "defined" FINISHED = "finished" @@ -469,20 +473,17 @@ class SchedulingUnitBlueprint(RefreshFromDbInvalidatesCachedPropertiesMixin, Nam ingest_permission_granted_since = DateTimeField(auto_now_add=False, null=True, help_text='The moment when ingest permission was granted.') requirements_template = ForeignKey('SchedulingUnitTemplate', on_delete=CASCADE, help_text='Schema used for requirements_doc (IMMUTABLE).') draft = ForeignKey('SchedulingUnitDraft', related_name='scheduling_unit_blueprints', on_delete=PROTECT, help_text='Scheduling Unit Draft which this run instantiates.') - output_data_allowed_to_be_ingested = BooleanField(default=False, help_text='boolean (default FALSE), which blocks Ingest Tasks from starting if OFF. When toggled ON, backend must scan for startable Ingest Tasks.') output_pinned = BooleanField(default=False, help_text='boolean (default FALSE), which blocks deleting unpinned dataproducts. When toggled ON, backend must pick SUB up for deletion. It also must when dataproducts are unpinned.') results_accepted = BooleanField(default=False, help_text='boolean (default NULL), which records whether the results were accepted, allowing the higher-level accounting to be adjusted.') priority_rank = FloatField(null=False, default=0.0, help_text='Priority of this scheduling unit w.r.t. other scheduling units within the same queue and project.') priority_queue = ForeignKey('PriorityQueueType', null=False, on_delete=PROTECT, default="A", help_text='Priority queue of this scheduling unit. Queues provide a strict ordering between scheduling units.') def save(self, force_insert=False, force_update=False, using=None, update_fields=None): - annotate_validate_add_defaults_to_doc_using_template(self, 'requirements_doc', 'requirements_template') + self.annotate_validate_add_defaults_to_doc_using_template('requirements_doc', 'requirements_template') # This code only happens if the objects is not in the database yet. self._state.adding is True creating - if self._state.adding and hasattr(self, 'draft') and self.draft.scheduling_set.project.auto_ingest is False: - #When auto_ingest=False, the scheduling units will be created with ingest_permission_required = True - self.ingest_permission_required=True - + if self._state.adding and hasattr(self, 'draft'): + self.ingest_permission_required = self.draft.ingest_permission_required super().save(force_insert, force_update, using, update_fields) @@ -719,7 +720,7 @@ class ProjectPropertyMixin(): return obj -class TaskDraft(NamedCommon, ProjectPropertyMixin): +class TaskDraft(NamedCommon, ProjectPropertyMixin, TemplateSchemaMixin): specifications_doc = JSONField(help_text='Specifications for this task.') copies = ForeignKey('TaskDraft', related_name="copied_from", on_delete=SET_NULL, null=True, help_text='Source reference, if we are a copy (NULLable).') copy_reason = ForeignKey('CopyReason', on_delete=PROTECT, null=True, help_text='Reason why source was copied (NULLable).') @@ -733,7 +734,7 @@ class TaskDraft(NamedCommon, ProjectPropertyMixin): constraints = [UniqueConstraint(fields=['name', 'scheduling_unit_draft'], name='TaskDraft_unique_name_in_scheduling_unit')] def save(self, force_insert=False, force_update=False, using=None, update_fields=None): - annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template') + self.annotate_validate_add_defaults_to_doc_using_template('specifications_doc', 'specifications_template') if self._state.adding: # True on create, False on update, needs to be checked before super().save() super().save(force_insert, force_update, using, update_fields) if self.scheduling_unit_draft.scheduling_set.project.auto_pin: @@ -850,7 +851,7 @@ class TaskDraft(NamedCommon, ProjectPropertyMixin): # return None -class TaskBlueprint(RefreshFromDbInvalidatesCachedPropertiesMixin, NamedCommon): +class TaskBlueprint(RefreshFromDbInvalidatesCachedPropertiesMixin, TemplateSchemaMixin, NamedCommon): specifications_doc = JSONField(help_text='Schedulings for this task (IMMUTABLE).') do_cancel = BooleanField(help_text='Cancel this task.') @@ -864,7 +865,7 @@ class TaskBlueprint(RefreshFromDbInvalidatesCachedPropertiesMixin, NamedCommon): constraints = [UniqueConstraint(fields=['name', 'scheduling_unit_blueprint'], name='TaskBlueprint_unique_name_in_scheduling_unit')] def save(self, force_insert=False, force_update=False, using=None, update_fields=None): - annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template') + self.annotate_validate_add_defaults_to_doc_using_template('specifications_doc', 'specifications_template') super().save(force_insert, force_update, using, update_fields) @cached_property @@ -998,10 +999,9 @@ class TaskBlueprint(RefreshFromDbInvalidatesCachedPropertiesMixin, NamedCommon): return "schedulable" -class TaskRelationDraft(BasicCommon): +class TaskRelationDraft(BasicCommon, TemplateSchemaMixin): selection_doc = JSONField(help_text='Filter for selecting dataproducts from the output role.') selection_template = ForeignKey('TaskRelationSelectionTemplate', on_delete=CASCADE, help_text='Schema used for selection_doc.') - dataformat = ForeignKey('Dataformat', null=False, on_delete=PROTECT, help_text='Selected data format to use. One of (MS, HDF5).') # caveat: it might look like producer has an incorrect related_name='consumed_by'. But it really is correct, denends on the way you look at it producer = ForeignKey('TaskDraft', related_name='consumed_by', on_delete=CASCADE, help_text='Task Draft that has the output connector. NOTE: The producer does typically, but not necessarily, belong to the same Scheduling Unit (or even the same Project) as the consumer.') @@ -1023,13 +1023,12 @@ class TaskRelationDraft(BasicCommon): # output_role.output = True def save(self, force_insert=False, force_update=False, using=None, update_fields=None): - annotate_validate_add_defaults_to_doc_using_template(self, 'selection_doc', 'selection_template') + self.annotate_validate_add_defaults_to_doc_using_template('selection_doc', 'selection_template') super().save(force_insert, force_update, using, update_fields) -class TaskRelationBlueprint(BasicCommon): +class TaskRelationBlueprint(BasicCommon, TemplateSchemaMixin): selection_doc = JSONField(help_text='Filter for selecting dataproducts from the output role.') - dataformat = ForeignKey('Dataformat', null=False, on_delete=PROTECT, help_text='Selected data format to use.') # caveat: it might look like producer has an incorrect related_name='consumed_by'. But it really is correct, denends on the way you look at it producer = ForeignKey('TaskBlueprint', related_name='consumed_by', on_delete=CASCADE, help_text='Task Blueprint that has the output connector.') @@ -1046,7 +1045,7 @@ class TaskRelationBlueprint(BasicCommon): constraints = [UniqueConstraint(fields=['producer', 'consumer', 'input_role', 'output_role'], name='TaskRelationBlueprint_unique_relation')] def save(self, force_insert=False, force_update=False, using=None, update_fields=None): - annotate_validate_add_defaults_to_doc_using_template(self, 'selection_doc', 'selection_template') + self.annotate_validate_add_defaults_to_doc_using_template('selection_doc', 'selection_template') super().save(force_insert, force_update, using, update_fields) @@ -1086,7 +1085,7 @@ class TaskSchedulingRelationDraft(BasicCommon): super().save(force_insert, force_update, using, update_fields) -class Reservation(NamedCommon): +class Reservation(NamedCommon, TemplateSchemaMixin): project = ForeignKey('Project', null=True, related_name='reservations', on_delete=CASCADE, help_text='Reservation will be accounted for this project.') description = CharField(max_length=255, help_text='Short description for this reservation, used in overviews') start_time = DateTimeField(help_text='Start of this reservation.') @@ -1104,6 +1103,6 @@ class Reservation(NamedCommon): return None def save(self, force_insert=False, force_update=False, using=None, update_fields=None): - annotate_validate_add_defaults_to_doc_using_template(self, 'specifications_doc', 'specifications_template') + self.annotate_validate_add_defaults_to_doc_using_template('specifications_doc', 'specifications_template') super().save(force_insert, force_update, using, update_fields) diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/populate.py b/SAS/TMSS/backend/src/tmss/tmssapp/populate.py index 9916e128ae38bb9ee11d9f7e0433fd8bcb774585..e07b02aaa9d6f2c20295a13aa9c3661da3f0bb64 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/populate.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/populate.py @@ -121,7 +121,8 @@ def populate_test_data(): constraints_spec = get_default_json_object_for_schema(constraints_template.schema) uc1_strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines") - simple_strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="Simple Observation") + simple_obs_strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="Simple Observation") + short_obs_pl_ingest_strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="Short Test Observation - Pipeline - Ingest") simple_beamforming_strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="Simple Beamforming Observation") projects = models.Project.objects.order_by('-priority_rank').all() @@ -144,7 +145,7 @@ def populate_test_data(): for scheduling_set in tmss_project.scheduling_sets.all(): for unit_nr in range(2): - for strategy_template in [simple_beamforming_strategy_template, uc1_strategy_template, simple_strategy_template]: + for strategy_template in [short_obs_pl_ingest_strategy_template, simple_obs_strategy_template, simple_beamforming_strategy_template, uc1_strategy_template]: # the 'template' in the strategy_template is a predefined json-data blob which validates against the given scheduling_unit_template # a user might 'upload' a partial json-data blob, so add all the known defaults scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema) @@ -154,7 +155,7 @@ def populate_test_data(): scheduling_unit_spec['tasks']['Calibrator Observation 1']['specifications_doc']['duration'] = 2*60 scheduling_unit_spec['tasks']['Target Observation']['specifications_doc']['duration'] = 2*3600 scheduling_unit_spec['tasks']['Calibrator Observation 2']['specifications_doc']['duration'] = 2*60 - elif strategy_template == simple_strategy_template: + elif strategy_template == simple_obs_strategy_template: scheduling_unit_spec['tasks']['Observation']['specifications_doc']['duration'] = 5*60 # set some constraints, so the dynamic scheduler has something to chew on. @@ -403,41 +404,40 @@ def populate_connectors(): # NOTE: This is an explicit list of each possible link between tasks. This model suffices # until the number of connectors throw too large. By then, we could consider introducing # wild cards, like output_of=NULL meaning "any". - logger.info("POPULATING CONNECTORS") + logger.info("Populating TaskConnectorType's") # calibrator observation TaskConnectorType.objects.create(role=Role.objects.get(value=Role.Choices.CORRELATOR.value), datatype=Datatype.objects.get(value=Datatype.Choices.VISIBILITIES.value), + dataformat=Dataformat.objects.get(value=Dataformat.Choices.MEASUREMENTSET.value), task_template=TaskTemplate.objects.get(name='calibrator observation'), iotype=IOType.objects.get(value=IOType.Choices.OUTPUT.value)) # target observation TaskConnectorType.objects.create(role=Role.objects.get(value=Role.Choices.CORRELATOR.value), - datatype=Datatype.objects.get(value=Datatype.Choices.VISIBILITIES.value), - task_template=TaskTemplate.objects.get(name='target observation'), - iotype=IOType.objects.get(value=IOType.Choices.OUTPUT.value)) + datatype=Datatype.objects.get(value=Datatype.Choices.VISIBILITIES.value), + dataformat=Dataformat.objects.get(value=Dataformat.Choices.MEASUREMENTSET.value), + task_template=TaskTemplate.objects.get(name='target observation'), + iotype=IOType.objects.get(value=IOType.Choices.OUTPUT.value)) # preprocessing pipeline - TaskConnectorType.objects.create(role=Role.objects.get(value=Role.Choices.ANY.value), - datatype=Datatype.objects.get(value=Datatype.Choices.VISIBILITIES.value), - task_template=TaskTemplate.objects.get(name='preprocessing pipeline'), - iotype=IOType.objects.get(value=IOType.Choices.INPUT.value)) - - TaskConnectorType.objects.create(role=Role.objects.get(value=Role.Choices.ANY.value), - datatype=Datatype.objects.get(value=Datatype.Choices.VISIBILITIES.value), - task_template=TaskTemplate.objects.get(name='preprocessing pipeline'), - iotype=IOType.objects.get(value=IOType.Choices.OUTPUT.value)) - - # ingest - TaskConnectorType.objects.create(role=Role.objects.get(value=Role.Choices.ANY.value), - datatype=Datatype.objects.get(value=Datatype.Choices.VISIBILITIES.value), - task_template=TaskTemplate.objects.get(name='ingest'), - iotype=IOType.objects.get(value=IOType.Choices.INPUT.value)) - - TaskConnectorType.objects.create(role=Role.objects.get(value=Role.Choices.ANY.value), - datatype=Datatype.objects.get(value=Datatype.Choices.TIME_SERIES.value), - task_template=TaskTemplate.objects.get(name='ingest'), - iotype=IOType.objects.get(value=IOType.Choices.INPUT.value)) + for iotype_value in (IOType.Choices.INPUT.value, IOType.Choices.OUTPUT.value): + TaskConnectorType.objects.create(role=Role.objects.get(value=Role.Choices.ANY.value), + datatype=Datatype.objects.get(value=Datatype.Choices.VISIBILITIES.value), + dataformat=Dataformat.objects.get(value=Dataformat.Choices.MEASUREMENTSET.value), + task_template=TaskTemplate.objects.get(name='preprocessing pipeline'), + iotype=IOType.objects.get(value=iotype_value)) + + # ingest and cleanup + for task_template_name in ('ingest', 'cleanup'): + for datatype_value in (Datatype.Choices.VISIBILITIES.value, Datatype.Choices.TIME_SERIES.value): + for dataformat_value in [choice.value for choice in Dataformat.Choices]: + for role_value in [choice.value for choice in Role.Choices]: + TaskConnectorType.objects.create(role=Role.objects.get(value=role_value), + datatype=Datatype.objects.get(value=datatype_value), + dataformat=Dataformat.objects.get(value=dataformat_value), + task_template=TaskTemplate.objects.get(name=task_template_name), + iotype=IOType.objects.get(value=IOType.Choices.INPUT.value)) def populate_permissions(): diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json index fc409bf145881ef9dac3db69189dc2bce35f23b5..9a7a4fe7b836db4579a9111af512f2d31b6e4a9c 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json @@ -200,15 +200,16 @@ "producer": "Calibrator Observation 1", "consumer": "Pipeline 1", "tags": [], - "input": { - "role": "any", - "datatype": "visibilities" - }, "output": { "role": "correlator", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" + }, + "input": { + "role": "any", + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "dataformat": "MeasurementSet", "selection_doc": {}, "selection_template": "all" }, @@ -216,15 +217,16 @@ "producer": "Calibrator Observation 2", "consumer": "Pipeline 2", "tags": [], - "input": { - "role": "any", - "datatype": "visibilities" - }, "output": { "role": "correlator", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" + }, + "input": { + "role": "any", + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "dataformat": "MeasurementSet", "selection_doc": {}, "selection_template": "all" }, @@ -232,15 +234,16 @@ "producer": "Target Observation", "consumer": "Pipeline target1", "tags": [], - "input": { - "role": "any", - "datatype": "visibilities" - }, "output": { "role": "correlator", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" + }, + "input": { + "role": "any", + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "dataformat": "MeasurementSet", "selection_doc": { "sap": [ "target1" @@ -252,15 +255,16 @@ "producer": "Target Observation", "consumer": "Pipeline target2", "tags": [], - "input": { - "role": "any", - "datatype": "visibilities" - }, "output": { "role": "correlator", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" + }, + "input": { + "role": "any", + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "dataformat": "MeasurementSet", "selection_doc": { "sap": [ "target2" @@ -272,15 +276,16 @@ "producer": "Pipeline 1", "consumer": "Ingest", "tags": [], - "input": { + "output": { "role": "any", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "output": { + "input": { "role": "any", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "dataformat": "MeasurementSet", "selection_doc": {}, "selection_template": "all" }, @@ -288,15 +293,16 @@ "producer": "Pipeline 2", "consumer": "Ingest", "tags": [], - "input": { + "output": { "role": "any", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "output": { + "input": { "role": "any", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "dataformat": "MeasurementSet", "selection_doc": {}, "selection_template": "all" }, @@ -304,15 +310,16 @@ "producer": "Pipeline target1", "consumer": "Ingest", "tags": [], - "input": { + "output": { "role": "any", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "output": { + "input": { "role": "any", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "dataformat": "MeasurementSet", "selection_doc": {}, "selection_template": "all" }, @@ -320,15 +327,16 @@ "producer": "Pipeline target2", "consumer": "Ingest", "tags": [], - "input": { + "output": { "role": "any", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "output": { + "input": { "role": "any", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "dataformat": "MeasurementSet", "selection_doc": {}, "selection_template": "all" } diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-tasks-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-tasks-1.json index 01c7c91fdb8cccbc94aae63ac1539fb006d136e3..398542538b828ae57a2d392dffcb79e8259ac87e 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-tasks-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-tasks-1.json @@ -8,21 +8,33 @@ "definitions": { "task_connector": { "type": "object", + "description": "A task connector describes what a task can take as input and produces as output.", "additionalProperties": false, "default": {}, "properties": { "role": { "type": "string", - "title": "Role" + "title": "Role", + "description": "The role of a task connector describes its intended use.", + "enum": ["correlator", "beamformer", "inspection plots", "calibrator", "target", "any"] }, "datatype": { "type": "string", - "title": "Data Type" + "title": "Data Type", + "description": "The data type of a task connector describes its what kind of data is produced/consumed.", + "enum": ["visibilities", "time series", "instrument model", "image", "quality"] + }, + "dataformat": { + "type": "string", + "title": "Data Format", + "description": "The data type of a task connector describes in which format the data is produced/consumed.", + "enum": ["MeasurementSet", "Beamformed", "QA_HDF5", "QA_Plots"] } }, "required": [ "role", - "datatype" + "datatype", + "dataformat" ] } } diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_template-scheduling_unit-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_template-scheduling_unit-1.json index 00af272aa1318b9628e974edd49baed3be4ec25a..f92347892c9a0b3dcf67268e15f4b00ea85fe0c9 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_template-scheduling_unit-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/scheduling_unit_template-scheduling_unit-1.json @@ -90,10 +90,6 @@ "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/tasks/1/#/definitions/task_connector", "default": {} }, - "dataformat": { - "type": "string", - "title": "Data Format" - }, "selection_doc": { "type": "object", "title": "Filter selection", diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-observation-pipeline-ingest-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-observation-pipeline-ingest-scheduling-unit-observation-strategy.json index 0c5ba135fd763e1fa4f82633b7df6688e05ebbe9..6ae834740335d9474e7351d58c3739b1bf154a2f 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-observation-pipeline-ingest-scheduling-unit-observation-strategy.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-observation-pipeline-ingest-scheduling-unit-observation-strategy.json @@ -75,6 +75,12 @@ "tags": [], "specifications_doc": {}, "specifications_template": "ingest" + }, + "Cleanup": { + "description": "Cleanup all dataproducts from disk", + "tags": [], + "specifications_doc": {}, + "specifications_template": "cleanup" } }, "task_relations": [ @@ -84,13 +90,14 @@ "tags": [], "input": { "role": "any", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, "output": { "role": "correlator", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "dataformat": "MeasurementSet", "selection_doc": {}, "selection_template": "all" }, @@ -100,13 +107,48 @@ "tags": [], "input": { "role": "any", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" + }, + "output": { + "role": "any", + "datatype": "visibilities", + "dataformat": "MeasurementSet" + }, + "selection_doc": {}, + "selection_template": "all" + }, + { + "producer": "Observation", + "consumer": "Cleanup", + "tags": [], + "input": { + "role": "any", + "datatype": "visibilities", + "dataformat": "MeasurementSet" + }, + "output": { + "role": "correlator", + "datatype": "visibilities", + "dataformat": "MeasurementSet" + }, + "selection_doc": {}, + "selection_template": "all" + }, + { + "producer": "Pipeline", + "consumer": "Cleanup", + "tags": [], + "input": { + "role": "any", + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, "output": { "role": "any", - "datatype": "visibilities" + "datatype": "visibilities", + "dataformat": "MeasurementSet" }, - "dataformat": "MeasurementSet", "selection_doc": {}, "selection_template": "all" } diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/subtask_template-cleanup-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/subtask_template-cleanup-1.json new file mode 100644 index 0000000000000000000000000000000000000000..b0244ed9f921709d7a16176a3afe887e0b24d2a9 --- /dev/null +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/subtask_template-cleanup-1.json @@ -0,0 +1,12 @@ +{ + "$id":"http://tmss.lofar.org/api/schemas/subtasktemplate/cleanup/1#", + "$schema": "http://json-schema.org/draft-06/schema#", + "title":"cleanup", + "description":"This schema defines the parameters to setup and control a dataproducts cleanup subtask.", + "version":1, + "type": "object", + "properties": { + }, + "required": [ + ] +} diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/task_template-cleanup-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/task_template-cleanup-1.json new file mode 100644 index 0000000000000000000000000000000000000000..993e48bf6386e887f9ead7cb9b448e72fe7bdace --- /dev/null +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/task_template-cleanup-1.json @@ -0,0 +1,12 @@ +{ + "$id": "http://tmss.lofar.org/api/schemas/tasktemplate/cleanup/1#", + "$schema": "http://json-schema.org/draft-06/schema#", + "title": "cleanup", + "description": "This schema defines the parameters to setup a dataproduct(s) cleanup task.", + "version": 1, + "type": "object", + "properties": { + }, + "required": [ + ] +} diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/templates.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/templates.json index e03990777545d78c5493574a707cbf328c369058..644405c2243aa00b45ea54d58bb696c767ebc1ac 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/templates.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/templates.json @@ -176,11 +176,21 @@ "template": "subtask_template", "type": "ingest" }, + { + "file_name": "subtask_template-cleanup-1.json", + "template": "subtask_template", + "type": "cleanup" + }, { "file_name": "task_template-ingest-1.json", "template": "task_template", "type": "ingest" }, + { + "file_name": "task_template-cleanup-1.json", + "template": "task_template", + "type": "cleanup" + }, { "file_name": "reservation_template-reservation-1.json", "template": "reservation_template" diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py index 0711865e0fcec308b7ae86bd170fc882fe105b0c..d0660311e04974f7bec4cc8c2f24b49c51d115e4 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/serializers/scheduling.py @@ -79,8 +79,8 @@ class DataproductFeedbackTemplateSerializer(AbstractTemplateSerializer): class SubtaskSerializer(DynamicRelationalHyperlinkedModelSerializer): # If this is OK then we can extend API with NO url ('flat' values) on more places if required - cluster_value = serializers.StringRelatedField(source='cluster', label='cluster_value', read_only=True) - subtask_type = serializers.StringRelatedField(source='specifications_template.type', label='subtask_type', read_only=True, help_text='The subtask type as defined in the specifications template.') + cluster_name = serializers.StringRelatedField(source='cluster', label='cluster_name', read_only=True, help_text='The cluster name as defined in the specifications template, provided here to safe an addition lookup.') + subtask_type = serializers.StringRelatedField(source='specifications_template.type', label='subtask_type', read_only=True, help_text='The subtask type as defined in the specifications template, provided here to safe an addition lookup.') specifications_doc = JSONEditorField(schema_source='specifications_template.schema') duration = FloatDurationField(read_only=True) diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py index 5ade4eb7604b230a6324a3a2538ec0b485b73da9..d02ea21feeb351530421a95f27e7f305b6c2cb06 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py @@ -62,7 +62,8 @@ def create_subtasks_from_task_blueprint(task_blueprint: TaskBlueprint) -> [Subta create_qafile_subtask_from_task_blueprint, create_qaplots_subtask_from_task_blueprint], 'preprocessing pipeline': [create_preprocessing_subtask_from_task_blueprint], - 'ingest': [create_ingest_subtask_from_task_blueprint]} + 'ingest': [create_ingest_subtask_from_task_blueprint], + 'cleanup': [create_cleanup_subtask_from_task_blueprint]} generators_mapping['calibrator observation'] = generators_mapping['target observation'] generators_mapping['beamforming observation'] = [create_observation_control_subtask_from_task_blueprint] @@ -742,6 +743,49 @@ def create_ingest_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> return subtask +def create_cleanup_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask: + ''' Create a subtask for a cleanup job + This method implements "Instantiate subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + ''' + # step 0: check pre-requisites + check_prerequities_for_subtask_creation(task_blueprint) + + # step 1: create subtask in defining state, with filled-in subtask_template + subtask_template = SubtaskTemplate.objects.get(name='cleanup') + subtask_specs = get_default_json_object_for_schema(subtask_template.schema) + cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4") + subtask_data = {"start_time": None, + "stop_time": None, + "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value), + "task_blueprint": task_blueprint, + "specifications_template": subtask_template, + "specifications_doc": subtask_specs, + "cluster": Cluster.objects.get(name=cluster_name)} + subtask = Subtask.objects.create(**subtask_data) + + # step 2: create and link subtask input + # for this cleanup subtask an 'input' seems a bit weird, but it actually makes sense! + # this cleanup subtask will cleanup the output data of all linked input predecessors. + for task_relation_blueprint in task_blueprint.produced_by.all(): + producing_task_blueprint = task_relation_blueprint.producer + + predecessor_subtasks = [st for st in producing_task_blueprint.subtasks.filter(specifications_template__type__value__in=(SubtaskType.Choices.OBSERVATION.value, SubtaskType.Choices.PIPELINE.value)).order_by('id').all()] + for predecessor_subtask in predecessor_subtasks: + for predecessor_subtask_output in predecessor_subtask.outputs.all(): + SubtaskInput.objects.create(subtask=subtask, + producer=predecessor_subtask_output, + selection_doc=task_relation_blueprint.selection_doc, + selection_template=task_relation_blueprint.selection_template) + + # step 3: set state to DEFINED + subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) + subtask.save() + + # done, now return the subtask, and allow the system to wait for the predecessors to be finished before we schedule this ingest + return subtask + + # ==== various schedule* methods to schedule a Subtasks (if possible) ==== def schedule_subtask(subtask: Subtask) -> Subtask: @@ -769,6 +813,9 @@ def schedule_subtask(subtask: Subtask) -> Subtask: if subtask.specifications_template.type.value == SubtaskType.Choices.INGEST.value: return schedule_ingest_subtask(subtask) + if subtask.specifications_template.type.value == SubtaskType.Choices.CLEANUP.value: + return schedule_cleanup_subtask(subtask) + if subtask.specifications_template.type.value == SubtaskType.Choices.COPY.value: return schedule_copy_subtask(subtask) @@ -1566,6 +1613,54 @@ def schedule_ingest_subtask(ingest_subtask: Subtask): return ingest_subtask +def schedule_cleanup_subtask(cleanup_subtask: Subtask): + ''' Schedule the given cleanup_subtask + This method should typically be called upon the event of an predecessor (pipeline or observation) subtask being finished. + This method implements "Scheduling subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + ''' + # step 0: check pre-requisites + check_prerequities_for_scheduling(cleanup_subtask) + + if cleanup_subtask.specifications_template.type.value != SubtaskType.Choices.CLEANUP.value: + raise SubtaskSchedulingSpecificationException("Cannot schedule subtask id=%d type=%s but type should be %s" % (cleanup_subtask.pk, + cleanup_subtask.specifications_template.type, + SubtaskType.Choices.CLEANUP.value)) + + # step 1: set state to SCHEDULING + cleanup_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULING.value) + cleanup_subtask.save() + + # step 1a: set start/stop times + # not very relevant for ingest subtasks, but it's nice for the user to see when the cleanup task was scheduled. + # please note that an cleanup subtask may idle for some time while it is in the cleanup queue. + # the actual start/stop times are set by the IngestTMSSAdapter when the subtask starts and stops. + cleanup_subtask.start_time = max([pred.stop_time for pred in cleanup_subtask.predecessors] + [datetime.utcnow()]) + cleanup_subtask.stop_time = cleanup_subtask.start_time + timedelta(hours=6) + + # step 2: link input dataproducts + if cleanup_subtask.inputs.count() == 0: + raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because it has no input(s)" % (cleanup_subtask.pk, + cleanup_subtask.specifications_template.type)) + + # iterate over all inputs + for cleanup_subtask_input in cleanup_subtask.inputs.all(): + # select and set input dataproducts that meet the filter defined in selection_doc + input_dataproducts = [dataproduct for dataproduct in cleanup_subtask_input.producer.dataproducts.all() + if specifications_doc_meets_selection_doc(dataproduct.specifications_doc, cleanup_subtask_input.selection_doc)] + cleanup_subtask_input.dataproducts.set(input_dataproducts) + + # cleanup has no outputs + + # skip step 4: cleanup does not need to have resources assigned + + # step 5: set state to SCHEDULED (resulting in the cleanup_service to pick this subtask up and run it) + cleanup_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value) + cleanup_subtask.save() + + return cleanup_subtask + + def schedule_copy_subtask(copy_subtask: Subtask): ''' Schedule the given copy_subtask This method should typically be called upon the event of an predecessor (pipeline or observation) subtask being finished. diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py b/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py index eceb99e688cd3b78173648004d023424dde01bd7..256128032bb3aa75343dbf05d9c0442df28d471a 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py @@ -1,13 +1,13 @@ from lofar.sas.tmss.tmss.exceptions import * from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.tmssapp.subtasks import unschedule_subtasks_in_task_blueprint -from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskBlueprint, SchedulingUnitBlueprint +from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskBlueprint, SchedulingUnitBlueprint, IOType, TaskTemplate, TaskType, TaskRelationSelectionTemplate from lofar.sas.tmss.tmss.tmssapp.subtasks import create_and_schedule_subtasks_from_task_blueprint, create_subtasks_from_task_blueprint, schedule_independent_subtasks_in_task_blueprint, update_subtasks_start_times_for_scheduling_unit from lofar.common.datetimeutils import round_to_minute_precision from functools import cmp_to_key import os from copy import deepcopy -from lofar.common.json_utils import add_defaults_to_json_object_for_schema +from lofar.common.json_utils import add_defaults_to_json_object_for_schema, get_default_json_object_for_schema import logging from datetime import datetime, timedelta from django.db.utils import IntegrityError @@ -179,19 +179,25 @@ def create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft: models. try: producer_task_draft = scheduling_unit_draft.task_drafts.get(name=task_relation_definition["producer"]) consumer_task_draft = scheduling_unit_draft.task_drafts.get(name=task_relation_definition["consumer"]) - dataformat = models.Dataformat.objects.get(value=task_relation_definition["dataformat"]) - input_role = models.TaskConnectorType.objects.get(task_template=consumer_task_draft.specifications_template, role=task_relation_definition["input"]["role"], datatype=task_relation_definition["input"]["datatype"], iotype=models.IOType.objects.get(value=models.IOType.Choices.INPUT.value)) - output_role = models.TaskConnectorType.objects.get(task_template=producer_task_draft.specifications_template, role=task_relation_definition["output"]["role"], datatype=task_relation_definition["output"]["datatype"], iotype=models.IOType.objects.get(value=models.IOType.Choices.OUTPUT.value)) + input_role = models.TaskConnectorType.objects.get(task_template=consumer_task_draft.specifications_template, + role=task_relation_definition["input"]["role"], + datatype=task_relation_definition["input"]["datatype"], + dataformat=task_relation_definition["input"]["dataformat"], + iotype=models.IOType.Choices.INPUT.value) + output_role = models.TaskConnectorType.objects.get(task_template=producer_task_draft.specifications_template, + role=task_relation_definition["output"]["role"], + datatype=task_relation_definition["output"]["datatype"], + dataformat=task_relation_definition["output"]["dataformat"], + iotype=models.IOType.Choices.OUTPUT.value) selection_template = models.TaskRelationSelectionTemplate.objects.get(name=task_relation_definition["selection_template"]) except Exception as e: - logger.error("Cannot create task_relation from spec '%s'. Error: %s", task_relation_definition, e) + logger.error("Could not determine Task Relations for %s. Error: %s", task_relation_definition, e) raise try: with transaction.atomic(): task_relation = models.TaskRelationDraft.objects.create(tags=task_relation_definition.get("tags",[]), selection_doc=task_relation_definition["selection_doc"], - dataformat=dataformat, producer=producer_task_draft, consumer=consumer_task_draft, input_role=input_role, @@ -287,8 +293,7 @@ def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft) -> model producer=producing_task_blueprint, consumer=consuming_task_blueprint, selection_doc=task_relation_draft.selection_doc, - selection_template=task_relation_draft.selection_template, - dataformat=task_relation_draft.dataformat) + selection_template=task_relation_draft.selection_template) logger.info("created task_relation_blueprint id=%s which connects task_blueprints producer_id=%s and consumer_id=%s", task_relation_blueprint.pk, producing_task_blueprint.pk, consuming_task_blueprint.pk) except IntegrityError as e: @@ -422,3 +427,90 @@ def unschedule_subtasks_in_scheduling_unit_blueprint(scheduling_unit_blueprint: scheduling_unit_blueprint.refresh_from_db() return scheduling_unit_blueprint +def create_cleanuptask_for_scheduling_unit_blueprint(scheduling_unit_blueprint: SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: + '''create a cleanuptask for the given scheduling_unit which will cleanup all output dataproducts from tasks in this scheduling_unit which aren't already cleaned up''' + + # Rationale: + # adding a cleanup task(blueprint) to a scheduling_unit_blueprint adds a task to the graph (which breaks the immutable blueprint concept), + # but it does not modify observation/pipeline behaviour, hence we allow it. + # Regard this as a convenience function to allow users to simplify cleaning up after themselves if they forgot to specificy a cleanup task. + # + # Note: We do modify the graph (both in draft and blueprint), + # but we do NOT update the specifications_doc because that doc (blueprint) is immutable, and shows the user what was specified. + # The fact that the graph in the specifications_doc and in real instances are different (with an addded cleanup task) shows the users that cleanup + # was apparently forgotten at specification time, and added later, which is explainable. + # + # Maybe we want to split this function in the future into a "add cleanup to draft" and/or "add cleanup to blueprint" + # For now, we present it as a friendly convenience function to cleanup after yourself once the blueprint is already running / already ran with experimental scheduling units. + # In practice we will instantiate most scheduling units from properly defined observation_strategy_templates which include cleanup. + + with transaction.atomic(): + # create a cleanup task draft and blueprint.... + cleanup_template = models.TaskTemplate.objects.get(name="cleanup") + cleanup_spec_doc = get_default_json_object_for_schema(cleanup_template.schema) + + cleanup_task_draft = models.TaskDraft.objects.create( + name="Cleanup", + description="Cleaning up all output dataproducts for this scheduling unit", + scheduling_unit_draft=scheduling_unit_blueprint.draft, + specifications_doc=cleanup_spec_doc, + specifications_template=cleanup_template) + + cleanup_task_blueprint = TaskBlueprint.objects.create( + description=cleanup_task_draft.description, + name=cleanup_task_draft.name, + do_cancel=False, + draft=cleanup_task_draft, + scheduling_unit_blueprint=scheduling_unit_blueprint, + specifications_doc=cleanup_task_draft.specifications_doc, + specifications_template=cleanup_task_draft.specifications_template, + output_pinned=False) + + logger.info("Created Cleanup Task id=%d for scheduling_unit id=%s, adding the outputs of all producing tasks in the scheduling unit to the cleanup...", cleanup_task_blueprint.id, scheduling_unit_blueprint.id) + + # ... and connect the outputs of the producing tasks to the cleanup, so the cleanup task knows what to remove. + selection_template = TaskRelationSelectionTemplate.objects.get(name="all") + selection_doc = get_default_json_object_for_schema(selection_template.schema) + + for producer_task_blueprint in scheduling_unit_blueprint.task_blueprints.exclude(specifications_template__type=TaskType.Choices.CLEANUP).exclude(specifications_template__type=TaskType.Choices.INGEST).all(): + for connector_type in producer_task_blueprint.specifications_template.output_connector_types.filter(iotype__value=IOType.Choices.OUTPUT.value).all(): + # define what the producer_task_blueprint is producing + output_role = models.TaskConnectorType.objects.get(task_template=producer_task_blueprint.specifications_template, + role=connector_type.role, + datatype=connector_type.datatype, + iotype=models.IOType.objects.get(value=models.IOType.Choices.OUTPUT.value)) + + # define what the cleanup task accepts/consumes + input_role = models.TaskConnectorType.objects.filter(dataformat=connector_type.dataformat).get(task_template=cleanup_task_draft.specifications_template, + role=models.Role.objects.get(value=models.Role.Choices.ANY.value), + datatype=connector_type.datatype, + iotype=models.IOType.objects.get(value=models.IOType.Choices.INPUT.value)) + + # connect the two (in draft and blueprint) + task_relation_draft = models.TaskRelationDraft.objects.create(producer=producer_task_blueprint.draft, + consumer=cleanup_task_draft, + input_role=input_role, + output_role=output_role, + selection_doc=selection_doc, + selection_template=selection_template) + + logger.info("created task_relation id=%s between task draft id=%s name='%s' and id=%s name='%s", + task_relation_draft.pk, task_relation_draft.producer.id, task_relation_draft.producer.name, task_relation_draft.consumer.id, task_relation_draft.consumer.name) + + task_relation_blueprint = models.TaskRelationBlueprint.objects.create(draft=task_relation_draft, + producer=producer_task_blueprint, + consumer=cleanup_task_blueprint, + input_role=input_role, + output_role=output_role, + selection_doc=selection_doc, + selection_template=selection_template) + + logger.info("created task_relation id=%s between task blueprint id=%s name='%s' and id=%s name='%s", + task_relation_blueprint.pk, task_relation_blueprint.producer.id, task_relation_blueprint.producer.name, task_relation_blueprint.consumer.id, task_relation_blueprint.consumer.name) + + # and finally also create the executable subtask for the cleanup_task_blueprint, so it can actually run. + create_subtasks_from_task_blueprint(cleanup_task_blueprint) + + # return the modified scheduling_unit + scheduling_unit_blueprint.refresh_from_db() + return scheduling_unit_blueprint \ No newline at end of file diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/permissions.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/permissions.py index 8e6b095a4fe79fd8680080065e5c82d7903f1325..291e602d5832032000e0db6a09771e2238e69d78 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/permissions.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/permissions.py @@ -67,6 +67,8 @@ class IsProjectMember(drf_permissions.DjangoObjectPermissions): # GET detail, PATCH, and DELETE # we always have permission as superuser (e.g. in test environment, where a regular user is created to test permission specifically) if request.user.is_superuser: + logger.info("IsProjectMember: User=%s is superuser. Not enforcing project permissions!" % request.user) + logger.info('### IsProjectMember.has_object_permission %s %s True' % (request._request, request.method)) return True # todo: do we want to restrict access for that as well? Then we add it to the ProjectPermission model, but it seems cumbersome...? @@ -98,9 +100,10 @@ class IsProjectMember(drf_permissions.DjangoObjectPermissions): related_project = obj.project if project_role['project'] == obj.project.name and \ models.ProjectRole.objects.get(value=project_role['role']) in permitted_project_roles: + logger.info('user=%s is permitted to access object=%s' % (request.user, obj)) + logger.info('### IsProjectMember.has_object_permission %s %s True' % (request._request, request.method)) return True else: - related_project = None logger.error("No project property on object %s, so cannot check project permission." % obj) # todo: how to deal with objects that do not have a unique project associated to them? # Do need users need the required role in all of them? Or just one? @@ -110,6 +113,8 @@ class IsProjectMember(drf_permissions.DjangoObjectPermissions): logger.warning("'%s' is a Template and action is '%s' so granting object access nonetheless." % (obj, view.action)) return True + logger.info('User=%s is not permitted to access object=%s with related project=%s since it requires one of project_roles=%s' % (request.user, obj, related_project, permitted_project_roles)) + logger.info('### IsProjectMember.has_object_permission %s False' % (request._request)) return False def has_permission(self, request, view): diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py index b8eefe04f4f4a96b6f8969059b58fa2806f27708..93bbbfbd1bcd71efed23a276fe675b1278060432 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/specification.py @@ -825,6 +825,31 @@ class SchedulingUnitBlueprintViewSet(LOFARViewSet): # result is list of dict so thats why return JsonResponse(result, safe=False) + @swagger_auto_schema(responses={200: "All Subtasks in this SchedulingUnitBlueprint", + 403: 'forbidden'}, + operation_description="Get all subtasks for this scheduling_unit") + @action(methods=['get'], detail=True, url_name="subtasks", name="all subtasks in this scheduling_unit") + def subtasks(self, request, pk=None): + subtasks = models.Subtask.objects.all().filter(task_blueprint__scheduling_unit_blueprint_id=pk). \ + select_related('state', 'specifications_template', 'specifications_template__type', 'cluster', 'created_or_updated_by_user').all() + + # return a response with the new serialized scheduling_unit_blueprint (with references to the created task_blueprint(s) and (scheduled) subtasks) + return Response(serializers.SubtaskSerializer(subtasks, many=True, context={'request':request}).data, + status=status.HTTP_200_OK) + + + @swagger_auto_schema(responses={201: "This SchedulingUnitBlueprint, with references to the created Cleanup TaskBlueprints.", + 403: 'forbidden'}, + operation_description="Create a cleanup task for this scheduling unit.") + @action(methods=['get'], detail=True, url_name="create_cleanuptask", name="Create a cleanup task for this scheduling unit") + def create_cleanuptask_for_scheduling_unit_blueprint(self, request, pk=None): + scheduling_unit_blueprint = get_object_or_404(models.SchedulingUnitBlueprint, pk=pk) + scheduling_unit_blueprint = create_cleanuptask_for_scheduling_unit_blueprint(scheduling_unit_blueprint) + + # return a response with the new serialized scheduling_unit_blueprint (with references to the created task_blueprint and subtask) + return Response(serializers.SchedulingUnitBlueprintSerializer(scheduling_unit_blueprint, context={'request':request}).data, + status=status.HTTP_201_CREATED) + class SchedulingUnitBlueprintExtendedViewSet(SchedulingUnitBlueprintViewSet): serializer_class = serializers.SchedulingUnitBlueprintExtendedSerializer diff --git a/SAS/TMSS/backend/src/tmss/workflowapp/flows/schedulingunitflow.py b/SAS/TMSS/backend/src/tmss/workflowapp/flows/schedulingunitflow.py index 5ab934891e8e4358fbf19719cb1d82972de9587b..437d82c871b96c9492828f6505b13eba8d4f70ad 100644 --- a/SAS/TMSS/backend/src/tmss/workflowapp/flows/schedulingunitflow.py +++ b/SAS/TMSS/backend/src/tmss/workflowapp/flows/schedulingunitflow.py @@ -272,7 +272,7 @@ class SchedulingUnitFlow(Flow): logger.info("granting ingest permission for scheduling unit blueprint id=%s", activation.process.su.id) activation.process.su.ingest_permission_granted_since = round_to_second_precision(datetime.utcnow()) - activation.process.su.output_data_allowed_to_be_ingested = True + activation.process.su.ingest_permission_required = True activation.process.su.save() activation.process.save() diff --git a/SAS/TMSS/backend/src/tmss/workflowapp/tests/t_workflow_qaworkflow.py b/SAS/TMSS/backend/src/tmss/workflowapp/tests/t_workflow_qaworkflow.py index 744773b026fc4ea04d334a92ba4145c17785f024..67980972ba4c0f352cc187ca9309351dfce1d909 100755 --- a/SAS/TMSS/backend/src/tmss/workflowapp/tests/t_workflow_qaworkflow.py +++ b/SAS/TMSS/backend/src/tmss/workflowapp/tests/t_workflow_qaworkflow.py @@ -335,10 +335,6 @@ class SchedulingUnitFlowTest(unittest.TestCase): ingest_permission_granted_since = models.SchedulingUnitBlueprint.objects.get(pk=scheduling_unit_process_id).ingest_permission_granted_since self.assertEqual(True,isinstance(ingest_permission_granted_since, datetime)) - #verify that output_data_allowed_to_be_ingested is now True - output_data_allowed_to_be_ingested = models.SchedulingUnitBlueprint.objects.get(pk=scheduling_unit_process_id).output_data_allowed_to_be_ingested - self.assertEqual(True,output_data_allowed_to_be_ingested) - self.assertEqual(Task.objects.filter(process=scheduling_unit_process_id).order_by('id')[11].flow_task.name, 'ingest_done') self.assertEqual(Task.objects.filter(process=scheduling_unit_process_id).order_by('id')[11].status, 'DONE') @@ -1394,10 +1390,6 @@ class SchedulingUnitFlowTest(unittest.TestCase): ingest_permission_granted_since = models.SchedulingUnitBlueprint.objects.get(pk=scheduling_unit_process_id).ingest_permission_granted_since self.assertEqual(True,isinstance(ingest_permission_granted_since, datetime)) - #verify that output_data_allowed_to_be_ingested is now True - output_data_allowed_to_be_ingested = models.SchedulingUnitBlueprint.objects.get(pk=scheduling_unit_process_id).output_data_allowed_to_be_ingested - self.assertEqual(True,output_data_allowed_to_be_ingested) - self.assertEqual(Task.objects.filter(process=scheduling_unit_process_id).order_by('id')[11].flow_task.name, 'ingest_done') self.assertEqual(Task.objects.filter(process=scheduling_unit_process_id).order_by('id')[11].status, 'DONE') diff --git a/SAS/TMSS/backend/test/t_adapter.py b/SAS/TMSS/backend/test/t_adapter.py index e886bd4dd488524c7238f62d2132fd983bf894fb..7e6584db476356ab17752eec2328b5958fcb2396 100755 --- a/SAS/TMSS/backend/test/t_adapter.py +++ b/SAS/TMSS/backend/test/t_adapter.py @@ -578,9 +578,9 @@ class ProjectReportTest(unittest.TestCase): self.assertAlmostEqual(result['durations']['total_failed'], total_failed) # There is only one finished SUB - self.assertEqual(result['durations']['scheduling_unit_blueprints_finished'][0]['id'], succeeded_sub.pk) + self.assertEqual(result['SUBs']['finished'][0]['id'], succeeded_sub.pk) # There is only one cancelled SUB - self.assertEqual(result['durations']['scheduling_unit_blueprints_failed'][0]['id'], cancelled_sub.pk) + self.assertEqual(result['SUBs']['failed'][0]['id'], cancelled_sub.pk) # There are just two dataproducts self.assertEqual(result['LTA dataproducts']['size__sum'], dataproduct1.size + dataproduct2.size) diff --git a/SAS/TMSS/backend/test/t_scheduling.py b/SAS/TMSS/backend/test/t_scheduling.py index 4f03350406cf7c7914d6ca8ae7b6bedeb78cba5b..be283562973fef76d10e6eb7bb94c6357ac9a830 100755 --- a/SAS/TMSS/backend/test/t_scheduling.py +++ b/SAS/TMSS/backend/test/t_scheduling.py @@ -422,9 +422,8 @@ class SchedulingTest(unittest.TestCase): # connect obs to pipeline scheduling_unit_doc['task_relations'].append({"producer": "Observation", "consumer": "Pipeline", - "input": { "role": "any", "datatype": "visibilities" }, - "output": { "role": "correlator", "datatype": "visibilities" }, - "dataformat": "MeasurementSet", + "input": { "role": "any", "datatype": "visibilities", "dataformat": "MeasurementSet"}, + "output": { "role": "correlator", "datatype": "visibilities", "dataformat": "MeasurementSet"}, "selection_doc": {}, "selection_template": "all" }) diff --git a/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py b/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py index 7248ff73a2c58498048cec1df10da470800907d2..d7515c0afdd7169c391097f628cff0248a99bf1c 100755 --- a/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py +++ b/SAS/TMSS/backend/test/t_tmssapp_specification_REST_API.py @@ -615,9 +615,9 @@ class TaskConnectorTestCase(unittest.TestCase): # POST a new item with invalid choice test_data_invalid = dict(test_data_creator.TaskConnectorType(task_template_url=self.task_template_url)) - test_data_invalid['dataformats'] = [BASE_URL + '/dataformat/forbidden/'] + test_data_invalid['dataformat'] = BASE_URL + '/dataformat/forbidden/' r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector_type/', test_data_invalid, 400, {}) - self.assertTrue('Invalid hyperlink' in str(r_dict['dataformats'])) + self.assertTrue('Invalid hyperlink' in str(r_dict['dataformat'])) def test_task_connector_POST_nonexistant_task_template_raises_error(self): @@ -665,8 +665,7 @@ class TaskConnectorTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) test_patch = {"role": BASE_URL + '/role/calibrator', - "dataformats": [BASE_URL + '/dataformat/Beamformed', - BASE_URL + '/dataformat/MeasurementSet']} + "dataformat": BASE_URL + '/dataformat/Beamformed'} # PATCH item and verify PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) diff --git a/SAS/TMSS/backend/test/tmss_test_data_django_models.py b/SAS/TMSS/backend/test/tmss_test_data_django_models.py index fb0d9a88c0b541baf613d8626b6d0f514536ceb5..9b7024f59cb7d6f0f06e429dc72ffb08fd231ef2 100644 --- a/SAS/TMSS/backend/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/backend/test/tmss_test_data_django_models.py @@ -106,6 +106,7 @@ def TaskRelationSelectionTemplate_test_data(name="my_TaskRelationSelectionTempla def TaskConnectorType_test_data() -> dict: return {"role": models.Role.objects.get(value='calibrator'), "datatype": models.Datatype.objects.get(value='instrument model'), + "dataformat": models.Dataformat.objects.get(value='Beamformed'), "task_template": models.TaskTemplate.objects.create(**TaskTemplate_test_data()), "iotype": models.IOType.objects.get(value=models.IOType.Choices.OUTPUT.value), "tags": []} @@ -243,7 +244,6 @@ def TaskRelationDraft_test_data(producer: models.TaskDraft = None, consumer: mod return {"tags": [], "selection_doc": {}, - "dataformat": models.Dataformat.objects.get(value='Beamformed'), "producer": producer, "consumer": consumer, "input_role": models.TaskConnectorType.objects.create(**TaskConnectorType_test_data()), @@ -307,7 +307,6 @@ def TaskRelationBlueprint_test_data(producer: models.TaskBlueprint = None, consu return {"tags": [], "selection_doc": {}, - "dataformat": models.Dataformat.objects.get(value='Beamformed'), "input_role": models.TaskConnectorType.objects.create(**TaskConnectorType_test_data()), "output_role": models.TaskConnectorType.objects.create(**TaskConnectorType_test_data()), "draft": models.TaskRelationDraft.objects.create(**TaskRelationDraft_test_data()), diff --git a/SAS/TMSS/backend/test/tmss_test_data_rest.py b/SAS/TMSS/backend/test/tmss_test_data_rest.py index 3ac9952f3b0a98efd8caef3b36b1a90deff60e19..4b74a99f08e150ac3dd61c17157696fb048bf5c9 100644 --- a/SAS/TMSS/backend/test/tmss_test_data_rest.py +++ b/SAS/TMSS/backend/test/tmss_test_data_rest.py @@ -227,7 +227,7 @@ class TMSSRESTTestDataCreator(): return {"role": self.django_api_url + '/role/%s'%role, "datatype": self.django_api_url + '/datatype/image', - "dataformats": [self.django_api_url + '/dataformat/Beamformed'], + "dataformat": self.django_api_url + '/dataformat/Beamformed', "task_template": task_template_url, "iotype": self.django_api_url + '/iotype/%s'%iotype, "tags": []} @@ -439,7 +439,6 @@ class TMSSRESTTestDataCreator(): return {"tags": [], "selection_doc": selection_doc, - "dataformat": self.django_api_url + "/dataformat/Beamformed", "producer": producer_url, "consumer": consumer_url, "input_role": input_role_url, @@ -539,7 +538,6 @@ class TMSSRESTTestDataCreator(): # test data return {"tags": [], "selection_doc": selection_doc, - "dataformat": self.django_api_url + '/dataformat/MeasurementSet', "input_role": input_role_url, "output_role": output_role_url, "draft": draft_url, diff --git a/SAS/TMSS/client/lib/tmss_http_rest_client.py b/SAS/TMSS/client/lib/tmss_http_rest_client.py index 59474ebfe38de4b4dde767329fc2a04754f20fde..61872b924712a8d3a7b875c52f79fec5536039ba 100644 --- a/SAS/TMSS/client/lib/tmss_http_rest_client.py +++ b/SAS/TMSS/client/lib/tmss_http_rest_client.py @@ -344,6 +344,14 @@ class TMSSsession(object): returns a dict with the 'id' and 'progress', or raises.""" return self.get_path_as_json_object('subtask/%s/get_progress' % subtask_id) + def get_subtasks_in_same_scheduling_unit(self, subtask: dict) -> []: + """get all subtasks in the same scheduling_unit for the given subtask. + returns a list of subtask-dicts upon success, or raises.""" + task_blueprint = self.get_url_as_json_object(subtask['task_blueprint']) + scheduling_unit_blueprint = self.get_url_as_json_object(task_blueprint['scheduling_unit_blueprint']) + subtasks = self.get_url_as_json_object(full_url=scheduling_unit_blueprint['url'].rstrip('/') + '/subtasks') + return subtasks + def get_setting(self, setting_name: str) -> {}: """get the value of a TMSS setting. returns the setting value upon success, or raises.""" diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js b/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js index fce56dec489c12171a738f814635207a3dc123fc..df3e6659276fc8b78c97288612fb751a5366d73d 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js @@ -150,6 +150,14 @@ function Jeditor(props) { message: 'Not a valid input for Subband List' }); } + } else if (schema.validationType === "subband_list_optional") { + if (value && !subbandValidator(value)) { + errors.push({ + path: path, + property: 'validationType', + message: 'Not a valid input for Subband List' + }); + } } else if (schema.validationType === "time") { if (!timeValidator(value)) { errors.push({ @@ -288,14 +296,15 @@ function Jeditor(props) { function getCustomProperties(properties) { for (const propertyKey in properties) { const propertyValue = properties[propertyKey]; - if (propertyKey === 'subbands') { + if ((propertyKey === 'subbands' && propertyValue.type=== 'array') || + propertyKey === 'list' && propertyValue.type=== 'array') { let newProperty = {}; newProperty.additionalItems = false; newProperty.title = propertyValue.title; newProperty.type = 'string'; newProperty.default = ''; newProperty.description = "For Range enter Start and End seperated by 2 dots. Mulitple ranges can be separated by comma. Minimum should be 0 and maximum should be 511. For exmaple 11..20, 30..50"; - newProperty.validationType = 'subband_list'; + newProperty.validationType = propertyKey === 'subbands'?'subband_list':'subband_list_optional'; properties[propertyKey] = newProperty; } else if (propertyKey.toLowerCase() === 'duration') { let newProperty = { @@ -367,7 +376,8 @@ function Jeditor(props) { if (_.indexOf(pointingProps, inputKey) >= 0) { inputValue.angle1 = UnitConverter.getAngleInput(inputValue.angle1); inputValue.angle2 = UnitConverter.getAngleInput(inputValue.angle2, true); - } else if (inputKey === 'subbands') { + } else if ((inputKey === 'subbands' && inputValue instanceof Array) || + (inputKey === 'list' && inputValue instanceof Array)) { editorInput[inputKey] = getSubbandInput(inputValue); } else { updateInput(inputValue); @@ -393,7 +403,8 @@ function Jeditor(props) { } else { updateOutput(outputValue); } - } else if (outputKey === 'subbands') { + } else if ((outputKey === 'subbands' && typeof(outputValue) === 'string') || + (outputKey === 'list' && typeof(outputValue) === 'string')) { editorOutput[outputKey] = getSubbandOutput(outputValue); } else if (outputKey.toLowerCase() === 'duration') { const splitOutput = outputValue.split(':'); @@ -481,7 +492,7 @@ function Jeditor(props) { * @param {String} prpOutput */ function getSubbandOutput(prpOutput) { - const subbandArray = prpOutput.split(","); + const subbandArray = prpOutput?prpOutput.split(","):[]; let subbandList = []; for (const subband of subbandArray ) { const subbandRange = subband.split('..'); diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/Beamformer.js b/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/Beamformer.js new file mode 100644 index 0000000000000000000000000000000000000000..ce67c36e25cfe4ce75f2b5ab6033d5942a4c9c36 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/Beamformer.js @@ -0,0 +1,286 @@ +import React, { Component } from 'react'; +import { Dialog } from 'primereact/dialog'; +import { Button } from 'primereact/button'; +import $RefParser from "@apidevtools/json-schema-ref-parser"; +import UtilService from '../../services/util.service'; +import Jeditor from '../JSONEditor/JEditor'; +import _ from 'lodash'; + +export default class Beamformer extends Component { + constructor(props) { + super(props); + this.tmpRowData = []; + this.state = { + showDialog: false, + dialogTitle: 'Beamformer - Specification', + validEditor: false, // For JSON editor validation + validFields: {}, // For Form Validation + }; + + this.formRules = {}; // Form validation rules + this.previousValue = [{}]; + + this.copyBeamformersValue = this.copyBeamformersValue.bind(this); + this.setEditorFunction = this.setEditorFunction.bind(this); + this.setEditorOutput = this.setEditorOutput.bind(this); + this.validateForm = this.validateForm.bind(this); + this.doCancel = this.doCancel.bind(this); + this.keyEventHandler = this.keyEventHandler.bind(this); + } + + isPopup() { + return true; + } + + /** + * Get beamformer details if exists + */ + async componentDidMount(){ + let parentRows = this.props.agGridReact.props.rowData[this.props.node.rowIndex]; + let parentCellData = parentRows[this.props.colDef.field]; + let observStrategy = this.props.context.componentParent.state.observStrategy; + this.changeStrategy(observStrategy) + await this.setState({ + showDialog: true, + parentCellData: parentCellData, + }); + this.previousValue= parentCellData; + } + + /** Prepare data for JEditor */ + async changeStrategy(observStrategy) { + if(observStrategy) { + const tasks = observStrategy.template.tasks; + let paramsOutput = {}; + let schema = { type: 'object', additionalProperties: false, + properties: {}, definitions:{} + }; + for (const taskName of _.keys(tasks)) { + const task = tasks[taskName]; + //Resolve task from the strategy template + const $taskRefs = await $RefParser.resolve(task); + + // Identify the task specification template of every task in the strategy template + const taskTemplate = _.find(this.props.context.componentParent.taskTemplates, {'name': task['specifications_template']}); + schema['$id'] = taskTemplate.schema['$id']; + schema['$schema'] = taskTemplate.schema['$schema']; + let index = 0; + let param = _.find(observStrategy.template.parameters, function(o) { return o.name === 'Beamformers' || o.name === 'beamformers' ;}); + if(param) { + if (param.refs[0].indexOf(`/tasks/${taskName}`) > 0) { + // Resolve the identified template + const $templateRefs = await $RefParser.resolve(taskTemplate); + let property = { }; + let tempProperty = null; + const taskPaths = param.refs[0].split("/"); + // Get the property type from the template and create new property in the schema for the parameters + try { + const parameterRef = param.refs[0]; + tempProperty = $templateRefs.get(parameterRef); + + } catch(error) { + tempProperty = _.cloneDeep(taskTemplate.schema.properties[taskPaths[4]]); + if (tempProperty['$ref']) { + tempProperty = await UtilService.resolveSchema(tempProperty); + if (tempProperty.definitions && tempProperty.definitions[taskPaths[4]]) { + schema.definitions = {...schema.definitions, ...tempProperty.definitions}; + tempProperty = tempProperty.definitions[taskPaths[4]]; + } else if (tempProperty.properties && tempProperty.properties[taskPaths[4]]) { + tempProperty = tempProperty.properties[taskPaths[4]]; + } + } + if (tempProperty.type === 'array' && taskPaths.length>6) { + tempProperty = tempProperty.items.properties[taskPaths[6]]; + } + property = tempProperty; + } + property.title = param.name; + property.default = $taskRefs.get(param.refs[0].replace(`#/tasks/${taskName}`, '#')); + paramsOutput[`param_${index}`] = property.default; + schema.properties[`param_${index}`] = property; + // Set property defintions taken from the task template in new schema + for (const definitionName in taskTemplate.schema.definitions) { + schema.definitions[definitionName] = taskTemplate.schema.definitions[definitionName]; + + } + } + index++; + } + } + if(this.state.parentCellData && JSON.stringify(this.state.parentCellData) !== '[{}]') { + if(this.state.parentCellData['param_0']) { + paramsOutput = this.state.parentCellData; + } else { + paramsOutput = {'param_0': this.state.parentCellData}; + } + } + await this.setState({observStrategy: observStrategy, paramsSchema: schema, paramsOutput: paramsOutput,}); + } + } + + /** + * Resolve JSON Schema + */ + async resolveSchema(schema){ + let properties = schema.properties; + schema.definitions = schema.definitions?schema.definitions:{}; + if (properties) { + for (const propertyKey in properties) { + let property = properties[propertyKey]; + if (property["$ref"] && !property["$ref"].startsWith("#")) { // 1st level reference of the object + const refUrl = property["$ref"]; + let newRef = refUrl.substring(refUrl.indexOf("#")); + if (refUrl.endsWith("/pointing")) { // For type pointing + schema.definitions["pointing"] = (await $RefParser.resolve(refUrl)).get(newRef); + property["$ref"] = newRef; + } else { // General object to resolve if any reference in child level + property = await this.resolveSchema((await $RefParser.resolve(refUrl)).get(newRef)); + } + } else if (property["type"] === "array") { // reference in array items definition + let resolvedItems = await this.resolveSchema(property["items"]); + schema.definitions = {...schema.definitions, ...resolvedItems.definitions}; + delete resolvedItems['definitions']; + property["items"] = resolvedItems; + } + properties[propertyKey] = property; + } + } else if (schema["oneOf"]) { // Reference in OneOf array + let resolvedOneOfList = []; + for (const oneOfProperty of schema["oneOf"]) { + const resolvedOneOf = await this.resolveSchema(oneOfProperty); + resolvedOneOfList.push(resolvedOneOf); + } + schema["oneOf"] = resolvedOneOfList; + } else if (schema["$ref"] && !schema["$ref"].startsWith("#")) { //reference in oneOf list item + const refUrl = schema["$ref"]; + let newRef = refUrl.substring(refUrl.indexOf("#")); + if (refUrl.endsWith("/pointing")) { + schema.definitions["pointing"] = (await $RefParser.resolve(refUrl)).get(newRef); + schema["$ref"] = newRef; + } else { + schema = await this.resolveSchema((await $RefParser.resolve(refUrl)).get(newRef)); + } + } + return schema; + } + + /** + * Copy JEditor value to AG Grid cell + */ + async copyBeamformersValue(){ + this.previousValue = this.state.paramsOutput; + await this.props.context.componentParent.updateCell( + this.props.node.rowIndex,this.props.colDef.field, this.state.paramsOutput + ); + this.setState({ showDialog: false}); + } + + /** + * While cancel retain existing value + */ + async doCancel(){ + await this.props.context.componentParent.updateCell( + this.props.node.rowIndex,this.props.colDef.field, this.previousValue + ); + this.setState({paramsOutput: this.previousValue, showDialog: false}); + } + + /** + * JEditor's function that to be called when parent wants to trigger change in the JSON Editor + * @param {Function} editorFunction + */ + setEditorFunction(editorFunction) { + this.setState({editorFunction: editorFunction}); + } + + /** + * This is the callback method to be passed to the JSON editor. + * JEditor will call this function when there is change in the editor. + * @param {Object} jsonOutput + * @param {Array} errors + */ + setEditorOutput(jsonOutput, errors) { + this.paramsOutput = jsonOutput; + this.validEditor = errors.length === 0; + this.setState({ paramsOutput: jsonOutput, + validEditor: errors.length === 0, + validForm: this.validateForm()}); + } + + /** + * Validation function to validate the form or field based on the form rules. + * If no argument passed for fieldName, validates all fields in the form. + * @param {string} fieldName + */ + validateForm(fieldName) { + let validForm = false; + let errors = this.state.errors; + let validFields = this.state.validFields; + if (fieldName) { + delete errors[fieldName]; + delete validFields[fieldName]; + if (this.formRules[fieldName]) { + const rule = this.formRules[fieldName]; + const fieldValue = this.state.schedulingUnit[fieldName]; + if (rule.required) { + if (!fieldValue) { + errors[fieldName] = rule.message?rule.message:`${fieldName} is required`; + } else { + validFields[fieldName] = true; + } + } + } + } + this.setState({errors: errors, validFields: validFields}); + if (Object.keys(validFields).length === Object.keys(this.formRules).length) { + validForm = true; + } + return validForm && !this.state.missingStationFieldsErrors; + } + /** + * Handle Tab key event in Beamformers Editor. It will be invoked when press Tab key in Beamformes editor + * @param {*} e + */ + keyEventHandler(e){ + var key = e.which || e.keyCode; + if(key === 9) { + this.copyBeamformersValue(); + } + } + + render() { + const schema = this.state.paramsSchema; + let jeditor = null; + if (schema) { + jeditor = React.createElement(Jeditor, {title: "Beamformer Specification", + schema: schema, + initValue: this.state.paramsOutput, + callback: this.setEditorOutput, + parentFunction: this.setEditorFunction + }); + } + return ( + <div onKeyDown={this.keyEventHandler}> + <Dialog header={_.startCase(this.state.dialogTitle)} style={{width: '60vw', height: '80vh'}} visible={this.state.showDialog} maximized={false} + onHide={() => {this.doCancel()}} inputId="confirm_dialog" + footer={<div> + <Button label="OK" icon="pi pi-check" onClick={() => {this.copyBeamformersValue()}} disabled={!this.state.validEditor} style={{width: '6em'}} /> + <Button className="p-button-danger" icon="pi pi-times" label="Cancel" onClick={() => {this.doCancel()}} /> + + </div> + } + > + <div className="ag-theme-balham" style={{ height: '65vh' }}> + <div className="p-fluid"> + <div className="p-grid"> + <div className="p-col-12"> + {this.state.paramsSchema?jeditor:""} + </div> + </div> + </div> + </div> + </Dialog> + </div> + ); + } +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/BeamformerRenderer.js b/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/BeamformerRenderer.js new file mode 100644 index 0000000000000000000000000000000000000000..7990622fff174c6b95e0eb89052f9a4873ae4ce1 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/BeamformerRenderer.js @@ -0,0 +1,34 @@ +import React, { Component } from 'react'; + +export default class BeamformersRenderer extends Component { + constructor(props) { + super(props); + } + + /** + Show cell value in grid + */ + render() { + let row = []; + let value = ''; + if (this.props.colDef.field.startsWith('gdef_')) { + row = this.props.agGridReact.props.context.componentParent.state.commonRowData[0]; + value = row[this.props.colDef.field]; + } + else { + row = this.props.agGridReact.props.rowData[this.props.node.rowIndex]; + value = row[this.props.colDef.field]; + } + if(value && value['param_0']) { + value = JSON.stringify(value['param_0']); + } else { + value = JSON.stringify(value); + } + + return <> + {value && + value + } + </>; + } +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/CustomDateComp.js b/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/CustomDateComp.js index 50623578335782048c11ba4ff25bb4f182370119..abfc5dc2078638524d304475bd0de3f9b873a147 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/CustomDateComp.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/Spreadsheet/CustomDateComp.js @@ -34,31 +34,12 @@ export default class CustomDateComp extends Component { } isCancelAfterEnd(){ - let date = (this.state.date !== '' && this.state.date !== 'undefined')? moment(this.state.date).format(UIConstants.CALENDAR_DATETIME_FORMAT) :''; + let date = (this.state.date !== '' && this.state.date !== undefined)? moment(this.state.date).format(UIConstants.CALENDAR_DATETIME_FORMAT) :''; this.props.context.componentParent.updateTime( this.props.node.rowIndex,this.props.colDef.field, date ); } - render() { - return this.state.systemTime?( - <Flatpickr - data-enable-time - options={{ - "inline": true, - "enableSeconds": true, - "time_24hr": true, - "defaultDate": this.state.systemTime?this.state.systemTime.format(UIConstants.CALENDAR_DEFAULTDATE_FORMAT):"", - "defaultHour": this.state.systemTime?this.state.systemTime.hours():12, - "defaultMinute": this.state.systemTime?this.state.systemTime.minutes():0 - }} - value={this.state.date} - onChange= {value => {this.updateDateChanges(value[0]?value[0]:this.state.date)}} - /> - ):""; - } - - updateDateChanges(e){ this.setState({date : e || ''}); } @@ -91,4 +72,29 @@ export default class CustomDateComp extends Component { this.props.node.rowIndex,this.props.colDef.field,selectedDates[0] ); }; + + render() { + return this.state.systemTime?( + <> + <button class="p-button p-component p-button-icon-only" onClick={() => {this.updateDateChanges(null)}} + title="Clear" style={{left: '190px'}}> + <i class="fas fa-times"></i> + </button> + <Flatpickr + data-enable-time + options={{ + "inline": true, + "enableSeconds": true, + "time_24hr": true, + "defaultDate": this.state.systemTime?this.state.systemTime.format(UIConstants.CALENDAR_DEFAULTDATE_FORMAT):"", + "defaultHour": this.state.systemTime?this.state.systemTime.hours():12, + "defaultMinute": this.state.systemTime?this.state.systemTime.minutes():0 + }} + value={this.state.date?this.state.date:''} + onChange= {value => {this.updateDateChanges(value[0]?value[0]:this.state.date)}} + /> + </> + ):""; + } + } \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js b/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js index ba59f506ee48ddee32e91bc4d2b3cc6d58dc7800..c5ef309c4d87d25f27b2e86cdae473001669a361 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js @@ -266,7 +266,7 @@ export class CalendarTimeline extends Component { monthDuration = `(${startMonth}-${endMonth})`; } return (<div {...getRootProps()} className="sidebar-header" - style={{width: `${this.state.sidebarWidth}px`}}> + style={{width: `${this.props.sidebarWidth?this.props.sidebarWidth:this.state.sidebarWidth}px`}}> <div className="sidebar-header-row">{this.state.viewType===UIConstants.timeline.types.NORMAL? (this.state.dayHeaderVisible?`Day${monthDuration}`:`Week${monthDuration}`) :`Week (${this.state.timelineStartDate.week()}) / Day`}</div> @@ -1466,7 +1466,7 @@ export class CalendarTimeline extends Component { minZoom={this.state.minZoom} maxZoom={this.state.maxZoom} lineHeight={this.props.rowHeight || 50} itemHeightRatio={0.95} - sidebarWidth={this.state.sidebarWidth} + sidebarWidth={this.props.sidebarWidth?this.props.sidebarWidth:this.state.sidebarWidth} timeSteps={this.state.timeSteps} onZoom={this.onZoom} onBoundsChange={this.onBoundsChange} diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss index 573ce55702ebf05f50fd2d7fe384da36dc6b03b3..131ad0bc18321312059034f9309c8e74c181c962 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss @@ -435,3 +435,11 @@ body .p-multiselect-panel .p-multiselect-header .p-multiselect-filter-container height: 1.75em; // top: -3px; } +.toggle-btn { + height: 20px; + font-size: 12px !important; + bottom: 8px !important; +} +.toggle-btn>span { + padding: 0em 0.25em !important; +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js index 58add662c42b12bdc0d882f4e7852dfa334dc3c7..ba52802355ed2324dc7ef6a1ea2c7f6c8a15c37f 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js @@ -414,8 +414,13 @@ class ViewSchedulingUnit extends Component{ this.setState({dialogVisible: false, showSpinner: true}); ScheduleService.createSchedulingUnitBlueprintTree(this.state.scheduleunit.id) .then(blueprint => { - appGrowl.show({severity: 'success', summary: 'Success', detail: 'Blueprint created successfully!'}); - this.setState({showSpinner: false, redirect: `/schedulingunit/view/blueprint/${blueprint.id}`, isLoading: true}); + if (blueprint) { + appGrowl.show({severity: 'success', summary: 'Success', detail: 'Blueprint created successfully!'}); + this.setState({showSpinner: false, redirect: `/schedulingunit/view/blueprint/${blueprint.id}`, isLoading: true}); + } else { + appGrowl.show({severity: 'error', summary: 'Failed', detail: 'Unable to create blueprint!'}); + this.setState({showSpinner: false}); + } }); } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/excelview.schedulingset.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/excelview.schedulingset.js index 75faba38ba5ed5dfa0a117e2a49150b4faf5b164..ff028172ae69ebe0768d5451823edda91486d744 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/excelview.schedulingset.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/excelview.schedulingset.js @@ -1,65 +1,61 @@ import React, { Component } from 'react'; -import { Redirect } from 'react-router-dom'; +import { Redirect } from 'react-router-dom'; import { Dropdown } from 'primereact/dropdown'; import { Button } from 'primereact/button'; import { Growl } from 'primereact/components/growl/Growl'; import { Checkbox } from 'primereact/checkbox'; import { Accordion, AccordionTab } from 'primereact/accordion'; -import { AgGridReact } from 'ag-grid-react'; -import { AllCommunityModules } from '@ag-grid-community/all-modules'; -import $RefParser from "@apidevtools/json-schema-ref-parser"; +import { DataTable } from 'primereact/datatable'; +import { Column } from 'primereact/column'; + import TimeInputmask from '../../components/Spreadsheet/TimeInputmask' import DegreeInputmask from '../../components/Spreadsheet/DegreeInputmask' import NumericEditor from '../../components/Spreadsheet/numericEditor'; import BetweenEditor from '../../components/Spreadsheet/BetweenEditor'; import BetweenRenderer from '../../components/Spreadsheet/BetweenRenderer'; +import BeamformersRenderer from '../../components/Spreadsheet/BeamformerRenderer'; import MultiSelector from '../../components/Spreadsheet/MultiSelector'; +import CustomDateComp from '../../components/Spreadsheet/CustomDateComp'; +import StationEditor from '../../components/Spreadsheet/StationEditor'; +import Beamformer from '../../components/Spreadsheet/Beamformer'; +import { CustomPageSpinner } from '../../components/CustomPageSpinner'; + import AppLoader from '../../layout/components/AppLoader'; import PageHeader from '../../layout/components/PageHeader'; import { publish } from '../../App'; +import { CustomDialog } from '../../layout/components/CustomDialog'; +import SchedulingSet from './schedulingset.create'; import ProjectService from '../../services/project.service'; import ScheduleService from '../../services/schedule.service'; import TaskService from '../../services/task.service'; -import CustomDateComp from '../../components/Spreadsheet/CustomDateComp'; +import UtilService from '../../services/util.service'; import Validator from '../../utils/validator'; import UnitConverter from '../../utils/unit.converter' import UIConstants from '../../utils/ui.constants'; -import UnitConversion from '../../utils/unit.converter'; -import StationEditor from '../../components/Spreadsheet/StationEditor'; -import SchedulingSet from './schedulingset.create'; + import moment from 'moment'; import _ from 'lodash'; +import $RefParser from "@apidevtools/json-schema-ref-parser"; +import { AgGridReact } from 'ag-grid-react'; +import { AllCommunityModules } from '@ag-grid-community/all-modules'; import 'ag-grid-community/dist/styles/ag-grid.css'; import 'ag-grid-community/dist/styles/ag-theme-alpine.css'; -import { CustomPageSpinner } from '../../components/CustomPageSpinner'; -import { CustomDialog } from '../../layout/components/CustomDialog'; -import UtilService from '../../services/util.service'; -// const DATE_TIME_FORMAT = 'YYYY-MM-DD HH:mm:ss'; const BG_COLOR = '#f878788f'; - /** * Component to create / update Scheduling Unit Drafts using Spreadsheet */ export class SchedulingSetCreate extends Component { constructor(props) { super(props); - this.gridApi = ''; - this.gridColumnApi = ''; - this.topGridApi = ''; - this.topGridColumnApi = ''; - this.rowData = []; - this.tmpRowData = []; - this.daily = []; - this.dailyOption = []; - this.isNewSet = false; - //this.dialogMsg = ''; - //this.dialogType = ''; - //this.callBackFunction = ''; - this.state = { + this.state= { + redirect: null, + errors: [], + validFields: {}, + observStrategy: {}, selectedProject: {}, copyHeader: false, // Copy Table Header to clipboard applyEmptyValue: false, @@ -68,23 +64,33 @@ export class SchedulingSetCreate extends Component { isLoading: true, isAGLoading: false, // Flag for loading spinner dialog: { header: '', detail: ''}, // Dialog properties - redirect: null, // URL to redirect - errors: [], // Form Validation errors - clipboard: [], // Maintaining grid data while Ctrl+C/V - schedulingUnit: { - project: (props.match?props.match.params.project:null) || null, - }, - schedulingSets: [], - schedulingUnitList: [], - selectedSchedulingSetId: null, - observStrategy: {}, + clipboard: [], totalCount: 0, validEditor: false, - validFields: {}, noOfSU: 10, - //ag-grid + defaultCellValues: {}, + showDefault: false, + confirmDialogVisible: false, + isDirty: false, + schedulingUnit: { + name: '', + description: '', + project: (props.match?props.match.params.project:null) || null, + }, columnMap: [], columnDefs: [], + columnTypes: { + numberValueColumn: { + editable: true, + valueParser: function numberParser(params) { + return Number(params.newValue); + }, + } + }, + defaultColDef: { + editable: true, flex: 1, sortable: true, minWidth: 100, resizable: true, + }, + rowSelection: 'multiple', context: { componentParent: this }, modules: AllCommunityModules, frameworkComponents: { @@ -96,20 +102,9 @@ export class SchedulingSetCreate extends Component { multiselector: MultiSelector, agDateInput: CustomDateComp, station: StationEditor, + beamformer: Beamformer, + beamformersRenderer: BeamformersRenderer, }, - columnTypes: { - numberValueColumn: { - editable: true, - valueParser: function numberParser(params) { - return Number(params.newValue); - }, - } - }, - defaultColDef: { - editable: true, flex: 1, sortable: true, minWidth: 100, resizable: true, - }, - rowSelection: 'multiple', - // ag grid to show row index components: { rowIdRenderer: function (params) { return 1 + params.rowIndex; @@ -117,7 +112,6 @@ export class SchedulingSetCreate extends Component { validCount: 0, inValidCount: 0, }, - //ag-gird - No of rows list noOfSUOptions: [ { label: '10', value: '10' }, { label: '50', value: '50' }, @@ -128,110 +122,114 @@ export class SchedulingSetCreate extends Component { customSelectedStations: [], selectedStations: [], defaultStationGroups: [], - //saveDialogVisible: false, - defaultCellValues: {}, - showDefault: false, - confirmDialogVisible: false, - isDirty: false + selectedSchedulingSetId: null, + rowData: [], }; + + this.gridApi = ''; + this.gridColumnApi = ''; + this.topGridApi = ''; + this.topGridColumnApi = ''; + this.rowData = []; + this.tmpRowData = []; + this.daily = []; + this.dailyOption = []; + this.isNewSet = false; + this.constraintSchema = []; this.showIcon = true; + this.fieldProperty = {}; + + this.applyToAllRow = false; + this.callBackFunction = ""; + this.onClose = this.close; + this.onCancel =this.close; + this.applyToEmptyRowOnly = false; + + this.dialogWidth = "40vw"; this.dialogType = "confirmation"; this.dialogHeight = 'auto'; this.dialogHeader = ""; this.dialogMsg = ""; this.dialogContent = ""; - this.applyToAllRow = false; - this.callBackFunction = ""; - this.onClose = this.close; - this.onCancel =this.close; - this.applyToEmptyRowOnly = false; // A SU Row not exists and the Name & Desc are empty + this.projects = []; // All projects to load project dropdown + this.schedulingSets = []; // All scheduling sets to be filtered for project + this.observStrategies = []; // All Observing strategy templates + this.taskTemplates = []; // All task templates to be filtered based on tasks in selected strategy template + this.constraintTemplates = []; + this.agSUWithDefaultValue = {'id': 0, 'suname': '', 'sudesc': ''}; + this.emptyAGSU = {}; - this.applyToAll = this.applyToAll.bind(this); - this.applyToSelected = this.applyToSelected.bind(this); - this.applyToEmptyRows = this.applyToEmptyRows.bind(this); - this.resetCommonData = this.resetCommonData.bind(this); - this.reload = this.reload.bind(this); - this.applyChanges = this.applyChanges.bind(this); - this.onTopGridReady = this.onTopGridReady.bind(this); + this.onProjectChange = this.onProjectChange.bind(this); + this.setSchedulingSetParams = this.setSchedulingSetParams.bind(this); + this.onStrategyChange = this.onStrategyChange.bind(this); + this.setNoOfSUint = this.setNoOfSUint.bind(this); + this.showAddSchedulingSet = this.showAddSchedulingSet.bind(this); + this.isNotEmpty = this.isNotEmpty.bind(this); this.onGridReady = this.onGridReady.bind(this); - this.validateForm = this.validateForm.bind(this); - this.validateEditor = this.validateEditor.bind(this); + this.onTopGridReady = this.onTopGridReady.bind(this); this.saveSchedulingUnit = this.saveSchedulingUnit.bind(this); - this.cancelCreate = this.cancelCreate.bind(this); - this.checkIsDirty = this.checkIsDirty.bind(this); - this.clipboardEvent = this.clipboardEvent.bind(this); - this.topAGGridEvent = this.topAGGridEvent.bind(this); - this.reset = this.reset.bind(this); - this.close = this.close.bind(this); - this.saveSU = this.saveSU.bind(this); this.validateGridAndSave = this.validateGridAndSave.bind(this); this.showDialogContent = this.showDialogContent.bind(this); - this.isNotEmpty = this.isNotEmpty.bind(this); - this.setDefaultCellValue = this.setDefaultCellValue.bind(this); - this.copyHeader = this.copyHeader.bind(this); - this.copyOnlyHeader = this.copyOnlyHeader.bind(this); + this.saveSU = this.saveSU.bind(this); + this.reset = this.reset.bind(this); + this.refreshSchedulingSet = this.refreshSchedulingSet.bind(this); + this.close = this.close.bind(this); + this.cancelCreate = this.cancelCreate.bind(this); + this.checkIsDirty = this.checkIsDirty.bind(this); this.cellValueChageEvent = this.cellValueChageEvent.bind(this); - this.onProjectChange = this.onProjectChange.bind(this); this.showWarning = this.showWarning.bind(this); - this.onSchedulingSetChange = this.onSchedulingSetChange.bind(this); - this.onStrategyChange = this.onStrategyChange.bind(this); - this.refreshSchedulingSet = this.refreshSchedulingSet.bind(this); - this.showAddSchedulingSet = this.showAddSchedulingSet.bind(this); + this.copyHeader = this.copyHeader.bind(this); + this.copyOnlyHeader = this.copyOnlyHeader.bind(this); + this.clipboardEvent = this.clipboardEvent.bind(this); + this.applyToAll = this.applyToAll.bind(this); + this.applyToSelected = this.applyToSelected.bind(this); + this.applyToEmptyRows = this.applyToEmptyRows.bind(this); + this.resetCommonData = this.resetCommonData.bind(this); + this.reload = this.reload.bind(this); + this.applyChanges = this.applyChanges.bind(this); + this.getSchedulingDialogContent = this.getSchedulingDialogContent.bind(this); + //this.setCurrentSUSet = this.setCurrentSUSet.bind(this); - this.projects = []; // All projects to load project dropdown - this.schedulingSets = []; // All scheduling sets to be filtered for project - this.observStrategies = []; // All Observing strategy templates - this.taskTemplates = []; // All task templates to be filtered based on tasks in selected strategy template - this.tooltipOptions = UIConstants.tooltipOptions; - this.nameInput = React.createRef(); // Ref to Name field for auto focus this.formRules = { // Form validation rules project: {required: true, message: "Select project to get Scheduling Sets"}, scheduling_set_id: {required: true, message: "Select the Scheduling Set"}, }; } + + async onTopGridReady (params) { + await this.setState({ + topGridApi:params.api, + topGridColumnApi:params.columnApi, + }) + this.state.topGridApi.hideOverlay(); + } - componentDidMount() { - const promises = [ ProjectService.getProjectList(), - ScheduleService.getSchedulingSets(), - ScheduleService.getObservationStrategies(), - TaskService.getTaskTemplates()]; - Promise.all(promises).then(responses => { - this.projects = responses[0]; - this.schedulingSets = responses[1]; - this.observStrategies = responses[2]; - this.taskTemplates = responses[3]; - if (this.state.schedulingUnit.project) { - const projectSchedluingSets = _.filter(this.schedulingSets, {'project_id': this.state.schedulingUnit.project}); - this.setState({isLoading: false, schedulingSets: projectSchedluingSets, allSchedulingSets: this.schedulingSets}); - } else { - this.setState({isLoading: false}); - } - }); + async onGridReady (params) { + await this.setState({ + gridApi:params.api, + gridColumnApi:params.columnApi, + }) + this.state.gridApi.hideOverlay(); } - + /** - * Show warning messgae if any changes not saved when the AG grid reload or cancel the page - * @param {*} functionName + * Check is empty string + * @param {*} value */ - showWarning (functionName) { - this.showIcon = true; - this.dialogType = "confirmation"; - this.dialogHeader = "Add Multiple Scheduling Unit(s)"; - this.dialogMsg = "Do you want to leave the changes? Your changes may not be saved."; - this.dialogContent = ""; - this.callBackFunction = functionName; - this.onClose = this.close; - this.onCancel = this.close; - this.setState({ - confirmDialogVisible: true, - }); + isNotEmpty(value){ + if ( value === null || value === undefined || value.length === 0 ){ + return false; + } else { + return true; + } } + /** * Trigger when the project drop down get changed and check isDirty * @param {*} projectName */ - onProjectChange(projectName) { + onProjectChange(projectName) { if (this.state.isDirty) { this.showWarning(() =>{ this. changeProject(projectName); @@ -240,112 +238,233 @@ export class SchedulingSetCreate extends Component { this.changeProject(projectName); } } - + /** * Function to call on change of project and reload scheduling set dropdown * @param {string} projectName */ - changeProject(projectName) { + changeProject(projectName) { const projectSchedluingSets = _.filter(this.schedulingSets, {'project_id': projectName}); let schedulingUnit = this.state.schedulingUnit; schedulingUnit.project = projectName; - /* this.setState({confirmDialogVisible: false, isDirty: false, schedulingUnit: schedulingUnit, - schedulingSets: projectSchedluingSets, validForm: this.validateForm('project'), rowData: [], - observStrategy: {}, copyHeader: false, isDirty: false}); */ - const selectedProject = _.filter(this.projects, {'name': projectName}); this.setState({confirmDialogVisible: false, isDirty: false, selectedProject: selectedProject, schedulingUnit: schedulingUnit, schedulingSets: projectSchedluingSets, validForm: this.validateForm('project'), rowData: [],observStrategy: {}, copyHeader: false}); publish('edit-dirty', false); } - - /** - * Trigger when the Scheduling Set drop down get changed and check isDirty - * @param {*} key - * @param {*} value - */ - onSchedulingSetChange(key, value) { - if (this.state.isDirty) { - this.showWarning(() =>{ - this.setSchedulingSetParams(key, value); - }); - } else { - this. setSchedulingSetParams(key, value); - } - } /** * Function to set form values to the SU object * @param {string} key * @param {object} value */ + async setSchedulingSetParams(key, value) { this.setState({isAGLoading: true, copyHeader: false, confirmDialogVisible: false, isDirty: false}); publish('edit-dirty', false); let schedulingUnit = this.state.schedulingUnit; schedulingUnit[key] = value; + this.setState({schedulingUnit, selectedSchedulingSetId: value, copyHeader: false, confirmDialogVisible: false, isDirty: false, rowData: []}); + if(this.state.observStrategy && this.state.observStrategy.id) { + this.onStrategyChange(this.state.observStrategy.id); + } + } + + /** + * Set No. of Scheduling Unit load/show in the excel view table + * @param {*} value + */ + async setNoOfSUint(value){ + this.setState({isDirty: true, isAGLoading: true}); + publish('edit-dirty', true); + if (value >= 0 && value < 501){ + await this.setState({noOfSU: value}); + } else { + await this.setState({noOfSU: 500}); + } - let schedulingUnitList = await ScheduleService.getSchedulingBySet(value); - if (schedulingUnitList) { - const schedulingSetIds = _.uniq(_.map(schedulingUnitList, 'observation_strategy_template_id')); - if (schedulingSetIds.length === 1) { - const observStrategy = _.find(this.observStrategies, {'id': schedulingUnitList[0].observation_strategy_template_id}); - this.setDefaultStationGroup(observStrategy); + let noOfSU = this.state.noOfSU; + this.tmpRowData = []; + if (this.state.rowData && this.state.rowData.length >0 && this.state.emptyRow) { + if (this.state.totalCount <= noOfSU) { + for (var count = 0; count < noOfSU; count++) { + if(this.state.rowData.length > count ) { + this.tmpRowData.push(_.cloneDeep(this.state.rowData[count])); + } else { + this.tmpRowData.push(_.cloneDeep(this.state.agSUWithDefaultValue)); + } + } this.setState({ - schedulingUnit: schedulingUnit, validForm: this.validateForm(key), validEditor: this.validateEditor(), - schedulingUnitList: schedulingUnitList, schedulingSetId: value, selectedSchedulingSetId: value, observStrategy: observStrategy, + rowData: this.tmpRowData, + noOfSU: noOfSU, + isAGLoading: false }); - this.isNewSet = false; - await this.prepareScheduleUnitListForGrid(); - } else { - /* Let user to select Observation Strategy */ + } else { this.setState({ - rowData:[], schedulingUnit: schedulingUnit, validForm: this.validateForm(key), validEditor: this.validateEditor(), - schedulingUnitList:schedulingUnitList, selectedSchedulingSetId: value, observStrategy: {} - }); + isAGLoading: false + }) } - } else { - this.setState({schedulingUnit: schedulingUnit, validForm: this.validateForm(key), validEditor: this.validateEditor(), - selectedSchedulingSetId: value}); + } else { + this.setState({ + isAGLoading: false + }); } - this.setState({isAGLoading: false}); } /** - * Set default value for Station group when filter change + * Dialog to add Scheduling Set */ - async setDefaultStationGroup(observStrategy) { - let station_group = []; - const tasks = observStrategy.template.tasks; - for (const taskName of _.keys(tasks)) { - const task = tasks[taskName]; - //Resolve task from the strategy template - await $RefParser.resolve(task); - // Identify the task specification template of every task in the strategy template - const taskTemplate = _.find(this.taskTemplates, {'name': task['specifications_template']}); - if (taskTemplate.type_value === 'observation' && task.specifications_doc.station_groups) { - station_group = task.specifications_doc.station_groups; - } - } - await this.setState({ - defaultStationGroups: station_group, - }) + showAddSchedulingSet() { + this.dialogType = "success"; + this.dialogHeader = "Add Scheduling Set’"; + this.dialogMsg = <SchedulingSet project={this.state.selectedProject[0]} onCancel={this.refreshSchedulingSet} />; + this.dialogContent = ""; + this.showIcon = false; + this.callBackFunction = this.refreshSchedulingSet; + this.onClose = this.refreshSchedulingSet; + this.onCancel = this.refreshSchedulingSet; + this.setState({confirmDialogVisible: true}); } /** - * Trigger when the Strategy drop down get changed and check isDirty - * @param {*} strategyId + * Update isDirty when cell value updated in AG grid + * @param {*} params */ - onStrategyChange(strategyId) { - if (this.state.isDirty) { - this.showWarning(() =>{ - this.changeStrategy(strategyId); - }); + cellValueChageEvent(params) { + if( params.value && !_.isEqual(params.value, params.oldValue)) { + this.setState({isDirty: true}); + publish('edit-dirty', true); + } + } + + /** + * If any changes detected warn before cancel the page + */ + checkIsDirty() { + if( this.state.isDirty ){ + this.showIcon = true; + this.dialogType = "confirmation"; + this.dialogHeader = "Add Multiple Scheduling Unit(s)"; + this.dialogMsg = "Do you want to leave this page? Your changes may not be saved."; + this.dialogContent = ""; + this.dialogHeight = '5em'; + this.callBackFunction = this.cancelCreate; + this.onClose = this.close; + this.onCancel = this.close; + this.setState({confirmDialogVisible: true}); + } else { + this.cancelCreate(); + } + } + + /** + * Set the new Set created in drop down + */ + /*async setCurrentSUSet(id) { + this.refreshSchedulingSet(); + if(id) { + let currentSU = this.state.schedulingUnit; + currentSU.scheduling_set_id = id; + this.setState({schedulingUnit: currentSU}); + } + + }*/ + + /** After adding new Scheduling Set, refresh the Scheduling Set list */ + async refreshSchedulingSet(){ + this.schedulingSets = await ScheduleService.getSchedulingSets(); + const filteredSchedluingSets = _.filter(this.schedulingSets, {'project_id': this.state.schedulingUnit.project}); + this.setState({saveDialogVisible: false, confirmDialogVisible: false, schedulingSets: filteredSchedluingSets}); + } + + close(){ + this.setState({confirmDialogVisible: false}); + } + + validateForm(fieldName) { + let validForm = false; + let errors = this.state.errors; + let validFields = this.state.validFields; + if (fieldName) { + delete errors[fieldName]; + delete validFields[fieldName]; + if (this.formRules[fieldName]) { + const rule = this.formRules[fieldName]; + const fieldValue = this.state.schedulingUnit[fieldName]; + if (rule.required) { + if (!fieldValue) { + errors[fieldName] = rule.message?rule.message:`${fieldName} is required`; + } else { + validFields[fieldName] = true; + } + } + } } else { - this. changeStrategy(strategyId); + errors = {}; + validFields = {}; + for (const fieldName in this.formRules) { + const rule = this.formRules[fieldName]; + const fieldValue = this.state.schedulingUnit[fieldName]; + if (rule.required) { + if (!fieldValue) { + errors[fieldName] = rule.message?rule.message:`${fieldName} is required`; + } else { + validFields[fieldName] = true; + } + } + } } + this.setState({errors: errors, validFields: validFields}); + if (Object.keys(validFields).length === Object.keys(this.formRules).length) { + validForm = true; + } + return validForm; + } + + /** + * This function is mainly added for Unit Tests. If this function is removed Unit Tests will fail. + */ + validateEditor() { + return this.validEditor?true:false; + } + + async componentDidMount() { + const promises = [ + ProjectService.getProjectList(), + ScheduleService.getSchedulingSets(), + ScheduleService.getObservationStrategies(), + TaskService.getTaskTemplates(), + ScheduleService.getSchedulingConstraintTemplates(), + ]; + await Promise.all(promises).then(responses => { + this.projects = responses[0]; + this.schedulingSets = responses[1]; + this.observStrategies = responses[2]; + this.taskTemplates = responses[3]; + this.constraintTemplates = responses[4]; + if (this.state.schedulingUnit.project) { + const projectSchedluingSets = _.filter(this.schedulingSets, {'project_id': this.state.schedulingUnit.project}); + this.setState({isLoading: false, schedulingSets: projectSchedluingSets, allSchedulingSets: this.schedulingSets}); + } else { + this.setState({isLoading: false}); + } + }); } + /** + * Trigger when the Strategy drop down get changed and check isDirty + * @param {*} strategyId + */ + onStrategyChange(strategyId) { + if (this.state.isDirty) { + this.showWarning(() =>{ + this.changeStrategy(strategyId); + }); + } else { + this. changeStrategy(strategyId); + } + } + /** * Function called when observation strategy template is changed. * @@ -355,50 +474,88 @@ export class SchedulingSetCreate extends Component { await this.setState({noOfSU: 10, isAGLoading: true, copyHeader: false, rowData: [], confirmDialogVisible: false, isDirty: false}); publish('edit-dirty', false); const observStrategy = _.find(this.observStrategies, {'id': strategyId}); - let schedulingUnitList= await ScheduleService.getSchedulingBySet(this.state.selectedSchedulingSetId); - schedulingUnitList = _.filter(schedulingUnitList,{'observation_strategy_template_id': strategyId}) ; - this.setDefaultStationGroup(observStrategy); - if(schedulingUnitList.length === 0) { - schedulingUnitList = await this.getEmptySchedulingUnit(strategyId); - this.isNewSet = true; - } - else { - this.isNewSet = false; - } - await this.setState({ - schedulingUnitList: schedulingUnitList, - observStrategy: observStrategy, - }); - - if (schedulingUnitList && schedulingUnitList.length >0){ - await this.prepareScheduleUnitListForGrid(); - } else { - this.setState({ - rowData: [] - }); + this.setState({observStrategy: observStrategy, noOfSU: 10, isAGLoading: true, copyHeader: false, rowData: [], agSUWithDefaultValue: {}, confirmDialogVisible: false, isDirty: false}); + await this.getTaskSchema(observStrategy); + + if(this.state.schedulingUnit.project && this.state.schedulingUnit.scheduling_set_id) { + this.prepareScheduleUnitListForGrid(); } - this.setState({isAGLoading: false,commonRowData: []}); } - - // TODO: This function should be modified or removed - async getEmptySchedulingUnit(strategyId){ - // let suList = await ScheduleService.getSchedulingUnitDraft(); - // return [_.find(suList.data.results, {'observation_strategy_template_id': strategyId})]; - let emptySU = {name: "", description: ""}; - let constraintTemplates = await ScheduleService.getSchedulingConstraintTemplates(); - let constraintTemplate = constraintTemplates.length>0?constraintTemplates[0]:null; - emptySU['scheduling_constraints_template_id'] = constraintTemplate?constraintTemplate.id:null; - emptySU['scheduling_constraints_doc'] = {}; - let strategy = _.find(this.observStrategies, ['id', strategyId]); - emptySU['requirements_doc'] = strategy?strategy.template:{}; - emptySU['observation_strategy_template_id'] = strategyId; - return [emptySU]; + + async getTaskSchema(observStrategy) { + let station_group = []; + let tasksToUpdate = {}; + if(observStrategy) { + const tasks = observStrategy.template.tasks; + let paramsOutput = {}; + let schema = { type: 'object', additionalProperties: false, + properties: {}, definitions:{} + }; + for (const taskName of _.keys(tasks)) { + const task = tasks[taskName]; + //Resolve task from the strategy template + const $taskRefs = await $RefParser.resolve(task); + + // Identify the task specification template of every task in the strategy template + const taskTemplate = _.find(this.taskTemplates, {'name': task['specifications_template']}); + schema['$id'] = taskTemplate.schema['$id']; + schema['$schema'] = taskTemplate.schema['$schema']; + + if (taskTemplate.type_value==='observation' && task.specifications_doc.station_groups) { + station_group = task.specifications_doc.station_groups; + tasksToUpdate[taskName] = taskName; + } + let index = 0; + for (const param of observStrategy.template.parameters) { + if (param.refs[0].indexOf(`/tasks/${taskName}`) > 0) { + tasksToUpdate[taskName] = taskName; + // Resolve the identified template + const $templateRefs = await $RefParser.resolve(taskTemplate); + let property = { }; + let tempProperty = null; + const taskPaths = param.refs[0].split("/"); + // Get the property type from the template and create new property in the schema for the parameters + try { + const parameterRef = param.refs[0];//.replace(`#/tasks/${taskName}/specifications_doc`, '#/schema/properties'); + tempProperty = $templateRefs.get(parameterRef); + // property = _.cloneDeep(taskTemplate.schema.properties[taskPaths[4]]); + + } catch(error) { + tempProperty = _.cloneDeep(taskTemplate.schema.properties[taskPaths[4]]); + if (tempProperty['$ref']) { + tempProperty = await UtilService.resolveSchema(tempProperty); + if (tempProperty.definitions && tempProperty.definitions[taskPaths[4]]) { + schema.definitions = {...schema.definitions, ...tempProperty.definitions}; + tempProperty = tempProperty.definitions[taskPaths[4]]; + } else if (tempProperty.properties && tempProperty.properties[taskPaths[4]]) { + tempProperty = tempProperty.properties[taskPaths[4]]; + } + } + if (tempProperty.type === 'array' && taskPaths.length>6) { + tempProperty = tempProperty.items.properties[taskPaths[6]]; + } + property = tempProperty; + } + property.title = param.name; + property.default = $taskRefs.get(param.refs[0].replace(`#/tasks/${taskName}`, '#')); + paramsOutput[`param_${index}`] = property.default; + schema.properties[`param_${index}`] = property; + // Set property defintions taken from the task template in new schema + for (const definitionName in taskTemplate.schema.definitions) { + schema.definitions[definitionName] = taskTemplate.schema.definitions[definitionName]; + } + } + index++; + } + } + await this.setState({observStrategy: observStrategy, paramsSchema: schema, paramsOutput: paramsOutput,defaultStationGroups: station_group, tasksToUpdate: tasksToUpdate}); + } } /** * Resolve JSON Schema */ - async resolveSchema(schema){ + async resolveSchema(schema){ let properties = schema.properties; schema.definitions = schema.definitions?schema.definitions:{}; if (properties) { @@ -440,630 +597,183 @@ export class SchedulingSetCreate extends Component { } return schema; } - - /** - * return constraint - * @param {*} scheduleUnit - */ - async getConstraintSchema(scheduleUnit){ - let constraintSchema = await ScheduleService.getSchedulingConstraintTemplate(scheduleUnit.scheduling_constraints_template_id); - return constraintSchema; - } - + /** - * Create AG Grid column properties + * Function to prepare row data for ag-grid. */ - createAGGridAngelColumnsProperty(schema) { - let cellProps = []; - cellProps['angle1'] = {isgroup: true, type:'numberValueColumn', cellRenderer: 'timeInputMask',cellEditor: 'timeInputMask', valueSetter: 'valueSetter', cellStyle: function(params) { - if (params.value && !Validator.validateTime(params.value)) { - return { backgroundColor: BG_COLOR}; - } else { - return { backgroundColor: ''}; - } - },}; - cellProps['angle2'] = {isgroup: true, type:'numberValueColumn', cellRenderer: 'degreeInputMask',cellEditor: 'degreeInputMask', valueSetter: 'valueSetter' , cellStyle: function(params) { - if (params.value && !Validator.validateAngle(params.value)) { - return { backgroundColor: BG_COLOR}; - } else { - return { backgroundColor: ''}; - } - }, }; - cellProps['angle3'] = {isgroup: true, cellEditor: 'numericEditor',cellStyle: function(params) { - // console.log(params); - // if (params.value){ - // console.log("params value - ", params.value); - // console.log(Number(params.value)); - // if (!params.colDef.field.startsWith('gdef') && isNaN(params.value)) { - // return { backgroundColor: BG_COLOR}; - // } - // else{ - // return { backgroundColor: ''}; - // } - // } else { - // console.log("No Params value"); - // return (!params.colDef.field.startsWith('gdef')) ?{ backgroundColor: BG_COLOR} : { backgroundColor: ''} - // } - if (isNaN(params.value)) { - return { backgroundColor: BG_COLOR}; - } else { - return { backgroundColor: ''}; - } - }}; - cellProps['direction_type'] = {isgroup: true, cellEditor: 'agSelectCellEditor',default: schema.definitions.pointing.properties.direction_type.default, - cellEditorParams: { - values: schema.definitions.pointing.properties.direction_type.enum, - }, - }; - cellProps['duration'] = { type:'numberValueColumn', cellEditor:'numericEditor', cellStyle: function(params) { - if (params.value){ - if ( !Number(params.value)){ - return { backgroundColor: BG_COLOR}; + async prepareScheduleUnitListForGrid(){ + this.agSUWithDefaultValue = {'id': 0, 'suname': '', 'sudesc': ''}; + let schedulingUnitList= await ScheduleService.getSchedulingBySet(this.state.selectedSchedulingSetId); + schedulingUnitList = _.filter(schedulingUnitList,{'observation_strategy_template_id': this.state.observStrategy.id}) ; + /** Get Caolumn details */ + await this.createGridCellDetails(); + let observationPropsList = []; + this.tmpRowData = []; + let totalSU = this.state.noOfSU; + let lastRow = {}; + let hasSameValue = true; + if(schedulingUnitList && schedulingUnitList.length > 0) { + for(const scheduleunit of schedulingUnitList){ + let observationProps = { + id: scheduleunit.id, + suname: scheduleunit.name, + sudesc: scheduleunit.description, + //set default TRUE and it will reset this value while validating the row and will skip the invalid rows when save the row data + isValid: true, + }; + + if (scheduleunit.observation_strategy_template_id) { + let parameters = await this.getObservationValueFromTask(scheduleunit); + let parametersName = Object.keys(parameters); + for(const parameter of parametersName){ + let valueItem = parameters[parameter]; + let excelColumns = this.state.columnMap[parameter]; + if (excelColumns) { + let excelColumnsKeys = Object.keys(excelColumns); + for(const eColKey of excelColumnsKeys){ + if (eColKey === 'angle1') { + observationProps[excelColumns[eColKey]] = UnitConverter.getAngleInput(valueItem[eColKey], false); + } + else if (eColKey === 'angle2') { + observationProps[excelColumns[eColKey]] = UnitConverter.getAngleInput(valueItem[eColKey], true); + } + else { + let keys = Object.keys(valueItem); + if(_.includes(keys, eColKey)) { + observationProps[excelColumns[eColKey]] = valueItem[eColKey]; + } else { + observationProps[excelColumns[eColKey]] = valueItem; + } + } + } + } + } + } else { + let parameters = scheduleunit['requirements_doc'].parameters; + for(const parameter of parameters){ + let refUrl = parameter['refs']; + let valueItem = (await $RefParser.resolve( scheduleunit['requirements_doc'])).get(refUrl[0]); + let excelColumns = this.state.columnMap[parameter.name]; + if (excelColumns) { + let excelColumnsKeys = Object.keys(excelColumns); + for(const eColKey of excelColumnsKeys){ + if (eColKey === 'angle1') { + observationProps[excelColumns[eColKey]] = UnitConverter.getAngleInput(valueItem[eColKey], false); + } + else if (eColKey === 'angle2') { + observationProps[excelColumns[eColKey]] = UnitConverter.getAngleInput(valueItem[eColKey], true); + } + else { + observationProps[excelColumns[eColKey]] = valueItem[eColKey]; + } + } + } + } } - else if ( Number(params.value) < 1) { - return { backgroundColor: BG_COLOR}; - } else{ - return { backgroundColor: ''}; + // Get Station details + observationProps['stations'] = await this.getStationGrops(scheduleunit); + let constraint = scheduleunit.id?scheduleunit.scheduling_constraints_doc:null; + if (constraint){ + if (constraint.scheduler){ + observationProps['scheduler'] = constraint.scheduler; + } + observationProps['timeat'] = this.isNotEmpty(constraint.time.at)?moment.utc(constraint.time.at).format(UIConstants.CALENDAR_DATETIME_FORMAT): ''; + observationProps['timeafter'] = this.isNotEmpty(constraint.time.after)?moment.utc(constraint.time.after).format(UIConstants.CALENDAR_DATETIME_FORMAT):''; + observationProps['timebefore'] = this.isNotEmpty(constraint.time.before)?moment.utc(constraint.time.before).format(UIConstants.CALENDAR_DATETIME_FORMAT):''; + if (constraint.time.between){ + observationProps['between'] = this.getBetweenStringValue(constraint.time.between); + } + if (constraint.time.between){ + observationProps['notbetween'] = this.getBetweenStringValue(constraint.time.not_between); + } + + observationProps['daily'] = this.fetchDailyFieldValue(constraint.daily); + UnitConverter.radiansToDegree(constraint.sky); + observationProps['min_target_elevation'] = constraint.sky.min_target_elevation; + observationProps['min_calibrator_elevation'] = constraint.sky.min_calibrator_elevation; + if ( constraint.sky.transit_offset ){ + observationProps['offset_from'] = constraint.sky.transit_offset.from ;//constraint.sky.transit_offset.from:''; + observationProps['offset_to'] = constraint.sky.transit_offset.to ; //constraint.sky.transit_offset.to:''; + } + + if (constraint.sky.min_distance){ + observationProps['md_sun'] = constraint.sky.min_distance.sun;//constraint.sky.min_distance.sun:0; + observationProps['md_moon'] = constraint.sky.min_distance.moon; //constraint.sky.min_distance.moon:0; + observationProps['md_jupiter'] = constraint.sky.min_distance.jupiter;//constraint.sky.min_distance.jupiter:0; + } + } + observationPropsList.push(observationProps); + //Set values for global row if all rows has same value + if (_.isEmpty(lastRow)) { + lastRow = observationProps; + } else if (!_.isEqual( + _.omit(lastRow, ['id']), + _.omit(observationProps, ['id']) + )) { + hasSameValue = false; } } - }, }; - - return cellProps; - } - - /** - * Function to generate AG-Grid column definition. - * @param {number} strategyId - */ - async createGridColumns(scheduleUnit){ - let defaultCellValues = {}; - let schema = await this.getTaskSchema(scheduleUnit, false); - schema = await this.resolveSchema(schema); - let constraintSchema = await this.getConstraintSchema(scheduleUnit); - constraintSchema = await this.resolveSchema(constraintSchema); - // AG Grid Cell Specific Properties - let dailyProps = Object.keys( constraintSchema.schema.properties.daily.properties); - this.daily = []; - this.dailyOption = []; - dailyProps.forEach(prop => { - this.dailyOption.push({'name':prop, 'value':prop}); - this.daily.push(prop); - }) - this.setState({ - dailyOption: this.dailyOption, - schedulingConstraintsDoc: scheduleUnit.scheduling_constraints_doc, - constraintUrl: scheduleUnit.scheduling_constraints_template, - constraintId: scheduleUnit.scheduling_constraints_template_id, - daily: this.daily, - }); - - let cellProps = this.createAGGridAngelColumnsProperty(schema); - //Ag-grid Colums definition - // Column order to use clipboard copy - let colKeyOrder = []; - colKeyOrder.push("suname"); - colKeyOrder.push("sudesc"); - let columnMap = []; - let colProperty = {}; - let columnDefs = [ - { // Row Index - headerName: '#', - editable: false, - maxWidth: 60, - cellRenderer: 'rowIdRenderer', - pinned: 'left', - lockPosition: true, - suppressSizeToFit: true, - }, - { - headerName: 'Scheduling Unit', - children: [ - {headerName: 'Name',field: 'suname'}, - {headerName: 'Description',field: 'sudesc', cellStyle: function(params) { - if (params.data.suname && (params.data.suname !== '' && (!params.value || params.value === ''))) { - return { backgroundColor: BG_COLOR}; - } else { return { backgroundColor: ''};} - }, - } - ], - }, - - { headerName: 'Scheduler',field: 'scheduler',cellEditor: 'agSelectCellEditor',default: constraintSchema.schema.properties.scheduler.default, - cellEditorParams: { - values: constraintSchema.schema.properties.scheduler.enum, - }, - }, - { headerName: 'Time', - children: [ - { headerName: 'At', field:'timeat', editable: true, cellRenderer: 'betweenRenderer',cellEditor: 'agDateInput', valueSetter: 'newValueSetter'}, - { headerName: 'After', field:'timeafter', editable: true, cellRenderer: 'betweenRenderer',cellEditor: 'agDateInput', valueSetter: 'newValueSetter'}, - { headerName: 'Before', field:'timebefore', editable: true, cellRenderer: 'betweenRenderer',cellEditor: 'agDateInput', valueSetter: 'newValueSetter'}, - ], - }, - - {headerName: 'Between',field: 'between',cellRenderer: 'betweenRenderer',cellEditor: 'betweenEditor',valueSetter: 'newValueSetter'}, - {headerName: 'Not Between',field: 'notbetween',cellRenderer: 'betweenRenderer',cellEditor: 'betweenEditor',valueSetter: 'newValueSetter'}, - {headerName: 'Daily',field: 'daily',cellEditor: 'multiselector', valueSetter: function(params) {}}, - { - headerName: 'Sky', - children: [ - {headerName: 'Min Target Elevation',field: 'min_target_elevation', cellEditor: 'numericEditor', cellStyle: function(params) { - if (params.value){ - if (params.value === undefined || params.value === null || isNaN(params.value)){ - return { backgroundColor: BG_COLOR}; - } - else if ( Number(params.value) < 0|| Number(params.value) > 90) { - return { backgroundColor: BG_COLOR}; - } else{ - return { backgroundColor: ''}; - } - } - }, }, - {headerName: 'Min Calibrator Elevation',field: 'min_calibrator_elevation', cellEditor: 'numericEditor', cellStyle: function(params) { - if (params.value){ - if (params.value === undefined || params.value === null || isNaN(params.value)){ - return { backgroundColor: BG_COLOR}; - } - else if ( Number(params.value) < 0|| Number(params.value) > 90) { - return { backgroundColor: BG_COLOR}; - } else{ - return { backgroundColor: ''}; - } - } - }, }, - {headerName: 'Offset Window From',field: 'offset_from', cellEditor: 'numericEditor',cellStyle: function(params) { - - if (params.value){ - if (params.value === 'undefined' || params.value === ''){ - return { backgroundColor: ''}; - } - if(params.value === "0"){ - return { backgroundColor: ''}; - } - if (!Number(params.value)){ - return { backgroundColor: BG_COLOR}; - } - else if ( Number(params.value) < -0.20943951 || Number(params.value) > 0.20943951) { - return { backgroundColor: BG_COLOR}; - } else{ - return { backgroundColor: ''}; - } - } else { - return { backgroundColor: ''}; - } - }, }, - {headerName: 'Offset Window To',field: 'offset_to', cellEditor: 'numericEditor', cellStyle: function(params) { - if (params.value){ - if (params.value === 'undefined' || params.value === ''){ - return { backgroundColor: ''}; - } - if(params.value === "0"){ - return { backgroundColor: ''}; - } - if ( !Number(params.value)){ - return { backgroundColor: BG_COLOR}; - } - else if ( Number(params.value) < -0.20943951 || Number(params.value) > 0.20943951) { - return { backgroundColor: BG_COLOR}; - } else{ - return { backgroundColor: ''}; - } - } else { - return { backgroundColor: ''}; - } - }, }, - ], - }, - { - headerName: 'Min_distance', - children: [ - {headerName: 'Sun',field: 'md_sun', cellEditor: 'numericEditor',cellStyle: function(params) { - if (params.value){ - if (params.value === undefined || params.value === null || isNaN(params.value)){ - return { backgroundColor: BG_COLOR}; - } - else if ( Number(params.value) < 0 || Number(params.value) > 180) { - return { backgroundColor: BG_COLOR}; - } else{ - return { backgroundColor: ''}; - } - } - } - }, - {headerName: 'Moon',field: 'md_moon', cellEditor: 'numericEditor', cellStyle: function(params) { - if (params.value){ - if (params.value === undefined || params.value === null || isNaN(params.value)){ - return { backgroundColor: BG_COLOR}; - } - else if ( Number(params.value) < 0 || Number(params.value) > 180) { - return { backgroundColor: BG_COLOR}; - } else{ - return { backgroundColor: ''}; - } - } - } - }, - {headerName: 'Jupiter',field: 'md_jupiter', cellEditor: 'numericEditor', cellStyle: function(params) { - if (params.value){ - if (params.value === undefined || params.value === null || isNaN(params.value)){ - return { backgroundColor: BG_COLOR}; - } - else if ( Number(params.value) < 0 || Number(params.value) > 180) { - return { backgroundColor: BG_COLOR}; - } else{ - return { backgroundColor: ''}; - } - } - } - }, - ], - }, - ]; - // Column order in excel to clipboard and vice versa - // TODO: Based on the fields available in the constraint schema, these columns should be added. - colKeyOrder.push('scheduler'); - colKeyOrder.push('timeat'); - colKeyOrder.push('timeafter'); - colKeyOrder.push('timebefore'); - colKeyOrder.push('between'); - colKeyOrder.push('notbetween'); - colKeyOrder.push('daily'); - colKeyOrder.push('min_target_elevation'); - colKeyOrder.push('min_calibrator_elevation'); - colKeyOrder.push('offset_from'); - colKeyOrder.push('offset_to'); - colKeyOrder.push('md_sun'); - colKeyOrder.push('md_moon'); - colKeyOrder.push('md_jupiter'); - defaultCellValues['scheduler'] = constraintSchema.schema.properties.scheduler.default; - // TODO: The radian coonversion should call a function in UnitConverter.js - defaultCellValues['min_target_elevation'] = (constraintSchema.schema.properties.sky.properties.min_target_elevation.default * 180) / Math.PI; - defaultCellValues['min_calibrator_elevation'] =(constraintSchema.schema.properties.sky.properties.min_calibrator_elevation.default * 180) / Math.PI; - defaultCellValues['offset_from'] = 0; - defaultCellValues['offset_to'] = 0; - defaultCellValues['md_sun'] = (constraintSchema.schema.properties.sky.properties.min_distance.properties.sun.default * 180) / Math.PI; - defaultCellValues['md_moon'] = (constraintSchema.schema.properties.sky.properties.min_distance.properties.moon.default * 180) / Math.PI; - defaultCellValues['md_jupiter'] = (constraintSchema.schema.properties.sky.properties.min_distance.properties.jupiter.default) / Math.PI; - - if(this.state.defaultStationGroups){ - let stationValue = ''; - this.state.defaultStationGroups.map(stationGroup =>{ - stationValue += stationGroup.stations+':'+ (stationGroup.max_nr_missing || 0)+"|"; - }) - defaultCellValues['stations'] = stationValue; + } + let defaultCommonRowData = {}; + if (hasSameValue) { + defaultCommonRowData = observationPropsList[observationPropsList.length-1]; } - colProperty = {'ID':'id', 'Name':'suname', 'Description':'sudesc'}; - columnMap['Scheduling Unit'] = colProperty; - - let defaultSchema = await this.getTaskTemplateSchema(scheduleUnit, 'Target Observation'); - defaultSchema = await this.resolveSchema(defaultSchema); - let definitions = defaultSchema.definitions.pointing.properties; - let properties = defaultSchema.properties; - const propsKeys = Object.keys(properties); - for(const propKey of propsKeys){ - let property = properties[propKey]; - let childern = []; - let colProperty = {}; - if (property.title === 'Duration'){ - let cellAttr = {}; - cellAttr['headerName'] = 'Duration'; - cellAttr['field'] = 'duration'; - let cellKeys = Object.keys(cellProps['duration']); - for(const cellKey of cellKeys){ - cellAttr[cellKey] = cellProps['duration'][cellKey]; - }; - - colKeyOrder.push('duration'); - childern.push(cellAttr); - colProperty[propKey] = 'duration'; - defaultCellValues['duration'] = property.default; + this.tmpRowData = observationPropsList; + // find No. of rows filled in array + let totalCount = this.tmpRowData.length; + // Prepare No. Of SU for rows for UI + if (this.tmpRowData && this.tmpRowData.length > 0){ + const paramsOutputKey = Object.keys(this.tmpRowData[0]); + let availableCount = this.tmpRowData.length; + if(this.isNewSet) { + availableCount = 0; + this.tmpRowData = []; } - else { - let childalias = property.title; - childalias = _.lowerCase(childalias).split(' ').map(x => x[0]).join(''); - const paramKeys = Object.keys(property.default); - paramKeys.forEach(key =>{ - if (key === 'angle1'){ - defaultCellValues[childalias+key] = UnitConverter.getAngleInput(property.default[key], false); - } else if (key === 'angle2') { - defaultCellValues[childalias+key] = UnitConverter.getAngleInput(property.default[key], true); - } else { - defaultCellValues[childalias+key] = property.default[key]; - } - colProperty[key] = childalias+key; - let cellAttr = {}; - cellAttr['headerName'] = definitions[key].title; - cellAttr['field'] = childalias+key; - colKeyOrder.push(childalias+key); - let cellKeys = Object.keys(cellProps[key]); - for(const cellKey of cellKeys){ - cellAttr[cellKey] = cellProps[key][cellKey]; - }; - childern.push(cellAttr); - }); - } - - columnDefs.push({ - headerName:property.title, - children:childern - }) - columnMap[property.title] = colProperty; - } - columnDefs.push({headerName: 'Stations', field: 'stations', cellRenderer: 'betweenRenderer', cellEditor: 'station', valueSetter: 'newValueSetter'}); - colKeyOrder.push('stations'); - let globalColmunDef =_.cloneDeep(columnDefs); - globalColmunDef = await this.createGlobalColumnDefs(globalColmunDef, schema, constraintSchema); - - this.setState({ - columnDefs: columnDefs, - globalColmunDef: globalColmunDef, - columnMap: columnMap, - colKeyOrder: colKeyOrder, - defaultCellValues: defaultCellValues, - }); - } - - /** - * Create AG Grid column definition - * @param {*} globalColmunDef - * @param {*} schema - * @param {*} constraintSchema - */ - createGlobalColumnDefs(globalColmunDef, schema, constraintSchema) { - let schedulerValues = [...' ', ...constraintSchema.schema.properties.scheduler.enum]; - let direction_type_Values = [...' ', ...schema.definitions.pointing.properties.direction_type.enum]; - globalColmunDef.forEach(colDef => { - if (colDef.children) { - colDef.children.forEach(childColDef => { - if (childColDef.field) { - if(childColDef.field.endsWith('direction_type')) { - childColDef.cellEditorParams.values = direction_type_Values; - } - childColDef.field = 'gdef_'+childColDef.field; - if (childColDef.default) { - childColDef.default = ''; - } - } - }); - } else { - if(colDef.headerName === '#') { - colDef['hide'] = true; - } - if(colDef.field) { - if ( colDef.field.endsWith('scheduler')) { - colDef.cellEditorParams.values = schedulerValues; - } - colDef.field = 'gdef_'+colDef.field; - if (colDef.default) { - colDef.default = ''; - } - } - } - }); - return globalColmunDef; - } - - async getTaskTemplateSchema(scheduleUnit, taskName) { - let strategyId = scheduleUnit.observation_strategy_template_id; - let templates = await ScheduleService.getObservationStrategies(); - const observStrategy = _.find(templates, {'id': strategyId}); - const tasks = observStrategy.template.tasks; - - let schema = { type: 'object', additionalProperties: false, - properties: {}, definitions:{} - }; - let paramsOutput = {}; - // TODo: This schema reference resolving code has to be moved to common file and needs to rework - for (const taskName in tasks) { - const task = tasks[taskName]; - if (task['specifications_template'] === 'target observation') { - //Resolve task from the strategy template - const $taskRefs = await $RefParser.resolve(task); - // Identify the task specification template of every task in the strategy template - const taskTemplate = _.find(this.taskTemplates, {'name': task['specifications_template']}); - schema['$id'] = taskTemplate.schema['$id']; - schema['$schema'] = taskTemplate.schema['$schema']; - let index = 0; - for (const param of observStrategy.template.parameters) { - if (param.refs[0].indexOf(`/tasks/${taskName}`) > 0) { - // Resolve the identified template - const $templateRefs = await $RefParser.resolve(taskTemplate); - let property = { }; - let tempProperty = null; - const taskPaths = param.refs[0].split("/"); - // Get the property type from the template and create new property in the schema for the parameters - try { - const parameterRef = param.refs[0]; - tempProperty = $templateRefs.get(parameterRef); - } catch(error) { - tempProperty = _.cloneDeep(taskTemplate.schema.properties[taskPaths[4]]); - if (tempProperty['$ref']) { - tempProperty = await UtilService.resolveSchema(tempProperty); - if (tempProperty.definitions && tempProperty.definitions[taskPaths[4]]) { - schema.definitions = {...schema.definitions, ...tempProperty.definitions}; - tempProperty = tempProperty.definitions[taskPaths[4]]; - } else if (tempProperty.properties && tempProperty.properties[taskPaths[4]]) { - tempProperty = tempProperty.properties[taskPaths[4]]; - } - } - if (tempProperty.type === 'array' && taskPaths.length>6) { - tempProperty = tempProperty.items.properties[taskPaths[6]]; - } - property = tempProperty; - } - property.title = param.name; - property.default = $taskRefs.get(param.refs[0].replace(`#/tasks/${taskName}`, '#')); - paramsOutput[`param_${index}`] = property.default; - schema.properties[`param_${index}`] = property; - // Set property defintions taken from the task template in new schema - for (const definitionName in taskTemplate.schema.definitions) { - schema.definitions[definitionName] = taskTemplate.schema.definitions[definitionName]; - } - } - index++; - } - } - } - return schema; - } - - async getTaskSchema(scheduleUnit) { - let strategyId = scheduleUnit.observation_strategy_template_id; - let tasksToUpdate = {}; - const observStrategy = _.find(this.observStrategies, {'id': strategyId}); - const tasks = observStrategy.template.tasks; - let paramsOutput = {}; - let schema = { type: 'object', additionalProperties: false, - properties: {}, definitions:{} - }; - let taskDrafts = []; - if (scheduleUnit.id) { - await ScheduleService.getTasksDraftBySchedulingUnitId(scheduleUnit.id).then(response =>{ - taskDrafts = response.data.results; - }); - } - - for (const taskName in tasks) { - const task = tasks[taskName]; - const taskDraft = taskDrafts.find(taskD => taskD.name === taskName); - if (taskDraft) { - task.specifications_doc = taskDraft.specifications_doc; + if (availableCount >= totalSU){ + totalSU = availableCount+1; } - //Resolve task from the strategy template - const $taskRefs = await $RefParser.resolve(task); - // TODo: This schema reference resolving code has to be moved to common file and needs to rework - // Identify the task specification template of every task in the strategy template - const taskTemplate = _.find(this.taskTemplates, {'name': task['specifications_template']}); - schema['$id'] = taskTemplate.schema['$id']; - schema['$schema'] = taskTemplate.schema['$schema']; - let index = 0; - for (const param of observStrategy.template.parameters) { - if (param.refs[0].indexOf(`/tasks/${taskName}`) > 0) { - tasksToUpdate[taskName] = taskName; - // Resolve the identified template - const $templateRefs = await $RefParser.resolve(taskTemplate); - let property = { }; - let tempProperty = null; - const taskPaths = param.refs[0].split("/"); - // Get the property type from the template and create new property in the schema for the parameters - try { - const parameterRef = param.refs[0]; - tempProperty = $templateRefs.get(parameterRef); - } catch(error) { - tempProperty = _.cloneDeep(taskTemplate.schema.properties[taskPaths[4]]); - if (tempProperty['$ref']) { - tempProperty = await UtilService.resolveSchema(tempProperty); - if (tempProperty.definitions && tempProperty.definitions[taskPaths[4]]) { - schema.definitions = {...schema.definitions, ...tempProperty.definitions}; - tempProperty = tempProperty.definitions[taskPaths[4]]; - } else if (tempProperty.properties && tempProperty.properties[taskPaths[4]]) { - tempProperty = tempProperty.properties[taskPaths[4]]; - } - } - if (tempProperty.type === 'array' && taskPaths.length>6) { - tempProperty = tempProperty.items.properties[taskPaths[6]]; - } - property = tempProperty; - } - property.title = param.name; - property.default = $taskRefs.get(param.refs[0].replace(`#/tasks/${taskName}`, '#')); - paramsOutput[`param_${index}`] = property.default; - schema.properties[`param_${index}`] = property; - // Set property defintions taken from the task template in new schema - for (const definitionName in taskTemplate.schema.definitions) { - schema.definitions[definitionName] = taskTemplate.schema.definitions[definitionName]; + for(var i = availableCount; i<totalSU; i++){ + let emptyRow = {}; + paramsOutputKey.forEach(key =>{ + if (key === 'id'){ + emptyRow[key] = 0; + } else { + emptyRow[key] = ''; } - } - index++; - } - if (taskTemplate.type_value === 'observation' && task.specifications_doc.station_groups) { - tasksToUpdate[taskName] = taskName; - } - this.setState({ paramsOutput: paramsOutput, tasksToUpdate: tasksToUpdate}); - } - return schema; - } - - /** - * CallBack Function : update time value in master grid - */ - async updateTime(rowIndex, field, value) { - let row = {}; - let tmpRowData = []; - if ( field.startsWith('gdef_')) { - row = this.state.commonRowData[0]; - row[field] = value; - tmpRowData =this.state.commonRowData; - tmpRowData[0] = row; - await this.setState({ - commonRowData: tmpRowData - }); - this.state.topGridApi.setRowData(this.state.commonRowData); - this.state.topGridApi.redrawRows(); - } - else { - row = this.state.rowData[rowIndex]; - row[field] = value; - tmpRowData = this.state.rowData; - tmpRowData[rowIndex] = row; - await this.setState({ - rowData: tmpRowData, - isDirty: true - }); - publish('edit-dirty', true); - this.state.gridApi.setRowData(this.state.rowData); - this.state.gridApi.redrawRows(); + }) + this.tmpRowData.push(_.cloneDeep(this.agSUWithDefaultValue));//emptyRow); + } + } else { + let availableCount = this.tmpRowData.length; + for(var i = availableCount; i<totalSU; i++){ + this.tmpRowData.push(_.cloneDeep(this.agSUWithDefaultValue));//emptyRow); + } } - } - - /** - * Update the Daily/Station column value from external component - * @param {*} rowIndex - * @param {*} field - * @param {*} value - */ - async updateCell(rowIndex, field, value) { - let row = {}; - let tmpRowData = []; - if ( field.startsWith('gdef_')) { - row = this.state.commonRowData[0]; - row[field] = value; - tmpRowData = this.state.commonRowData; - tmpRowData[0] = row; - await this.setState({ - commonRowData: tmpRowData - }); - if(field !== 'gdef_daily') { - this.state.topGridApi.stopEditing(); - var focusedCell = this.state.topGridColumnApi.getColumn(field) - this.state.topGridApi.ensureColumnVisible(focusedCell); - this.state.topGridApi.setFocusedCell(rowIndex, focusedCell); - } + if(this.isNewSet) { + defaultCommonRowData = this.tmpRowData[this.tmpRowData.length-1]; } - else { - row = this.state.rowData[rowIndex]; - row[field] = value; - tmpRowData = this.state.rowData; - tmpRowData[rowIndex] = row; - await this.setState({ - rowData: tmpRowData, - isDirty: true - }); - publish('edit-dirty', true); - if(field !== 'daily') { - this.state.gridApi.stopEditing(); - var focusedCell = this.state.gridColumnApi.getColumn(field) - this.state.gridApi.ensureColumnVisible(focusedCell); - this.state.gridApi.setFocusedCell(rowIndex, focusedCell); - } + this.setState({ + schedulingUnitList: schedulingUnitList, + rowData: this.tmpRowData, + totalCount: totalCount, + noOfSU: this.tmpRowData.length, + emptyRow: this.tmpRowData[this.tmpRowData.length-1], + isAGLoading: false, + commonRowData: [defaultCommonRowData], + defaultCommonRowData: defaultCommonRowData, + hasSameValue: hasSameValue + }); + {this.state.gridApi && + this.state.gridApi.setRowData(this.state.rowData); } } - + + /** - * Get Station details + * Get Station details from Scheduling Unit * @param {*} schedulingUnit */ - async getStationGrops(schedulingUnit){ + async getStationGrops(schedulingUnit){ let stationValue = ''; if (schedulingUnit && schedulingUnit.id>0) { const promises = await [ @@ -1085,17 +795,21 @@ export class SchedulingSetCreate extends Component { targetObservation = taskDrafts.data.results.find(task => {return task.specifications_doc.station_groups?true:false}); stationGroups = targetObservation?targetObservation.specifications_doc.station_groups:[]; } - if (stationGroups) { stationGroups.map(stationGroup =>{ stationValue += stationGroup.stations+':'+stationGroup.max_nr_missing+"|"; - }) + }); } }); } return stationValue; } + /** + * Get Observation details from Scheduling->Task + * @param {Object} scheduleunit - Scheduling Unit + * @returns + */ async getObservationValueFromTask(scheduleunit) { let taskDrafts = []; if (scheduleunit.id) { @@ -1141,197 +855,415 @@ export class SchedulingSetCreate extends Component { } property = tempProperty; } - property.title = param.name; + if(property) { + property.title = param.name; + } else { + property = {}; + property.title = param.name; + } + property.default = $taskRefs.get(param.refs[0].replace(`#/tasks/${taskName}`, '#')); - if ( param.name === 'Duration') { - paramsOutput[param.name] = {'param_0': property.default}; - } else { + //if ( param.name === 'Duration') { + // paramsOutput[param.name] = property.default; + // } else { paramsOutput[param.name] = property.default; - } + // } } + this.setState({tasksToUpdate: tasksToUpdate}); } } return paramsOutput; } /** - * Function to prepare ag-grid row data. + * Define AG Grid column properties */ - async prepareScheduleUnitListForGrid(){ - if (this.state.schedulingUnitList.length === 0) { - return; - } - this.tmpRowData = []; - let totalSU = this.state.noOfSU; - let lastRow = {}; - let hasSameValue = true; - //refresh column header - await this.createGridColumns(this.state.schedulingUnitList[0]); - let observationPropsList = []; - for(const scheduleunit of this.state.schedulingUnitList){ - let observationProps = { - id: scheduleunit.id, - suname: scheduleunit.name, - sudesc: scheduleunit.description, - //set default TRUE and it will reset this value while validating the row and will skip the invalid rows when save the row data - isValid: true, + getAGGridAngelColumnsDefinition(schema) { + let cellProps = []; + cellProps['angle1'] = {isgroup: true, type:'numberValueColumn', cellRenderer: 'timeInputMask',cellEditor: 'timeInputMask', valueSetter: 'valueSetter', cellStyle: function(params) { + if (params.value && !Validator.validateTime(params.value)) { + return { backgroundColor: BG_COLOR}; + } else { + return { backgroundColor: ''}; + } + },}; + cellProps['angle2'] = {isgroup: true, type:'numberValueColumn', cellRenderer: 'degreeInputMask',cellEditor: 'degreeInputMask', valueSetter: 'valueSetter' , cellStyle: function(params) { + if (params.value && !Validator.validateAngle(params.value)) { + return { backgroundColor: BG_COLOR}; + } else { + return { backgroundColor: ''}; + } + }, }; + cellProps['angle3'] = {isgroup: true, cellEditor: 'numericEditor',cellStyle: function(params) { + if (isNaN(params.value)) { + return { backgroundColor: BG_COLOR}; + } else { + return { backgroundColor: ''}; + } + }}; + cellProps['direction_type'] = {isgroup: true, cellEditor: 'agSelectCellEditor',default: schema.definitions.pointing.properties.direction_type.default, + cellEditorParams: { + values: schema.definitions.pointing.properties.direction_type.enum, + }, + }; + cellProps['duration'] = { type:'numberValueColumn', cellEditor:'numericEditor', cellStyle: function(params) { + if (params.value){ + if ( !Number(params.value)){ + return { backgroundColor: BG_COLOR}; + } + else if ( Number(params.value) < 1) { + return { backgroundColor: BG_COLOR}; + } else{ + return { backgroundColor: ''}; + } + } + }, }; + cellProps['beamformers'] = { cellRenderer: 'beamformersRenderer', cellEditor:'beamformer' }; + return cellProps; + } + + /** + * + * @param {*} predefineCellProps + * @param {*} childCellProps + * @param {*} cellName + * @returns + */ + getAGGridAngelColumnsProperty(predefineCellProps, childCellProps, cellName) { + //cellName = _.lowerCase(cellName); + let cellProperty = predefineCellProps[cellName]; + if(cellProperty) { + let cellKeys = Object.keys(cellProperty); + for(const cellKey of cellKeys){ + childCellProps[cellKey] = predefineCellProps[cellName][cellKey]; }; + } else { + // let defaultProp = {editable: true, cellRenderer: 'betweenRenderer',cellEditor: 'agDateInput', valueSetter: 'newValueSetter'}; + // childCellProps = Object.assign(childCellProps, defaultProp); + } + return childCellProps; + } + + async createGridCellDetails() { + let columnMap = []; + let colProperty = {}; + this.colKeyOrder = []; + let columnDefs = [ + { // Row Index + headerName: '#', + editable: false, + maxWidth: 60, + cellRenderer: 'rowIdRenderer', + pinned: 'left', + lockPosition: true, + suppressSizeToFit: true, + }, + {headerName: 'Scheduling Unit', children: [ + {headerName: 'Name', field: 'suname'}, + {headerName: 'Description', field: 'sudesc', cellStyle: function(params) { + if (params.data && params.data.suname && (params.data.suname !== '' && (!params.value || params.value === ''))) { + return { backgroundColor: BG_COLOR}; + } else { return { backgroundColor: ''};} + },},] + } + ]; + colProperty = {'ID':'id', 'Name':'suname', 'Description':'sudesc'}; + columnMap['Scheduling Unit'] = colProperty; + this.colKeyOrder.push("suname"); + this.colKeyOrder.push("sudesc"); + // Create Constraint Column for AG Grid + columnDefs = await this.getConstraintColumns(columnDefs); + let cellProps = {}; + //Observation Schema + const schema = this.state.paramsSchema; + if(schema.properties) { + // let definitions = schema.definitions.pointing; + let predefineCellProps = this.getAGGridAngelColumnsDefinition(schema); + let propKeys = Object.keys(schema.properties); + for(const prop of propKeys) { + colProperty = {}; + cellProps = {}; + let property = schema.properties[prop]; + if(property && property.$ref) { + cellProps['headerName'] = property.title; + let defaultKeys = Object.keys(property.default); + let children = []; + for(const defaultKey of defaultKeys) { + this.colKeyOrder.push(prop+"~"+defaultKey); + if(defaultKey === 'angle1') { + this.agSUWithDefaultValue[prop+"~"+defaultKey] = UnitConverter.getAngleInput( property.default[defaultKey], false); + } else if(defaultKey === 'angle2') { + this.agSUWithDefaultValue[prop+"~"+defaultKey] = UnitConverter.getAngleInput( property.default[defaultKey], true); + } else{ + this.agSUWithDefaultValue[prop+"~"+defaultKey] = property.default[defaultKey]; + } + let childCellProps = { headerName : _.startCase(defaultKey), field : prop+"~"+defaultKey}; + childCellProps = this.getAGGridAngelColumnsProperty(predefineCellProps, childCellProps, defaultKey); + colProperty[defaultKey] = prop+"~"+defaultKey; + children.push(childCellProps); + } + columnMap[property.title] = colProperty; + cellProps['children'] = children; + columnDefs.push(cellProps); + } else { + colProperty ={}; + cellProps['headerName'] = property.title; + this.colKeyOrder.push(prop+"~"+property.title); + this.agSUWithDefaultValue[prop+"~"+property.title] = property.default; + cellProps['field'] = prop+"~"+property.title; + cellProps = this.getAGGridAngelColumnsProperty(predefineCellProps, cellProps, _.lowerCase(property.title)); + colProperty[property.title] = prop+"~"+property.title; + columnMap[property.title] = colProperty; + columnDefs.push(cellProps); + } + } + } + this.colKeyOrder.push('stations'); + let stationValue = ''; + this.state.defaultStationGroups.map(stationGroup =>{ + let missingStation = (stationGroup.max_nr_missing)?stationGroup.max_nr_missing:0; + stationValue += stationGroup.stations+':'+missingStation+"|"; + }) + this.agSUWithDefaultValue['stations'] = stationValue; + columnDefs.push({headerName: 'Stations', field: 'stations', cellRenderer: 'betweenRenderer', cellEditor: 'station', valueSetter: 'newValueSetter'}); + this.getEmptyRow(); + + let globalColmunDef =_.cloneDeep(columnDefs); + globalColmunDef = await this.createGlobalColumnDefs(globalColmunDef, schema); + + this.setState({colKeyOrder: this.colKeyOrder, globalColmunDef: globalColmunDef, columnDefs: columnDefs, columnMap: columnMap, agSUWithDefaultValue: this.agSUWithDefaultValue}); + } + + /** + * Create AG Grid column definition for top table + * @param {*} globalColmunDef + * @param {*} schema + * @param {*} constraintSchema + */ + createGlobalColumnDefs(globalColmunDef, schema) { + let schedulerValues = [...' ', ...this.constraintSchema.schema.properties.scheduler.enum]; + let direction_type_Values = [...' ', ...schema.definitions.pointing.properties.direction_type.enum]; + globalColmunDef.forEach(colDef => { + if (colDef.children) { + colDef.children.forEach(childColDef => { + if (childColDef.field) { + if(childColDef.field.endsWith('direction_type')) { + childColDef.cellEditorParams.values = direction_type_Values; + } + childColDef.field = 'gdef_'+childColDef.field; + if (childColDef.default) { + childColDef.default = ''; + } + } + }); + } else { + if(colDef.headerName === '#') { + colDef['hide'] = true; + } + if(colDef.field) { + if ( colDef.field.endsWith('scheduler')) { + colDef.cellEditorParams.values = schedulerValues; + } + colDef.field = 'gdef_'+colDef.field; + if (colDef.default) { + colDef.default = ''; + } + } + } + }); + return globalColmunDef; + } + + /** + * + */ + getEmptyRow() { + this.emptyAGSU = {}; + let keys = Object.keys(this.agSUWithDefaultValue); + for(const key of keys) { + if (key === 'id'){ + this.emptyAGSU[key] = 0; + } else { + this.emptyAGSU[key] = ''; + } + } + } + + /** + * Create Constraint columns for AG Grid + * @param {*} columnDefs + * @returns + */ + async getConstraintColumns(columnDefs) { + // currently only one constraint schema available and not propvided UI to choose constraints, so assign directly + this.constraintSchema = this.constraintTemplates[0]; + this.constraintSchema = await this.resolveSchema(this.constraintSchema); + + /** AG Grid Cell Specific Properties + In Excel View - expected column order is ['scheduler', 'time', 'daily', 'sky'] */ + let dailyProps = Object.keys( this.constraintSchema.schema.properties.daily.properties); + this.daily = []; + this.dailyOption = []; + dailyProps.forEach(prop => { + this.dailyOption.push({'name':prop, 'value':prop}); + this.daily.push(prop); + }); + this.setState({dailyOption: this.dailyOption, daily: this.daily}); - if (scheduleunit.observation_strategy_template_id) { - let parameters = await this.getObservationValueFromTask(scheduleunit); - let parametersName = Object.keys(parameters); - for(const parameter of parametersName){ - let valueItem = parameters[parameter]; - let excelColumns = this.state.columnMap[parameter]; - if (excelColumns) { - let excelColumnsKeys = Object.keys(excelColumns); - for(const eColKey of excelColumnsKeys){ - if (eColKey === 'angle1') { - observationProps[excelColumns[eColKey]] = UnitConverter.getAngleInput(valueItem[eColKey], false); + // move this variable to class variable + //Ag-grid Colums definition + // Column order to use clipboard copy + this.colKeyOrder.push('scheduler'); + this.agSUWithDefaultValue['scheduler'] = this.constraintSchema.schema.properties.scheduler.default; + this.agSUWithDefaultValue['min_target_elevation'] = (this.constraintSchema.schema.properties.sky.properties.min_target_elevation.default * 180) / Math.PI; + this.agSUWithDefaultValue['min_calibrator_elevation'] =(this.constraintSchema.schema.properties.sky.properties.min_calibrator_elevation.default * 180) / Math.PI; + this.agSUWithDefaultValue['offset_from'] = 0; + this.agSUWithDefaultValue['offset_to'] = 0; + this.agSUWithDefaultValue['md_sun'] = (this.constraintSchema.schema.properties.sky.properties.min_distance.properties.sun.default * 180) / Math.PI; + this.agSUWithDefaultValue['md_moon'] = (this.constraintSchema.schema.properties.sky.properties.min_distance.properties.moon.default * 180) / Math.PI; + this.agSUWithDefaultValue['md_jupiter'] = (this.constraintSchema.schema.properties.sky.properties.min_distance.properties.jupiter.default) / Math.PI; + + columnDefs.push({headerName: 'Scheduler',field: 'scheduler',cellEditor: 'agSelectCellEditor',default: this.constraintSchema.schema.properties.scheduler.default, + cellEditorParams: {values: this.constraintSchema.schema.properties.scheduler.enum,}, }); + columnDefs.push({ headerName: 'Time', + children: [ + { headerName: 'At', field:'timeat', editable: true, cellRenderer: 'betweenRenderer',cellEditor: 'agDateInput', valueSetter: 'newValueSetter'}, + { headerName: 'After', field:'timeafter', editable: true, cellRenderer: 'betweenRenderer',cellEditor: 'agDateInput', valueSetter: 'newValueSetter'}, + { headerName: 'Before', field:'timebefore', editable: true, cellRenderer: 'betweenRenderer',cellEditor: 'agDateInput', valueSetter: 'newValueSetter'}, + ],}); + this.colKeyOrder.push('timeat'); + this.colKeyOrder.push('timeafter'); + this.colKeyOrder.push('timebefore'); + this.colKeyOrder.push('between'); + this.colKeyOrder.push('notbetween'); + this.colKeyOrder.push('daily'); + columnDefs.push({headerName: 'Between',field: 'between',cellRenderer: 'betweenRenderer',cellEditor: 'betweenEditor',valueSetter: 'newValueSetter'}); + columnDefs.push({headerName: 'Not Between',field: 'notbetween',cellRenderer: 'betweenRenderer',cellEditor: 'betweenEditor',valueSetter: 'newValueSetter'}); + this.colKeyOrder.push('min_target_elevation'); + this.colKeyOrder.push('min_calibrator_elevation'); + this.colKeyOrder.push('offset_from'); + this.colKeyOrder.push('offset_to'); + columnDefs.push({headerName: 'Daily',field: 'daily',cellEditor: 'multiselector', valueSetter: function(params) {}}, + {headerName: 'Sky', + children: [ + {headerName: 'Min Target Elevation',field: 'min_target_elevation', cellEditor: 'numericEditor', cellStyle: function(params) { + if (params.value){ + if (params.value === undefined || params.value === null || isNaN(params.value)){ + return { backgroundColor: BG_COLOR}; + } + else if ( Number(params.value) < 0|| Number(params.value) > 90) { + return { backgroundColor: BG_COLOR}; + } else{ + return { backgroundColor: ''}; + } + } + }, }, + {headerName: 'Min Calibrator Elevation',field: 'min_calibrator_elevation', cellEditor: 'numericEditor', cellStyle: function(params) { + if (params.value){ + if (params.value === undefined || params.value === null || isNaN(params.value)){ + return { backgroundColor: BG_COLOR}; + } + else if ( Number(params.value) < 0|| Number(params.value) > 90) { + return { backgroundColor: BG_COLOR}; + } else{ + return { backgroundColor: ''}; + } + } + }, }, + {headerName: 'Offset Window From',field: 'offset_from', cellEditor: 'numericEditor',cellStyle: function(params) { + + if (params.value){ + if (params.value === 'undefined' || params.value === ''){ + return { backgroundColor: ''}; + } + if(params.value === "0"){ + return { backgroundColor: ''}; + } + if (!Number(params.value)){ + return { backgroundColor: BG_COLOR}; + } + else if ( Number(params.value) < -0.20943951 || Number(params.value) > 0.20943951) { + return { backgroundColor: BG_COLOR}; + } else{ + return { backgroundColor: ''}; + } + } else { + return { backgroundColor: ''}; + } + }, }, + {headerName: 'Offset Window To',field: 'offset_to', cellEditor: 'numericEditor', cellStyle: function(params) { + if (params.value){ + if (params.value === 'undefined' || params.value === ''){ + return { backgroundColor: ''}; + } + if(params.value === "0"){ + return { backgroundColor: ''}; } - else if (eColKey === 'angle2') { - observationProps[excelColumns[eColKey]] = UnitConverter.getAngleInput(valueItem[eColKey], true); + if ( !Number(params.value)){ + return { backgroundColor: BG_COLOR}; } - else { - observationProps[excelColumns[eColKey]] = valueItem[eColKey]; + else if ( Number(params.value) < -0.20943951 || Number(params.value) > 0.20943951) { + return { backgroundColor: BG_COLOR}; + } else{ + return { backgroundColor: ''}; } + } else { + return { backgroundColor: ''}; + } + }, }, + ], + }); + this.colKeyOrder.push('md_sun'); + this.colKeyOrder.push('md_moon'); + this.colKeyOrder.push('md_jupiter'); + columnDefs.push({headerName: 'Min_distance',children: [ + {headerName: 'Sun',field: 'md_sun', cellEditor: 'numericEditor',cellStyle: function(params) { + if (params.value){ + if (params.value === undefined || params.value === null || isNaN(params.value)){ + return { backgroundColor: BG_COLOR}; + } + else if ( Number(params.value) < 0 || Number(params.value) > 180) { + return { backgroundColor: BG_COLOR}; + } else{ + return { backgroundColor: ''}; + } + } + } + }, + {headerName: 'Moon',field: 'md_moon', cellEditor: 'numericEditor', cellStyle: function(params) { + if (params.value){ + if (params.value === undefined || params.value === null || isNaN(params.value)){ + return { backgroundColor: BG_COLOR}; + } + else if ( Number(params.value) < 0 || Number(params.value) > 180) { + return { backgroundColor: BG_COLOR}; + } else{ + return { backgroundColor: ''}; } } } - } else { - let parameters = scheduleunit['requirements_doc'].parameters; - for(const parameter of parameters){ - let refUrl = parameter['refs']; - let valueItem = (await $RefParser.resolve( scheduleunit['requirements_doc'])).get(refUrl[0]); - let excelColumns = this.state.columnMap[parameter.name]; - if (excelColumns) { - let excelColumnsKeys = Object.keys(excelColumns); - for(const eColKey of excelColumnsKeys){ - if (eColKey === 'angle1') { - observationProps[excelColumns[eColKey]] = UnitConverter.getAngleInput(valueItem[eColKey], false); - } - else if (eColKey === 'angle2') { - observationProps[excelColumns[eColKey]] = UnitConverter.getAngleInput(valueItem[eColKey], true); - } - else { - observationProps[excelColumns[eColKey]] = valueItem[eColKey]; - } + }, + {headerName: 'Jupiter',field: 'md_jupiter', cellEditor: 'numericEditor', cellStyle: function(params) { + if (params.value){ + if (params.value === undefined || params.value === null || isNaN(params.value)){ + return { backgroundColor: BG_COLOR}; + } + else if ( Number(params.value) < 0 || Number(params.value) > 180) { + return { backgroundColor: BG_COLOR}; + } else{ + return { backgroundColor: ''}; } } } - } - - observationProps['stations'] = await this.getStationGrops(scheduleunit); - let constraint = scheduleunit.id?scheduleunit.scheduling_constraints_doc:null; - if (constraint){ - if (constraint.scheduler){ - observationProps['scheduler'] = constraint.scheduler; - } - observationProps['timeat'] = this.isNotEmpty(constraint.time.at)?moment.utc(constraint.time.at).format(UIConstants.CALENDAR_DATETIME_FORMAT): ''; - observationProps['timeafter'] = this.isNotEmpty(constraint.time.after)?moment.utc(constraint.time.after).format(UIConstants.CALENDAR_DATETIME_FORMAT):''; - observationProps['timebefore'] = this.isNotEmpty(constraint.time.before)?moment.utc(constraint.time.before).format(UIConstants.CALENDAR_DATETIME_FORMAT):''; - if (constraint.time.between){ - observationProps['between'] = this.getBetweenStringValue(constraint.time.between); - } - if (constraint.time.between){ - observationProps['notbetween'] = this.getBetweenStringValue(constraint.time.not_between); - } - - observationProps['daily'] = this.fetchDailyFieldValue(constraint.daily); - //console.log("SU id:", scheduleunit.id, "Connstraint:", constraint.sky); - UnitConversion.radiansToDegree(constraint.sky); - observationProps['min_target_elevation'] = constraint.sky.min_target_elevation; - observationProps['min_calibrator_elevation'] = constraint.sky.min_calibrator_elevation; - if ( constraint.sky.transit_offset ){ - observationProps['offset_from'] = constraint.sky.transit_offset.from ;//constraint.sky.transit_offset.from:''; - observationProps['offset_to'] = constraint.sky.transit_offset.to ; //constraint.sky.transit_offset.to:''; - } - - if (constraint.sky.min_distance){ - observationProps['md_sun'] = constraint.sky.min_distance.sun;//constraint.sky.min_distance.sun:0; - observationProps['md_moon'] = constraint.sky.min_distance.moon; //constraint.sky.min_distance.moon:0; - observationProps['md_jupiter'] = constraint.sky.min_distance.jupiter;//constraint.sky.min_distance.jupiter:0; - } - } - observationPropsList.push(observationProps); - //Set values for global row if all rows has same value - if (_.isEmpty(lastRow)) { - lastRow = observationProps; - } - else if (!_.isEqual( - _.omit(lastRow, ['id']), - _.omit(observationProps, ['id']) - )) { - hasSameValue = false; - } - - } - let defaultCommonRowData = {}; - if (hasSameValue) { - defaultCommonRowData = observationPropsList[observationPropsList.length-1]; - } - this.tmpRowData = observationPropsList; - // find No. of rows filled in array - let totalCount = this.tmpRowData.length; - // Prepare No. Of SU for rows for UI - if (this.tmpRowData && this.tmpRowData.length > 0){ - const paramsOutputKey = Object.keys( this.tmpRowData[0]); - let availableCount = this.tmpRowData.length; - if(this.isNewSet) { - availableCount = 0; - this.tmpRowData = []; - } - if (availableCount >= totalSU){ - totalSU = availableCount+1; - } - for(var i = availableCount; i<totalSU; i++){ - let emptyRow = {}; - paramsOutputKey.forEach(key =>{ - if (key === 'id'){ - emptyRow[key] = 0; - } else { - emptyRow[key] = ''; - } - }) - this.tmpRowData.push(emptyRow); - } - } - if(this.isNewSet) { - defaultCommonRowData = this.tmpRowData[this.tmpRowData.length-1]; - } - this.setState({ - rowData: this.tmpRowData, - totalCount: totalCount, - noOfSU: this.tmpRowData.length, - emptyRow: this.tmpRowData[this.tmpRowData.length-1], - isAGLoading: false, - commonRowData: [defaultCommonRowData], - defaultCommonRowData: defaultCommonRowData, - hasSameValue: hasSameValue - }); + }, + ], + }); - this.setDefaultCellValue(); - } - - /** - * Get Daily column value - * @param {*} daily - */ - fetchDailyFieldValue(daily){ - let returnValue = []; - if (daily.require_day === true){ - returnValue.push('require_day'); - } - if (daily.require_night === true){ - returnValue.push('require_night'); - } - if (daily.avoid_twilight === true){ - returnValue.push('avoid_twilight'); - } - return returnValue; + return columnDefs; } - /** + + /** * Function called back from Degree/Time Input Mask to set value in row data. * * @param {Stirng} cell -> contains Row ID, Column Name, Value, isDegree @@ -1343,222 +1275,109 @@ export class SchedulingSetCreate extends Component { row = this.state.commonRowData[0]; row[field] = value; row['isValid'] = isValid; + /* - this field is nolonger row[field+'value'] = UnitConverter.parseAngle(value); + */ tmpRowData = this.state.commonRowData; tmpRowData[0] = row; - await this.setState({ - commonRowData: tmpRowData - }); + await this.setState({commonRowData: tmpRowData}); } else { row = this.state.rowData[rowIndex]; row[field] = value; row['isValid'] = isValid; + /* row[field+'value'] = UnitConverter.parseAngle(value); + */ tmpRowData = this.state.rowData; tmpRowData[rowIndex] = row; - await this.setState({ - rowData: tmpRowData, - isDirty: true - }); - publish('edit-dirty', true); - } - } - - /** - * Read Data from clipboard - */ - async readClipBoard(){ - try{ - const queryOpts = { name: 'clipboard-read', allowWithoutGesture: true }; - await navigator.permissions.query(queryOpts); - let data = await navigator.clipboard.readText(); - return data; - }catch(err){ - console.log("Error",err); - } - } - - async topAGGridEvent(e) { - var key = e.which || e.keyCode; - var ctrl = e.ctrlKey ? e.ctrlKey : ((key === 17) ? true : false); - if ( ctrl && (key === 67 || key === 86)) { - this.showIcon = true; - this.dialogType = "warning"; - this.dialogHeader = "Warning"; - this.dialogMsg = "Copy / Paste is restricted in this grid"; - this.dialogContent = ""; - this.callBackFunction = this.close; - this.onClose = this.close; - this.onCancel = this.close; - this.setState({ - confirmDialogVisible: true, - }); - } - } - - /** - * Function to copy the data to clipboard - */ - async copyToClipboard(){ - var columnsName = this.state.gridColumnApi.getAllGridColumns(); - var selectedRows = this.state.gridApi.getSelectedRows(); - let clipboardData = ''; - if ( this.state.copyHeader ) { - var line = ''; - columnsName.map( column => { - if ( column.colId !== '0'){ - line += column.colDef.headerName + '\t'; - } - }) - line = _.trim(line); - clipboardData += line + '\r\n'; - } - for(const rowData of selectedRows){ - var line = ''; - for(const key of this.state.colKeyOrder){ - line += rowData[key] + '\t'; - } - line = _.trim(line); - clipboardData += line + '\r\n'; + await this.setState({rowData: tmpRowData,isDirty: true}); + publish('edit-dirty', true); } - clipboardData = _.trim(clipboardData); - - const queryOpts = { name: 'clipboard-write', allowWithoutGesture: true }; - await navigator.permissions.query(queryOpts); - await navigator.clipboard.writeText(clipboardData); - const headerText = (this.state.copyHeader) ?'with Header' : ''; - this.growl.show({severity: 'success', summary: '', detail: selectedRows.length+' row(s) copied to clipboard '+headerText }); } - /** - * Function to copy the data from clipboard - */ - async copyFromClipboard(){ - try { - var selectedRows = this.state.gridApi.getSelectedNodes(); - this.tmpRowData = this.state.rowData; - let dataRowCount = this.state.totalCount; - //Read Clipboard Data - let clipboardData = await this.readClipBoard(); - let selectedRowIndex = 0; - if (selectedRows){ - await selectedRows.map(selectedRow =>{ - selectedRowIndex = selectedRow.rowIndex; - if (clipboardData){ - clipboardData = _.trim(clipboardData); - let suGridRowData = this.state.emptyRow; - clipboardData = _.trim(clipboardData); - let suRows = clipboardData.split("\n"); - suRows.forEach(line => { - suGridRowData = {}; - suGridRowData['id'] = 0; - suGridRowData['isValid'] = true; - - if ( this.tmpRowData.length <= selectedRowIndex ) { - this.tmpRowData.push(this.state.emptyRow); - } - - let colCount = 0; - let suRow = line.split("\t"); - for(const key of this.state.colKeyOrder){ - suGridRowData[key] = suRow[colCount]; - colCount++; - } - if (this.tmpRowData[selectedRowIndex].id > 0 ) { - suGridRowData['id'] = this.tmpRowData[selectedRowIndex].id; - } - this.tmpRowData[selectedRowIndex] = (suGridRowData); - selectedRowIndex++ - }) - } - }); - dataRowCount = selectedRowIndex; - let emptyRow = this.state.emptyRow; - let tmpNoOfSU = this.state.noOfSU; - if (dataRowCount >= tmpNoOfSU){ - tmpNoOfSU = dataRowCount; - //Create additional empty row at the end - for(let i= this.tmpRowData.length; i<= tmpNoOfSU; i++){ - this.tmpRowData.push(emptyRow); - } - } - await this.setState({ - rowData: this.tmpRowData, - noOfSU: this.tmpRowData.length, - totalCount: dataRowCount, - isDirty: true - }) - publish('edit-dirty', true); - this.state.gridApi.setRowData(this.state.rowData); - this.state.gridApi.redrawRows(); - } - } - catch (err) { - console.error('Error: ', err); - } - } - - /** - * Copy data to/from clipboard - * @param {*} e - */ - async clipboardEvent(e){ - var key = e.which || e.keyCode; - var ctrl = e.ctrlKey ? e.ctrlKey : ((key === 17) ? true : false); - if ( key === 67 && ctrl ) { - //Ctrl+C - this.copyToClipboard(); - } - else if ( key === 86 && ctrl ) { - // Ctrl+V - this.copyFromClipboard(); - } - } - - /** - * Copy the table header to clipboard + /** + * CallBack Function : update time value in master grid */ - async copyOnlyHeader() { - this.setState({ fade: true }); - let clipboardData = ''; - if (this.state.gridColumnApi) { - var columnsName = this.state.gridColumnApi.getAllGridColumns(); - var line = ''; - if( columnsName ) { - columnsName.map( column => { - if ( column.colId !== '0'){ - line += column.colDef.headerName + '\t'; - } - }); - } - line = _.trim(line); - clipboardData += line + '\r\n'; - clipboardData = _.trim(clipboardData); - const queryOpts = { name: 'clipboard-write', allowWithoutGesture: true }; - await navigator.permissions.query(queryOpts); - await navigator.clipboard.writeText(clipboardData); - this.growl.show({severity: 'success', summary: '', detail: 'Header copied to clipboard '}); + async updateTime(rowIndex, field, value) { + let row = {}; + let tmpRowData = []; + if ( field.startsWith('gdef_')) { + row = this.state.commonRowData[0]; + row[field] = value; + tmpRowData =this.state.commonRowData; + tmpRowData[0] = row; + await this.setState({commonRowData: tmpRowData}); + this.state.topGridApi.setRowData(this.state.commonRowData); + this.state.topGridApi.redrawRows(); + } + else { + row = this.state.rowData[rowIndex]; + row[field] = value; + tmpRowData = this.state.rowData; + tmpRowData[rowIndex] = row; + await this.setState({rowData: tmpRowData,isDirty: true}); + publish('edit-dirty', true); + this.state.gridApi.setRowData(this.state.rowData); + this.state.gridApi.redrawRows(); } } /** - * Set state to copy the table header to clipboard + * Update the Daily/Station column value from external component + * @param {*} rowIndex + * @param {*} field * @param {*} value */ - async copyHeader(value) { - await this.setState({'copyHeader': value}); + async updateCell(rowIndex, field, value) { + let row = {}; + let tmpRowData = []; + if ( field.startsWith('gdef_')) { + row = this.state.commonRowData[0]; + row[field] = value; + tmpRowData = this.state.commonRowData; + tmpRowData[0] = row; + await this.setState({commonRowData: tmpRowData}); + if(field !== 'gdef_daily') { + this.state.topGridApi.stopEditing(); + var focusedCell = this.state.topGridColumnApi.getColumn(field) + this.state.topGridApi.ensureColumnVisible(focusedCell); + this.state.topGridApi.setFocusedCell(rowIndex, focusedCell); + } + } + else { + row = this.state.rowData[rowIndex]; + row[field] = value; + tmpRowData = this.state.rowData; + tmpRowData[rowIndex] = row; + await this.setState({rowData: tmpRowData,isDirty: true}); + publish('edit-dirty', true); + if(field !== 'daily') { + this.state.gridApi.stopEditing(); + var focusedCell = this.state.gridColumnApi.getColumn(field) + this.state.gridApi.ensureColumnVisible(focusedCell); + this.state.gridApi.setFocusedCell(rowIndex, focusedCell); + } + } } + + /** + * Save Scheduling Unit(s) form Excel table + */ + async saveSchedulingUnit() { + this.validateGridAndSave(); + } + /** * Validate Grid values on click Save button from UI */ - async validateGridAndSave(){ + async validateGridAndSave(){ let validCount = 0; let inValidCount = 0; let isValidRow = true; let errorDisplay = []; - const mandatoryKeys = ['suname','sudesc','scheduler','min_target_elevation','min_calibrator_elevation','offset_from','offset_to','md_sun','md_moon','md_jupiter','tp1angle1','tp1angle2','tp1angle3','tp1direction_type','tp2angle1','tp2angle2','tp2angle3','tp2direction_type','tbangle1','tbangle2','tbangle3','tbdirection_type']; + const mandatoryKeys = ['suname','sudesc','scheduler','min_target_elevation','min_calibrator_elevation','offset_from','offset_to','md_sun','md_moon','md_jupiter','param_0~angle1','param_0~angle2','param_0~direction_type','param_1~angle1','param_1~angle2','param_1~direction_type','param_2~angle1','param_2~angle2','param_2~direction_type']; let tmpMandatoryKeys = []; let tmpRowData = this.state.rowData; this.state.gridApi.forEachNode(function (node) { @@ -1594,7 +1413,6 @@ export class SchedulingSetCreate extends Component { for (var i = 0; i< node.columnController.gridColumns.length; i++) { let column = node.columnController.gridColumns[i]; if (column.colId === '0'){ - // rowNoColumn = column; } else { if (_.includes(tmpMandatoryKeys, column.colId)){ isValidRow = false; @@ -1615,7 +1433,7 @@ export class SchedulingSetCreate extends Component { // rowNoColumn.colDef.cellStyle = { backgroundColor: BG_COLOR}; } } else if (column.colId === 'offset_from' || column.colId === 'offset_to'){ - if ( Number(rowData[column.colId] < 0)){ + if ( typeof rowData[column.colId] === 'undefined' || (rowData[column.colId] && Number(rowData[column.colId] < 0))){ isValidRow = false; errorMsg += column.colDef.headerName+", "; // column.colDef.cellStyle = { backgroundColor: BG_COLOR}; @@ -1685,19 +1503,13 @@ export class SchedulingSetCreate extends Component { this.showIcon = true; this.dialogMsg = 'No valid Scheduling Unit found !'; this.dialogType = 'warning'; - this.onClose = () => { - this.setState({confirmDialogVisible: false}); - }; - this.setState({ - confirmDialogVisible: true, - }); - + this.onClose = () => {this.setState({confirmDialogVisible: false});}; + this.setState({confirmDialogVisible: true}); } else { this.setState({ validCount: validCount, inValidCount: inValidCount, tmpRowData: tmpRowData, - //saveDialogVisible: true, errorDisplay: errorDisplay, confirmDialogVisible: true, }); @@ -1718,68 +1530,177 @@ export class SchedulingSetCreate extends Component { } /** - * Function to create Scheduling unit + * Show the content in custom dialog */ - async saveSchedulingUnit(){ - this.validateGridAndSave(); + showDialogContent(){ + if (typeof this.state.errorDisplay === 'undefined' || this.state.errorDisplay.length === 0 ){ + return ""; + } else { + return <> <br/>Invalid Rows:- Row # and Invalid columns <br/>{this.state.errorDisplay && this.state.errorDisplay.length>0 && + this.state.errorDisplay.map((msg, index) => ( + <React.Fragment key={index+10} > + <span key={'label1-'+ index}>{msg}</span> <br /> + </React.Fragment> + ))} </> + } } + /** + * Prepare Scheduling Unit from Excel table + * @param {*} suRow + * @returns + */ + async prepareObservStrategyFromExcelValue(suRow) { + let colKeys = Object.keys(suRow); + let paramsOutput = {}; + for(const colKey of colKeys) { + let prefix = colKey.split("~"); + if(colKey.startsWith('param_') && prefix.length > 1) { + var res = Object.keys(suRow).filter(v => v.startsWith(prefix[0])); + if(res && res.length > 1) { + let res = paramsOutput[prefix[0]]; + if(prefix[1] === 'angle1' || prefix[1] === 'angle2') { + suRow[colKey] = UnitConverter.parseAngle(suRow[colKey]); + } + if(res) { + res[prefix[1]] = suRow[colKey]; + } else { + res = {}; + res[prefix[1]] = suRow[colKey]; + paramsOutput[prefix[0]] = res; + } + } else { + if(colKey.endsWith('Beamformers')){ + let result = suRow[colKey]; + if(result['param_0']) { + paramsOutput[prefix[0]] = result['param_0']; + } else { + paramsOutput[prefix[0]] = result; + } + } else if(colKey.endsWith('Duration')){ + paramsOutput[prefix[0]] = Number(suRow[colKey]); + } else { + paramsOutput[prefix[0]] = suRow[colKey]; + } + } + } else { + paramsOutput[prefix[0]] = suRow[colKey]; + } + } + this.setState({paramsOutput : paramsOutput}) + let observStrategy = _.cloneDeep(this.state.observStrategy); + const $refs = await $RefParser.resolve(observStrategy.template); + observStrategy.template.parameters.forEach(async(param, index) => { + $refs.set(observStrategy.template.parameters[index]['refs'][0], this.state.paramsOutput['param_' + index]); + }); + return observStrategy; + } /** - * Save/Update Scheduling Unit + * Prepare Constraint from Excel table + * @param {*} suRow + * @returns + */ + async prepareConstraintFromExcelValue(suRow) { + let between = this.getBetweenDateValue(suRow.between); + let notbetween = this.getBetweenDateValue(suRow.notbetween); + let constraint = null; + if (suRow.id > 0){ + let schedulingUnit = _.find(this.state.schedulingUnitList, {'id': suRow.id}); + constraint = schedulingUnit.scheduling_constraints_doc; + } + if ( constraint === null || constraint === 'undefined' || constraint === {}){ + constraint = this.state.schedulingConstraintsDoc; + } + if(!constraint) { + let schedulingUnit = await ScheduleService.getSchedulingUnitDraftById(1); + constraint = (schedulingUnit)? schedulingUnit.scheduling_constraints_doc : {}; + } + //If No SU Constraint create default ( maintain default struc) + constraint['scheduler'] = suRow.scheduler; + if (suRow.scheduler === 'dynamic' || suRow.scheduler === 'online'){ + if (this.isNotEmpty(suRow.timeat)) { + delete constraint.time.at; + } + + if (!this.isNotEmpty(suRow.timeafter)) { + delete constraint.time.after; + } + + if (!this.isNotEmpty(suRow.timebefore)) { + delete constraint.time.before; + } + } + else { + //mandatory + constraint.time.at = `${moment(suRow.timeat).format(UIConstants.UTC_DATE_TIME_MS_FORMAT, { trim: false })}Z`; + //optional + if (!this.isNotEmpty(suRow.timeafter)) { + delete constraint.time.after; + } else { + constraint.time.after = `${moment(suRow.timeafter).format(UIConstants.UTC_DATE_TIME_MS_FORMAT, { trim: false })}Z`; + } + + if (!this.isNotEmpty(suRow.timebefore)) { + delete constraint.time.before; + } else { + constraint.time.before = `${moment(suRow.timebefore).format(UIConstants.UTC_DATE_TIME_MS_FORMAT, { trim: false })}Z`; + } + } + + if (this.isNotEmpty(between)){ + constraint.time.between = between; + } + if (this.isNotEmpty(notbetween)){ + constraint.time.not_between = notbetween; + } + let dailyValueSelected = _.split(suRow.daily, ","); + this.state.daily.forEach(daily => { + if (_.includes(dailyValueSelected, daily)){ + constraint.daily[daily] = true; + } else { + constraint.daily[daily] = false; + } + }) + let min_distance_res = {}; + min_distance_res['sun'] = suRow.md_sun; + min_distance_res['moon'] = suRow.md_moon; + min_distance_res['jupiter'] = suRow.md_jupiter; + constraint.sky.min_distance = min_distance_res; + + let transit_offset_res = {}; + transit_offset_res['from'] = +suRow.offset_from; + transit_offset_res['to'] = +suRow.offset_to; + if (transit_offset_res){ + constraint.sky.transit_offset= transit_offset_res; + } + + constraint.sky.min_target_elevation = suRow.min_target_elevation; + constraint.sky.min_calibrator_elevation = suRow.min_calibrator_elevation; + + return constraint; + } + + /** + * Save/Update Scheduling Unit(s) */ - async saveSU() { + async saveSU() { let newSUCount = 0; let existingSUCount = 0; let isUpdated = true; try{ this.setState({ - // saveDialogVisible: false, confirmDialogVisible: false, showSpinner: true }); let newSU = this.state.schedulingUnit; - let parameters = this.state.schedulingUnitList[0]['requirements_doc'].parameters; - let columnMap = this.state.columnMap; - + let suStatus = []; for(const suRow of this.state.rowData){ if (!suRow['isValid']){ continue; } - let validRow = true; - let paramsOutput = {}; - let index = 0; - for(const parameter of parameters){ - let paramOutput = {}; - let result = columnMap[parameter.name]; - let resultKeys = Object.keys(result); - resultKeys.forEach(key => { - if (key === 'angle1') { - if (!Validator.validateTime(suRow[result[key]])) { - validRow = false; - return; - } - paramOutput[key] = UnitConverter.parseAngle(suRow[result[key]]); - } else if (key === 'angle2'){ - if (!Validator.validateAngle(suRow[result[key]])){ - validRow = false; - return; - } - paramOutput[key] = UnitConverter.parseAngle(suRow[result[key]]); - } else if (key === 'angle3'){ - paramOutput[key] = Number(suRow[result[key]]); - - } else { - paramOutput[key] = suRow[result[key]]; - } - }) - paramsOutput['param_'+index] = paramOutput; - index++; - } - if (!validRow){ - continue; - } + let observStrategy = await this.prepareObservStrategyFromExcelValue(suRow); //Stations let sgCellValue = suRow.stations; @@ -1792,19 +1713,12 @@ export class SchedulingSetCreate extends Component { if (sgValue && sgValue[0].length>0){ let stationArray = _.split(sgValue[0], ","); tmpStationGroup['stations'] = stationArray; - tmpStationGroup['max_nr_missing'] = Number(sgValue[1]); + let missingStation = (sgValue[1])?sgValue[1]:0; + tmpStationGroup['max_nr_missing'] = Number(missingStation); tmpStationGroups.push(tmpStationGroup); } - }) - - let observStrategy = _.cloneDeep(this.state.observStrategy); - const $refs = await $RefParser.resolve(observStrategy.template); - observStrategy.template.parameters.forEach(async(param, index) => { - let key = observStrategy.template.parameters[index]['refs'][0]; - let fieldValue = paramsOutput['param_' + index]; - let value = (key.endsWith('duration'))? parseInt(fieldValue['param_' + index]) : fieldValue; - $refs.set(observStrategy.template.parameters[index]['refs'][0], value); }); + if ( suRow.id === 0) { for (const taskName in observStrategy.template.tasks) { let task = observStrategy.template.tasks[taskName]; @@ -1813,95 +1727,17 @@ export class SchedulingSetCreate extends Component { } } } - - let between = this.getBetWeenDateValue(suRow.between); - let notbetween = this.getBetWeenDateValue(suRow.notbetween); - let isNewConstraint = false; let newConstraint = {}; - let constraint = null; - if (suRow.id > 0){ - newSU = _.find(this.state.schedulingUnitList, {'id': suRow.id}); - constraint = newSU.scheduling_constraints_doc; - } - - if ( constraint === null || constraint === 'undefined' || constraint === {}){ - constraint = this.state.schedulingConstraintsDoc; + let constraint = await this.prepareConstraintFromExcelValue(suRow); + if (suRow.id === 0){ isNewConstraint = true; } - - //If No SU Constraint create default ( maintain default struc) - constraint['scheduler'] = suRow.scheduler; - if (suRow.scheduler === 'dynamic' || suRow.scheduler === 'online'){ - if (this.isNotEmpty(suRow.timeat)) { - delete constraint.time.at; - } /*else { - constraint.time.at = `${moment(suRow.timeat).format("YYYY-MM-DDTHH:mm:ss.SSSSS", { trim: false })}Z`; - }*/ - - if (!this.isNotEmpty(suRow.timeafter)) { - delete constraint.time.after; - } /*else { - constraint.time.after = `${moment(suRow.timeafter).format("YYYY-MM-DDTHH:mm:ss.SSSSS", { trim: false })}Z`; - }*/ - - if (!this.isNotEmpty(suRow.timebefore)) { - delete constraint.time.before; - } /*else { - constraint.time.before = `${moment(suRow.timebefore).format("YYYY-MM-DDTHH:mm:ss.SSSSS", { trim: false })}Z`; - }*/ - } - else { - //mandatory - constraint.time.at = `${moment(suRow.timeat).format(UIConstants.UTC_DATE_TIME_MS_FORMAT, { trim: false })}Z`; - //optional - if (!this.isNotEmpty(suRow.timeafter)) { - delete constraint.time.after; - } else { - constraint.time.after = `${moment(suRow.timeafter).format(UIConstants.UTC_DATE_TIME_MS_FORMAT, { trim: false })}Z`; - } - - if (!this.isNotEmpty(suRow.timebefore)) { - delete constraint.time.before; - } else { - constraint.time.before = `${moment(suRow.timebefore).format(UIConstants.UTC_DATE_TIME_MS_FORMAT, { trim: false })}Z`; - } - } - - if (this.isNotEmpty(between)){ - constraint.time.between = between; - } - if (this.isNotEmpty(notbetween)){ - constraint.time.not_between = notbetween; - } - let dailyValueSelected = _.split(suRow.daily, ","); - this.state.daily.forEach(daily => { - if (_.includes(dailyValueSelected, daily)){ - constraint.daily[daily] = true; - } else { - constraint.daily[daily] = false; - } - }) - let min_distance_res = {}; - min_distance_res['sun'] = suRow.md_sun; - min_distance_res['moon'] = suRow.md_moon; - min_distance_res['jupiter'] = suRow.md_jupiter; - constraint.sky.min_distance = min_distance_res; - - let transit_offset_res = {}; - transit_offset_res['from'] = +suRow.offset_from; - transit_offset_res['to'] = +suRow.offset_to; - if (transit_offset_res){ - constraint.sky.transit_offset= transit_offset_res; - } - constraint.sky.min_target_elevation = suRow.min_target_elevation; - constraint.sky.min_calibrator_elevation = suRow.min_calibrator_elevation; - - UnitConversion.degreeToRadians(constraint.sky); + UnitConverter.degreeToRadians(constraint.sky); if (isNewConstraint){ - newSU.scheduling_constraints_doc = constraint; + newSU['scheduling_constraints_doc'] = constraint; } if (suRow.id === 0){ @@ -1910,7 +1746,7 @@ export class SchedulingSetCreate extends Component { newConstraint['constraint'] = {'url':''}; newConstraint.constraint.url = this.state.constraintUrl; } - + let suUpdateStatus = {}; if (suRow.id > 0 && this.isNotEmpty(suRow.suname) && this.isNotEmpty(suRow.sudesc)){ newSU = _.find(this.state.schedulingUnitList, {'id': suRow.id}); newSU['name'] = suRow.suname; @@ -1920,10 +1756,17 @@ export class SchedulingSetCreate extends Component { if(taskdata){ taskDrafts = taskdata.data.results; } + suUpdateStatus['suName'] = suRow.suname; + suUpdateStatus['action'] = 'Update'; let updateSu = await ScheduleService.updateSUDraftFromObservStrategy(observStrategy, newSU, taskDrafts, this.state.tasksToUpdate, tmpStationGroups); + suUpdateStatus['suStatus']= "Success"; + suUpdateStatus['taskName']= updateSu.taskName; if (updateSu && !updateSu.isSUUpdated) { isUpdated = false; - } + suUpdateStatus['taskStatus']= "Failed"; + } else { + suUpdateStatus['taskStatus']= "Success"; + } existingSUCount++; } else if (suRow.id === 0 && this.isNotEmpty(suRow.suname) && this.isNotEmpty(suRow.sudesc)){ @@ -1933,20 +1776,28 @@ export class SchedulingSetCreate extends Component { scheduling_constraints_template_id: newSU['scheduling_constraints_template_id'], scheduling_set_id: newSU['scheduling_set_id'] } + suUpdateStatus['suName'] = suRow.suname; + suUpdateStatus['action'] = 'Create'; let updateSu = await ScheduleService.saveSUDraftFromObservStrategy(observStrategy, newSchedulueUnit, newConstraint, tmpStationGroups); + suUpdateStatus['suStatus']= "Success"; + suUpdateStatus['taskName']= updateSu.taskName; if (updateSu && !updateSu.isSUUpdated) { isUpdated = false; + suUpdateStatus['taskStatus']= "Failed"; + } else { + suUpdateStatus['taskStatus']= "Success"; } newSUCount++; } + suStatus.push(suUpdateStatus); } if ((newSUCount+existingSUCount) > 0){ - //const dialog = {header: 'Success', detail: '['+newSUCount+'] Scheduling Units are created & ['+existingSUCount+'] Scheduling Units are updated successfully.'}; - // this.setState({ showSpinner: false, dialogVisible: true, dialog: dialog, isAGLoading: true, copyHeader: false, rowData: []}); + this.setState({suStatus:suStatus}); this.dialogType = "success"; this.dialogHeader = "Success"; this.showIcon = true; + this.dialogWidth = "60vw"; if (isUpdated) { this.dialogMsg = '['+newSUCount+'] Scheduling Units are created & ['+existingSUCount+'] Scheduling Units are updated successfully.'; } else { @@ -1954,9 +1805,9 @@ export class SchedulingSetCreate extends Component { this.dialogMsg = '['+newSUCount+'] Scheduling Units are created & ['+existingSUCount+'] Scheduling Units are updated successfully, and there are some Schedule Unit/Task failed to create/update'; } - this.dialogContent = ""; - this.onCancel = this.close; - this.onClose = this.close; + this.dialogContent = this.getSchedulingDialogContent; + this.onCancel = this.reset; + this.onClose = this.reset; this.callBackFunction = this.reset; this.setState({isDirty : false, showSpinner: false, confirmDialogVisible: true, /*dialog: dialog,*/ isAGLoading: true, copyHeader: false, rowData: []}); publish('edit-dirty', false); @@ -1965,43 +1816,69 @@ export class SchedulingSetCreate extends Component { publish('edit-dirty', false); this.growl.show({severity: 'error', summary: 'Warning', detail: 'No Scheduling Units create/update '}); } - }catch(err){ + } catch(err){ this.growl.show({severity: 'error', summary: 'Error Occured', detail: 'Unable to create/update Scheduling Units'}); this.setState({showSpinner: false}); } } - + /** - * Check is empty string - * @param {*} value + * Prepare Scheduling Unit(s) details to show on confirmation dialog */ - isNotEmpty(value){ - if ( value === null || value === undefined || value.length === 0 ){ - return false; - } else { - return true; - } + getSchedulingDialogContent() { + let suStatus = this.state.suStatus; + return <> + {suStatus.length > 0 && + <div style={{marginTop: '1em'}}> + <b>Scheduling Unit(s) & Task(s) status</b> + <DataTable value={suStatus} resizableColumns columnResizeMode="expand" className="card" style={{paddingLeft: '0em'}}> + <Column field="suName" header="Scheduling Unit Name"></Column> + <Column field="action" header="Action"></Column> + <Column field="suStatus" header="Scheduling Unit Status"></Column> + <Column field="taskStatus" header="Task(s) Status"></Column> + </DataTable> + </div> + } + </> } /** * Convert the date to string value for Between And Not-Between Columns * @param {*} dates */ - getBetweenStringValue(dates){ + getBetweenStringValue(dates){ let returnDate = ''; if (dates){ dates.forEach(utcDateArray => { returnDate += moment.utc(utcDateArray.from).format(UIConstants.CALENDAR_DATETIME_FORMAT)+","; returnDate += moment.utc(utcDateArray.to).format(UIConstants.CALENDAR_DATETIME_FORMAT)+"|"; - }) + }); } return returnDate; } + /** + * Get Daily column value + * @param {*} daily + */ + fetchDailyFieldValue(daily){ + let returnValue = []; + if (daily.require_day === true){ + returnValue.push('require_day'); + } + if (daily.require_night === true){ + returnValue.push('require_night'); + } + if (daily.avoid_twilight === true){ + returnValue.push('avoid_twilight'); + } + return returnValue; + } + /** * convert String to Date value for Between And Not-Between Columns */ - getBetWeenDateValue(betweenValue){ + getBetweenDateValue(betweenValue){ let returnDate = []; if (betweenValue){ let rowDateArray = _.split(betweenValue, "|"); @@ -2019,9 +1896,31 @@ export class SchedulingSetCreate extends Component { } /** + * warn before cancel the page if any changes detected + */ + checkIsDirty() { + if( this.state.isDirty ){ + this.showIcon = true; + this.dialogType = "confirmation"; + this.dialogHeader = "Add Multiple Scheduling Unit(s)"; + this.dialogMsg = "Do you want to leave this page? Your changes may not be saved."; + this.dialogContent = ""; + this.dialogHeight = '5em'; + this.callBackFunction = this.cancelCreate; + this.onClose = this.close; + this.onCancel = this.close; + this.setState({ + confirmDialogVisible: true, + }); + } else { + this.cancelCreate(); + } + } + + /** * Refresh the grid with updated data */ - async reset() { + async reset() { let schedulingUnitList = await ScheduleService.getSchedulingBySet(this.state.selectedSchedulingSetId); schedulingUnitList = _.filter(schedulingUnitList,{'observation_strategy_template_id': this.state.observStrategy.id}) ; this.setState({ @@ -2035,167 +1934,221 @@ export class SchedulingSetCreate extends Component { this.state.gridApi.setRowData(this.state.rowData); this.state.gridApi.redrawRows(); } - - /** + + /** * Cancel SU creation and redirect */ - cancelCreate() { + cancelCreate() { + publish('edit-dirty', false); this.setState({redirect: '/schedulingunit'}); } - async onGridReady (params) { - await this.setState({ - gridApi:params.api, - gridColumnApi:params.columnApi, - }) - this.state.gridApi.hideOverlay(); - } - - async onTopGridReady (params) { - await this.setState({ - topGridApi:params.api, - topGridColumnApi:params.columnApi, - }) - this.state.topGridApi.hideOverlay(); + /** + * Set state to copy the table header to clipboard + * @param {*} value + */ + async copyHeader(value) { + await this.setState({'copyHeader': value}); } - async setNoOfSUint(value){ - this.setState({isDirty: true, isAGLoading: true}); - publish('edit-dirty', true); - if (value >= 0 && value < 501){ - await this.setState({ - noOfSU: value - }) - } else { - await this.setState({ - noOfSU: 500 - }) - } - - let noOfSU = this.state.noOfSU; - this.tmpRowData = []; - if (this.state.rowData && this.state.rowData.length >0 && this.state.emptyRow) { - if (this.state.totalCount <= noOfSU) { - for (var count = 0; count < noOfSU; count++) { - if(this.state.rowData.length > count ) { - this.tmpRowData.push(_.cloneDeep(this.state.rowData[count])); - } else { - this.tmpRowData.push(_.cloneDeep(this.state.emptyRow)); - } - } - this.setState({ - rowData: this.tmpRowData, - noOfSU: noOfSU, - isAGLoading: false + + /** + * Copy the table header to clipboard + */ + async copyOnlyHeader() { + this.setState({ fade: true }); + let clipboardData = ''; + if (this.state.gridColumnApi) { + var columnsName = this.state.gridColumnApi.getAllGridColumns(); + var line = ''; + if( columnsName ) { + columnsName.map( column => { + if ( column.colId !== '0'){ + line += column.colDef.headerName + '\t'; + } }); - } else { - this.setState({ - isAGLoading: false - }) } - - } else { - this.setState({ - isAGLoading: false - }); + line = _.trim(line); + clipboardData += line + '\r\n'; + clipboardData = _.trim(clipboardData); + const queryOpts = { name: 'clipboard-write', allowWithoutGesture: true }; + await navigator.permissions.query(queryOpts); + await navigator.clipboard.writeText(clipboardData); + this.growl.show({severity: 'success', summary: '', detail: 'Header copied to clipboard '}); } } - validateForm(fieldName) { - let validForm = false; - let errors = this.state.errors; - let validFields = this.state.validFields; - if (fieldName) { - delete errors[fieldName]; - delete validFields[fieldName]; - if (this.formRules[fieldName]) { - const rule = this.formRules[fieldName]; - const fieldValue = this.state.schedulingUnit[fieldName]; - if (rule.required) { - if (!fieldValue) { - errors[fieldName] = rule.message?rule.message:`${fieldName} is required`; - } else { - validFields[fieldName] = true; - } - } - } - } else { - errors = {}; - validFields = {}; - for (const fieldName in this.formRules) { - const rule = this.formRules[fieldName]; - const fieldValue = this.state.schedulingUnit[fieldName]; - if (rule.required) { - if (!fieldValue) { - errors[fieldName] = rule.message?rule.message:`${fieldName} is required`; - } else { - validFields[fieldName] = true; - } - } - } - } - this.setState({errors: errors, validFields: validFields}); - if (Object.keys(validFields).length === Object.keys(this.formRules).length) { - validForm = true; + /** + * Read Data from clipboard + */ + async readClipBoard(){ + try{ + const queryOpts = { name: 'clipboard-read', allowWithoutGesture: true }; + await navigator.permissions.query(queryOpts); + let data = await navigator.clipboard.readText(); + return data; + }catch(err){ + console.log("Error",err); } - return validForm; - } + } /** - * This function is mainly added for Unit Tests. If this function is removed Unit Tests will fail. + * Copy data to/from clipboard + * @param {*} e */ - validateEditor() { - return this.validEditor?true:false; + async clipboardEvent(e){ + var key = e.which || e.keyCode; + var ctrl = e.ctrlKey ? e.ctrlKey : ((key === 17) ? true : false); + if ( key === 67 && ctrl ) { + //Ctrl+C + this.copyToClipboard(); + } + else if ( key === 86 && ctrl ) { + // Ctrl+V + this.copyFromClipboard(); + } } - + /** - * Show the content in custom dialog + * Function to copy the data to clipboard */ - showDialogContent(){ - if (typeof this.state.errorDisplay === 'undefined' || this.state.errorDisplay.length === 0 ){ - return ""; + async copyToClipboard(){ + var columnsName = this.state.gridColumnApi.getAllGridColumns(); + var selectedRows = this.state.gridApi.getSelectedRows(); + let clipboardData = ''; + if ( this.state.copyHeader ) { + var line = ''; + columnsName.map( column => { + if ( column.colId !== '0'){ + line += column.colDef.headerName + '\t'; + } + }) + line = _.trim(line); + clipboardData += line + '\r\n'; } - else { - return <> <br/>Invalid Rows:- Row # and Invalid columns <br/>{this.state.errorDisplay && this.state.errorDisplay.length>0 && - this.state.errorDisplay.map((msg, index) => ( - <React.Fragment key={index+10} > - <span key={'label1-'+ index}>{msg}</span> <br /> - </React.Fragment> - ))} </> + for(const rowData of selectedRows){ + var line = ''; + for(const key of this.state.colKeyOrder){ + let value = ' '; + if(key.endsWith('Beamformers')) { + let tmp = rowData[key]; + if(tmp['param_0']) { + value = JSON.stringify(tmp['param_0']); + } else { + value = JSON.stringify(tmp); + } + } else { + value = rowData[key]; + } + if(value === undefined) { + value = ' '; + } + line += value+ '\t'; + } + line = line.slice(0, -2); + clipboardData += line + '\r\n'; } + clipboardData = clipboardData.slice(0, -4); + + const queryOpts = { name: 'clipboard-write', allowWithoutGesture: true }; + await navigator.permissions.query(queryOpts); + await navigator.clipboard.writeText(clipboardData); + const headerText = (this.state.copyHeader) ?'with Header' : ''; + this.growl.show({severity: 'success', summary: '', detail: selectedRows.length+' row(s) copied to clipboard '+headerText }); } /** - * Set default value for empty rows + * Function to copy the data from clipboard */ - async setDefaultCellValue(){ - if(this.state.rowData && this.state.rowData.length > 0){ - if (!this.state.showDefault){ - let tmpRowData = this.state.rowData; - let defaultValueColumns = Object.keys(this.state.defaultCellValues); - await tmpRowData.forEach(rowData => { - defaultValueColumns.forEach(key => { - if(!this.isNotEmpty(rowData[key])){ - rowData[key] = this.state.defaultCellValues[key]; - } - }) + async copyFromClipboard(){ + try { + var selectedRows = this.state.gridApi.getSelectedNodes(); + this.tmpRowData = this.state.rowData; + let dataRowCount = this.state.totalCount; + //Read Clipboard Data + let clipboardData = await this.readClipBoard(); + let selectedRowIndex = 0; + if (selectedRows){ + await selectedRows.map(selectedRow =>{ + selectedRowIndex = selectedRow.rowIndex; + if (clipboardData){ + let suGridRowData = this.state.emptyRow; + let suRows = clipboardData.split("\n"); + suRows.forEach(line => { + suGridRowData = {}; + suGridRowData['id'] = 0; + suGridRowData['isValid'] = true; + if ( this.tmpRowData.length <= selectedRowIndex ) { + this.tmpRowData.push(this.state.emptyRow); + } + let colCount = 0; + let suRow = line.split("\t"); + for(const key of this.state.colKeyOrder){ + if(key === 'param_3~Beamformers') { + let cellValue = {}; + cellValue['param_0']=JSON.parse(suRow[colCount]); + suGridRowData[key] = cellValue; + } else { + suGridRowData[key] = suRow[colCount]; + } + colCount++; + } + if (this.tmpRowData[selectedRowIndex].id > 0 ) { + suGridRowData['id'] = this.tmpRowData[selectedRowIndex].id; + } + this.tmpRowData[selectedRowIndex] = (suGridRowData); + selectedRowIndex++ + }) + } }); + dataRowCount = selectedRowIndex; + let emptyRow = this.state.emptyRow; + let tmpNoOfSU = this.state.noOfSU; + if (dataRowCount >= tmpNoOfSU){ + tmpNoOfSU = dataRowCount; + //Create additional empty row at the end + for(let i= this.tmpRowData.length; i<= tmpNoOfSU; i++){ + this.tmpRowData.push(emptyRow); + } + } await this.setState({ - rowData: tmpRowData, - // showDefault: true, + rowData: this.tmpRowData, + noOfSU: this.tmpRowData.length, + totalCount: dataRowCount, + isDirty: true }); - - } - {this.state.gridApi && + publish('edit-dirty', true); this.state.gridApi.setRowData(this.state.rowData); + this.state.gridApi.redrawRows(); } } + catch (err) { + console.error('Error: ', err); + } + } + + /** + * Show warning messgae if any changes not saved when the AG grid reload or cancel the page + * @param {*} functionName + */ + showWarning (functionName) { + this.showIcon = true; + this.dialogType = "confirmation"; + this.dialogHeader = "Add Multiple Scheduling Unit(s)"; + this.dialogMsg = "Do you want to leave the changes? Your changes may not be saved."; + this.dialogContent = ""; + this.callBackFunction = functionName; + this.onClose = this.close; + this.onCancel = this.close; + this.setState({ + confirmDialogVisible: true, + }); } /** * Reset the top table values */ - resetCommonData(){ + resetCommonData(){ let tmpData = [this.state.defaultCommonRowData]; //[...[this.state.emptyRow]]; let gRowData = {}; for (const key of _.keys(tmpData[0])) { @@ -2211,7 +2164,7 @@ export class SchedulingSetCreate extends Component { this.setState({commonRowData: [gRowData]}); } - /** + /** * Reload the data from API */ reload(){ @@ -2233,7 +2186,7 @@ export class SchedulingSetCreate extends Component { } if (!this.state.applyEmptyValue && isNotEmptyRow ) { this.growl.show({severity: 'warn', summary: 'Warning', detail: 'Please enter value in the column(s) above to apply'}); - } else { + } else { this.dialogType = "confirmation"; this.dialogHeader = "Warning"; this.showIcon = true; @@ -2245,7 +2198,7 @@ export class SchedulingSetCreate extends Component { this.onClose = this.close; this.onCancel =this.close; this.setState({confirmDialogVisible: true}); - } + } } /** @@ -2333,7 +2286,7 @@ export class SchedulingSetCreate extends Component { if (this.applyToEmptyRowOnly && (row['id'] > 0 || (row['suname'] !== '' && row['sudesc'] !== '') ) ){ continue; } - Object.keys(row).forEach(key => { + this.colKeyOrder.forEach(key => { if (key !== 'id') { let value = grow['gdef_'+key]; if( this.state.applyEmptyValue) { @@ -2349,64 +2302,6 @@ export class SchedulingSetCreate extends Component { } } - /** - * Update isDirty when ever cell value updated in AG grid - * @param {*} params - */ - cellValueChageEvent(params) { - if( params.value && !_.isEqual(params.value, params.oldValue)) { - this.setState({isDirty: true}); - publish('edit-dirty', true); - } - } - - /** - * warn before cancel the page if any changes detected - */ - checkIsDirty() { - if( this.state.isDirty ){ - this.showIcon = true; - this.dialogType = "confirmation"; - this.dialogHeader = "Add Multiple Scheduling Unit(s)"; - this.dialogMsg = "Do you want to leave this page? Your changes may not be saved."; - this.dialogContent = ""; - this.dialogHeight = '5em'; - this.callBackFunction = this.cancelCreate; - this.onClose = this.close; - this.onCancel = this.close; - this.setState({ - confirmDialogVisible: true, - }); - } else { - this.cancelCreate(); - } - } - - async refreshSchedulingSet(){ - this.schedulingSets = await ScheduleService.getSchedulingSets(); - const filteredSchedluingSets = _.filter(this.schedulingSets, {'project_id': this.state.schedulingUnit.project}); - this.setState({saveDialogVisible: false, confirmDialogVisible: false, schedulingSets: filteredSchedluingSets}); - } - - close(){ - this.setState({confirmDialogVisible: false}); - } - - showAddSchedulingSet() { - this.showIcon = false; - this.dialogType = "success"; - this.dialogHeader = "Add Scheduling Set’"; - this.dialogMsg = <SchedulingSet project={this.state.selectedProject[0]} onCancel={this.refreshSchedulingSet} />; - this.dialogContent = ""; - this.showIcon = false; - this.callBackFunction = this.refreshSchedulingSet; - this.onClose = this.refreshSchedulingSet; - this.onCancel = this.refreshSchedulingSet; - this.setState({ - confirmDialogVisible: true, - }); - } - render() { if (this.state.redirect) { return <Redirect to={ {pathname: this.state.redirect} }></Redirect> @@ -2596,7 +2491,7 @@ export class SchedulingSetCreate extends Component { </div> </> } - <CustomDialog type={this.dialogType} visible={this.state.confirmDialogVisible} width="40vw" height={this.dialogHeight} + <CustomDialog type={this.dialogType} visible={this.state.confirmDialogVisible} width={this.dialogWidth} height={this.dialogHeight} header={this.dialogHeader} message={this.dialogMsg} content={this.dialogContent} onClose={this.onClose} onCancel={this.onCancel} onSubmit={this.callBackFunction} showIcon={this.showIcon} actions={this.actions}> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/schedulingset.create.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/schedulingset.create.js index 2b34d370565a6284dde6d7b40b222befe78a564c..7bba8ad5d41b755e689d533c046bf9ce7315192d 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/schedulingset.create.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/schedulingset.create.js @@ -4,8 +4,7 @@ import { InputTextarea } from 'primereact/inputtextarea'; import UIConstants from '../../utils/ui.constants'; import { CustomDialog } from '../../layout/components/CustomDialog'; import ScheduleService from '../../services/schedule.service'; -import { Growl } from 'primereact/components/growl/Growl'; -import { appGrowl } from './../../layout/components/AppGrowl'; +import { appGrowl } from '../../layout/components/AppGrowl'; export class SchedulingSet extends Component { @@ -28,18 +27,18 @@ export class SchedulingSet extends Component { this.actions = [ {id:"yes", title: 'Save', callback: async ()=>{ let schedulingSet = this.state.schedulingSet; if (!this.isNotEmpty(schedulingSet.name) || !this.isNotEmpty(schedulingSet.description)){ - this.growl.show({severity: 'error', summary: 'Error Occured', detail: 'Name and Description are mandatory'}); + appGrowl.show({severity: 'error', summary: 'Error Occured', detail: 'Name and Description are mandatory'}); } else { schedulingSet['generator_doc'] = {}; schedulingSet['scheduling_unit_drafts'] = []; const suSet = await ScheduleService.saveSchedulingSet(schedulingSet); - if (suSet.id && suSet.id !== null) { + if (suSet.id !== null) { appGrowl.show({severity: 'success', summary: 'Success', detail: 'Scheduling Set is created successfully.'}); this.setState({suSet: suSet, dialogVisible: true, }); this.props.onCancel(); - } /* else { - this.growl.show({severity: 'error', summary: 'Error Occured', detail: schedulingSet.message || 'Unable to save Scheduling Set'}); - } */ + } else { + appGrowl.show({severity: 'error', summary: 'Error Occured', detail: schedulingSet.message || 'Unable to save Scheduling Set'}); + } } }}, {id:"no", title: 'Cancel', callback: this.props.onCancel} ]; @@ -79,21 +78,7 @@ export class SchedulingSet extends Component { } } } - } /* else { - errors = {}; - validFields = {}; - for (const fieldName in this.formRules) { - const rule = this.formRules[fieldName]; - const fieldValue = this.state.schedulingSet[fieldName]; - if (rule.required) { - if (!fieldValue) { - errors[fieldName] = rule.message?rule.message:`${fieldName} is required`; - } else { - validFields[fieldName] = true; - } - } - } - }*/ + } this.setState({errors: errors, validFields: validFields}); if (Object.keys(validFields).length === Object.keys(this.formRules).length) { validForm = true; @@ -112,7 +97,6 @@ export class SchedulingSet extends Component { let schedulingSet = this.state.schedulingSet; schedulingSet[key] = value; let isValid = this.validateForm(key); - // isValid= this.validateForm('project'); this.setState({schedulingSet: schedulingSet, validForm: isValid}); } @@ -129,7 +113,7 @@ export class SchedulingSet extends Component { const dialog = {header: 'Success', detail: 'Scheduling Set is created successfully.'}; this.setState({suSet: suSet, dialogVisible: false, dialog: dialog}); } else { - this.growl.show({severity: 'error', summary: 'Error Occured', detail: schedulingSet.message || 'Unable to save Scheduling Set'}); + appGrowl.show({severity: 'error', summary: 'Error Occured', detail: schedulingSet.message || 'Unable to save Scheduling Set'}); } } @@ -148,10 +132,10 @@ export class SchedulingSet extends Component { return true; } } + render() { return ( <> - <Growl ref={(el) => this.growl = el} /> <CustomDialog type="success" visible={this.state.dialogVisible} width="60vw" header={'Add Scheduling Set'} message= { diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/dataproduct.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/dataproduct.js index c4ff0f2b0f63db9291702c4e72c206593119cd31..11ef48d543dcce7b4443e9eef226e42f8a50d810 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/dataproduct.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Task/dataproduct.js @@ -64,7 +64,7 @@ export class DataProduct extends Component{ for(const id of subTaskIds){ let storageLocation = ''; await DataProductService.getSubtask(id).then( subtask =>{ - storageLocation = subtask.data.cluster_value; + storageLocation = subtask.data.cluster_name; }) //Fetch data product for Input Subtask and Output Subtask await DataProductService.getSubtaskInputDataproduct(id).then(async inputdata =>{ diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js index 09e3008e61628bff292911ff8e50ea8bb3f6a330..ab2a1b29b545745debd413a76238af94760b0525 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js @@ -27,6 +27,7 @@ import { OverlayPanel } from 'primereact/overlaypanel'; import { RadioButton } from 'primereact/radiobutton'; import { TieredMenu } from 'primereact/tieredmenu'; import { MultiSelect } from 'primereact/multiselect'; +import { Button } from 'primereact/button'; //import { TRUE } from 'node-sass'; @@ -71,7 +72,8 @@ export class TimelineView extends Component { selectedStationGroup: [], //Station Group(core,international,remote) reservationFilter: null, showSUs: true, - showTasks: false + showTasks: false, + groupByProject: false } this.STATUS_BEFORE_SCHEDULED = ['defining', 'defined', 'schedulable']; // Statuses before scheduled to get station_group this.allStationsGroup = []; @@ -147,7 +149,8 @@ export class TimelineView extends Component { moment.utc(suBlueprint.stop_time).isSameOrAfter(defaultEndTime)))) { items.push(this.getTimelineItem(suBlueprint)); if (!_.find(group, {'id': suDraft.id})) { - group.push({'id': suDraft.id, title: suDraft.name}); + group.push({'id': this.state.groupByProject?suBlueprint.project:suDraft.id, + title: this.state.groupByProject?suBlueprint.project:suDraft.name}); } suList.push(suBlueprint); } @@ -213,7 +216,7 @@ export class TimelineView extends Component { } } let item = { id: suBlueprint.id, - group: suBlueprint.suDraft.id, + group: this.state.groupByProject?suBlueprint.project:suBlueprint.suDraft.id, //title: `${suBlueprint.project} - ${suBlueprint.suDraft.name} - ${(suBlueprint.durationInSec/3600).toFixed(2)}Hrs`, title: "", project: suBlueprint.project, type: 'SCHEDULE', @@ -256,7 +259,7 @@ export class TimelineView extends Component { suId: suBlueprint.id, taskId: task.id, controlId: controlId, - group: `${suBlueprint.suDraft.id}_${task.draft_id}`, + group: `${this.state.groupByProject?suBlueprint.project:suBlueprint.suDraft.id}_${this.state.groupByProject?'observations':task.draft_id}`, // group: `${suBlueprint.suDraft.id}_Tasks`, // For single row task grouping title: '', project: suBlueprint.project, type: 'TASK', @@ -272,8 +275,10 @@ export class TimelineView extends Component { status: task.status.toLowerCase()}; items.push(item); if (!_.find(itemGroup, ['id', `${suBlueprint.suDraft.id}_${task.draft_id}`])) { - itemGroup.push({'id': `${suBlueprint.suDraft.id}_${task.draft_id}`, parent: suBlueprint.suDraft.id, - start: start_time, title: `${!this.state.showSUs?suBlueprint.suDraft.name:""} -- ${task.name}`}); + itemGroup.push({'id': `${this.state.groupByProject?suBlueprint.project:suBlueprint.suDraft.id}_${this.state.groupByProject?'observations':task.draft_id}`, + parent: this.state.groupByProject?suBlueprint.project:suBlueprint.suDraft.id, + start: start_time, + title: `${!this.state.showSUs?(this.state.groupByProject?suBlueprint.project:suBlueprint.suDraft.name):""} -- ${this.state.groupByProject?'observations':task.name}`}); } /* >>>>>> If all tasks should be shown in single row remove the above 2 lines and uncomment these lines if (!_.find(itemGroup, ['id', `${suBlueprint.suDraft.id}_Tasks`])) { @@ -458,8 +463,10 @@ export class TimelineView extends Component { items.push(timelineItem); if (!_.find(group, {'id': suBlueprint.suDraft.id})) { /* parent and start properties are added to order and display task rows below the corresponding SU row */ - group.push({'id': suBlueprint.suDraft.id, parent: suBlueprint.suDraft.id, - start: moment.utc("1900-01-01", "YYYY-MM-DD"), title: suBlueprint.suDraft.name}); + group.push({'id': this.state.groupByProject?suBlueprint.project:suBlueprint.suDraft.id, + parent: this.state.groupByProject?suBlueprint.project:suBlueprint.suDraft.id, + start: moment.utc("1900-01-01", "YYYY-MM-DD"), + title: this.state.groupByProject?suBlueprint.project:suBlueprint.suDraft.name}); } } // Add task item only in timeline view and when show task is enabled @@ -665,8 +672,10 @@ export class TimelineView extends Component { items.push(timelineItem); if (!_.find(group, {'id': suBlueprint.suDraft.id})) { /* parent and start properties are added to order and list task rows below the SU row */ - group.push({'id': suBlueprint.suDraft.id, parent: suBlueprint.suDraft.id, - start: moment.utc("1900-01-01", "YYYY-MM-DD"), title: suBlueprint.suDraft.name}); + group.push({'id': this.state.groupByProject?suBlueprint.project:suBlueprint.suDraft.id, + parent: this.state.groupByProject?suBlueprint.project:suBlueprint.suDraft.id, + start: moment.utc("1900-01-01", "YYYY-MM-DD"), + title: this.state.groupByProject?suBlueprint.project:suBlueprint.suDraft.name}); } } if (this.state.showTasks && !this.state.stationView) { @@ -1005,6 +1014,13 @@ export class TimelineView extends Component { <label htmlFor="suOnly">Only Task</label> <RadioButton value="suTask" name="Both" inputId="bothSuTask" onChange={(e) => this.showTimelineItems(e.value)} checked={this.state.showSUs && this.state.showTasks} /> <label htmlFor="suOnly">Both</label> + + <div className="sub-header"> + {this.state.groupByProject && + <Button className="p-button-rounded toggle-btn" label="Group By SU" onClick={e => this.setState({groupByProject: false})} /> } + {!this.state.groupByProject && + <Button className="p-button-rounded toggle-btn" label="Group By Project" onClick={e => this.setState({groupByProject: true})} /> } + </div> </> } </div> @@ -1014,6 +1030,7 @@ export class TimelineView extends Component { items={this.state.items} currentUTC={this.state.currentUTC} rowHeight={this.state.stationView?50:50} + sidebarWidth={!this.state.showSUs?250:200} itemClickCallback={this.onItemClick} itemMouseOverCallback={this.onItemMouseOver} itemMouseOutCallback={this.onItemMouseOut} diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js index eb1d3364f5d71f6eb31b17ba27803daafdd6f057..b8e3ac5893333751b0e68b5113cebb0893776483 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js @@ -474,10 +474,13 @@ const ScheduleService = { // Create task drafts with updated requirement_doc schedulingUnit = await this.createSUTaskDrafts(schedulingUnit); if (schedulingUnit && schedulingUnit.task_drafts.length > 0) { + schedulingUnit['isSUUpdated'] = true; + schedulingUnit['taskName'] = '(Tasks)'; return schedulingUnit; } } return { + taskName: '(Tasks)', error: true, message: 'Unable to Create Task Drafts' }; @@ -491,6 +494,7 @@ const ScheduleService = { }, updateSUDraftFromObservStrategy: async function(observStrategy,schedulingUnit,tasks,tasksToUpdate,station_groups) { + let taskName = ''; try { delete schedulingUnit['duration']; schedulingUnit['isSUUpdated'] = false; @@ -499,25 +503,37 @@ const ScheduleService = { schedulingUnit['isSUUpdated'] = true; for (const taskToUpdate in tasksToUpdate) { let task = tasks.find(task => { return task.name === taskToUpdate}); - task.specifications_doc = observStrategy.template.tasks[taskToUpdate].specifications_doc; - if (task.specifications_doc.station_groups) { - task.specifications_doc.station_groups = station_groups; - } - delete task['duration']; - delete task['relative_start_time']; - delete task['relative_stop_time']; - task = await TaskService.updateTask('draft', task); - if (task.error) { - schedulingUnit = task; + taskName = taskToUpdate; + if(task) { + task.specifications_doc = observStrategy.template.tasks[taskToUpdate].specifications_doc; + if (task.specifications_doc.station_groups) { + task.specifications_doc.station_groups = station_groups; + } + delete task['duration']; + delete task['relative_start_time']; + delete task['relative_stop_time']; + task = await TaskService.updateTask('draft', task); + if (task.error) { + schedulingUnit = task; + } + } else { + return { + taskName: taskName, + error: true, + message: 'Unable to Update Task Drafts' + } } } - + } else { + schedulingUnit['isSUUpdated'] = false; } + schedulingUnit['taskName'] = taskName; return schedulingUnit; } catch(error) { console.error(error); schedulingUnit['isSUUpdated'] = false; return { + taskName: taskName, error: true, message: 'Unable to Update Task Drafts' } @@ -539,7 +555,8 @@ const ScheduleService = { return suCreateTaskResponse.data; } catch(error) { console.error(error); - return null; + schedulingUnit['isSUUpdated'] = false; + return schedulingUnit; } }, getSchedulingListByProject: async function(project){ @@ -595,6 +612,7 @@ const ScheduleService = { return response.data; } catch(error) { console.error(error); + console.log(error.response); } }, getStationGroup: async function() { diff --git a/SAS/TMSS/frontend/tmss_webapp/src/utils/ui.constants.js b/SAS/TMSS/frontend/tmss_webapp/src/utils/ui.constants.js index b2cdb71562603a663bddc1420395566d4a823afb..cc6a2efc08744512b14c86df9e416d56d8426455 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/utils/ui.constants.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/utils/ui.constants.js @@ -4,13 +4,13 @@ const UIConstants = { types: { NORMAL: "NORMAL", WEEKVIEW:"WEEKVIEW"} }, httpStatusMessages: { - 400: {severity: 'error', summary: 'Error', sticky: true, detail: 'Error while process request, please contact system admin'}, - 401: {severity: 'error', summary: 'Error', sticky: true, detail: 'Not authenticated, Please retry to login with valid credential'}, - 403: {severity: 'error', summary: 'Error', sticky: true, detail: 'Unknown request, please contact system admin'}, + 400: {severity: 'error', summary: 'Error', sticky: true, detail: 'Request data may be incorrect. Please try again or contact system admin'}, + 401: {severity: 'error', summary: 'Error', sticky: true, detail: 'Not authenticated, please login with valid credential'}, + 403: {severity: 'error', summary: 'Error', sticky: true, detail: "You don't have permissions to this action, please contact system admin"}, 404: {severity: 'error', summary: 'Error', sticky: true, detail: 'URL is not recognized, please contact system admin'}, - 408: {severity: 'error', summary: 'Error', sticky: true, detail: 'Request is taking more time to response, please contact system admin'}, - 500: {severity: 'error', summary: 'Error', sticky: true, detail: 'Internal Server Error, URL may not exists, please contact system admin'}, - 503: {severity: 'error', summary: 'Error', sticky: true, detail: 'Server not available, please check system admin'}, + 408: {severity: 'error', summary: 'Error', sticky: true, detail: 'Request is taking more time to response, please try again or contact system admin'}, + 500: {severity: 'error', summary: 'Error', sticky: true, detail: 'Server could not process the request, please check the data submitted is correct or contact system admin'}, + 503: {severity: 'error', summary: 'Error', sticky: true, detail: 'Server is not available, please try again or contact system admin'}, }, CALENDAR_DATE_FORMAT: 'yy-mm-dd', CALENDAR_DATETIME_FORMAT : 'YYYY-MM-DD HH:mm:ss', diff --git a/SAS/TMSS/frontend/tmss_webapp/src/utils/validator.js b/SAS/TMSS/frontend/tmss_webapp/src/utils/validator.js index f7612d15d63f4c2a6d625b74aa4badc3afb585d9..61e87c9adbb4aafb58a2ae5af4ea2dca271b6a85 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/utils/validator.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/utils/validator.js @@ -21,6 +21,7 @@ const Validator = { } return false; }, + /** * Validates whether any of the given property values is modified comparing the old and new object. * @param {Object} oldObject - old object that is already existing in the state list