diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index b7cedc3c91918494814c46a19eb98f2165d6ff87..1773ab9edb8fe586f6662bfa4d7d2bd48d2ad83e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -211,8 +211,6 @@ dockerize_TMSS: needs: - job: build_SCU artifacts: true - - job: integration_test_SCU - artifacts: false # # INTEGRATION TEST STAGE @@ -317,6 +315,8 @@ deploy-tmss-ua: - ssh lofarsys@tmss-ua.control.lofar "docker-compose -f docker-compose-ua.yml up -d" needs: - dockerize_TMSS + - job: integration_test_SCU + artifacts: false when: manual only: - "master" @@ -330,6 +330,8 @@ deploy-tmss-dockerhub: - docker logout needs: - dockerize_TMSS + - job: integration_test_SCU + artifacts: false when: manual only: - "master" diff --git a/LCS/PyCommon/json_utils.py b/LCS/PyCommon/json_utils.py index 963e397174ee5943fa038d869af8c78edcaae33e..6a40f670614a047e0107b7c21059e1dce1fb2d99 100644 --- a/LCS/PyCommon/json_utils.py +++ b/LCS/PyCommon/json_utils.py @@ -16,6 +16,8 @@ # with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. import json +import time + import jsonschema from copy import deepcopy import requests @@ -159,19 +161,31 @@ def get_referenced_subschema(ref_url, cache: dict=None, max_cache_age: timedelta '''fetch the schema given by the ref_url, and get the sub-schema given by the #/ path in the ref_url''' # deduct referred schema name and version from ref-value head, anchor, tail = ref_url.partition('#') + + def _fech_url_and_update_cache_entry_if_needed(): + # try to fetch the url a few time (jsonschema.org is down quite often, but only for a brief moment) + for attempt_nr in range(5): + try: + response = requests.get(ref_url) + if response.status_code == 200: + referenced_schema = json.loads(response.text) + if isinstance(cache, dict): + cache[head] = referenced_schema, datetime.utcnow() + return referenced_schema + except requests.exceptions.RequestException as e: + time.sleep(2) # retry after a little sleep + raise Exception("Could not get: %s" % (ref_url,)) + if isinstance(cache, dict) and head in cache: # use cached value referenced_schema, last_update_timestamp = cache[head] # refresh cache if outdated if datetime.utcnow() - last_update_timestamp > max_cache_age: - referenced_schema = json.loads(requests.get(ref_url).text) - cache[head] = referenced_schema, datetime.utcnow() + referenced_schema = _fech_url_and_update_cache_entry_if_needed() else: # fetch url, and store in cache - referenced_schema = json.loads(requests.get(ref_url).text) - if isinstance(cache, dict): - cache[head] = referenced_schema, datetime.utcnow() + referenced_schema = _fech_url_and_update_cache_entry_if_needed() # extract sub-schema tail = tail.strip('/') @@ -222,13 +236,12 @@ def get_refs(schema) -> set: return refs -def validate_json_against_its_schema(json_object: dict): +def validate_json_against_its_schema(json_object: dict, cache: dict=None, max_cache_age: timedelta=DEFAULT_MAX_SCHEMA_CACHE_AGE): '''validate the give json object against its own schema (the URI/URL that its propery $schema points to)''' schema_url = json_object['$schema'] - response = requests.get(schema_url, headers={"Accept":"application/json"}) - if response.status_code == 200: - return validate_json_against_schema(json_object, response.text) - raise jsonschema.exceptions.ValidationError("Could not get schema from '%s'\n%s" % (schema_url, str(response.text))) + schema_object = get_referenced_subschema(schema_url, cache=cache, max_cache_age=max_cache_age) + return validate_json_against_schema(json_object, schema_object) + def validate_json_against_schema(json_string: str, schema: str): '''validate the given json_string against the given schema. @@ -260,13 +273,13 @@ def validate_json_against_schema(json_string: str, schema: str): raise jsonschema.exceptions.ValidationError(str(e)) -def get_default_json_object_for_schema(schema: str) -> dict: +def get_default_json_object_for_schema(schema: str, cache: dict=None, max_cache_age: timedelta=DEFAULT_MAX_SCHEMA_CACHE_AGE) -> dict: """ TMSS wrapper for TMSS 'add_defaults_to_json_object_for_schema' :param schema: :return: json_object with default values of the schema """ - data = add_defaults_to_json_object_for_schema({}, schema) + data = add_defaults_to_json_object_for_schema({}, schema, cache=cache, max_cache_age=max_cache_age) if '$id' in schema: data['$schema'] = schema['$id'] return data diff --git a/LTA/LTACommon/CMakeLists.txt b/LTA/LTACommon/CMakeLists.txt index 590e81909a57a76557c620d7509923ac90238782..fed7330d6f64b4791f34c11cc9d06d317a011080 100644 --- a/LTA/LTACommon/CMakeLists.txt +++ b/LTA/LTACommon/CMakeLists.txt @@ -1,5 +1,5 @@ lofar_package(LTACommon 1.0) -set(etc_files LTA-SIP.xsd) -lofar_add_sysconf_files(${etc_files} DESTINATION lta) +set(share_files LTA-SIP.xsd) +lofar_add_data_files(${share_files} DESTINATION lta) diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingesttmssadapter.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingesttmssadapter.py index 7fd829007bf08bc58122d8ba8b1ad33e8f62c1ff..0e6210ef221fac0d7b2564f0845f0cd41aa3f7bf 100644 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingesttmssadapter.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestAdminServer/lib/ingesttmssadapter.py @@ -27,14 +27,14 @@ from lofar.lta.ingest.server.config import MAX_NR_OF_RETRIES from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession from lofar.messaging.messagebus import ToBus, DEFAULT_BROKER, DEFAULT_BUSNAME, UsingToBusMixin from lofar.messaging.messages import CommandMessage, EventMessage -from lofar.sas.tmss.client.tmssbuslistener import TMSSBusListener, TMSSEventMessageHandler, TMSS_ALL_EVENTS_FILTER +from lofar.sas.tmss.client.tmssbuslistener import TMSSBusListener, TMSSEventMessageHandler, TMSS_SUBTASK_STATUS_EVENT_PREFIX from lofar.common.datetimeutils import totalSeconds from lofar.common.dbcredentials import DBCredentials from lofar.common.util import waitForInterrupt from threading import Thread import time -from datetime import datetime +from datetime import datetime, timedelta from typing import Union import logging @@ -62,7 +62,25 @@ class IngestEventMessageHandlerForIngestTMSSAdapter(UsingToBusMixin, IngestEvent def onJobStarted(self, job_dict): if self.is_tmss_job(job_dict): - self.tmss_client.set_subtask_status(job_dict['export_id'], 'started') + subtask_id = job_dict['export_id'] + subtask = self.tmss_client.get_subtask(subtask_id) + + if subtask['state_value'] == 'started': + pass # the ingest subtask was already started + else: + # wait until subtask was fully queued (or in error/cancelled) + start_wait_timestamp = datetime.utcnow() + while subtask['state_value'] not in ('queued', 'cancelled', 'error'): + if datetime.utcnow() - start_wait_timestamp > timedelta(seconds=60): + raise TimeoutError("Timeout while waiting for ingest subtask id=%s to get status queued/cancelled/error. Current status is %s" % (subtask_id, subtask['state_value'])) + time.sleep(1) + subtask = self.tmss_client.get_subtask(subtask_id) + + if subtask['state_value'] == 'queued': + # the ingest subtask was fully queued, and this is the first ingest transfer job that started + # so, set the ingest subtask to starting->started + self.tmss_client.set_subtask_status(subtask_id, 'starting') + self.tmss_client.set_subtask_status(subtask_id, 'started') def onJobFailed(self, job_dict): if self.is_tmss_job(job_dict): @@ -150,13 +168,15 @@ class TMSSEventMessageHandlerForIngestTMSSAdapter(UsingToBusMixin, TMSSEventMess self.tmss_client.set_subtask_status(subtask['id'], 'queueing') # gather all relevant and needed info... - task_blueprint = self.tmss_client.get_url_as_json_object(subtask['task_blueprint']) + task_blueprint = self.tmss_client.get_url_as_json_object(subtask['task_blueprints'][0]) task_draft = self.tmss_client.get_url_as_json_object(task_blueprint['draft']) scheduling_unit_draft = self.tmss_client.get_url_as_json_object(task_draft['scheduling_unit_draft']) scheduling_set = self.tmss_client.get_url_as_json_object(scheduling_unit_draft['scheduling_set']) project = self.tmss_client.get_url_as_json_object(scheduling_set['project']) # create an ingest xml job for each input dataproduct + # store the jobs in a list, and submit them in one go to the queue + jobs = [] for input_dp in input_dataproducts: dp_global_identifier = self.tmss_client.get_url_as_json_object(input_dp['global_identifier']) producer = self.tmss_client.get_url_as_json_object(input_dp['producer']) @@ -169,7 +189,10 @@ class TMSSEventMessageHandlerForIngestTMSSAdapter(UsingToBusMixin, TMSSEventMess location=subtask['cluster_name']+':'+os.path.join(input_dp['directory'], input_dp['filename']), tmss_ingest_subtask_id=subtask['id'], tmss_input_dataproduct_id=input_dp['id']) + jobs.append(job) + # submit all jobs to the in one go to ingest-incoming-job-queue + for job in jobs: msg = CommandMessage(content=job, subject=DEFAULT_INGEST_INCOMING_JOB_SUBJECT) logger.info('submitting job %s to exchange %s with subject %s at broker %s', parseJobXml(job)['JobId'], self._tobus.exchange, msg.subject, self._tobus.broker) diff --git a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/sip.py b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/sip.py index 7c42d02ca0d8e9ca5522d16c1afe055d40aca316..440792e4546b6e7703dea0046dd35eb0ca1ef2d6 100755 --- a/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/sip.py +++ b/LTA/LTAIngest/LTAIngestServer/LTAIngestTransferServer/lib/sip.py @@ -19,7 +19,7 @@ def validateSIPAgainstSchema(sip, log_prefix=''): start = time.time() lofarrootdir = os.environ.get('LOFARROOT', '/opt/lofar') - sip_xsd_path = os.path.join(lofarrootdir, 'etc', 'lta', 'LTA-SIP.xsd') + sip_xsd_path = os.path.join(lofarrootdir, 'share', 'lta', 'LTA-SIP.xsd') if not os.path.exists(sip_xsd_path): logger.error('Could not find LTA-SIP.xsd at %s', sip_xsd_path) diff --git a/LTA/sip/bin/feedback2sip b/LTA/sip/bin/feedback2sip old mode 100644 new mode 100755 diff --git a/LTA/sip/bin/validatesip b/LTA/sip/bin/validatesip old mode 100644 new mode 100755 diff --git a/LTA/sip/bin/visualizesip b/LTA/sip/bin/visualizesip old mode 100644 new mode 100755 diff --git a/LTA/sip/lib/siplib.py b/LTA/sip/lib/siplib.py index e81b00ed5576eaf33f567f4a9394e609d9e284c5..71b7c184c5004408cb40ca7e3ec4b69b7e4da9c4 100644 --- a/LTA/sip/lib/siplib.py +++ b/LTA/sip/lib/siplib.py @@ -1488,11 +1488,16 @@ class Sip(object): raise Exception("This SIP does not describe a correlated dataproduct. No subarray pointing available.") # this will also validate the document so far - def get_prettyxml(self): + def get_prettyxml(self, schema_location:str = None): try: dom = self.__sip.toDOM() dom.documentElement.setAttribute("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance") - dom.documentElement.setAttribute('xsi:schemaLocation', "http://www.astron.nl/SIP-Lofar LTA-SIP-2.7.0.xsd") + if schema_location is None: + # this is/was the default schema location, even though we never hosted the xsd at the astron server + # That makes xmllint fail to validate (because the schema obviously can't be found) + schema_location = "http://www.astron.nl/SIP-Lofar LTA-SIP-2.7.2.xsd" + dom.documentElement.setAttribute('xsi:schemaLocation', schema_location) + dom.documentElement.setAttribute('xmlns:sip', schema_location.split(' ')[0]) return dom.toprettyxml() except pyxb.ValidationError as err: logger.error(err.details()) diff --git a/LTA/sip/lib/validator.py b/LTA/sip/lib/validator.py index 508c2beee330b5c7a32226031eb9d95beef2c2d3..e0de12d44e2a71d200607a2d44264f082e619105 100644 --- a/LTA/sip/lib/validator.py +++ b/LTA/sip/lib/validator.py @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) d = os.path.dirname(os.path.realpath(__file__)) XSDPATH = d+"/LTA-SIP.xsd" -DEFAULT_SIP_XSD_PATH = os.path.join(os.environ.get('LOFARROOT', '/opt/lofar'), 'etc', 'lta', 'LTA-SIP.xsd') +DEFAULT_SIP_XSD_PATH = os.path.join(os.environ.get('LOFARROOT', '/opt/lofar'), 'share', 'lta', 'LTA-SIP.xsd') def validate(xmlpath, xsdpath=DEFAULT_SIP_XSD_PATH): '''validates given xml file against given xsd file''' diff --git a/MAC/APL/MainCU/src/MACScheduler/MACScheduler.cc b/MAC/APL/MainCU/src/MACScheduler/MACScheduler.cc index 070672448124bd3697786a99704e1dd4ca1c3ec9..d1a1564f304e17f25a3d742aa955a1fdfdd9ad48 100644 --- a/MAC/APL/MainCU/src/MACScheduler/MACScheduler.cc +++ b/MAC/APL/MainCU/src/MACScheduler/MACScheduler.cc @@ -577,6 +577,9 @@ GCFEvent::TResult MACScheduler::active_state(GCFEvent& event, GCFPortInterface& tm.setTreeState(theObs->second, tsc.get("queued")); #endif } else { + // TODO: set to queueing state the moment MAC knows about this upoming obs. + // I've tried that but it's realy hard to keep the MAC internal bookkeeping and TMSS in sync. + itsTMSSconnection->setSubtaskState(theObs->second, "queueing"); itsTMSSconnection->setSubtaskState(theObs->second, "queued"); } break; @@ -598,6 +601,7 @@ GCFEvent::TResult MACScheduler::active_state(GCFEvent& event, GCFPortInterface& #endif } else { + itsTMSSconnection->setSubtaskState(theObs->second, "starting"); itsTMSSconnection->setSubtaskState(theObs->second, "started"); } break; @@ -922,8 +926,6 @@ void MACScheduler::_updatePlannedList() OLiter prepIter = itsPreparedObs.find(subtask_id); if ((prepIter == itsPreparedObs.end()) || (prepIter->second.prepReady == false) || (prepIter->second.modTime != modTime)) { - itsTMSSconnection->setSubtaskState(subtask_id, "queueing"); - // create a ParameterFile for this Observation string parsetText = itsTMSSconnection->getParsetAsText(subtask_id); if(prepIter == itsPreparedObs.end()) { diff --git a/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc b/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc index 5a9bc3f4bea1cadc352584deeb3ff09fba52e036..2fd54053b9b58da8c84a19bf981eb8c7b6e27d50 100644 --- a/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc +++ b/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc @@ -236,6 +236,7 @@ bool TMSSBridge::httpQuery(const string& target, string &result, const string& q curl_global_cleanup(); LOG_INFO_STR(string("[") << query_method << "] code=" << httpCode << " " << url); + if (httpCode == 200) { return true; } diff --git a/SAS/TMSS/backend/services/scheduling/lib/constraints/template_constraints_v1.py b/SAS/TMSS/backend/services/scheduling/lib/constraints/template_constraints_v1.py index 342b727554e0c3a5ca3212ab4008f8ecd116e752..d02f9f43afab5beeb76cb1a505c2410401d3c588 100644 --- a/SAS/TMSS/backend/services/scheduling/lib/constraints/template_constraints_v1.py +++ b/SAS/TMSS/backend/services/scheduling/lib/constraints/template_constraints_v1.py @@ -83,7 +83,7 @@ def can_run_within_timewindow_with_daily_constraints(scheduling_unit: models.Sch Checks whether it is possible to run the scheduling unit /somewhere/ in the given time window, considering the duration of the involved observation. :return: True if there is at least one possibility to place the scheduling unit in a way that all daily constraints are met over the runtime of the observation, else False. """ - main_observation_task_name = get_target_observation_task_name_from_requirements_doc(scheduling_unit) + main_observation_task_name = get_longest_observation_task_name_from_requirements_doc(scheduling_unit) duration = timedelta(seconds=scheduling_unit.requirements_doc['tasks'][main_observation_task_name]['specifications_doc']['duration']) window_lower_bound = lower_bound while window_lower_bound + duration < upper_bound: @@ -100,7 +100,7 @@ def can_run_anywhere_within_timewindow_with_daily_constraints(scheduling_unit: m Checks whether it is possible to place the scheduling unit arbitrarily in the given time window, i.e. the daily constraints must be met over the full time window. :return: True if all daily constraints are met over the entire time window, else False. """ - main_observation_task_name = get_target_observation_task_name_from_requirements_doc(scheduling_unit) + main_observation_task_name = get_longest_observation_task_name_from_requirements_doc(scheduling_unit) constraints = scheduling_unit.scheduling_constraints_doc if constraints['daily']['require_day'] or constraints['daily']['require_night'] or constraints['daily']['avoid_twilight']: @@ -157,7 +157,7 @@ def can_run_within_timewindow_with_time_constraints(scheduling_unit: models.Sche :return: True if there is at least one possibility to place the scheduling unit in a way that all time constraints are met over the runtime of the observation, else False. """ - main_observation_task_name = get_target_observation_task_name_from_requirements_doc(scheduling_unit) + main_observation_task_name = get_longest_observation_task_name_from_requirements_doc(scheduling_unit) constraints = scheduling_unit.scheduling_constraints_doc # Check the 'at' constraint and then only check can_run_anywhere for the single possible time window @@ -251,9 +251,6 @@ def can_run_anywhere_within_timewindow_with_sky_constraints(scheduling_unit: mod Checks whether it is possible to place the scheduling unit arbitrarily in the given time window, i.e. the sky constraints must be met over the full time window. :return: True if all sky constraints are met over the entire time window, else False. """ - # TODO: remove this shortcut after demo - return True - constraints = scheduling_unit.scheduling_constraints_doc if not "sky" in constraints: return True @@ -385,10 +382,26 @@ def get_target_observation_task_name_from_requirements_doc(scheduling_unit: mode raise TMSSException("Cannot find target observation in scheduling_unit requirements_doc") +def get_longest_observation_task_name_from_requirements_doc(scheduling_unit: models.SchedulingUnitBlueprint) -> str: + longest_observation_task_name = None + longest_observation_duration = 0 + for task_name, task in scheduling_unit.requirements_doc['tasks'].items(): + if 'observation' in task.get('specifications_template', ''): + if 'duration' in task.get('specifications_doc', {}): + duration = task['specifications_doc']['duration'] + if duration > longest_observation_duration: + longest_observation_duration = duration + longest_observation_task_name = task_name + if longest_observation_task_name is not None: + return longest_observation_task_name + raise TMSSException("Cannot find a longest observation in scheduling_unit requirements_doc") + + def get_earliest_possible_start_time(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> datetime: constraints = scheduling_unit.scheduling_constraints_doc - main_observation_task_name = get_target_observation_task_name_from_requirements_doc(scheduling_unit) + # TODO: for estimating the earliest_possible_start_time, we need the full duration of the scheduling unit, not just the longest one. + main_observation_task_name = get_longest_observation_task_name_from_requirements_doc(scheduling_unit) duration = timedelta(seconds=scheduling_unit.requirements_doc['tasks'][main_observation_task_name]['specifications_doc']['duration']) try: if 'at' in constraints['time']: diff --git a/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py b/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py index 59e644b4a882c4add00baa7b495fb95f41a524df..002d6ed94053dcc868c3c1f93bb508ca589e2d65 100755 --- a/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py +++ b/SAS/TMSS/backend/services/scheduling/test/t_dynamic_scheduling.py @@ -26,9 +26,6 @@ from astropy.coordinates import Angle import logging logger = logging.getLogger(__name__) -#TODO: remove after demo -exit(3) - from lofar.common.test_utils import skip_integration_tests if skip_integration_tests(): exit(3) @@ -113,8 +110,9 @@ class TestDynamicScheduling(TestCase): # Note: we use django.test.TestCase inst for input in subtask.inputs.all(): input.delete() subtask.delete() - task_blueprint.draft.delete() + draft = task_blueprint.draft task_blueprint.delete() + draft.delete() scheduling_unit_blueprint.delete() scheduling_unit_draft.delete() @@ -348,6 +346,34 @@ class TestDynamicScheduling(TestCase): # Note: we use django.test.TestCase inst self.assertGreaterEqual(scheduling_unit_blueprint_high.start_time - scheduling_unit_blueprint_manual.stop_time, DEFAULT_INTER_OBSERVATION_GAP) + def test_can_schedule_all_observing_strategy_templates_with_default_constraints(self): + '''''' + constraints_template = models.SchedulingConstraintsTemplate.objects.get(name="constraints") + constraints = get_default_json_object_for_schema(constraints_template.schema) + + for strategy_template in models.SchedulingUnitObservingStrategyTemplate.objects.all(): + scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template, + strategy_template.scheduling_unit_template.schema) + + draft = models.SchedulingUnitDraft.objects.create(name=strategy_template.name, + scheduling_set=self.scheduling_set_high, + requirements_template=strategy_template.scheduling_unit_template, + requirements_doc=scheduling_unit_spec, + observation_strategy_template=strategy_template, + scheduling_constraints_doc=constraints, + scheduling_constraints_template=constraints_template) + blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(draft) + + # call the method-under-test: do_dynamic_schedule + # this test only checks if each strategy_template *can* be scheduled by the dynamic scheduler without any exceptions/errors. The defaults should just work. + scheduled_scheduling_unit = do_dynamic_schedule() + self.assertEqual(blueprint.id, scheduled_scheduling_unit.id) + self.assertEqual("scheduled", scheduled_scheduling_unit.status) + + # wipe all entries in tmss-db/radb, and try next strategy_template + self.setUp() + + class TestDailyConstraints(TestCase): ''' Tests for the constraint checkers used in dynamic scheduling @@ -863,14 +889,14 @@ class TestSkyConstraints(unittest.TestCase): # case 1: transits at 14h, obs middle is at 13h, so we have an offset of -3600 seconds # big window - self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky'] = {'transit_offset': {'from': -43200, 'to': 43200}} # todo: use blueprint contraints after TMSS-697 was merged + self.scheduling_unit_blueprint.scheduling_constraints_doc['sky'] = {'transit_offset': {'from': -43200, 'to': 43200}} self.scheduling_unit_blueprint.save() timestamp = datetime(2020, 1, 1, 12, 0, 0) returned_value = tc1.can_run_anywhere_within_timewindow_with_sky_constraints(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) self.assertTrue(returned_value) # narrow window - self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky'] = {'transit_offset': {'from': -3601, 'to': -3599}} # todo: use blueprint contraints after TMSS-697 was merged + self.scheduling_unit_blueprint.scheduling_constraints_doc['sky'] = {'transit_offset': {'from': -3601, 'to': -3599}} self.scheduling_unit_blueprint.save() timestamp = datetime(2020, 1, 1, 12, 0, 0) returned_value = tc1.can_run_anywhere_within_timewindow_with_sky_constraints(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) @@ -880,7 +906,7 @@ class TestSkyConstraints(unittest.TestCase): # window spans past 12h, so reference transit is not nearest transit to obs time self.target_transit_mock.return_value = self.target_transit_data_previous - self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky'] = {'transit_offset': {'from': -43300, 'to': -43100}} # todo: use blueprint contraints after TMSS-697 was merged + self.scheduling_unit_blueprint.scheduling_constraints_doc['sky'] = {'transit_offset': {'from': -43300, 'to': -43100}} self.scheduling_unit_blueprint.save() timestamp = datetime(2020, 1, 1, 1, 0, 0) returned_value = tc1.can_run_anywhere_within_timewindow_with_sky_constraints(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) @@ -891,14 +917,14 @@ class TestSkyConstraints(unittest.TestCase): # transits at 14h, obs middle is at 13h, so we have an offset of -3600 seconds # window after - self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky'] = {'transit_offset': {'from': -3599, 'to': 43200}} # todo: use blueprint contraints after TMSS-697 was merged + self.scheduling_unit_blueprint.scheduling_constraints_doc['sky'] = {'transit_offset': {'from': -3599, 'to': 43200}} self.scheduling_unit_blueprint.save() timestamp = datetime(2020, 1, 1, 12, 0, 0) returned_value = tc1.can_run_anywhere_within_timewindow_with_sky_constraints(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) self.assertFalse(returned_value) # window before - self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky'] = {'transit_offset': {'from': -43200, 'to': -3601}} # todo: use blueprint contraints after TMSS-697 was merged + self.scheduling_unit_blueprint.scheduling_constraints_doc['sky'] = {'transit_offset': {'from': -43200, 'to': -3601}} self.scheduling_unit_blueprint.save() timestamp = datetime(2020, 1, 1, 12, 0, 0) returned_value = tc1.can_run_anywhere_within_timewindow_with_sky_constraints(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) @@ -910,7 +936,7 @@ class TestSkyConstraints(unittest.TestCase): # obs middle is 13h, so we have an offset of -7200 seconds self.target_transit_mock.side_effect = self.target_transit_data_saps - self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky'] = {'transit_offset': {'from': -7201, 'to': -7199}} # todo: use blueprint contraints after TMSS-697 was merged + self.scheduling_unit_blueprint.scheduling_constraints_doc['sky'] = {'transit_offset': {'from': -7201, 'to': -7199}} self.scheduling_unit_blueprint.requirements_doc['tasks']['Observation']['specifications_doc']['antenna_set'] = 'LBA_INNER' self.scheduling_unit_blueprint.requirements_doc['tasks']['Observation']['specifications_doc']['SAPs'] = \ [{'name': 'CygA', 'target': 'CygA', 'subbands': [0, 1], 'digital_pointing': {'angle1': 5.233660650313663, 'angle2': 0.7109404782526458, 'direction_type': 'J2000'}}, diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/common.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/common.py index 4eeeb68e1a42963aeabbd1111c7dcd509f0eb781..b13765d5a07282c1e9b24e71c1f3a1946cd7dd02 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/models/common.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/common.py @@ -203,7 +203,7 @@ class TemplateSchemaMixin(): # add defaults for missing properies, and validate on the fly # use the class's _schema_cache - document = add_defaults_to_json_object_for_schema(document, template.schema, self._schema_cache) + document = add_defaults_to_json_object_for_schema(document, template.schema, cache=self._schema_cache, max_cache_age=self._MAX_SCHEMA_CACHE_AGE) # update the model instance with the updated and validated document setattr(self, document_attr, document) diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py index ada071a865bdf4f336164fa504e62eb9f7083a87..25326994619527ce4e4278fa35ce9c04448fb1b2 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/models/specification.py @@ -17,6 +17,7 @@ from django.core.exceptions import ValidationError import datetime from collections import Counter from django.utils.functional import cached_property +from pprint import pformat from lofar.sas.tmss.tmss.exceptions import TMSSException # @@ -219,7 +220,13 @@ class SchedulingUnitObservingStrategyTemplate(NamedCommon): def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if self.template and self.scheduling_unit_template_id and self.scheduling_unit_template.schema: - validate_json_against_schema(self.template, self.scheduling_unit_template.schema) + try: + validate_json_against_schema(self.template, self.scheduling_unit_template.schema) + except Exception as e: + # log the error for debugging and re-raise + logger.error("Error while validating SchedulingUnitObservingStrategyTemplate name='%s' id='%s' error: %s\ntemplate:\n%s", + self.name, self.id, e, pformat(self.template)) + raise super().save(force_insert, force_update, using, update_fields) diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/LoTSS-observation-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/LoTSS-observation-scheduling-unit-observation-strategy.json index ce930faebf11e9d798bfa64809f06f067e4aeefe..674c49680e4caa76246e00893a8ed0f946c729f9 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/LoTSS-observation-scheduling-unit-observation-strategy.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/LoTSS-observation-scheduling-unit-observation-strategy.json @@ -331,8 +331,8 @@ 447 ], "digital_pointing":{ - "angle1":0.24, - "angle2":0.25, + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426, "direction_type":"J2000" } }, @@ -584,8 +584,8 @@ 447 ], "digital_pointing":{ - "angle1":0.27, - "angle2":0.28, + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426, "direction_type":"J2000" } } @@ -593,8 +593,8 @@ "filter":"HBA_110_190", "duration":28800, "tile_beam":{ - "angle1":0.42, - "angle2":0.43, + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426, "direction_type":"J2000" }, "correlator":{ @@ -737,8 +737,8 @@ "name":"calibrator1", "duration":600, "pointing":{ - "angle1":0, - "angle2":0, + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426, "direction_type":"J2000" }, "autoselect":false @@ -754,8 +754,8 @@ "name":"calibrator2", "duration":600, "pointing":{ - "angle1":0, - "angle2":0, + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426, "direction_type":"J2000" }, "autoselect":false diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json index 9a7a4fe7b836db4579a9111af512f2d31b6e4a9c..c8cf099bb1f48f17fef7067087e7a7de7cb27271 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/UC1-scheduling-unit-observation-strategy.json @@ -8,8 +8,8 @@ "autoselect": false, "pointing": { "direction_type": "J2000", - "angle1": 0, - "angle2": 0 + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426 }, "name": "calibrator1" }, @@ -78,16 +78,16 @@ ], "tile_beam": { "direction_type": "J2000", - "angle1": 0.42, - "angle2": 0.43 + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426 }, "SAPs": [ { "name": "target1", "digital_pointing": { "direction_type": "J2000", - "angle1": 0.24, - "angle2": 0.25 + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426 }, "subbands": [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243] }, @@ -95,8 +95,8 @@ "name": "target2", "digital_pointing": { "direction_type": "J2000", - "angle1": 0.27, - "angle2": 0.28 + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426 }, "subbands": [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243] } @@ -158,8 +158,8 @@ "autoselect": false, "pointing": { "direction_type": "J2000", - "angle1": 0, - "angle2": 0 + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426 }, "name": "calibrator2" }, diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-pointing-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-pointing-1.json index 75e850155bd192c799fc8e659516ac23c9ee2f2d..daaf144d92f22def7252cd2c259dcce965cebb26 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-pointing-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-pointing-1.json @@ -35,13 +35,13 @@ "type": "number", "title": "Angle 1", "description": "First angle (e.g. RA)", - "default": 0 + "default": 0.6624317181687094 }, "angle2": { "type": "number", "title": "Angle 2", "description": "Second angle (e.g. DEC)", - "default": 0 + "default": 1.5579526427549426 } }, "required": [ diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json index e3afa001749c54992e3de0cc6938a24ac4ed2867..2fb3614642699975018bf09db55d6c2ce5595dab 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/common_schema_template-stations-1.json @@ -80,13 +80,17 @@ "type":"integer", "title":"Maximum number of stations to omit", "description":"Maximum number of stations that can be omitted from a group (due to maintenance for example)", - "minimum":0 + "minimum":0, + "default": 0 }, "station_group":{ "type":"object", "title": "Station group", "description": "A set of predefined list of stations, and a constraint on how many stations are allowed to be missing (due to maintenance for example)", - "default":{}, + "default":{ + "stations": ["CS002", "CS003", "CS004", "CS005", "CS006", "CS007"], + "max_nr_missing": 1 + }, "anyOf": [ { "title":"Superterp", @@ -95,17 +99,18 @@ "properties":{ "stations":{ "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list", - "enum": [["CS002", "CS003", "CS004", "CS005", "CS006", "CS007"]], - "default": ["CS002", "CS003", "CS004", "CS005", "CS006", "CS007"], - "uniqueItems": false + "enum": [["CS002", "CS003", "CS004", "CS005", "CS006", "CS007"]] }, "max_nr_missing":{ - "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations", - "default": 0 + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations" } }, "required": ["stations", "max_nr_missing"], - "additionalProperties": false + "additionalProperties": false, + "default":{ + "stations": ["CS002", "CS003", "CS004", "CS005", "CS006", "CS007"], + "max_nr_missing": 0 + } }, { "title":"Core", @@ -114,17 +119,18 @@ "properties":{ "stations":{ "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list", - "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501"]], - "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501"], - "uniqueItems": false + "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501"]] }, "max_nr_missing":{ - "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations", - "default": 4 + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations" } }, "required": ["stations", "max_nr_missing"], - "additionalProperties": false + "additionalProperties": false, + "default":{ + "stations": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501"], + "max_nr_missing": 4 + } }, { "title":"Remote", @@ -133,17 +139,18 @@ "properties":{ "stations":{ "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list", - "enum": [["RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"]], - "default": ["RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"], - "uniqueItems": false + "enum": [["RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"]] }, "max_nr_missing":{ - "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations", - "default": 4 + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations" } }, "required": ["stations", "max_nr_missing"], - "additionalProperties": false + "additionalProperties": false, + "default": { + "stations": ["RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"], + "max_nr_missing": 4 + } }, { "title":"Dutch", @@ -152,17 +159,18 @@ "properties":{ "stations":{ "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list", - "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"]], - "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"], - "uniqueItems": false + "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"]] }, "max_nr_missing":{ - "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations", - "default": 4 + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations" } }, "required": ["stations", "max_nr_missing"], - "additionalProperties": false + "additionalProperties": false, + "default": { + "stations": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS103", "CS201", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509"], + "max_nr_missing": 4 + } }, { "title":"International", @@ -171,17 +179,18 @@ "properties":{ "stations":{ "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list", - "enum": [["DE601", "DE602", "DE603", "DE604", "DE605", "DE609", "FR606", "SE607", "UK608", "PL610", "PL611", "PL612", "IE613", "LV614"]], - "default": ["DE601", "DE602", "DE603", "DE604", "DE605", "DE609", "FR606", "SE607", "UK608", "PL610", "PL611", "PL612", "IE613", "LV614"], - "uniqueItems": false + "enum": [["DE601", "DE602", "DE603", "DE604", "DE605", "DE609", "FR606", "SE607", "UK608", "PL610", "PL611", "PL612", "IE613", "LV614"]] }, "max_nr_missing":{ - "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations", - "default": 2 + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations" } }, "required": ["stations", "max_nr_missing"], - "additionalProperties": false + "additionalProperties": false, + "default": { + "stations": ["DE601", "DE602", "DE603", "DE604", "DE605", "DE609", "FR606", "SE607", "UK608", "PL610", "PL611", "PL612", "IE613", "LV614"], + "max_nr_missing": 2 + } }, { "title":"International required", @@ -190,17 +199,18 @@ "properties":{ "stations":{ "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list", - "enum": [["DE601", "DE605"]], - "default": ["DE601", "DE605"], - "uniqueItems": false + "enum": [["DE601", "DE605"]] }, "max_nr_missing":{ - "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations", - "default": 1 + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations" } }, "required": ["stations", "max_nr_missing"], - "additionalProperties": false + "additionalProperties": false, + "default": { + "stations": ["DE601", "DE605"], + "max_nr_missing": 1 + } }, { "title":"All", @@ -209,17 +219,18 @@ "properties":{ "stations":{ "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list", - "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509", "DE601", "DE602", "DE603", "DE604", "DE605", "DE609", "FR606", "SE607", "UK608", "PL610", "PL611", "PL612", "IE613", "LV614"]], - "default": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509", "DE601", "DE602", "DE603", "DE604", "DE605", "DE609", "FR606", "SE607", "UK608", "PL610", "PL611", "PL612", "IE613", "LV614"], - "uniqueItems": false + "enum": [["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509", "DE601", "DE602", "DE603", "DE604", "DE605", "DE609", "FR606", "SE607", "UK608", "PL610", "PL611", "PL612", "IE613", "LV614"]] }, "max_nr_missing":{ - "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations", - "default": 6 + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations" } }, "required": ["stations", "max_nr_missing"], - "additionalProperties": false + "additionalProperties": false, + "default": { + "stations": ["CS001", "CS002", "CS003", "CS004", "CS005", "CS006", "CS007", "CS011", "CS013", "CS017", "CS021", "CS024", "CS026", "CS028", "CS030", "CS031", "CS032", "CS301", "CS302", "CS401", "CS501", "RS106", "RS205", "RS208", "RS210", "RS305", "RS306", "RS307", "RS310", "RS406", "RS407", "RS409", "RS503", "RS508", "RS509", "DE601", "DE602", "DE603", "DE604", "DE605", "DE609", "FR606", "SE607", "UK608", "PL610", "PL611", "PL612", "IE613", "LV614"], + "max_nr_missing": 6 + } }, { "title":"Custom", @@ -227,20 +238,18 @@ "type": "object", "properties":{ "stations":{ - "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list", - "default": ["CS001"], - "minItems": 1, - "additionalItems": false, - "additionalProperties": false, - "uniqueItems": true + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/station_list" }, "max_nr_missing":{ - "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations", - "default": 0 + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/stations/1#/definitions/max_number_of_missing_stations" } }, "required": ["stations", "max_nr_missing"], - "additionalProperties": false + "additionalProperties": false, + "default": { + "stations": ["CS001"], + "max_nr_missing": 0 + } } ] }, diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-observation-pipeline-ingest-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-observation-pipeline-ingest-scheduling-unit-observation-strategy.json index 6ae834740335d9474e7351d58c3739b1bf154a2f..768804b59e502d0d94257c219b212e232cb7e6b4 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-observation-pipeline-ingest-scheduling-unit-observation-strategy.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/short-observation-pipeline-ingest-scheduling-unit-observation-strategy.json @@ -25,12 +25,13 @@ "antenna_set": "HBA_DUAL_INNER", "filter": "HBA_110_190", "station_groups": [ { - "stations": ["CS002", "CS003", "CS004", "CS005", "CS006", "CS007"] + "stations": ["CS002", "CS003", "CS004", "CS005", "CS006", "CS007"], + "max_nr_missing": 1 }], "tile_beam": { "direction_type": "J2000", - "angle1": 5.233660650313663, - "angle2": 0.7109404782526458 + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426 }, "SAPs": [ { @@ -38,8 +39,8 @@ "target": "CygA", "digital_pointing": { "direction_type": "J2000", - "angle1": 5.233660650313663, - "angle2": 0.7109404782526458 + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426 }, "subbands": [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243] } diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-beamforming-observation-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-beamforming-observation-scheduling-unit-observation-strategy.json index 4d56ae8273810ae352ab54fbab2a37c2d2913399..bc6925c79cf44060f7962678a406b58c1609123a 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-beamforming-observation-scheduling-unit-observation-strategy.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-beamforming-observation-scheduling-unit-observation-strategy.json @@ -13,8 +13,8 @@ "target": "CygA", "digital_pointing": { "direction_type": "J2000", - "angle1": 5.233660650313663, - "angle2": 0.7109404782526458 + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426 }, "subbands": [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243] } @@ -26,8 +26,8 @@ ], "tile_beam": { "direction_type": "J2000", - "angle1": 5.233660650313663, - "angle2": 0.7109404782526458 + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426 }, "beamformers": [ { @@ -38,8 +38,8 @@ "tabs": [{ "pointing": { "direction_type": "J2000", - "angle1": 0, - "angle2": 0 + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426 }, "relative": true }], diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json index 4ea17e719fad83f17b9746f474f1761f9682a48f..f598d9956f417f935d7af687ecd0d8ddd17d2a1b 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json +++ b/SAS/TMSS/backend/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json @@ -29,8 +29,8 @@ }], "tile_beam": { "direction_type": "J2000", - "angle1": 5.233660650313663, - "angle2": 0.7109404782526458 + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426 }, "SAPs": [ { @@ -38,8 +38,8 @@ "target": "CygA", "digital_pointing": { "direction_type": "J2000", - "angle1": 5.233660650313663, - "angle2": 0.7109404782526458 + "angle1": 0.6624317181687094, + "angle2": 1.5579526427549426 }, "subbands": [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243] } diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py index e99dd864d74c15acb51854aa8f145c3a96bf9ea7..116fdbe86a22fa90e330fe30b53776c34ab343c2 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/subtasks.py @@ -33,6 +33,9 @@ from lofar.sas.tmss.tmss.tmssapp.conversions import antennafields_for_antennaset from lofar.sas.tmss.tmss.exceptions import TMSSException from django.db import transaction +# cache for json schema's +_schema_cache = {} + # ==== various create* methods to convert/create a TaskBlueprint into one or more Subtasks ==== def check_prerequities_for_subtask_creation(task_blueprint: TaskBlueprint) -> bool: @@ -155,7 +158,7 @@ def create_observation_subtask_specifications_from_observation_task_blueprint(ta # start with an observation subtask specification with all the defaults and the right structure according to the schema subtask_template = SubtaskTemplate.objects.get(name='observation control') - subtask_spec = get_default_json_object_for_schema(subtask_template.schema) + subtask_spec = get_default_json_object_for_schema(subtask_template.schema, cache=_schema_cache) # wipe the default pointings, these should come from the task_spec subtask_spec['stations'].pop('analog_pointing', None) @@ -534,7 +537,7 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask) # step 1: create subtask in defining state, with filled-in subtask_template qafile_subtask_template = SubtaskTemplate.objects.get(name="QA file conversion") - qafile_subtask_spec = add_defaults_to_json_object_for_schema({}, qafile_subtask_template.schema) + qafile_subtask_spec = add_defaults_to_json_object_for_schema({}, qafile_subtask_template.schema, cache=_schema_cache) qafile_subtask_spec['nr_of_subbands'] = obs_task_qafile_spec.get("nr_of_subbands") qafile_subtask_spec['nr_of_timestamps'] = obs_task_qafile_spec.get("nr_of_timestamps") validate_json_against_schema(qafile_subtask_spec, qafile_subtask_template.schema) @@ -551,7 +554,7 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask) # step 2: create and link subtask input/output selection_template = TaskRelationSelectionTemplate.objects.get(name="all") - selection_doc = get_default_json_object_for_schema(selection_template.schema) + selection_doc = get_default_json_object_for_schema(selection_template.schema, cache=_schema_cache) for obs_out in observation_subtask.outputs.all(): qafile_subtask_input = SubtaskInput.objects.create(subtask=qafile_subtask, @@ -615,7 +618,7 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta # step 1: create subtask in defining state, with filled-in subtask_template qaplots_subtask_template = SubtaskTemplate.objects.get(name="QA plots") - qaplots_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qaplots_subtask_template.schema) + qaplots_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qaplots_subtask_template.schema, cache=_schema_cache) qaplots_subtask_spec_doc['autocorrelation'] = obs_task_qaplots_spec.get("autocorrelation") qaplots_subtask_spec_doc['crosscorrelation'] = obs_task_qaplots_spec.get("crosscorrelation") validate_json_against_schema(qaplots_subtask_spec_doc, qaplots_subtask_template.schema) @@ -632,7 +635,7 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta # step 2: create and link subtask input/output selection_template = TaskRelationSelectionTemplate.objects.get(name="all") - selection_doc = get_default_json_object_for_schema(selection_template.schema) + selection_doc = get_default_json_object_for_schema(selection_template.schema, cache=_schema_cache) qaplots_subtask_input = SubtaskInput.objects.create(subtask=qaplots_subtask, producer=qafile_subtask.outputs.first(), selection_doc=selection_doc, @@ -667,8 +670,8 @@ def create_pipeline_subtask_from_task_blueprint(task_blueprint: TaskBlueprint, s # step 1: create subtask in defining state, with filled-in subtask_template subtask_template = SubtaskTemplate.objects.get(name=subtask_template_name) - default_subtask_specs = get_default_json_object_for_schema(subtask_template.schema) - task_specs_with_defaults = add_defaults_to_json_object_for_schema(task_blueprint.specifications_doc, task_blueprint.specifications_template.schema) + default_subtask_specs = get_default_json_object_for_schema(subtask_template.schema, cache=_schema_cache) + task_specs_with_defaults = add_defaults_to_json_object_for_schema(task_blueprint.specifications_doc, task_blueprint.specifications_template.schema, cache=_schema_cache) subtask_specs = generate_subtask_specs_from_task_spec_func(task_specs_with_defaults, default_subtask_specs) cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4") @@ -723,7 +726,7 @@ def create_ingest_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> # step 1: create subtask in defining state, with filled-in subtask_template subtask_template = SubtaskTemplate.objects.get(name='ingest control') - default_subtask_specs = get_default_json_object_for_schema(subtask_template.schema) + default_subtask_specs = get_default_json_object_for_schema(subtask_template.schema, cache=_schema_cache) subtask_specs = default_subtask_specs # todo: translate specs from task to subtask once we have non-empty templates cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4") subtask_data = {"start_time": None, @@ -766,7 +769,7 @@ def create_cleanup_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> # step 1: create subtask in defining state, with filled-in subtask_template subtask_template = SubtaskTemplate.objects.get(name='cleanup') - subtask_specs = get_default_json_object_for_schema(subtask_template.schema) + subtask_specs = get_default_json_object_for_schema(subtask_template.schema, cache=_schema_cache) cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4") subtask_data = {"start_time": None, "stop_time": None, @@ -1170,9 +1173,9 @@ def schedule_qafile_subtask(qafile_subtask: Subtask): dataformat=Dataformat.objects.get(value=Dataformat.Choices.QA_HDF5.value), datatype=Datatype.objects.get(value=Datatype.Choices.QUALITY.value), # todo: is this correct? producer=qafile_subtask.outputs.first(), - specifications_doc=get_default_json_object_for_schema(DataproductSpecificationsTemplate.objects.get(name="empty").schema), + specifications_doc=get_default_json_object_for_schema(DataproductSpecificationsTemplate.objects.get(name="empty").schema, cache=_schema_cache), specifications_template=DataproductSpecificationsTemplate.objects.get(name="empty"), - feedback_doc=get_default_json_object_for_schema(DataproductFeedbackTemplate.objects.get(name="empty").schema), + feedback_doc=get_default_json_object_for_schema(DataproductFeedbackTemplate.objects.get(name="empty").schema, cache=_schema_cache), feedback_template=DataproductFeedbackTemplate.objects.get(name="empty"), sap=None # todo: do we need to point to a SAP here? Of which dataproduct then? ) @@ -1223,9 +1226,9 @@ def schedule_qaplots_subtask(qaplots_subtask: Subtask): dataformat=Dataformat.objects.get(value=Dataformat.Choices.QA_PLOTS.value), datatype=Datatype.objects.get(value=Datatype.Choices.QUALITY.value), # todo: is this correct? producer=qaplots_subtask.outputs.first(), - specifications_doc=get_default_json_object_for_schema(DataproductSpecificationsTemplate.objects.get(name="empty").schema), + specifications_doc=get_default_json_object_for_schema(DataproductSpecificationsTemplate.objects.get(name="empty").schema, cache=_schema_cache), specifications_template=DataproductSpecificationsTemplate.objects.get(name="empty"), - feedback_doc=get_default_json_object_for_schema(DataproductFeedbackTemplate.objects.get(name="empty").schema), + feedback_doc=get_default_json_object_for_schema(DataproductFeedbackTemplate.objects.get(name="empty").schema, cache=_schema_cache), feedback_template=DataproductFeedbackTemplate.objects.get(name="empty"), sap=None # todo: do we need to point to a SAP here? Of which dataproduct then? ) @@ -1336,7 +1339,7 @@ def schedule_observation_subtask(observation_subtask: Subtask): specifications_doc = observation_subtask.specifications_doc dataproduct_specifications_template = DataproductSpecificationsTemplate.objects.get(name="SAP") dataproduct_feedback_template = DataproductFeedbackTemplate.objects.get(name="empty") - dataproduct_feedback_doc = get_default_json_object_for_schema(dataproduct_feedback_template.schema) + dataproduct_feedback_doc = get_default_json_object_for_schema(dataproduct_feedback_template.schema, cache=_schema_cache) # select correct output for each pointing based on name @@ -1433,7 +1436,7 @@ def schedule_observation_subtask(observation_subtask: Subtask): producer=observation_subtask.outputs.first(), # todo: select correct output. I tried "subtask_output_dict[sap['name']]" but tests fail because the sap's name is not in the task blueprint. Maybe it's just test setup and this should work? specifications_doc={"sap": specifications_doc['stations']['digital_pointings'][sap_nr]["name"], "coherent": coherent, "identifiers": {"pipeline_index": pipeline_nr, "tab_index": tab_nr, "stokes_index": stokes_nr, "part_index": part_nr}}, specifications_template=dataproduct_specifications_template_timeseries, - feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), + feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema, cache=_schema_cache), feedback_template=dataproduct_feedback_template, size=0, expected_size=1024*1024*1024*tab_nr, @@ -1503,7 +1506,7 @@ def _create_preprocessing_output_dataproducts_and_transforms(pipeline_subtask: S producer=pipeline_subtask_output, specifications_doc=input_dp.specifications_doc, specifications_template=dataproduct_specifications_template, - feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), + feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema, cache=_schema_cache), feedback_template=dataproduct_feedback_template, sap=input_dp.sap, global_identifier=None) for input_dp in input_dataproducts] @@ -1537,7 +1540,7 @@ def _create_pulsar_pipeline_output_dataproducts_and_transforms(pipeline_subtask: producer=pipeline_subtask_output, specifications_doc=input_dp.specifications_doc, specifications_template=dataproduct_specifications_template, - feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), + feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema, cache=_schema_cache), feedback_template=dataproduct_feedback_template, sap=input_dp.sap, global_identifier=None) for input_dp in input_dataproducts] @@ -1575,7 +1578,7 @@ def _create_pulsar_pipeline_output_dataproducts_and_transforms(pipeline_subtask: producer=pipeline_subtask_output, specifications_doc={ "coherent": is_coherent, "identifiers": { "obsid": obsid } }, specifications_template=dataproduct_specifications_template, - feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), + feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema, cache=_schema_cache), feedback_template=dataproduct_feedback_template, sap=None, # TODO: Can we say anything here, as summaries cover all SAPs global_identifier=None) for (obsid, is_coherent) in summaries} diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py b/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py index a43d4d81c28c4cc5138f02645d1c9a0adbb066a2..b729f07ecdd7911ffe6f4094cbc2ff7d0a32bce9 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/tasks.py @@ -15,6 +15,9 @@ from django.db import transaction logger = logging.getLogger(__name__) +# cache for json schema's +_schema_cache = {} + def create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitBlueprint: """ Create a SchedulingUnitBlueprint from the SchedulingUnitDraft @@ -146,12 +149,14 @@ def create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft: models. if len(scheduling_unit_draft.requirements_doc.get("tasks", {})) == 0: raise BlueprintCreationException("create_task_drafts_from_scheduling_unit_draft: scheduling_unit_draft.id=%s has no tasks defined in its requirements_doc" % (scheduling_unit_draft.pk,)) + schema_cache = {} + for task_name, task_definition in scheduling_unit_draft.requirements_doc["tasks"].items(): task_template_name = task_definition["specifications_template"] task_template = models.TaskTemplate.objects.get(name=task_template_name) task_specifications_doc = task_definition["specifications_doc"] - task_specifications_doc = add_defaults_to_json_object_for_schema(task_specifications_doc, task_template.schema) + task_specifications_doc = add_defaults_to_json_object_for_schema(task_specifications_doc, task_template.schema, cache=_schema_cache) try: logger.debug("creating task draft... task_name='%s', task_template_name='%s'", task_template_name, task_template_name) @@ -464,7 +469,7 @@ def create_cleanuptask_for_scheduling_unit_blueprint(scheduling_unit_blueprint: with transaction.atomic(): # create a cleanup task draft and blueprint.... cleanup_template = models.TaskTemplate.objects.get(name="cleanup") - cleanup_spec_doc = get_default_json_object_for_schema(cleanup_template.schema) + cleanup_spec_doc = get_default_json_object_for_schema(cleanup_template.schema, cache=_schema_cache) cleanup_task_draft = models.TaskDraft.objects.create( name="Cleanup", @@ -487,7 +492,7 @@ def create_cleanuptask_for_scheduling_unit_blueprint(scheduling_unit_blueprint: # ... and connect the outputs of the producing tasks to the cleanup, so the cleanup task knows what to remove. selection_template = TaskRelationSelectionTemplate.objects.get(name="all") - selection_doc = get_default_json_object_for_schema(selection_template.schema) + selection_doc = get_default_json_object_for_schema(selection_template.schema, cache=_schema_cache) for producer_task_blueprint in scheduling_unit_blueprint.task_blueprints.exclude(specifications_template__type=TaskType.Choices.CLEANUP).exclude(specifications_template__type=TaskType.Choices.INGEST).all(): for connector_type in producer_task_blueprint.specifications_template.output_connector_types.filter(iotype__value=IOType.Choices.OUTPUT.value).all(): diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/views.py b/SAS/TMSS/backend/src/tmss/tmssapp/views.py index c043399964b788b809194e49c1c0b6872e57fdfe..49d3489f2e2d71ea183a6a97490d795fcf34d08b 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/views.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/views.py @@ -12,6 +12,7 @@ from rest_framework.authtoken.models import Token from rest_framework.permissions import AllowAny from rest_framework.decorators import authentication_classes, permission_classes from django.apps import apps +import re from rest_framework.decorators import api_view from datetime import datetime @@ -78,6 +79,32 @@ def get_template_json_schema(request, template:str, name:str, version:str): return response +# Allow everybody to GET our publicly available LTA SIP XSD (XML Schema Definition for the LTA SIP) +@permission_classes([AllowAny]) +@authentication_classes([AllowAny]) +@swagger_auto_schema(#method='GET', + responses={200: 'Get the LTA SIP XSD', + 404: 'not available'}, + operation_description="Get the LTA SIP XSD.") +#@api_view(['GET']) # todo: !! decorating this as api_view somehow breaks json ref resolution !! fix this and double url issue in urls.py, then use decorator here to include in Swagger +def get_lta_sip_xsd(request): + + lta_sip_xsd_path = os.path.join(os.environ["LOFARROOT"], "share", "lta", "LTA-SIP.xsd") + with open(lta_sip_xsd_path, 'rt') as file: + xsd = file.read() + + # hacky way of setting the namespace to this url + # can/should be done with proper xml dom setAttribute on document node. + # but this string manipulation is faster, and works just as well. + # the namespace should point to the absolute url of this request, without the document name. + abs_uri = "%s://%s/%s" % (request.scheme, request.get_host().rstrip('/'), request.get_full_path().lstrip('/')) + abs_uri = abs_uri[:abs_uri.rfind('/')] + for attr in ('targetNamespace', 'xmlns'): + xsd = xsd.replace('''%s="http://www.astron.nl/SIP-Lofar"'''%attr, '''%s="%s"'''%(attr,abs_uri)) + + return HttpResponse(content=xsd, content_type='application/xml') + + # Allow everybody to GET our publicly available station group lookups @permission_classes([AllowAny]) @authentication_classes([AllowAny]) diff --git a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py index 28ad1dbc6a6174dcb31ee2db8d88e0f954228001..df92de0ce14cdcd67cc4913818ee26ea7a140127 100644 --- a/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py +++ b/SAS/TMSS/backend/src/tmss/tmssapp/viewsets/scheduling.py @@ -395,9 +395,24 @@ class DataproductViewSet(LOFARViewSet): operation_description="Get the Submission Information Package (SIP) for this dataproduct") @action(methods=['get'], detail=True, url_name="sip") def sip(self, request, pk=None): - dataproduct = get_object_or_404(models.Dataproduct, pk=pk) from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct - return HttpResponse(generate_sip_for_dataproduct(dataproduct).get_prettyxml(), content_type='application/xml') + from lofar.sas.tmss.tmss.tmssapp import views + from django.urls import reverse + + # get the dataproduct... + dataproduct = get_object_or_404(models.Dataproduct, pk=pk) + + # construct the schema loction for the sip + lta_sip_xsd_path = reverse(views.get_lta_sip_xsd) + lta_sip_xsd_uri = "%s://%s/%s" % (request.scheme, request.get_host().rstrip('/'), lta_sip_xsd_path.lstrip('/')) + # the schema_location should point to a weird 2 part url, the path -space- document. + schema_location = lta_sip_xsd_uri[:lta_sip_xsd_uri.rfind('/')] + ' ' + lta_sip_xsd_uri[lta_sip_xsd_uri.rfind('/')+1:] + + # generate the sip + sip = generate_sip_for_dataproduct(dataproduct).get_prettyxml(schema_location=schema_location) + + # and return it + return HttpResponse(sip, content_type='application/xml') @swagger_auto_schema(responses={200: 'The SIP graph for this dataproduct', 403: 'forbidden'}, diff --git a/SAS/TMSS/backend/src/tmss/urls.py b/SAS/TMSS/backend/src/tmss/urls.py index c077e51431b29da1484c0653421d54c27a7a5f91..9d84e25fefe8577ec79a92894da50ab626b65e37 100644 --- a/SAS/TMSS/backend/src/tmss/urls.py +++ b/SAS/TMSS/backend/src/tmss/urls.py @@ -67,6 +67,7 @@ urlpatterns = [ #re_path('schemas/<str:template>/<str:name>/<str:version>', views.get_template_json_schema, name='get_template_json_schema'), # !! use of regex here breaks reverse url lookup path('schemas/<str:template>/<str:name>/<str:version>', views.get_template_json_schema, name='get_template_json_schema'), # !! two urls for same view break Swagger, one url break json ref resolution !! path('schemas/<str:template>/<str:name>/<str:version>/', views.get_template_json_schema, name='get_template_json_schema'), # !! two urls for same view break Swagger, one url break json ref resolution !! + path('xsd/LTA-SIP.xsd', views.get_lta_sip_xsd, name='get_lta_sip_xsd'), #re_path('station_groups/<str:template_name>/<str:template_version>/<str:station_group>/?', views.get_stations_in_group, name='get_stations_in_group'), # !! use of regex here somehow breaks functionality (because parameters?) -> index page path('station_groups/<str:template_name>/<str:template_version>/<str:station_group>', views.get_stations_in_group, name='get_stations_in_group'), path('station_groups/<str:template_name>/<str:template_version>/<str:station_group>/', views.get_stations_in_group, name='get_stations_in_group'), diff --git a/SAS/TMSS/backend/test/test_environment.py b/SAS/TMSS/backend/test/test_environment.py index 2cf2d6c51f8f246101f405113a20d2437bbdc8f2..7a4dd9679ff8b5590b30091c683747becab3bac4 100644 --- a/SAS/TMSS/backend/test/test_environment.py +++ b/SAS/TMSS/backend/test/test_environment.py @@ -631,7 +631,7 @@ def main_test_environment(): def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int, stop_event: threading.Event, handle_observations: bool = True, handle_pipelines: bool = True, - handle_QA: bool = True, handle_ingest: bool = True, + handle_QA: bool = True, handle_ingest: bool = True, handle_cleanup: bool = True, auto_grant_ingest_permission: bool = True, delay: float=1, duration: float=5, create_output_dataproducts: bool=False, @@ -653,7 +653,7 @@ def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int class SimulationEventHandler(TMSSEventMessageHandler): def __init__(self, scheduling_unit_blueprint_id: int, stop_event: threading.Event, handle_observations: bool = True, handle_pipelines: bool = True, - handle_QA: bool = True, handle_ingest: bool = True, + handle_QA: bool = True, handle_ingest: bool = True, handle_cleanup: bool = True, delay: float = 1, duration: float = 10, create_output_dataproducts: bool=False) -> None: super().__init__(log_event_messages=False) @@ -663,13 +663,14 @@ def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int self.handle_pipelines = handle_pipelines self.handle_QA = handle_QA self.handle_ingest = handle_ingest + self.handle_cleanup = handle_cleanup self.auto_grant_ingest_permission = auto_grant_ingest_permission self.delay = delay self.duration = duration self.create_output_dataproducts = create_output_dataproducts def need_to_handle(self, subtask: models.Subtask) -> bool: - if self.scheduling_unit_blueprint_id in [tb.scheduling_unit_blueprint.id for tb in subtask.task_blueprints.all()]: + if self.scheduling_unit_blueprint_id not in [tb.scheduling_unit_blueprint.id for tb in subtask.task_blueprints.all()]: return False if subtask.specifications_template.type.value == models.SubtaskType.Choices.OBSERVATION.value and not self.handle_observations: @@ -685,6 +686,9 @@ def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int if subtask.specifications_template.type.value == models.SubtaskType.Choices.INGEST.value and not self.handle_ingest: return False + if subtask.specifications_template.type.value == models.SubtaskType.Choices.CLEANUP.value and not self.handle_cleanup: + return False + return True def start_handling(self): @@ -694,21 +698,21 @@ def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int logger.info("starting to simulate a run for scheduling_unit id=%s ...", self.scheduling_unit_blueprint_id) - super().start_handling() - try: # exit if already finished scheduling_unit = models.SchedulingUnitBlueprint.objects.get(id=self.scheduling_unit_blueprint_id) - if scheduling_unit.status in ["finished", "error"]: + if scheduling_unit.status in ["finished", "error", "cancelled"]: logger.info("scheduling_unit id=%s name='%s' has status=%s -> not simulating", scheduling_unit.id, scheduling_unit.name, scheduling_unit.status) self.stop_event.set() return except models.SchedulingUnitBlueprint.DoesNotExist: pass + super().start_handling() + # trick: trigger any already scheduled subtasks, cascading in events simulating the run - subtasks = models.Subtask.objects.filter(task_blueprints__scheduling_unit_blueprint_id=self.scheduling_unit_blueprint_id) - for subtask in subtasks.filter(state__value=models.SubtaskState.Choices.SCHEDULED.value): + scheduled_subtasks = models.Subtask.objects.filter(task_blueprints__scheduling_unit_blueprint_id=self.scheduling_unit_blueprint_id).filter(state__value=models.SubtaskState.Choices.SCHEDULED.value).all() + for subtask in scheduled_subtasks: self.onSubTaskStatusChanged(subtask.id, "scheduled") # schedule the defined subtasks, cascading in events simulating the run @@ -760,8 +764,7 @@ def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int if not self.need_to_handle(subtask): return - logger.info("subtask id=%s type='%s' now has status='%s'", id, subtask.specifications_template.type.value, - status) + logger.info("subtask id=%s type='%s' has status='%s'", id, subtask.specifications_template.type.value, status) next_state = None if status == models.SubtaskState.Choices.SCHEDULED.value: @@ -856,7 +859,7 @@ def create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id: int return BusListenerJanitor(TMSSBusListener(SimulationEventHandler, handler_kwargs={'scheduling_unit_blueprint_id': scheduling_unit_blueprint_id, 'stop_event': stop_event, 'handle_observations': handle_observations, 'handle_pipelines': handle_pipelines, - 'handle_QA': handle_QA, 'handle_ingest': handle_ingest, + 'handle_QA': handle_QA, 'handle_ingest': handle_ingest, 'handle_cleanup': handle_cleanup, 'create_output_dataproducts': create_output_dataproducts, 'delay': delay, 'duration': duration}, exchange=exchange, broker=broker)) @@ -883,12 +886,14 @@ def main_scheduling_unit_blueprint_simulator(): group.add_option('-p', '--pipeline', dest='pipeline', action='store_true', help='simulate events for pipeline subtasks') group.add_option('-Q', '--QA', dest='QA', action='store_true', help='simulate events for QA subtasks') group.add_option('-i', '--ingest', dest='ingest', action='store_true', help='simulate events for ingest subtasks') + group.add_option('-c', '--cleanup', dest='cleanup', action='store_true', help='simulate events for cleanup subtasks') group = OptionGroup(parser, 'Simulation parameters') parser.add_option_group(group) group.add_option('-e', '--event_delay', dest='event_delay', type='float', default=1.0, help='wait <event_delay> seconds between simulating events to mimic real-world behaviour, default: %default') group.add_option('-d', '--duration', dest='duration', type='float', default=60.0, help='wait <duration> seconds while "observing"/"processing" between started and finishing state to mimic real-world behaviour, default: %default') group.add_option('-g', '--grant_ingest_permission', dest='grant_ingest_permission', action='store_true', help='automatically grant ingest permission for ingest subtasks if needed') + group.add_option('-f', '--create_output_dataproducts', dest='create_output_dataproducts', action='store_true', help='create small fake output dataproduct files for the observation and pipeline subtask(s)') group = OptionGroup(parser, 'Messaging options') parser.add_option_group(group) @@ -906,11 +911,12 @@ def main_scheduling_unit_blueprint_simulator(): scheduling_unit_blueprint_id = int(args[0]) - if not (options.observation or options.pipeline or options.QA or options.ingest): + if not (options.observation or options.pipeline or options.QA or options.ingest or options.cleanup): options.observation = True options.pipeline = True options.QA = True options.ingest = True + options.cleanup = True from lofar.sas.tmss.tmss import setup_and_check_tmss_django_database_connection_and_exit_on_error setup_and_check_tmss_django_database_connection_and_exit_on_error(options.dbcredentials) @@ -919,8 +925,9 @@ def main_scheduling_unit_blueprint_simulator(): with create_scheduling_unit_blueprint_simulator(scheduling_unit_blueprint_id, stop_event=stop_event, delay=options.event_delay, duration=options.duration, handle_observations=bool(options.observation), handle_pipelines=bool(options.pipeline), - handle_QA=bool(options.QA), handle_ingest=bool(options.ingest), + handle_QA=bool(options.QA), handle_ingest=bool(options.ingest), handle_cleanup=bool(options.cleanup), auto_grant_ingest_permission=bool(options.grant_ingest_permission), + create_output_dataproducts=bool(options.create_output_dataproducts), exchange=options.exchange, broker=options.broker): print("Press Ctrl-C to exit") try: diff --git a/SAS/TMSS/client/lib/populate.py b/SAS/TMSS/client/lib/populate.py index 952b0e27de20aa016f8d2de0c7622a59a964e657..db1898204cf2a70e0b1960207f90fbbe713acd06 100644 --- a/SAS/TMSS/client/lib/populate.py +++ b/SAS/TMSS/client/lib/populate.py @@ -100,8 +100,12 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None): # helper functions for uploading def upload_template(template: dict): - logger.info("Uploading template with name='%s' version='%s'", template['name'], template['version']) - client.post_template(template_path=template.pop('template'), **template) + try: + logger.info("Uploading template with name='%s' version='%s'", template['name'], template['version']) + client.post_template(template_path=template.pop('template'), **template) + except Exception as e: + logger.error("Error while uploading template with name='%s' version='%s': %s", + template['name'], template['version'], e) # helper functions for uploading def upload_template_if_needed_with_dependents_first(id: str):