diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ca2d341bd9bee6a1976a8026d522e19277228a9f..cbdac6666da10c7e0092d604e6eec625fccbd928 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -133,6 +133,12 @@ unit_test_TMSS: - SKIP_INTEGRATION_TESTS=true ctest dependencies: - build_TMSS + services: + - rabbitmq:latest + variables: + RABBITMQ_DEFAULT_USER: guest + RABBITMQ_DEFAULT_PASS: guest + LOFAR_DEFAULT_BROKER: 'rabbitmq' # override default 'localhost' which does not work for CI service rabbitmq. artifacts: name: unit-test-report when: always diff --git a/LCS/PyCommon/lcu_utils.py b/LCS/PyCommon/lcu_utils.py index 1377d1004ef4c716ab7ab2fd8b4ec062c0b021bc..c0a17b1e811a91f0a176eb2a21d8c804c6662564 100755 --- a/LCS/PyCommon/lcu_utils.py +++ b/LCS/PyCommon/lcu_utils.py @@ -126,6 +126,11 @@ def get_current_stations(station_group='today', as_host_names=True): :param as_host_names - bool: return the station names as ssh-able hostnames if True (like cs001c, cs002c). return the station names as parset-like VirtualInstrument.stationList names if False (like CS001, CS002). :return: the station names for the given station_group as ssh-able hostnames if as_host_names=True (like cs001c, cs002c) or as parset-like VirtualInstrument.stationList names if as_host_names=False (like CS001, CS002). ''' + # sanitize + station_group = station_group.lower().strip() + if station_group == 'all': + station_group = 'today' + cmd = ['cat', '/opt/operations/bin/stations.txt'] cmd = wrap_command_in_lcu_head_node_ssh_call(cmd) logger.debug('executing cmd: %s', ' '.join(cmd)) @@ -136,7 +141,7 @@ def get_current_stations(station_group='today', as_host_names=True): raise LCURuntimeError("Could not fetch stations.txt file. sdterr=%s" % (err, )) station_file_lines = out.splitlines(False) - station_group_filter = station_group.strip()+' ' + station_group_filter = station_group +' ' station_group_line = next(l for l in station_file_lines if l.startswith(station_group_filter)) station_aliases = station_group_line.split(' ')[-1].split(',') station_hostnames = [] diff --git a/SAS/TMSS/src/tmss/exceptions.py b/SAS/TMSS/src/tmss/exceptions.py index ccdf97362bd9d4e8f9c899f162458948c502ee22..a320dbd527a5a58a0d7274836beb66f9f5387c1c 100644 --- a/SAS/TMSS/src/tmss/exceptions.py +++ b/SAS/TMSS/src/tmss/exceptions.py @@ -8,6 +8,9 @@ class SchemaValidationException(TMSSException): class ConversionException(TMSSException): pass +class BlueprintCreationException(ConversionException): + pass + class SubtaskCreationException(ConversionException): pass diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/task-calibrator-addon.json b/SAS/TMSS/src/tmss/tmssapp/schemas/task-calibrator-addon.json index a3086e92547c7a26a676e0639adfdba66abcc610..a1642f634d20f905c7dbca91b0ad078c27c0479b 100644 --- a/SAS/TMSS/src/tmss/tmssapp/schemas/task-calibrator-addon.json +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/task-calibrator-addon.json @@ -31,19 +31,19 @@ "angle1": { "type": "number", "title": "Angle 1", - "description": "First angle (f.e. RA)", + "description": "First angle [rad] (e.g. RA)", "default": 0 }, "angle2": { "type": "number", "title": "Angle 2", - "description": "Second angle (f.e. DEC)", + "description": "Second angle [rad] (e.g. DEC)", "default": 0 }, "angle3": { "type": "number", "title": "Angle 3", - "description": "Third angle (f.e. N in LMN)", + "description": "Third angle [rad] (e.g. N in LMN)", "default": 0 } }, diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/task-observation-with-stations.json b/SAS/TMSS/src/tmss/tmssapp/schemas/task-observation-with-stations.json index d1e1fd20c1eec0d688d6d54e81fcf42ca8406b02..278ccb2a816bc645290dcafbddcf5d9d83eece79 100644 --- a/SAS/TMSS/src/tmss/tmssapp/schemas/task-observation-with-stations.json +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/task-observation-with-stations.json @@ -57,7 +57,7 @@ "properties": { "stations": { "title": "Station list", - "default": [{}], + "default": ["CS001"], "oneOf": [ { "type": "array", diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py index f7e5488817a435b7d4d4f3b76c266408730a526d..361de1d280d742046c27723712db7c38d9a2fb3d 100644 --- a/SAS/TMSS/src/tmss/tmssapp/subtasks.py +++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py @@ -7,6 +7,7 @@ from collections.abc import Iterable from lofar.common.datetimeutils import formatDatetime from lofar.common import isProductionEnvironment from lofar.common.json_utils import add_defaults_to_json_object_for_schema, get_default_json_object_for_schema +from lofar.common.lcu_utils import get_current_stations from lofar.sas.tmss.tmss.exceptions import SubtaskCreationException, SubtaskSchedulingException @@ -15,6 +16,7 @@ from lofar.common.datetimeutils import parseDatetime from lofar.common.json_utils import add_defaults_to_json_object_for_schema from lofar.sas.tmss.tmss.tmssapp.models import * from lofar.sas.resourceassignment.resourceassigner.rarpc import RARPC +from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset_dict # ==== various create* methods to convert/create a TaskBlueprint into one or more Subtasks ==== @@ -34,7 +36,6 @@ def create_subtasks_from_task_blueprint(task_blueprint: TaskBlueprint) -> [Subta create_qafile_subtask_from_task_blueprint, create_qaplots_subtask_from_task_blueprint], 'preprocessing schema': [create_preprocessing_subtask_from_task_blueprint]} - generators_mapping['calibrator schema'] = generators_mapping['observation schema'] template_name = task_blueprint.specifications_template.name @@ -53,6 +54,135 @@ def create_subtasks_from_task_blueprint(task_blueprint: TaskBlueprint) -> [Subta raise SubtaskCreationException('Cannot create subtasks for task id=%s because no generator exists for its schema name=%s' % (task_blueprint.pk, template_name)) +def create_observation_subtask_specifications_from_observation_task_blueprint(task_blueprint: TaskBlueprint) -> (dict, SubtaskTemplate): + """ + Create a valid observation subtask specification ('observationcontrol schema' SubtaskTemplate schema) based on the task_blueprint's settings + """ + + # check if task_blueprint has an observation-like specification + if task_blueprint.specifications_template.name.lower() not in ['observation schema', 'calibrator schema']: + raise SubtaskCreationException("Cannot create observation subtask specifications from task_blueprint id=%s with template name='%s'" % ( + task_blueprint.id, task_blueprint.specifications_template.name)) + + # start with an observation subtask specification with all the defaults and the right structure according to the schema + subtask_template = SubtaskTemplate.objects.get(name='observationcontrol schema') + subtask_spec = get_default_json_object_for_schema(subtask_template.schema) + + # wipe the default pointings, these should come from the task_spec + subtask_spec['stations']['analog_pointing'] = {} + subtask_spec['stations']['digital_pointings'] = [] + + # now go over the settings in the task_spec and 'copy'/'convert' them to the subtask_spec + task_spec = task_blueprint.specifications_doc + + # The calibrator has a minimal calibration-specific specification subset. + # The rest of it's specs are 'shared' with the target observation. + # So... copy the calibrator specs first, then loop over the shared target/calibrator specs... + if 'calibrator' in task_blueprint.specifications_template.name.lower(): + subtask_spec['stations']['analog_pointing'] = {"direction_type": task_spec["pointing"]["direction_type"], + "angle1": task_spec["pointing"]["angle1"], + "angle2": task_spec["pointing"]["angle2"]} + # for the calibrator, the digital pointing is equal to the analog pointing + subtask_spec['stations']['digital_pointings'] = [ {'name': 'calibrator', # there is no name for the calibrator pointing in the task spec + 'subbands': list(range(0,488)), # there are no subbands for the calibrator pointing in the task spec + 'pointing': subtask_spec['stations']['analog_pointing'] } ] + + if task_spec.get('autoselect', False): + logger.info("auto-selecting calibrator target based on elevation of target observation...") + # what to do? overrive the pointing??? + #TODO: implement + + target_task_blueprint = get_related_target_observation_task_blueprint(task_blueprint) + if target_task_blueprint is None: + raise SubtaskCreationException("Cannot create calibrator observation subtask specifications from task_blueprint id=%s with template name='%s' because no related target observation task_blueprint is found" % ( + task_blueprint.id, task_blueprint.specifications_template.name)) + + task_spec = target_task_blueprint.specifications_doc + logger.info("Using station and correlator settings for calibrator observation task_blueprint id=%s from target observation task_blueprint id=%s", task_blueprint.id, target_task_blueprint.id) + + subtask_spec['stations']["antenna_set"] = task_spec["antenna_set"] + subtask_spec['stations']["filter"] = task_spec["filter"] + + if "stations" in task_spec: + if "group" in task_spec["stations"][0]: + try: + # retrieve stations in group from RADB virtual instrument + station_group_name = task_spec["stations"][0]["group"] + subtask_spec['stations']['station_list'] = get_stations_in_group(station_group_name) + except Exception as e: + raise SubtaskCreationException("Could not determine stations in group '%s' for task_blueprint id=%s. Error: %s" % ( + station_group_name, task_blueprint.id, e)) + else: + subtask_spec['stations']['station_list'] = task_spec["stations"] + + if 'calibrator' not in task_blueprint.specifications_template.name.lower(): + # copy/convert the analoge/digital_pointings only for non-calibrator observations (the calibrator has its own pointing) + for sap in task_spec.get("SAPs", []): + subtask_spec['stations']['digital_pointings'].append( + {"name": sap["name"], + "pointing": {"direction_type": sap["digital_pointing"]["direction_type"], + "angle1": sap["digital_pointing"]["angle1"], + "angle2": sap["digital_pointing"]["angle2"]}, + "subbands": sap["subbands"] + }) + + if "tile_beam" in task_spec: + subtask_spec['stations']['analog_pointing'] = { "direction_type": task_spec["tile_beam"]["direction_type"], + "angle1": task_spec["tile_beam"]["angle1"], + "angle2": task_spec["tile_beam"]["angle2"] } + + if "correlator" in task_spec: + subtask_spec["COBALT"]["correlator"]["channels_per_subband"] = task_spec["correlator"]["channels_per_subband"] + + # TODO: compute remaining subtask correlator settings from task_spec ? + # subtask_spec["COBALT"]["correlator"]["integration_time"] = task_spec["correlator"]["integration_time"] + # subtask_spec["COBALT"]["correlator"]["storage_cluster"] = task_spec["correlator"]["storage_cluster"] + + # make sure that the subtask_spec is valid conform the schema + validate_json_against_schema(subtask_spec, subtask_template.schema) + + return subtask_spec, subtask_template + + +def get_stations_in_group(station_group_name: str) -> []: + '''Get a list of station names in the given station_group. + A lookup is performed in the RADB, in the virtual instrument table''' + with RADBRPC.create() as radbrpc: + resource_group_memberships = radbrpc.getResourceGroupMemberships()['groups'] + station_resource_group = next(rg for rg in resource_group_memberships.values() + if (rg['resource_group_type'] == 'station_group' or rg['resource_group_type'] == 'virtual') and rg['resource_group_name'] == station_group_name) + station_names = set(resource_group_memberships[rg_id]['resource_group_name'] for rg_id in station_resource_group['child_ids'] + if resource_group_memberships[rg_id]['resource_group_type'] == 'station') + + # HACK, RS408 should be removed from the RADB + if 'RS408' in station_names: + station_names.remove('RS408') + + return sorted(list(station_names)) + + +def get_related_target_observation_task_blueprint(calibrator_task_blueprint: TaskBlueprint) -> TaskBlueprint: + """ + get the related target observation task_blueprint for the given calibrator task_blueprint + if nothing found return None + """ + if 'calibrator' not in calibrator_task_blueprint.specifications_template.name.lower(): + raise ValueError("Cannot get a related target observation task_blueprint for non-calibrator task_blueprint id=%s template_name='%s'", + calibrator_task_blueprint.id, calibrator_task_blueprint.specifications_template.name) + + try: + return next(relation.second for relation in TaskSchedulingRelationBlueprint.objects.filter(first=calibrator_task_blueprint).all() + if relation.second is not None and relation.second.specifications_template.name.lower() == 'observation schema') + except StopIteration: + try: + return next(relation.first for relation in TaskSchedulingRelationBlueprint.objects.filter(second=calibrator_task_blueprint).all() + if relation.first is not None and relation.first.specifications_template.name.lower() == 'observation schema') + except StopIteration: + logger.info("No related target observation task_blueprint found for calibrator observation task_blueprint id=%d", calibrator_task_blueprint.id) + + return None + + def create_observation_control_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask: """ Create an observation control subtask . @@ -63,27 +193,7 @@ def create_observation_control_subtask_from_task_blueprint(task_blueprint: TaskB check_prerequities_for_subtask_creation(task_blueprint) # step 1: create subtask in defining state - subtask_template = SubtaskTemplate.objects.get(name='observationcontrol schema') - # This is some 'extra' specification to add to subtask ... where should it comes from, - # currently not defined in task ? - extra_specifications_doc = { - "stations": {"station_list": ["RS106", "RS205"], - "antenna_set": "HBA_DUAL_INNER", - "filter": "HBA_110_190", - "analog_pointing": {"direction_type": "J2000", - "angle1": 0.4262457643630986, - "angle2": 0.5787463318245085}, - "digital_pointings": [{"name": "3C48", - "pointing": {"direction_type": "J2000", - "angle1": 0.4262457643630986, - "angle1": 0.4262457643630986, - "angle2": 0.5787463318245085}, - "subbands": list(range(0, 244)) - }] - } - } - - specifications_doc = add_defaults_to_json_object_for_schema(extra_specifications_doc, subtask_template.schema) + specifications_doc, subtask_template = create_observation_subtask_specifications_from_observation_task_blueprint(task_blueprint) cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4") subtask_data = { "start_time": None, "stop_time": None, @@ -176,13 +286,14 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask) # done, now return the subtask, and allow the system to wait for the predecessors to be finished before we schedule this qa_file_subtask return qafile_subtask + def create_qaplots_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask: qafile_subtasks = [st for st in task_blueprint.subtasks.all() if st.specifications_template.type.value == SubtaskType.Choices.QA_FILES.value] if qafile_subtasks: qafile_subtask = qafile_subtasks[0] # TODO: decide what to do when there are multiple qafile subtasks? return create_qaplots_subtask_from_qafile_subtask(qafile_subtask) else: - raise SubtaskCreationException('Cannot create QA plotting subtask for task id=%s because QA file conversion subtask exists.' % (task_blueprint.pk, )) + raise SubtaskCreationException('Cannot create QA plotting subtask for task id=%s because no predecessor QA file conversion subtask exists.' % (task_blueprint.pk, )) def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subtask: @@ -256,6 +367,7 @@ def create_preprocessing_subtask_from_task_blueprint(task_blueprint: TaskBluepri subtask_template = SubtaskTemplate.objects.get(name='pipelinecontrol schema') default_subtask_specs = get_default_json_object_for_schema(subtask_template.schema) subtask_specs = _generate_subtask_specs_from_preprocessing_task_specs(task_blueprint.specifications_doc, default_subtask_specs) + cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4") subtask_data = { "start_time": None, "stop_time": None, "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value), @@ -264,7 +376,7 @@ def create_preprocessing_subtask_from_task_blueprint(task_blueprint: TaskBluepri "specifications_doc": subtask_specs, "priority": 1, "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), - "cluster": Cluster.objects.get(name="CEP4") } + "cluster": Cluster.objects.get(name=cluster_name) } subtask = Subtask.objects.create(**subtask_data) # step 2: create and link subtask input/output @@ -466,6 +578,63 @@ def schedule_qaplots_subtask(qaplots_subtask: Subtask): return qaplots_subtask +def get_previous_related_task_blueprint_with_time_offset(task_blueprint): + """ + Retrieve the the previous related blueprint task object (if any) + if nothing found return None, 0. + :param task_blueprint: + :return: previous_related_task_blueprint, + time_offset (in seconds) + """ + logger.info("get_previous_related_task_blueprint_with_time_offset %s (id=%s)", task_blueprint.name, task_blueprint.pk) + previous_related_task_blueprint = None + time_offset = 0 + + scheduling_relations = list(task_blueprint.first_to_connect.all()) + list(task_blueprint.second_to_connect.all()) + for scheduling_relation in scheduling_relations: + if scheduling_relation.first.id == task_blueprint.id and scheduling_relation.placement.value == "after": + previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.second.id) + time_offset = scheduling_relation.time_offset + + if scheduling_relation.second.id == task_blueprint.id and scheduling_relation.placement.value == "before": + previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.first.id) + time_offset = scheduling_relation.time_offset + + return previous_related_task_blueprint, time_offset + + +def calculate_start_time(observation_subtask: Subtask): + """ + Calculate the start time of an observation subtask. It should calculate the starttime in case of 'C-T-C train' + The start time of an observation depends on the start_time+duration and offset time of the previous observation + and so its scheduling relations should be known. + If there is no previous observation the 'default' start time is in two minutes from now + For demo purposes, will be changed into dynamic scheduled in the future + Note that the method is not robust now when previous start time is unknown. Also parallel observations are + not supported yet + :param observation_subtask: + :return: start_time (utc time) + """ + previous_related_task_blueprint, time_offset = get_previous_related_task_blueprint_with_time_offset(observation_subtask.task_blueprint) + if previous_related_task_blueprint is None: + # This is the first observation so take start time 2 minutes from now + now = datetime.utcnow() + next_start_time = now + timedelta(minutes=+2, seconds=-now.second, microseconds=-now.microsecond) + else: + # Get the duration of last/previous observation + duration_in_sec = previous_related_task_blueprint.specifications_doc["duration"] + logger.info("Duration of previous observation '%s' (id=%s) is %d seconds", + previous_related_task_blueprint.pk, previous_related_task_blueprint.pk, duration_in_sec) + # Get the previous observation subtask, should actually be one + lst_previous_subtasks_obs = [st for st in previous_related_task_blueprint.subtasks.all() if st.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value] + previous_subtask_obs = lst_previous_subtasks_obs[0] + logger.info("The previous observation subtask is id=%s", previous_subtask_obs.pk) + if previous_subtask_obs.start_time is None: + logger.info("Oeps the previous start time is unknown so I can not calculate it") + next_start_time = previous_subtask_obs.start_time + timedelta(seconds=duration_in_sec+time_offset) + return next_start_time + + def schedule_observation_subtask(observation_subtask: Subtask): ''' Schedule the given observation_subtask For first observations in a 'train' of subtasks this method is typically called by hand, or by the short-term-scheduler. @@ -487,13 +656,14 @@ def schedule_observation_subtask(observation_subtask: Subtask): # step 1a: check start/stop times if observation_subtask.start_time is None: - now = datetime.utcnow() - next_start_time = now + timedelta(minutes=+2, seconds=-now.second, microseconds=-now.microsecond) + next_start_time = calculate_start_time(observation_subtask) logger.info("observation id=%s has no starttime. assigned default: %s", observation_subtask.pk, formatDatetime(next_start_time)) observation_subtask.start_time = next_start_time if observation_subtask.stop_time is None: - stop_time = observation_subtask.start_time + timedelta(minutes=+1) + duration_in_sec = observation_subtask.task_blueprint.specifications_doc["duration"] + logger.info("Duration of observation id=%s is %d seconds", observation_subtask.pk, duration_in_sec) + stop_time = observation_subtask.start_time + timedelta(seconds=duration_in_sec) logger.info("observation id=%s has no stop_time. assigned default: %s", observation_subtask.pk, formatDatetime(stop_time)) observation_subtask.stop_time = stop_time diff --git a/SAS/TMSS/src/tmss/tmssapp/tasks.py b/SAS/TMSS/src/tmss/tmssapp/tasks.py index 715c12afb1aebcada94609020cd96f5329d15c90..5153d5e1c30535b940e42f8a99866b4d1f0b6207 100644 --- a/SAS/TMSS/src/tmss/tmssapp/tasks.py +++ b/SAS/TMSS/src/tmss/tmssapp/tasks.py @@ -1,3 +1,4 @@ +from lofar.sas.tmss.tmss.exceptions import * from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskBlueprint, SchedulingUnitBlueprint, TaskDraft, SchedulingRelationPlacement from lofar.sas.tmss.tmss.tmssapp.subtasks import create_and_schedule_subtasks_from_task_blueprint @@ -15,7 +16,7 @@ def create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_ Create a SchedulingUnitBlueprint from the SchedulingUnitDraft :raises Exception if instantiate fails. """ - logger.debug("create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft.id=%s)", scheduling_unit_draft.pk) + logger.debug("create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft.id=%s name='%s')", scheduling_unit_draft.pk, scheduling_unit_draft.name) # TODO: copy/fill-in the properties from the draft to the blueprint scheduling_unit_blueprint = SchedulingUnitBlueprint.objects.create( @@ -26,76 +27,98 @@ def create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_ draft=scheduling_unit_draft, requirements_template=scheduling_unit_draft.requirements_template) - logger.info("create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft.id=%s) created scheduling_unit_blueprint id=%s", scheduling_unit_draft.pk, scheduling_unit_blueprint.pk) + logger.info("create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft.id=%s name='%s') created scheduling_unit_blueprint id=%s name='%s'", + scheduling_unit_draft.pk, scheduling_unit_draft.name, scheduling_unit_blueprint.pk, scheduling_unit_blueprint.name) return scheduling_unit_blueprint -def create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> [TaskDraft]: +def create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitDraft: """ - Generic create-method for tasks draft. Calls the appropriate create method based on the scheduling_unit_blueprint - specifications_template name. + Create all defined task_drafts in the scheduling_unit_draft's requirements_doc, connect them, and return the updated scheduling_unit_draft. """ - list_created_task_object = [] - - try: - list_tasks = scheduling_unit_draft.requirements_doc["tasks"] - logger.info("create_task_drafts_from_scheduling_unit_draft with scheduling_unit_draft.id=%s, nbr_tasks=%d" % - (scheduling_unit_draft.pk, len(list_tasks))) - except KeyError: - logger.info("create_task_drafts_from_scheduling_unit_draft -> NO tasks to process from requirements_doc") - list_tasks = [] - - for task in list_tasks: - task_template_name = task["specifications_template"] - logger.info("task name is '%s', task_template_name '%s'" % (task["name"], task_template_name)) + logger.debug("create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft.id=%s, name='%s') ...", scheduling_unit_draft.pk, scheduling_unit_draft.name) + + if len(scheduling_unit_draft.requirements_doc.get("tasks",[])) == 0: + raise BlueprintCreationException("create_task_drafts_from_scheduling_unit_draft: scheduling_unit_draft.id=%s has no tasks defined in its requirements_doc" % (scheduling_unit_draft.pk,)) + + for task_definition in scheduling_unit_draft.requirements_doc["tasks"]: + task_template_name = task_definition["specifications_template"] task_template = models.TaskTemplate.objects.get(name=task_template_name) - task_draft = models.TaskDraft.objects.create( - name=task["name"], - description=task.get("description",""), - tags=task.get("tags",[]), - specifications_doc=task["specifications_doc"], - copy_reason=models.CopyReason.objects.get(value='template'), - copies=None, - scheduling_unit_draft=scheduling_unit_draft, - specifications_template=task_template) - logger.info("task draft with id %s created successfully" % task_draft.id) - list_created_task_object.append(task_draft) - - # Now create task relation - try: - list_task_relations = scheduling_unit_draft.requirements_doc["task_relations"] - logger.info("create_task_drafts_from_scheduling_unit_draft, nbr of task relations=%d" % len(list_task_relations)) - except KeyError: - logger.info("create_task_drafts_from_scheduling_unit_draft -> NO task relations to process from requirements_doc") - list_task_relations = [] - for task_relation in list_task_relations: - task_rel_obj = models.TaskRelationDraft.objects.create( - tags=task_relation.get("tags",[]), - selection_doc=task_relation["selection_doc"], - dataformat=models.Dataformat.objects.get(value=task_relation["dataformat"]), - producer=models.TaskDraft.objects.get(name=task_relation["producer"], scheduling_unit_draft_id=scheduling_unit_draft.id), - consumer=models.TaskDraft.objects.get(name=task_relation["consumer"], scheduling_unit_draft_id=scheduling_unit_draft.id), - input_role=models.TaskConnectorType.objects.get(role=task_relation["input"]["role"], datatype=task_relation["input"]["datatype"]), - output_role=models.TaskConnectorType.objects.get(role=task_relation["output"]["role"], datatype=task_relation["output"]["datatype"]), - selection_template=models.TaskRelationSelectionTemplate.objects.get(name=task_relation["selection_template"])) - logger.info("task relation draft object with id %s created successfully" % task_rel_obj.id) + + if scheduling_unit_draft.task_drafts.filter(name=task_definition["name"], specifications_template=task_template).count() > 0: + logger.debug("skipping creation of task draft because it is already in the scheduling_unit... task_name='%s', task_template_name='%s'", task_definition["name"], task_template_name) + continue + + logger.debug("creating task draft... task_name='%s', task_template_name='%s'", task_definition["name"], task_template_name) + + task_draft = models.TaskDraft.objects.create(name=task_definition["name"], + description=task_definition.get("description",""), + tags=task_definition.get("tags",[]), + specifications_doc=task_definition["specifications_doc"], + copy_reason=models.CopyReason.objects.get(value='template'), + copies=None, + scheduling_unit_draft=scheduling_unit_draft, + specifications_template=task_template) + + logger.info("created task draft id=%s task_name='%s', task_template_name='%s'", task_draft.pk, task_definition["name"], task_template_name) + + # Now create task relations + for task_relation_definition in scheduling_unit_draft.requirements_doc["task_relations"]: + producer_task_draft = scheduling_unit_draft.task_drafts.get(name=task_relation_definition["producer"]) + consumer_task_draft = scheduling_unit_draft.task_drafts.get(name=task_relation_definition["consumer"]) + dataformat = models.Dataformat.objects.get(value=task_relation_definition["dataformat"]) + input_role = models.TaskConnectorType.objects.get(role=task_relation_definition["input"]["role"], datatype=task_relation_definition["input"]["datatype"]) + output_role = models.TaskConnectorType.objects.get(role=task_relation_definition["output"]["role"], datatype=task_relation_definition["output"]["datatype"]) + selection_template = models.TaskRelationSelectionTemplate.objects.get(name=task_relation_definition["selection_template"]) + + if models.TaskRelationDraft.objects.filter(producer=producer_task_draft, + consumer=consumer_task_draft, + dataformat=dataformat, + input_role=input_role, + output_role=output_role, + selection_template=selection_template, + selection_doc=task_relation_definition["selection_doc"]).count() > 0: + logger.debug("skipping creation of task_relation between task draft '%s' and '%s' because it is already in the scheduling_unit...", task_relation_definition["producer"], task_relation_definition["consumer"]) + continue + + task_relation = models.TaskRelationDraft.objects.create(tags=task_relation_definition.get("tags",[]), + selection_doc=task_relation_definition["selection_doc"], + dataformat=dataformat, + producer=producer_task_draft, + consumer=consumer_task_draft, + input_role=input_role, + output_role=output_role, + selection_template=selection_template) + logger.info("created task_relation id=%s between task draft id=%s name='%s' and id=%s name='%s", + task_relation.pk, producer_task_draft.id, producer_task_draft.name, consumer_task_draft.id, consumer_task_draft.name) + # task_scheduling_relation - try: - list_task_scheduling_relations = scheduling_unit_draft.requirements_doc["task_scheduling_relations"] - logger.info("create_task_drafts_from_scheduling_unit_draft, nbr of task scheduling relations=%d" % len(list_task_scheduling_relations)) - except KeyError: - logger.info("create_task_drafts_from_scheduling_unit_draft -> NO tasks scheduling relations to process from requirements_doc") - list_task_scheduling_relations = [] - for task_scheduling_relation in list_task_scheduling_relations: - task_rel_sch_obj = models.TaskSchedulingRelationDraft.objects.create( - placement=models.SchedulingRelationPlacement.objects.get(value=task_scheduling_relation["placement"]), - time_offset=task_scheduling_relation["time_offset"], - first=models.TaskDraft.objects.get(name=task_scheduling_relation["first"], scheduling_unit_draft_id=scheduling_unit_draft.id), - second=models.TaskDraft.objects.get(name=task_scheduling_relation["second"], scheduling_unit_draft_id=scheduling_unit_draft.id)) - logger.info("task scheduling relation draft object with id %s created successfully" % task_rel_sch_obj.id) - - return list_created_task_object + for task_scheduling_relation_definition in scheduling_unit_draft.requirements_doc["task_scheduling_relations"]: + placement = models.SchedulingRelationPlacement.objects.get(value=task_scheduling_relation_definition["placement"]) + time_offset = task_scheduling_relation_definition["time_offset"] + first_task_draft = scheduling_unit_draft.task_drafts.get(name=task_scheduling_relation_definition["first"]) + second_task_draft = scheduling_unit_draft.task_drafts.get(name=task_scheduling_relation_definition["second"]) + + if models.TaskSchedulingRelationDraft.objects.filter(placement=placement, + time_offset=time_offset, + first=first_task_draft, + second=second_task_draft).count() > 0: + logger.debug("skipping creation of task_scheduling_relation between task draft '%s' and '%s' because it is already in the scheduling_unit...", + task_scheduling_relation_definition["first"], task_scheduling_relation_definition["second"]) + continue + + task_scheduling_relation = models.TaskSchedulingRelationDraft.objects.create(placement=placement, + time_offset=time_offset, + first=first_task_draft, + second=second_task_draft) + logger.info("created task_scheduling_relation id=%s between task draft id=%s name='%s' and id=%s name='%s", + task_scheduling_relation.pk, first_task_draft.id, first_task_draft.name, second_task_draft.id, second_task_draft.name) + + logger.info("create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft.id=%s, name='%s') ... done", scheduling_unit_draft.pk, scheduling_unit_draft.name) + + scheduling_unit_draft.refresh_from_db() + return scheduling_unit_draft def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft) -> models.TaskBlueprint: @@ -103,31 +126,28 @@ def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft) -> model Create a task_blueprint from the task_draft :raises Exception if instantiate fails. """ - logger.debug("Create Task Blueprint from Task Draft (id=%s)", task_draft.pk) + logger.debug("creating task_blueprint from task_draft id=%s", task_draft.pk) # get or create a scheduling_unit_blueprint from the scheduling_unit_draft scheduling_unit_blueprint = task_draft.scheduling_unit_draft.scheduling_unit_blueprints.last() if scheduling_unit_blueprint is None: scheduling_unit_blueprint = create_scheduling_unit_blueprint_from_scheduling_unit_draft(task_draft.scheduling_unit_draft) - description_str = "Task Blueprint " + task_draft.description - name_str = "Task Blueprint of " + task_draft.name task_blueprint = TaskBlueprint.objects.create( - description=description_str, - name=name_str, + description=task_draft.description, + name=task_draft.name, do_cancel=False, draft=task_draft, scheduling_unit_blueprint=scheduling_unit_blueprint, specifications_doc=task_draft.specifications_doc, - specifications_template=task_draft.specifications_template - ) + specifications_template=task_draft.specifications_template) - logger.info("Task Blueprint (id=%s) created from Task Draft (id=%s)", task_blueprint.pk, task_draft.pk) + logger.info("created task_blueprint id=%s from task_draft id=%s", task_blueprint.pk, task_draft.pk) # now that we have a task_blueprint, its time to refresh the task_draft so we get the non-cached fields task_draft.refresh_from_db() - # loop over consumers/producers, and 'copy'' the TaskRelationBlueprint from the TaskRelationDraft + # loop over consumers/producers, and 'copy' the TaskRelationBlueprint from the TaskRelationDraft # this is only possible if both 'ends' of the task_relation are converted to a TaskBlueprint # so, when converting two TaskDrafts (for example an observation and a pipeline), then for the conversion # of the first TaskDraft->TaskBlueprint no relation is setup, @@ -149,16 +169,36 @@ def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft) -> model selection_doc=task_relation_draft.selection_doc, selection_template=task_relation_draft.selection_template, dataformat=task_relation_draft.dataformat) - logger.info("Task Blueprint (id=%s) connected to Task Blueprint (id=%s) via Task Relation Blueprint (id=%s)", - task_blueprint.pk, producing_task_blueprint.pk, task_relation_blueprint.pk) + logger.info("created task_relation_blueprint id=%s which connects task_blueprints producer_id=%s and consumer_id=%s", + task_relation_blueprint.pk, producing_task_blueprint.pk, consuming_task_blueprint.pk,) + + # Do the same 'trick' for Task Scheduling Relation Draft to Blueprint + task_draft_scheduling_relations = list(task_draft.first_to_connect.all()) + list(task_draft.second_to_connect.all()) + for task_scheduling_relation_draft in task_draft_scheduling_relations: + for first_task_blueprint in task_scheduling_relation_draft.first.task_blueprints.all(): + for second_task_blueprint in task_scheduling_relation_draft.second.task_blueprints.all(): + try: + # do nothing if task_scheduling_relation_blueprint already exists... + models.TaskSchedulingRelationBlueprint.objects.get(first_id=first_task_blueprint.id, + second_id=second_task_blueprint.id) + except models.TaskSchedulingRelationBlueprint.DoesNotExist: + # ...'else' create it. + task_scheduling_relation_blueprint = models.TaskSchedulingRelationBlueprint.objects.create(first=first_task_blueprint, + second=second_task_blueprint, + time_offset=task_scheduling_relation_draft.time_offset, + placement=task_scheduling_relation_draft.placement) + logger.info("created task_scheduling_relation_blueprint id=%s which connects task_blueprints first_id=%s and second_id=%s, placement=%s time_offset=%s[sec]", + task_scheduling_relation_blueprint.pk, first_task_blueprint.pk, second_task_blueprint.pk, task_scheduling_relation_draft.placement, task_scheduling_relation_draft.time_offset) + return task_blueprint def create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitBlueprint: '''Convenience method: Create the scheduling_unit_blueprint, then create its child task_blueprint(s), then create the task_blueprint's subtasks''' - if scheduling_unit_draft.task_drafts.count() == 0: - # create task_drafts first, so the task_blueprints can be created from the scheduling_unit_blueprint in the next step - create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft) + + # make sure we call create task_drafts first, so the task_blueprints can be created from the scheduling_unit_blueprint in the next step + # already known task_drafts and/or relations are skipped automagically + create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft) scheduling_unit_blueprint = create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft) return create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) @@ -185,13 +225,19 @@ def create_task_blueprint_and_subtasks_and_schedule_subtasks_from_task_draft(tas task_blueprint.refresh_from_db() return task_blueprint + def create_task_blueprints_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: '''Convenience method: Create the scheduling_unit_blueprint's task_blueprint(s)''' + + # make sure we call create task_drafts first, so the task_blueprints can be created from the scheduling_unit_blueprint in the next step + # already known task_drafts and/or relations are skipped automagically + create_task_drafts_from_scheduling_unit_draft(scheduling_unit_blueprint.draft) + task_drafts = list(scheduling_unit_blueprint.draft.task_drafts.all()) # sort them in 'data-flow'-order, - # because successors can depend on predecessors, so the first tbp's need to be subtask'd first. - task_drafts.sort(key=cmp_to_key(lambda tbp_a, tbp_b: -1 if tbp_a in tbp_b.predecessors else 1 if tbp_b in tbp_a.predecessors else 0)) + # because successors can depend on predecessors, so the first taskdraft's need to be blueprinted first. + task_drafts.sort(key=cmp_to_key(lambda taskdraft_a, taskdraft_b: -1 if taskdraft_a in taskdraft_b.predecessors else 1 if taskdraft_b in taskdraft_a.predecessors else 0)) # convert task_draft(s) to task_blueprint(s) for task_draft in task_drafts: @@ -201,6 +247,7 @@ def create_task_blueprints_from_scheduling_unit_blueprint(scheduling_unit_bluepr scheduling_unit_blueprint.refresh_from_db() return scheduling_unit_blueprint + def create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: '''Convenience method: Create the scheduling_unit_blueprint's task_blueprint(s), then create each task_blueprint's subtasks''' scheduling_unit_blueprint = create_task_blueprints_from_scheduling_unit_blueprint(scheduling_unit_blueprint) @@ -218,6 +265,7 @@ def create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(schedulin scheduling_unit_blueprint.refresh_from_db() return scheduling_unit_blueprint + def create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: '''Convenience method: Create the scheduling_unit_blueprint's task_blueprint(s), then create the task_blueprint's subtasks, and schedule the ones that are not dependend on predecessors''' scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py index 68b9df3a508493dd6c49069bb289c92288abeaef..93ab6971734d1c740e52c75b4c6e75407a5b7dd0 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py @@ -266,11 +266,11 @@ class SchedulingUnitDraftViewSet(LOFARViewSet): headers={'Location': scheduling_unit_blueprint_path}) - @swagger_auto_schema(responses={201: 'The Created Task Draft, see Location in Response header', + @swagger_auto_schema(responses={201: 'The updated scheduling_unit_draft with references to its created task_drafts', 403: 'forbidden'}, operation_description="Create Task Drafts from SchedulingUnitDraft.") @action(methods=['get'], detail=True, url_name="create_task_drafts", name="Create Task Drafts from Requirement doc") - def create_tasks_draft(self, request, pk=None): + def create_task_drafts(self, request, pk=None): scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=pk) create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft) diff --git a/SAS/TMSS/test/t_scheduling.py b/SAS/TMSS/test/t_scheduling.py index 7194384d130232e86b2ebd2327e78bc122917288..446b75fb538ccc5fe7df866727e7fdef68514561 100755 --- a/SAS/TMSS/test/t_scheduling.py +++ b/SAS/TMSS/test/t_scheduling.py @@ -177,6 +177,7 @@ class SchedulingTest(unittest.TestCase): obs_task = get_default_json_object_for_schema(client.get_task_template(name="observation schema")['schema']) obs_task['QA']['plots']['enabled'] = False obs_task['QA']['file_conversion']['enabled'] = False + obs_task['SAPs'][0]['subbands'] = [0,1] scheduling_unit_doc['tasks'].append({"name": "Observation", "specifications_doc": obs_task, "specifications_template": "observation schema"}) diff --git a/SAS/TMSS/test/t_subtasks.py b/SAS/TMSS/test/t_subtasks.py index 1d21cf990efa1011bb2c04d5b296ede323581f82..32de3e5628b7aa8acd8fd07cddb758786f8a8676 100755 --- a/SAS/TMSS/test/t_subtasks.py +++ b/SAS/TMSS/test/t_subtasks.py @@ -41,23 +41,14 @@ from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.tmssapp.subtasks import * -# The following methods should be tested -# check_prerequities_for_subtask_creation -# create_subtasks_from_task_blueprint -# create_observation_control_subtask_from_task_blueprint -# create_qafile_subtask_from_task_blueprint -# create_qafile_subtask_from_observation_subtask -# create_qaplots_subtask_from_task_blueprint -# create_qaplots_subtask_from_qafile_subtask -# create_preprocessing_subtask_from_task_blueprint -# +# The following methods should be tested too + # schedule_subtask # check_prerequities_for_scheduling # schedule_qafile_subtask # schedule_qaplots_subtask # schedule_observation_subtask # schedule_pipeline_subtask -# # create_and_schedule_subtasks_from_task_blueprint @@ -137,23 +128,17 @@ class SubTasksCreationFromTaskBluePrint(unittest.TestCase): Helper function to create a task blueprint object for testing with given task template name value as string (no object) """ - task_blueprint_data = TaskBlueprint_test_data() + task_template = TaskTemplate.objects.get(name=task_template_name) + task_spec = get_default_json_object_for_schema(task_template.schema) + if 'QA' in task_spec: + task_spec["QA"]['plots']['enabled'] = QA_enabled + task_spec["QA"]['file_conversion']['enabled'] = QA_enabled + + task_draft_data = TaskDraft_test_data(specifications_template=task_template, specifications_doc=task_spec) + task_draft_obj = models.TaskDraft.objects.create(**task_draft_data) + + task_blueprint_data = TaskBlueprint_test_data(task_draft=task_draft_obj) task_blueprint_obj = models.TaskBlueprint.objects.create(**task_blueprint_data) - task_blueprint_obj.specifications_template.name = task_template_name - task_blueprint_obj.specifications_doc = { - "QA": { - "plots": { - "enabled": QA_enabled, - "autocorrelation": True, - "crosscorrelation": True - }, - "file_conversion": { - "enabled": QA_enabled, - "nr_of_subbands": -1, - "nr_of_timestamps": 256 - } - } - } return task_blueprint_obj def test_create_sequence_of_subtask_from_task_blueprint(self): @@ -209,16 +194,6 @@ class SubTasksCreationFromTaskBluePrint(unittest.TestCase): #self.assertEqual("defined", str(subtask.state)) - def test_create_subtasks_from_task_blueprint_failure_on_schema(self): - """ - Test creation failure due to unknown schema (no correlator or preprocessing schema) - Check exception - "SubtaskCreationException: Cannot create subtasks for task id=1 because no generator exists for its schema name=unknown schema" - """ - task_blueprint = self.create_task_blueprint_object("unknown schema") - with self.assertRaises(SubtaskCreationException): - create_subtasks_from_task_blueprint(task_blueprint) - def test_create_subtasks_from_task_blueprint_succeed(self): """ """ diff --git a/SAS/TMSS/test/t_tasks.py b/SAS/TMSS/test/t_tasks.py index b7beb7f2261f085f168737fa0e229165284b4f50..05cb7e958324c029e95a2655d73ec3006af0330b 100755 --- a/SAS/TMSS/test/t_tasks.py +++ b/SAS/TMSS/test/t_tasks.py @@ -27,16 +27,32 @@ import logging logger = logging.getLogger(__name__) logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) -# Do Mandatory setup step: -# use setup/teardown magic for tmss test database, ldap server and django server -# (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) -from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * +# before we import any django modules the DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS need to be known/set. +# import and start an isolated RATestEnvironment and TMSSTestEnvironment (with fresh database and attached django and ldap server on free ports) +# this automagically sets the required DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS envvars. +from lofar.sas.resourceassignment.resourceassigner.test.ra_test_environment import RATestEnvironment +from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment + +ra_test_env = RATestEnvironment() +tmss_test_env = TMSSTestEnvironment() +try: + ra_test_env.start() + tmss_test_env.start() +except: + ra_test_env.stop() + tmss_test_env.stop() + exit(1) + +# tell unittest to stop (and automagically cleanup) the test database once all testing is done. +def tearDownModule(): + tmss_test_env.stop() + ra_test_env.stop() from lofar.sas.tmss.test.tmss_test_data_django_models import * # import and setup rest test data creator from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator -rest_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) +rest_data_creator = TMSSRESTTestDataCreator(tmss_test_env.django_server.url, (tmss_test_env.ldap_server.dbcreds.user, tmss_test_env.ldap_server.dbcreds.password)) from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.tmssapp.tasks import * @@ -54,7 +70,9 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase): """ Helper function to create a scheduling unit object for testing """ - scheduling_unit_draft_data = SchedulingUnitDraft_test_data(name=scheduling_unit_draft_name) + scheduling_unit_draft_data = SchedulingUnitDraft_test_data(name=scheduling_unit_draft_name, + requirements_doc=requirements_doc, + template=models.SchedulingUnitTemplate.objects.get(name="scheduling unit schema")) draft_obj = models.SchedulingUnitDraft.objects.create(**scheduling_unit_draft_data) return draft_obj @@ -66,13 +84,9 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase): """ scheduling_unit_draft = self.create_scheduling_unit_draft_object("Test Scheduling Unit 1") - res_scheduling_unit_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_draft/' + str(scheduling_unit_draft.id), 200) scheduling_unit_blueprint = create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft) self.assertEqual(scheduling_unit_draft.name, scheduling_unit_blueprint.draft.name) - res_scheduling_unit_blueprint = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_blueprint/' + str(scheduling_unit_blueprint.id), 200) - self.assertEqual(res_scheduling_unit_blueprint['requirements_template'], res_scheduling_unit_draft['requirements_template']) - self.assertEqual(res_scheduling_unit_blueprint['requirements_doc'], res_scheduling_unit_draft['requirements_doc']) - self.assertEqual(0, len(res_scheduling_unit_blueprint['task_blueprints'])) + self.assertEqual(0, len(scheduling_unit_blueprint.task_blueprints.all())) def test_create_task_drafts_from_scheduling_unit_draft(self): """ @@ -80,12 +94,14 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase): Check if NO tasks are created Check with REST-call if NO tasks are created """ - scheduling_unit_draft = self.create_scheduling_unit_draft_object("Test Scheduling Unit 2") + scheduling_unit_draft = self.create_scheduling_unit_draft_object("Test Scheduling Unit 2", requirements_doc={'tasks': []}) + with self.assertRaises(BlueprintCreationException): + create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft) - res_scheduling_unit_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_draft/' + str(scheduling_unit_draft.id), 200) - list_tasks = create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft) - self.assertEqual(0, len(list_tasks)) - self.assertEqual(0, len(res_scheduling_unit_draft['task_drafts'])) + scheduling_unit_draft.refresh_from_db() + task_drafts = scheduling_unit_draft.task_drafts.all() + self.assertEqual(0, len(task_drafts)) + self.assertEqual(0, len(task_drafts)) def test_create_task_drafts_from_scheduling_unit_draft_with_UC1_requirements(self): """ @@ -106,10 +122,44 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase): copies=None, scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data())) - res_scheduling_unit_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_draft/' + str(scheduling_unit_draft.id), 200) - list_tasks = create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft) - self.assertEqual(7, len(list_tasks)) - # TODO: check why rest api is not updated? self.assertEqual(7, len(res_scheduling_unit_draft['task_drafts'])) + create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft) + + scheduling_unit_draft.refresh_from_db() + task_drafts = scheduling_unit_draft.task_drafts.all() + self.assertEqual(7, len(task_drafts)) + + + def test_create_task_blueprints_and_subtasks_subtasks_from_scheduling_unit_draft_with_UC1_requirements(self): + """ + Create Scheduling Unit Draft with requirements_doc (read from file) + Check if tasks (7) are created + Check with REST-call if tasks are created + """ + working_dir = os.path.dirname(os.path.abspath(__file__)) + with open(os.path.join(working_dir, "testdata/example_UC1_scheduling_unit.json")) as json_file: + json_requirements_doc = json.loads(json_file.read()) + + scheduling_unit_draft = models.SchedulingUnitDraft.objects.create( + name="Test Scheduling Unit UC1", + requirements_doc=json_requirements_doc, + requirements_template=models.SchedulingUnitTemplate.objects.get(name="scheduling unit schema"), + copy_reason=models.CopyReason.objects.get(value='template'), + generator_instance_doc="para", + copies=None, + scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data())) + + create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + + scheduling_unit_draft.refresh_from_db() + task_drafts = scheduling_unit_draft.task_drafts.all() + self.assertEqual(7, len(task_drafts)) + + scheduling_unit_blueprints = scheduling_unit_draft.scheduling_unit_blueprints.all() + self.assertEqual(1, len(scheduling_unit_blueprints)) + + scheduling_unit_blueprint = scheduling_unit_blueprints[0] + task_blueprints = scheduling_unit_blueprint.task_blueprints.all() + self.assertEqual(7, len(task_blueprints)) def test_create_task_blueprints_and_subtasks_subtasks_from_scheduling_unit_draft(self): """ @@ -117,15 +167,12 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase): Check if the name draft (specified) is equal to name blueprint (created) Check with REST-call if NO tasks are created """ - scheduling_unit_draft = self.create_scheduling_unit_draft_object("Test Scheduling Unit 3") + scheduling_unit_draft = self.create_scheduling_unit_draft_object("Test Scheduling Unit 3", {'tasks': []}) - res_scheduling_unit_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_draft/' + str(scheduling_unit_draft.id), 200) - scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) - self.assertEqual(scheduling_unit_draft.name, scheduling_unit_blueprint.draft.name) - res_scheduling_unit_blueprint = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_blueprint/' + str(scheduling_unit_blueprint.id), 200) - self.assertEqual(res_scheduling_unit_blueprint['requirements_template'], res_scheduling_unit_draft['requirements_template']) - self.assertEqual(res_scheduling_unit_blueprint['requirements_doc'], res_scheduling_unit_draft['requirements_doc']) - self.assertEqual(0, len(res_scheduling_unit_blueprint['task_blueprints'])) + with self.assertRaises(BlueprintCreationException): + create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + + self.assertEqual(0, len(scheduling_unit_draft.scheduling_unit_blueprints.all())) class CreationFromSchedulingUnitBluePrint(unittest.TestCase): @@ -142,9 +189,10 @@ class CreationFromSchedulingUnitBluePrint(unittest.TestCase): scheduling_unit_blueprint_data = SchedulingUnitBlueprint_test_data(name="Test Scheduling Unit BluePrint") scheduling_unit_blueprint = models.SchedulingUnitBlueprint.objects.create(**scheduling_unit_blueprint_data) - scheduling_unit_blueprint_after_creation = create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) - res_scheduling_unit_blueprint = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_blueprint/' + str(scheduling_unit_blueprint.id), 200) - self.assertEqual(0, len(res_scheduling_unit_blueprint['task_blueprints'])) + with self.assertRaises(BlueprintCreationException): + scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) + + self.assertEqual(0, scheduling_unit_blueprint.task_blueprints.count()) class CreationFromTaskDraft(unittest.TestCase): @@ -174,18 +222,12 @@ class CreationFromTaskDraft(unittest.TestCase): """ self.create_task_object("Test Target Observation 1") - task_draft = models.TaskDraft.objects.get(name="Test Target Observation 1") - rest_task_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/' + str(task_draft.id), 200) task_blueprint = create_task_blueprint_and_subtasks_from_task_draft(task_draft) self.assertEqual(task_draft.name, task_blueprint.draft.name) - rest_task_blueprint = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/' + str(task_blueprint.id), 200) - self.assertEqual(3, len(rest_task_blueprint['subtasks'])) - self.assertEqual(rest_task_blueprint['specifications_template'], rest_task_draft['specifications_template']) - for subtask_url in rest_task_blueprint['subtasks']: - res_subtask = GET_and_assert_equal_expected_code(self, subtask_url, 200) - state_value = GET_and_assert_equal_expected_code(self, res_subtask['state'], 200)['value'] - # TODO not all scheduled??? self.assertEqual(state_value, "defined") + self.assertEqual(3, task_blueprint.subtasks.count()) + for subtask in task_blueprint.subtasks.all(): + subtask.state.value == 'defined' def test_create_task_blueprint(self): """ @@ -196,12 +238,9 @@ class CreationFromTaskDraft(unittest.TestCase): self.create_task_object("Test Target Observation 2") task_draft = models.TaskDraft.objects.get(name="Test Target Observation 2") - res_task_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/' + str(task_draft.id), 200) task_blueprint = create_task_blueprint_from_task_draft(task_draft) self.assertEqual(task_draft.name, task_blueprint.draft.name) - res_task_blueprint = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/' + str(task_blueprint.id), 200) - self.assertEqual(0, len(res_task_blueprint['subtasks'])) - self.assertEqual(res_task_blueprint['specifications_template'], res_task_draft['specifications_template']) + self.assertEqual(0, task_blueprint.subtasks.count()) if __name__ == "__main__": diff --git a/SAS/TMSS/test/test_utils.py b/SAS/TMSS/test/test_utils.py index 88a250084b484b67931a20a822bac8b38655462b..841d508f08e4d62b41d4724b467fbd4e8754c4ad 100644 --- a/SAS/TMSS/test/test_utils.py +++ b/SAS/TMSS/test/test_utils.py @@ -346,7 +346,7 @@ def main_test_environment(): parser.add_option_group(group) (options, args) = parser.parse_args() - logging.basicConfig(format = '%(asctime)s %(levelname)s %(message)s', level = logging.INFO) +# logging.basicConfig(format = '%(asctime)s %(levelname)s %(message)s', level = logging.INFO) with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port, exchange=options.exchange, broker=options.broker) as instance: # print some nice info for the user to use the test servers... diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py index d37115c57557de550dd59c079d9db5f80f9ddaa3..2d8a16334f71af6d49759d28f67f6540991b053c 100644 --- a/SAS/TMSS/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/test/tmss_test_data_django_models.py @@ -165,17 +165,20 @@ def SchedulingUnitDraft_test_data(name="my_scheduling_unit_draft", scheduling_se "scheduling_set": scheduling_set, "requirements_template": template } -def TaskDraft_test_data(name: str="my_task_draft", specifications_template: models.TaskTemplate=None, scheduling_unit_draft: models.SchedulingUnitDraft=None) -> dict: +def TaskDraft_test_data(name: str="my_task_draft", specifications_template: models.TaskTemplate=None, specifications_doc: dict=None, scheduling_unit_draft: models.SchedulingUnitDraft=None) -> dict: if specifications_template is None: specifications_template = models.TaskTemplate.objects.create(**TaskTemplate_test_data()) + if specifications_doc is None: + specifications_doc = get_default_json_object_for_schema(specifications_template.schema) + if scheduling_unit_draft is None: scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()) return {"name": name, "description": "", "tags": [], - "specifications_doc": get_default_json_object_for_schema(specifications_template.schema), + "specifications_doc": specifications_doc, "copy_reason": models.CopyReason.objects.get(value='template'), "copies": None, "scheduling_unit_draft": scheduling_unit_draft,