diff --git a/.gitignore b/.gitignore index b7aa0f000ec071aa3396d0f1b27c1be37bbf6600..fe3291cd80a1f65ccfec7caa6f9426e7d9974d99 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,5 @@ SAS/OTB/jRSP/autoconf_share SAS/OTB/jRSP/bootstrap SAS/OTB/jRSP/configure.in **/.idea +SAS/TMSS/frontend/tmss_webapp/package-lock.json +SAS/TMSS/frontend/tmss_webapp/node_modules/ diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 7983041e601acd82a2a6d7d6aa1152b613930a07..d5b8c529d5f6860ff12071fed565bf30defd78c7 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -219,7 +219,7 @@ dockerize_TMSS: - docker push nexus.cep4.control.lofar:18080/tmss_django:$CI_COMMIT_SHORT_SHA - docker tag tmss_testprovider:$CI_COMMIT_SHORT_SHA nexus.cep4.control.lofar:18080/tmss_testprovider:$CI_COMMIT_SHORT_SHA - docker push nexus.cep4.control.lofar:18080/tmss_testprovider:$CI_COMMIT_SHORT_SHA - - docker logout $CI_NEXUS_REGISTRY + - docker logout $CI_NEXUS_REGISTRY dependencies: - build_TMSS - unit_test_TMSS @@ -311,7 +311,7 @@ deploy-tmss-test: - chmod 700 ~/.ssh - ssh-keyscan scu199.control.lofar >> ~/.ssh/known_hosts - chmod 644 ~/.ssh/known_hosts - script: + script: - cd SAS/TMSS - ssh lofarsys@scu199.control.lofar "docker-compose -f docker-compose-scu199.yml down" - scp docker-compose-scu199.yml lofarsys@scu199.control.lofar:~/ diff --git a/MAC/Services/CMakeLists.txt b/MAC/Services/CMakeLists.txt index 5a28a358a0b37cc84a27a1bdaa1a8b4c12eeda3d..9382db18d5d8a7af897dfc20e61de2ffccb8ab61 100644 --- a/MAC/Services/CMakeLists.txt +++ b/MAC/Services/CMakeLists.txt @@ -1,6 +1,11 @@ # $Id$ -lofar_package(MAC_Services 1.0 DEPENDS PyMessaging OTDB_Services pyparameterset Docker ResourceAssignmentService TBBService TMSSClient) + +IF(BUILD_TESTING) + lofar_package(MAC_Services 1.0 DEPENDS PyMessaging OTDB_Services pyparameterset Docker ResourceAssignmentService TBBService TMSSClient TMSS) +ELSE() + lofar_package(MAC_Services 1.0 DEPENDS PyMessaging OTDB_Services pyparameterset Docker ResourceAssignmentService TBBService TMSSClient) +ENDIF(BUILD_TESTING) add_subdirectory(src) add_subdirectory(test) diff --git a/MAC/Services/src/PipelineControl.py b/MAC/Services/src/PipelineControl.py index ca2c24b907388d6e610c193ad777667f66a06fd0..eaf0693b8ae2e092b6c94a5d926b5a673a258417 100755 --- a/MAC/Services/src/PipelineControl.py +++ b/MAC/Services/src/PipelineControl.py @@ -154,27 +154,31 @@ class Parset(dict): return result def processingNumberOfCoresPerTask(self): - result = int(self[PARSET_PREFIX + "Observation.Cluster.ProcessingCluster.numberOfCoresPerTask"]) or None - if not result: - logger.warning('Invalid Observation.Cluster.ProcessingCluster.numberOfCoresPerTask: %s, defaulting to %i', - result, DEFAULT_NUMBER_OF_CORES_PER_TASK) - result = DEFAULT_NUMBER_OF_CORES_PER_TASK - return result + try: + result = int(self[PARSET_PREFIX + "Observation.Cluster.ProcessingCluster.numberOfCoresPerTask"]) or None + if not result: + logger.warning('Invalid Observation.Cluster.ProcessingCluster.numberOfCoresPerTask: %s, defaulting to %i', + result, DEFAULT_NUMBER_OF_CORES_PER_TASK) + result = DEFAULT_NUMBER_OF_CORES_PER_TASK + return result + except: + return DEFAULT_NUMBER_OF_CORES_PER_TASK def processingNumberOfTasks(self): """ Parse the number of nodes to allocate from "Observation.Cluster.ProcessingCluster.numberOfTasks" """ - result = int(self[PARSET_PREFIX + - "Observation.Cluster.ProcessingCluster.numberOfTasks"].strip()) or None - - # apply bound - if not result or result <= 0 or result > NUMBER_OF_NODES * NUMBER_OF_CORES_PER_NODE: - logger.warning('Invalid Observation.Cluster.ProcessingCluster.numberOfTasks: %s, defaulting to %s', - result, DEFAULT_NUMBER_OF_TASKS) - result = DEFAULT_NUMBER_OF_TASKS + try: + result = int(self[PARSET_PREFIX + "Observation.Cluster.ProcessingCluster.numberOfTasks"].strip()) or None - return result + # apply bound + if not result or result <= 0 or result > NUMBER_OF_NODES * NUMBER_OF_CORES_PER_NODE: + logger.warning('Invalid Observation.Cluster.ProcessingCluster.numberOfTasks: %s, defaulting to %s', + result, DEFAULT_NUMBER_OF_TASKS) + result = DEFAULT_NUMBER_OF_TASKS + return result + except: + return DEFAULT_NUMBER_OF_TASKS @staticmethod def dockerRepository(): @@ -365,8 +369,7 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): scheduled_pipeline_subtask_ids = [] for subtask in scheduled_subtasks: try: - bits = subtask['url'].split('/') - subtask_id = int(bits[bits.index("subtask") + 1]) + subtask_id = subtask['id'] scheduled_pipeline_subtask_ids.append(subtask_id) except Exception as e: logger.error(e) @@ -378,11 +381,11 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): try: subtask = self.tmss_client.get_subtask(subtask_id) subtask_template = self.tmss_client.get_url_as_json_object(subtask['specifications_template']) - if 'pipeline' not in subtask_template['type']: - logger.info("skipping scheduled subtask id=%s of non-pipeline type '%s'", subtask_id, subtask_template['type']) + if 'pipeline' not in subtask_template['type_value']: + logger.info("skipping scheduled subtask id=%s of non-pipeline type '%s'", subtask_id, subtask_template['type_value']) continue - parset = Parset(parameterset.fromString(self.tmss_client.get_subtask_parset(subtask_id))) + parset = Parset(parameterset.fromString(self.tmss_client.get_subtask_parset(subtask_id)).dict()) if not parset or not self._shouldHandle(parset): continue self._startPipeline(subtask_id, parset) @@ -395,13 +398,14 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): try: subtask = self.tmss_client.get_subtask(subtask_id) subtask_template = self.tmss_client.get_url_as_json_object(subtask['specifications_template']) - if 'pipeline' not in subtask_template['type']: - logger.info("skipping scheduled subtask id=%s of non-pipeline type '%s'", subtask_id, subtask_template['type']) + if 'pipeline' not in subtask_template['type_value']: + logger.info("skipping scheduled subtask id=%s of non-pipeline type '%s'", subtask_id, subtask_template['type_value']) return + logger.info("getting parset for scheduled subtask id=%s of type '%s'", subtask_id, subtask_template['type_value']) parset = self.tmss_client.get_subtask_parset(subtask_id) parset = parameterset.fromString(parset) - parset = Parset(parset) + parset = Parset(parset.dict()) if parset and self._shouldHandle(parset): self._startPipeline(subtask_id, parset) except Exception as e: @@ -436,9 +440,11 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): # Avoid race conditions by checking whether we haven't already sent the job # to SLURM. Our QUEUED status update may still be being processed. - if self.slurm.isQueuedOrRunning(subtask_id): - logger.info("Pipeline %s is already queued or running in SLURM.", subtask_id) - return + # if self.slurm.isQueuedOrRunning(subtask_id): + # logger.info("Pipeline %s is already queued or running in SLURM.", subtask_id) + # return + + self.tmss_client.set_subtask_status(subtask_id, "queueing") logger.info("***** START Subtask ID %s *****", subtask_id) @@ -513,8 +519,8 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): # print some info echo Running on $SLURM_NODELIST - # notify TMSS that we're running - runcmd {setStatus_started} + # notify TMSS that we're starting + runcmd {setStatus_starting} # notify ganglia wget -O - -q "http://ganglia.control.lofar/ganglia/api/events.php?action=add&start_time=now&summary=Pipeline {obsid} ACTIVE&host_regex=" @@ -522,6 +528,9 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): # fetch parset runcmd {getParset} > {parset_file} + # notify TMSS that we're (almost) running (should be called from within the pipeline...) + runcmd {setStatus_started} + # run the pipeline runcmd docker-run-slurm.sh --rm --net=host \ -e LOFARENV={lofarenv} \ @@ -536,22 +545,13 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): runcmd {setStatus_finishing} if [ $RESULT -eq 0 ]; then - # !!! TODO: Review behavior for TMSS - # wait for MoM to pick up feedback before we set finished status - # AS: I increased this to 300 sec to be in line with the wait time after observation finished - # and because we still note quite a lot of feedback issues in MoM - runcmd sleep 300 - # if we reached this point, the pipeline ran succesfully runcmd {setStatus_finished} # notify ganglia wget -O - -q "http://ganglia.control.lofar/ganglia/api/events.php?action=add&start_time=now&summary=Pipeline {obsid} FINISHED&host_regex=" else - # !!! TODO: Review behavior for TMSS - # If we are killed by the pipeline being set to finished, we just went from finished->finishing - # but our abort_trigger may already have been cancelled. Set the status here too to avoid lingering - # in finishing + # !!! TODO: How to set an "unsuccesfull" finished state in TMSS? runcmd {setStatus_finished} fi @@ -568,6 +568,7 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): cluster=parset.processingCluster(), getParset=getParset_cmdline(), + setStatus_starting=setStatus_cmdline("starting"), setStatus_started=setStatus_cmdline("started"), setStatus_finishing=setStatus_cmdline("finishing"), setStatus_finished=setStatus_cmdline("finished"), diff --git a/MAC/Services/test/tPipelineControl.py b/MAC/Services/test/tPipelineControl.py index d5e510f681a85f402962234c13ab4706f3ab32db..c4603858fbb871d7fdbe2f877d52f4aa37b942d6 100644 --- a/MAC/Services/test/tPipelineControl.py +++ b/MAC/Services/test/tPipelineControl.py @@ -419,52 +419,11 @@ class TestPipelineControlTMSSClassMethods(unittest.TestCase): logger.warning('END TEST_SHOULDHANDLE') -@unit_test +@integration_test class TestPipelineControlTMSS(unittest.TestCase): + # TODO: write similar test as t_qaservice, which integrates tmss_test_env and (in this case) pipelinecontrol + pass - def test_check_scheduled_pipelines(self): - """ Test whether we start pipelines that have status scheduled in TMSS. """ - - logger.warning('TEST_CHECKSCHEDULED') - - with unittest.mock.patch('lofar.mac.PipelineControl.TMSSsession.get_subtask_parset') as mock_get_subtask_parset: - mock_get_subtask_parset.side_effect = lambda id: "ObsSW.Observation.processType=Pipeline\n" \ - "ObsSW.Observation.Cluster.ProcessingCluster.clusterName=CEP4\n" - - with unittest.mock.patch('lofar.mac.PipelineControl.TMSSsession.get_subtasks') as mock_get_subtasks: - mock_get_subtasks.side_effect = lambda state: json.loads('[{"url":"http://localhost:8008/api/subtask/2000001/?format=json","tags":["TMSS","TESTING"],"created_at":"2020-05-11T06:39:01.907446","updated_at":"2020-05-11T17:49:47.455010","start_time":null,"stop_time":null,"specifications_doc":{"demixer":{"baselines":"CS*,RS*&","time_steps":1,"demix_always":[],"ignore_target":false,"demix_if_needed":[],"frequency_steps":4,"demix_time_steps":1,"demix_frequency_steps":4},"aoflagger":{"strategy":"HBAdefault"},"preflagger0":{"channels":"0..nchan/32-1,31*nchan/32..nchan-1"},"preflagger1":{"corrtype":"auto"},"storagemanager":"dysco"},"do_cancel":null,"priority":1,"scheduler_input_doc":{},"state":"http://localhost:8008/api/subtask_state/scheduled/?format=json","task_blueprint":"http://localhost:8008/api/task_blueprint/20/?format=json","specifications_template":"http://localhost:8008/api/subtask_template/2/?format=json","schedule_method":"http://localhost:8008/api/schedule_method/manual/?format=json","cluster":"http://localhost:8008/api/cluster/1/?format=json"},' \ - '{"url":"http://localhost:8008/api/subtask/2000042/?format=json","tags":["TMSS","TESTING"],"created_at":"2020-05-11T06:39:01.907446","updated_at":"2020-05-11T17:49:47.455010","start_time":null,"stop_time":null,"specifications_doc":{"demixer":{"baselines":"CS*,RS*&","time_steps":1,"demix_always":[],"ignore_target":false,"demix_if_needed":[],"frequency_steps":4,"demix_time_steps":1,"demix_frequency_steps":4},"aoflagger":{"strategy":"HBAdefault"},"preflagger0":{"channels":"0..nchan/32-1,31*nchan/32..nchan-1"},"preflagger1":{"corrtype":"auto"},"storagemanager":"dysco"},"do_cancel":null,"priority":1,"scheduler_input_doc":{},"state":"http://localhost:8008/api/subtask_state/scheduled/?format=json","task_blueprint":"http://localhost:8008/api/task_blueprint/20/?format=json","specifications_template":"http://localhost:8008/api/subtask_template/2/?format=json","schedule_method":"http://localhost:8008/api/schedule_method/manual/?format=json","cluster":"http://localhost:8008/api/cluster/1/?format=json"}]') - - with unittest.mock.patch('lofar.mac.PipelineControl.PipelineControlTMSSHandler._startPipeline') as mock_startPipeline: - handler = PipelineControlTMSSHandler() - handler.check_scheduled_pipelines() - - mock_get_subtasks.assert_called_with(state="scheduled") - - self.assertTrue(call(2000001) in mock_get_subtask_parset.call_args_list) - self.assertTrue(call(2000042) in mock_get_subtask_parset.call_args_list) - - mock_startPipeline.assert_called() - - logger.warning('END TEST_CHECKSCHEDULED') - - - def test_onSubTaskScheduled(self): - """ Test whether pipelines are started with correct Parset and ID TMSS. """ - logger.warning('TEST_ONSCHEDULED') - - with unittest.mock.patch('lofar.mac.PipelineControl.TMSSsession.get_subtask_parset') as mock_get_subtask_parset: - mock_get_subtask_parset.side_effect = lambda id: "ObsSW.Observation.processType=Pipeline\n" \ - "ObsSW.Observation.Cluster.ProcessingCluster.clusterName=CEP4\n" - - with unittest.mock.patch('lofar.mac.PipelineControl.PipelineControlTMSSHandler._startPipeline') as mock_startPipeline: - handler = PipelineControlTMSSHandler() - handler.onSubTaskScheduled(1234, "scheduling", "scheduled") - - mock_get_subtask_parset.assert_called_with(1234) - mock_startPipeline.assert_called() - - logger.warning('END TEST_ONSCHEDULED') if __name__ == "__main__": diff --git a/QA/QA_Common/test/CMakeLists.txt b/QA/QA_Common/test/CMakeLists.txt index e7de2d7e597a0f6ad83400dd57787ffdaf1fee05..f311211225cde88f947d693a927e9e0f23a687d3 100644 --- a/QA/QA_Common/test/CMakeLists.txt +++ b/QA/QA_Common/test/CMakeLists.txt @@ -18,7 +18,9 @@ # $Id$ include(LofarCTest) -lofar_add_test(t_hdf5_io) -set_tests_properties(t_hdf5_io PROPERTIES TIMEOUT 300) +IF(BUILD_TESTING) + lofar_add_test(t_hdf5_io) + set_tests_properties(t_hdf5_io PROPERTIES TIMEOUT 300) +ENDIF(BUILD_TESTING) diff --git a/QA/QA_Service/CMakeLists.txt b/QA/QA_Service/CMakeLists.txt index 8b296ec57e289bdf2b86b01f0d18ed435dda4a16..8dd303184dc2009fc7d97e900aac89d9ef0749de 100644 --- a/QA/QA_Service/CMakeLists.txt +++ b/QA/QA_Service/CMakeLists.txt @@ -17,7 +17,11 @@ # $Id$ -lofar_package(QA_Service 1.0 DEPENDS QA_Common PyMessaging OTDB_Services pyparameterset TMSSClient) +IF(BUILD_TESTING) + lofar_package(QA_Service 1.0 DEPENDS QA_Common PyMessaging OTDB_Services pyparameterset TMSSClient TMSS) +ELSE() + lofar_package(QA_Service 1.0 DEPENDS QA_Common PyMessaging OTDB_Services pyparameterset TMSSClient) +ENDIF(BUILD_TESTING) add_subdirectory(lib) add_subdirectory(bin) diff --git a/QA/QA_Service/lib/qa_service.py b/QA/QA_Service/lib/qa_service.py index e8283195becfe50615448a3d5ed1d72f8716a2ad..d189be1560b98aca99ce59488de94f107fc1f7b3 100644 --- a/QA/QA_Service/lib/qa_service.py +++ b/QA/QA_Service/lib/qa_service.py @@ -87,9 +87,9 @@ class QAFilteringTMSSSubTaskBusListener(TMSSSubTaskBusListener): with TMSSsession.create_from_dbcreds_for_ldap() as tmsssession: subtask = tmsssession.get_subtask(subtask_id) spec = tmsssession.get_url_as_json_object(subtask['specifications_template']) - if '/qa_files/' in spec['type']: + if 'qa_files' == spec['type_value']: self._send_qa_command_message(subtask_id, DEFAULT_DO_QAFILE_CONVERSION_SUBJECT) - elif '/qa_plots/' in spec['type']: + elif 'qa_plots' == spec['type_value']: self._send_qa_command_message(subtask_id, DEFAULT_DO_QAPLOTS_SUBJECT) def __init__(self, exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER): @@ -234,7 +234,15 @@ class QAService: self.tmsssession.set_subtask_status(subtask_id, 'starting') self.tmsssession.set_subtask_status(subtask_id, 'started') - hdf5_path = self.h5_lustre_filepath(otdb_id or subtask_id) + input_dataproducts = self.tmsssession.get_subtask_input_dataproducts(subtask_id=subtask_id) + + if len(input_dataproducts) != 1: + raise ValueError("QA subtask %s should have exactly 1 input dataproduct" % subtask_id) + + hdf5_path = os.path.join(input_dataproducts[0]['directory'], input_dataproducts[0]['filename']) + else: + hdf5_path = self.h5_lustre_filepath(otdb_id or subtask_id) + plot_dir_path = self._create_plots_for_h5_file(hdf5_path, otdb_id, subtask_id) if plot_dir_path: @@ -304,22 +312,42 @@ class QAService: try: obs_id = otdb_id or subtask_id - # define default h5 filename use default cep4 qa output dir - hdf5_path = self.h5_lustre_filepath(obs_id) - h5_dir_path, h5_filename = os.path.split(hdf5_path) - - nr_of_timestamps = -1 - nr_of_subbands = -1 + logger.info('trying to convert MS uv dataset with otdb_id=%s subtask_id=%s if any', otdb_id, subtask_id) + cmd = ['ms2hdf5', '--cep4', '-p', '-22'] if subtask_id: subtask = self.tmsssession.get_subtask(subtask_id=subtask_id) nr_of_timestamps = subtask['specifications_doc'].get('nr_of_timestamps', -1) nr_of_subbands = subtask['specifications_doc'].get('nr_of_subbands', -1) + cmd += ['-t', str(nr_of_timestamps), '-s', str(nr_of_subbands)] - logger.info('trying to convert MS uv dataset with otdb_id=%s subtask_id=%s if any', otdb_id, subtask_id) - cmd = ['ms2hdf5', '-o', str(obs_id), '--cep4', '-p', '-22', '-t', str(nr_of_timestamps), '-s', str(nr_of_subbands), ] - cmd += ['--output_dir', h5_dir_path] - cmd += ['--output_filename', h5_filename] + input_dataproducts = self.tmsssession.get_subtask_input_dataproducts(subtask_id=subtask_id) + + if not input_dataproducts: + raise ValueError("QA subtask %s has no input dataproducts" % subtask_id) + + directory = input_dataproducts[0]['directory'] + cmd += ['-m', directory] + + output_dataproducts = self.tmsssession.get_subtask_output_dataproducts(subtask_id=subtask_id) + + if len(output_dataproducts) != 1: + raise ValueError("QA subtask %s should have exactly 1 dataproduct, but it has %s. urls: %s" % (subtask_id, + len(output_dataproducts), + ', '.join(dp['url'] for dp in output_dataproducts))) + + hdf5_path = os.path.join(output_dataproducts[0]['directory'], output_dataproducts[0]['filename']) + cmd += ['--output_dir', output_dataproducts[0]['directory'], + '--output_filename', output_dataproducts[0]['filename']] + elif otdb_id: + # define default h5 filename use default cep4 qa output dir + hdf5_path = self.h5_lustre_filepath(obs_id) + h5_dir_path, h5_filename = os.path.split(hdf5_path) + + cmd += ['--output_dir', h5_dir_path, '--output_filename', h5_filename] + cmd += ['-o', str(obs_id)] + else: + raise ValueError("either otdb_id or subtask_id needs to be set") # wrap the command in a cep4 docker ssh call cmd = wrap_command_for_docker(cmd, 'adder', 'latest') @@ -358,11 +386,28 @@ class QAService: subtask = self.tmsssession.get_subtask(subtask_id=subtask_id) #TODO: use settings from subtask to tweak plot_hdf5_dynamic_spectra options + input_dataproducts = self.tmsssession.get_subtask_input_dataproducts(subtask_id=subtask_id) + if len(input_dataproducts) != 1: + raise ValueError("QA subtask %s should have exactly 1 input dataproduct, but it has %s. urls: %s" % (subtask_id, + len(input_dataproducts), + ', '.join(dp['url'] for dp in input_dataproducts))) + + hdf5_path = os.path.join(input_dataproducts[0]['directory'], input_dataproducts[0]['filename']) + + output_dataproducts = self.tmsssession.get_subtask_output_dataproducts(subtask_id=subtask_id) + if len(output_dataproducts) != 1: + raise ValueError("QA subtask %s should have exactly 1 output dataproduct, but it has %s. urls: %s" % (subtask_id, + len(output_dataproducts), + ', '.join(dp['url'] for dp in output_dataproducts))) + + task_plot_dir_path = output_dataproducts[0]['directory'] + base_plot_dir_path = os.path.dirname(task_plot_dir_path) + for plot_options in [['-1', '-acb'], # 'hot' autocor/crosscor, per baseline scaling with distinct polarization scales, in dB ['-1', '-acg'], # 'complex' autocor/crosscor, all baseline scaling with same polarization scales, in dB ['-1', '-acn', '--raw'], # normalized 'hot' autocor/crosscor, raw ['-4']]: # delay-rate - cmd = ['plot_hdf5_dynamic_spectra', '-o %s' % (base_plot_dir_path,), '--force', '--cep4'] + plot_options + [hdf5_path] + cmd = ['plot_hdf5_dynamic_spectra', '-o', base_plot_dir_path, '--force', '--cep4'] + plot_options + [hdf5_path] # wrap the command in a cep4 ssh call to docker container cmd = wrap_command_for_docker(cmd, 'adder', 'latest') @@ -461,6 +506,9 @@ class QAService: def _move_plots_to_nfs_dir(self, plot_dir_path): try: + if not plot_dir_path.endswith('/'): + plot_dir_path += '/' + plot_dir_name = os.path.basename(plot_dir_path) plot_nfs_base_path = os.path.join(QAService.QA_NFS_BASE_DIR, 'plots') diff --git a/QA/QA_Service/test/t_qa_service.py b/QA/QA_Service/test/t_qa_service.py index 3318d0cf092869223bd4382b34009c27a5082bee..2191bea8f073d28533de7233f011fb76e2718824 100755 --- a/QA/QA_Service/test/t_qa_service.py +++ b/QA/QA_Service/test/t_qa_service.py @@ -507,7 +507,6 @@ class TestQAService(unittest.TestCase): logger.info(' -- test_05_qa_service_for_expected_behaviour_on_tmss_events -- ') from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator - from lofar.sas.tmss.tmss.tmssapp.subtasks import create_observation_to_qafile_subtask, create_qafile_to_qaplots_subtask tmss_client = self.tmss_test_env.create_tmss_client() with tmss_client: @@ -538,8 +537,9 @@ class TestQAService(unittest.TestCase): #TODO: merge adder branch into trunk so we can use plot_hdf5_dynamic_spectra on the test-h5 file to create plots if 'plot_hdf5_dynamic_spectra' in cmd: # replace the plot_hdf5_dynamic_spectra command which runs normally in the docker container - # by a call to bash true, so the 'plot_hdf5_dynamic_spectra' call returns 0 exit code - mocked_cmd = ['true'] + # by a call to bash mkdir, so the 'plot_hdf5_dynamic_spectra' call returns 0 exit code, and we have an output directory to play with. + plot_path = os.path.join(cmd[cmd.index('-o')+1], os.path.basename(cmd[-1]).replace('_QA.h5','')) + mocked_cmd = ['mkdir', '-p', plot_path, ';', 'touch', os.path.join(plot_path, 'foo.bar')] logger.info('''mocked_wrap_command_for_docker returning mocked command: '%s', instead of original command: '%s' ''', ' '.join(mocked_cmd), ' '.join(cmd)) return mocked_cmd @@ -559,24 +559,37 @@ class TestQAService(unittest.TestCase): auth=(self.tmss_test_env.client_credentials.dbcreds.user, self.tmss_test_env.client_credentials.dbcreds.password)) + # create a observation output dataproduct, which automatically creates the needed observation subtask and it's outputs etc. + uv_dataproduct = tdc.post_data_and_get_response_as_json_object(tdc.Dataproduct(filename="my_uv_dataprodcut.MS", directory=self.TEST_DIR), '/dataproduct/') + uvdp_producer = tdc.get_response_as_json_object(uv_dataproduct['producer']) + obs_subtask = tdc.get_response_as_json_object(uvdp_producer['subtask']) + tmss_client.set_subtask_status(obs_subtask['id'], 'finished') + + qafile_subtask_template = tmss_client.get_subtask_template(name="QA file conversion") qafile_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qafile_subtask_template['schema']) - subtask_url = tdc.post_data_and_get_url(tdc.Subtask(specifications_template_url=qafile_subtask_template['url'], - specifications_doc=qafile_subtask_spec_doc), - '/subtask/') - subtask_id = subtask_url.split('/')[-2] + subtask = tdc.post_data_and_get_response_as_json_object(tdc.Subtask(specifications_template_url=qafile_subtask_template['url'], + specifications_doc=qafile_subtask_spec_doc), '/subtask/') + subtask_id = subtask['id'] - qaplots_subtask_template = tmss_client.get_subtask_template(name="QA plots") - qaplots_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qaplots_subtask_template['schema']) - - subtask_url2 = tdc.post_data_and_get_url(tdc.Subtask(specifications_template_url=qaplots_subtask_template['url'], - specifications_doc=qaplots_subtask_spec_doc), '/subtask/') - subtask_id2 = subtask_url2.split('/')[-2] + subtask_input = tdc.post_data_and_get_response_as_json_object(tdc.SubtaskInput(subtask['url'], dataproduct_urls=[uv_dataproduct['url']], subtask_output_url=uvdp_producer['url']), '/subtask_input/') + subtask_output = tdc.post_data_and_get_response_as_json_object(tdc.SubtaskOutput(subtask['url']), '/subtask_output/') + tmss_client.set_subtask_status(subtask_id, 'defined') # trigger a qa process by setting the tmss subtask to scheduled # this will result in the QAService actually doing its magic - tmss_client.set_subtask_status(subtask_id, 'scheduled') + # but first, we need to override where the output dataproduct is written, + # so, stop listening, schedule (getting the default output dataproduct), and then override the directory, and start listening again (picking up the scheduled-event). + qaservice.filtering_tmssbuslistener.stop_listening() + tmss_client.schedule_subtask(subtask_id) + dataproduct = tmss_client.get_subtask_output_dataproducts(subtask_id)[0] + tmss_client.session.patch(dataproduct['url'], json={'directory': os.path.join(self.TEST_DIR, 'h5')}, params={'format':'json'}) + dataproduct = tmss_client.get_subtask_output_dataproducts(subtask_id)[0] + qaservice.filtering_tmssbuslistener.start_listening() + + qaplots_subtask_template = tmss_client.get_subtask_template(name="QA plots") + qaplots_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qaplots_subtask_template['schema']) # start waiting until ConvertedMS2Hdf5 event message received (or timeout) qa_listener.converted_event.wait(30) @@ -600,7 +613,24 @@ class TestQAService(unittest.TestCase): self.assertTrue('subtask_id' in qa_listener.clustered_msg_content) self.assertTrue('hdf5_file_path' in qa_listener.clustered_msg_content) - tmss_client.set_subtask_status(subtask_id2, 'scheduled') + subtask2 = tdc.post_data_and_get_response_as_json_object(tdc.Subtask(specifications_template_url=qaplots_subtask_template['url'], + specifications_doc=qaplots_subtask_spec_doc), '/subtask/') + tdc.post_data_and_get_response_as_json_object(tdc.SubtaskInput(subtask2['url'], dataproduct_urls=[dataproduct['url']], subtask_output_url=subtask_output['url']), '/subtask_input/') + subtask2_output = tdc.post_data_and_get_response_as_json_object(tdc.SubtaskOutput(subtask2['url']), '/subtask_output/') + subtask2_id = subtask2['id'] + tmss_client.set_subtask_status(subtask2_id, 'defined') + + # normally the tmss_subtask_scheduling service schedules the next subtask automagically + # But in this test we do that by hand, since the tmss_subtask_scheduling service is not part of this test. + # Again, we need to override where the output dataproduct is written, + # so, stop listening, schedule (getting the default output dataproduct), and then override the directory, and start listening again (picking up the scheduled-event). + qaservice.filtering_tmssbuslistener.stop_listening() + tmss_client.schedule_subtask(subtask2_id) + dataproduct = tmss_client.get_subtask_output_dataproducts(subtask2_id)[0] + tmss_client.session.patch(dataproduct['url'], json={'directory': os.path.join(self.TEST_DIR, 'plots', 'L%s' % (obs_subtask['id'],))}, params={'format':'json'}) + dataproduct = tmss_client.get_subtask_output_dataproducts(subtask2_id)[0] + qaservice.filtering_tmssbuslistener.start_listening() + # start waiting until CreatedInspectionPlots event message received (or timeout) qa_listener.plotted_event.wait(30) diff --git a/SAS/TMSS/client/lib/mains.py b/SAS/TMSS/client/lib/mains.py index ca614b9f218c475ec0c4bdca66dba0d259f3838b..dd7829a020f9e9dd8c3f1d5e0019048445f84819 100644 --- a/SAS/TMSS/client/lib/mains.py +++ b/SAS/TMSS/client/lib/mains.py @@ -92,7 +92,7 @@ def main_set_subtask_state(): try: with TMSSsession.create_from_dbcreds_for_ldap() as session: changed_subtask = session.set_subtask_status(args.subtask_id, args.state) - print("%s now has state %s" % (changed_subtask['url'], changed_subtask['state'])) + print("%s now has state %s, see: %s" % (changed_subtask['id'], changed_subtask['state_value'], changed_subtask['url'])) except Exception as e: print(e) exit(1) diff --git a/SAS/TMSS/client/lib/tmss_http_rest_client.py b/SAS/TMSS/client/lib/tmss_http_rest_client.py index 1f426c0b52a58d0c33c54cf10583ba0c6e5d2408..1752c53477991e7b30244d6b346564e610624939 100644 --- a/SAS/TMSS/client/lib/tmss_http_rest_client.py +++ b/SAS/TMSS/client/lib/tmss_http_rest_client.py @@ -166,7 +166,7 @@ class TMSSsession(object): params['format'] ='json' response = self.session.get(url=full_url, params=params) - logger.info("[%s] %s %s on %s", response.request.method.upper(), response.status_code, responses.get(response.status_code), response.request.url) + logger.info("%s %s %s on %s", response.request.method.upper(), response.status_code, responses.get(response.status_code), response.request.url) if response.status_code >= 200 and response.status_code < 300: result = json.loads(response.content.decode('utf-8')) @@ -204,6 +204,14 @@ class TMSSsession(object): return None return result + def get_subtask_output_dataproducts(self, subtask_id: int) -> []: + '''get the output dataproducts of the subtask with the given subtask_id''' + return self.get_path_as_json_object('subtask/%s/output_dataproducts' % subtask_id) + + def get_subtask_input_dataproducts(self, subtask_id: int) -> []: + '''get the input dataproducts of the subtask with the given subtask_id''' + return self.get_path_as_json_object('subtask/%s/input_dataproducts' % subtask_id) + def specify_observation_task(self, task_id: int) -> requests.Response: """specify observation for the given draft task by just doing a REST API call """ result = self.session.get(url='%s/api/task/%s/specify_observation' % (self.base_url, task_id)) diff --git a/SAS/TMSS/frontend/tmss_webapp/README.md b/SAS/TMSS/frontend/tmss_webapp/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9c40dcdc3d8c852db2b42a2da2d8f867029378b4 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/README.md @@ -0,0 +1,68 @@ +This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). + +## Available Scripts + +In the project directory, you can run: + +### `yarn start` + +Runs the app in the development mode.<br /> +Open [http://localhost:3000](http://localhost:3000) to view it in the browser. + +The page will reload if you make edits.<br /> +You will also see any lint errors in the console. + +### `yarn test` + +Launches the test runner in the interactive watch mode.<br /> +See the section about [running tests](https://facebook.github.io/create-react-app/docs/running-tests) for more information. + +### `yarn build` + +Builds the app for production to the `build` folder.<br /> +It correctly bundles React in production mode and optimizes the build for the best performance. + +The build is minified and the filenames include the hashes.<br /> +Your app is ready to be deployed! + +See the section about [deployment](https://facebook.github.io/create-react-app/docs/deployment) for more information. + +### `yarn eject` + +**Note: this is a one-way operation. Once you `eject`, you can’t go back!** + +If you aren’t satisfied with the build tool and configuration choices, you can `eject` at any time. This command will remove the single build dependency from your project. + +Instead, it will copy all the configuration files and the transitive dependencies (webpack, Babel, ESLint, etc) right into your project so you have full control over them. All of the commands except `eject` will still work, but they will point to the copied scripts so you can tweak them. At this point you’re on your own. + +You don’t have to ever use `eject`. The curated feature set is suitable for small and middle deployments, and you shouldn’t feel obligated to use this feature. However we understand that this tool wouldn’t be useful if you couldn’t customize it when you are ready for it. + +## Learn More + +You can learn more in the [Create React App documentation](https://facebook.github.io/create-react-app/docs/getting-started). + +To learn React, check out the [React documentation](https://reactjs.org/). + +### Code Splitting + +This section has moved here: https://facebook.github.io/create-react-app/docs/code-splitting + +### Analyzing the Bundle Size + +This section has moved here: https://facebook.github.io/create-react-app/docs/analyzing-the-bundle-size + +### Making a Progressive Web App + +This section has moved here: https://facebook.github.io/create-react-app/docs/making-a-progressive-web-app + +### Advanced Configuration + +This section has moved here: https://facebook.github.io/create-react-app/docs/advanced-configuration + +### Deployment + +This section has moved here: https://facebook.github.io/create-react-app/docs/deployment + +### `yarn build` fails to minify + +This section has moved here: https://facebook.github.io/create-react-app/docs/troubleshooting#npm-run-build-fails-to-minify diff --git a/SAS/TMSS/frontend/tmss_webapp/package.json b/SAS/TMSS/frontend/tmss_webapp/package.json new file mode 100644 index 0000000000000000000000000000000000000000..b5328e779ebfa77c91b24d16cf90c040bc06e3a4 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/package.json @@ -0,0 +1,51 @@ +{ + "name": "tmss_gui", + "version": "0.1.0", + "private": true, + "dependencies": { + "@json-editor/json-editor": "^2.3.0", + "@testing-library/jest-dom": "^4.2.4", + "@testing-library/react": "^9.3.2", + "@testing-library/user-event": "^7.1.2", + "axios": "^0.19.2", + "font-awesome": "^4.7.0", + "history": "^5.0.0", + "node-sass": "^4.12.0", + "primeflex": "^1.3.0", + "primeicons": "^4.0.0", + "primereact": "^4.2.2", + "react": "^16.13.1", + "react-app-polyfill": "^1.0.6", + "react-bootstrap": "^1.0.1", + "react-bootstrap-datetimepicker": "0.0.22", + "react-dom": "^16.13.1", + "react-frame-component": "^4.1.2", + "react-json-view": "^1.19.1", + "react-router-dom": "^5.2.0", + "react-scripts": "3.4.1", + "react-transition-group": "^1.2.1", + "typescript": "^3.9.5", + "yup": "^0.29.1" + }, + "scripts": { + "start": "react-scripts start", + "build": "react-scripts build", + "test": "react-scripts test", + "eject": "react-scripts eject" + }, + "eslintConfig": { + "extends": "react-app" + }, + "browserslist": { + "production": [ + ">0.2%", + "not dead", + "not op_mini all" + ], + "development": [ + "last 1 chrome version", + "last 1 firefox version", + "last 1 safari version" + ] + } +} diff --git a/SAS/TMSS/frontend/tmss_webapp/public/favicon.ico b/SAS/TMSS/frontend/tmss_webapp/public/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..eb16acbe5ecd12aa72a0fdf8c46031ba410c4f7e Binary files /dev/null and b/SAS/TMSS/frontend/tmss_webapp/public/favicon.ico differ diff --git a/SAS/TMSS/frontend/tmss_webapp/public/index.html b/SAS/TMSS/frontend/tmss_webapp/public/index.html new file mode 100644 index 0000000000000000000000000000000000000000..4d5295648ddd04e625e8c19fb890c91f89629b5f --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/public/index.html @@ -0,0 +1,43 @@ +<!DOCTYPE html> +<html lang="en"> + <head> + <meta charset="utf-8" /> + <link rel="icon" href="%PUBLIC_URL%/favicon.ico" /> + <meta name="viewport" content="width=device-width, initial-scale=1" /> + <meta name="theme-color" content="#000000" /> + <meta + name="description" + content="Telescope Manager Specification System" + /> + <link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" /> + <!-- + manifest.json provides metadata used when your web app is installed on a + user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/ + --> + <link rel="manifest" href="%PUBLIC_URL%/manifest.json" /> + <!-- + Notice the use of %PUBLIC_URL% in the tags above. + It will be replaced with the URL of the `public` folder during the build. + Only files inside the `public` folder can be referenced from the HTML. + + Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will + work correctly both with client-side routing and a non-root public URL. + Learn how to configure a non-root public URL by running `npm run build`. + --> + <title>TMSS</title> + </head> + <body> + <noscript>You need to enable JavaScript to run this app.</noscript> + <div id="root"></div> + <!-- + This HTML file is a template. + If you open it directly in the browser, you will see an empty page. + + You can add webfonts, meta tags, or analytics to this file. + The build step will place the bundled scripts into the <body> tag. + + To begin the development, run `npm start` or `yarn start`. + To create a production bundle, use `npm run build` or `yarn build`. + --> + </body> +</html> diff --git a/SAS/TMSS/frontend/tmss_webapp/public/manifest.json b/SAS/TMSS/frontend/tmss_webapp/public/manifest.json new file mode 100644 index 0000000000000000000000000000000000000000..080d6c77ac21bb2ef88a6992b2b73ad93daaca92 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/public/manifest.json @@ -0,0 +1,25 @@ +{ + "short_name": "React App", + "name": "Create React App Sample", + "icons": [ + { + "src": "favicon.ico", + "sizes": "64x64 32x32 24x24 16x16", + "type": "image/x-icon" + }, + { + "src": "logo192.png", + "type": "image/png", + "sizes": "192x192" + }, + { + "src": "logo512.png", + "type": "image/png", + "sizes": "512x512" + } + ], + "start_url": ".", + "display": "standalone", + "theme_color": "#000000", + "background_color": "#ffffff" +} diff --git a/SAS/TMSS/frontend/tmss_webapp/public/robots.txt b/SAS/TMSS/frontend/tmss_webapp/public/robots.txt new file mode 100644 index 0000000000000000000000000000000000000000..e9e57dc4d41b9b46e05112e9f45b7ea6ac0ba15e --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/public/robots.txt @@ -0,0 +1,3 @@ +# https://www.robotstxt.org/robotstxt.html +User-agent: * +Disallow: diff --git a/SAS/TMSS/frontend/tmss_webapp/src/App.css b/SAS/TMSS/frontend/tmss_webapp/src/App.css new file mode 100644 index 0000000000000000000000000000000000000000..18ccb9475ca09cfadf5cd73e300c156090c9812f --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/App.css @@ -0,0 +1,39 @@ +.App { + /* text-align: center; */ +} + +.App-logo { + height: 40vmin; + pointer-events: none; +} + +@media (prefers-reduced-motion: no-preference) { + .App-logo { + animation: App-logo-spin infinite 20s linear; + } +} + +.App-header { + background-color: #303d59; + height: 50px; + display: flex; + flex-direction: column; + /* align-items: center; */ + justify-content: center; + font-size: calc(10px + 2vmin); + color: white; + padding-left: 10px; +} + +.App-link { + color: #61dafb; +} + +@keyframes App-logo-spin { + from { + transform: rotate(0deg); + } + to { + transform: rotate(360deg); + } +} diff --git a/SAS/TMSS/frontend/tmss_webapp/src/App.js b/SAS/TMSS/frontend/tmss_webapp/src/App.js new file mode 100644 index 0000000000000000000000000000000000000000..0d607d738c330273f30c7f9dc5a6421c4a0069c7 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/App.js @@ -0,0 +1,62 @@ +import React, {Component} from 'react'; +import { BrowserRouter as Router } from 'react-router-dom'; + +import {AppTopbar} from './layout/components/AppTopbar'; +import {AppMenu} from './layout/components/AppMenu'; +import { AppFooter } from './layout/components/AppFooter'; +import {RoutedContent} from './routes'; + +// import {Dashboard} from './routes/dashboard/dashboard'; + +import './layout/layout.scss'; +// import './App.css'; + + +class App extends Component { + + constructor() { + super(); + this.state = { + currentMenu: '', + currentPath: '/' + } + this.onMenuItemClick = this.onMenuItemClick.bind(this) + this.menu = [ + {label: 'Dashboard', icon: 'pi pi-fw pi-home', to:'/dashboard'}, + {label: 'Scheduling Units', icon: 'pi pi-fw pi-calendar', to:'/scheduling'} + ]; + + // this.menuComponent = {'Dashboard': Dashboard} + } + + onMenuItemClick(event) { + console.log(event); + this.setState({currentMenu:event.item.label, currentPath: event.item.path}); + } + + render() { + return ( + <React.Fragment> + <div className="App"> + + <AppTopbar></AppTopbar> + <Router basename={ this.state.currentPath }> + <AppMenu model={this.menu} onMenuItemClick={this.onMenuItemClick} /> + <div className="layout-wrapper layout-static layout-static-sidebar-active"> + <div className="layout-main"> + + <RoutedContent /> + + </div> + </div> + </Router> + <AppFooter></AppFooter> + + + </div> + </React.Fragment> + ); + } +} + +export default App; diff --git a/SAS/TMSS/frontend/tmss_webapp/src/App.test.js b/SAS/TMSS/frontend/tmss_webapp/src/App.test.js new file mode 100644 index 0000000000000000000000000000000000000000..6b91e5f1e9ffaab1e20239b210818532ce90a08b --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/App.test.js @@ -0,0 +1,9 @@ +import React from 'react'; +import { render } from '@testing-library/react'; +import App from './App'; + +test('renders ASTRON in footer', () => { + const { getByText } = render(<App />); + const linkElement = getByText("ASTRON"); + expect(linkElement).toBeInTheDocument(); +}); diff --git a/SAS/TMSS/frontend/tmss_webapp/src/index.css b/SAS/TMSS/frontend/tmss_webapp/src/index.css new file mode 100644 index 0000000000000000000000000000000000000000..ec2585e8c0bb8188184ed1e0703c4c8f2a8419b0 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/index.css @@ -0,0 +1,13 @@ +body { + margin: 0; + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', + 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue', + sans-serif; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +code { + font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New', + monospace; +} diff --git a/SAS/TMSS/frontend/tmss_webapp/src/index.js b/SAS/TMSS/frontend/tmss_webapp/src/index.js new file mode 100644 index 0000000000000000000000000000000000000000..f5185c1ec7a5dccf30b55a8e3f89afc3eca764a1 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/index.js @@ -0,0 +1,17 @@ +import React from 'react'; +import ReactDOM from 'react-dom'; +import './index.css'; +import App from './App'; +import * as serviceWorker from './serviceWorker'; + +ReactDOM.render( + <React.StrictMode> + <App /> + </React.StrictMode>, + document.getElementById('root') +); + +// If you want your app to work offline and load faster, you can change +// unregister() to register() below. Note this comes with some pitfalls. +// Learn more about service workers: https://bit.ly/CRA-PWA +serviceWorker.unregister(); diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/_overrides.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/_overrides.scss new file mode 100644 index 0000000000000000000000000000000000000000..850b92b170e5957de7638ac69233197a155174af --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/_overrides.scss @@ -0,0 +1 @@ +//Suggested location to add your overrides so that migration would be easy by just updating the SASS folder in the future \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/_variables.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/_variables.scss new file mode 100644 index 0000000000000000000000000000000000000000..cfb153f3cea23fd4b18dcc389dc39af723fe7a52 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/_variables.scss @@ -0,0 +1,46 @@ +/* General */ +$fontSize:14px; +$bodyBgColor:#edf0f5; +$textColor:#333333; +$textSecondaryColor:#707070; +$borderRadius:3px; +$dividerColor:#e3e3e3; +$transitionDuration:.2s; +$maskBgColor:#424242; + +/* Menu Common */ +$menuitemBadgeBgColor:#007be5; +$menuitemBadgeColor:#ffffff; +$submenuFontSize:13px; +$menuitemActiveRouteColor:#1fa1fc; + +/* Menu Light */ +$menuBgColorFirst:#f3f4f9; +$menuBgColorLast:#d7dbe8; +$menuitemColor:#232428; +$menuitemHoverColor:#0388e5; +$menuitemActiveColor:#0388e5; +$menuitemActiveBgColor:#ffffff; +$menuitemBorderColor:rgba(207, 211, 224, 0.6); + +/* Menu Dark */ +$menuDarkBgColorFirst:#ffffff; +$menuDarkBgColorLast:#ffffff; +$menuitemDarkColor:#0388e5; +$menuitemDarkHoverColor:#0388e5; +$menuitemDarkActiveColor:#0388e5; +$menuitemDarkActiveBgColor:#ffffff; +$menuitemDarkBorderColor:rgba(246, 248, 252, 0.918); + +/* Topbar */ +$topbarLeftBgColor:#3d4977; +$topbarRightBgColor:#3d4977; +$topbarItemBadgeBgColor:#ef6262; +$topbarItemBadgeColor:#ffffff; +$topbarItemColor:#ffffff; +$topbarItemHoverColor:#c3e8fb; +$topbarSearchInputBorderBottomColor:#ffffff; +$topbarSearchInputColor:#ffffff; + +/* Footer */ +$footerBgColor:#ffffff; \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/components/AppFooter.js b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/AppFooter.js new file mode 100644 index 0000000000000000000000000000000000000000..27edea6d5758942eaf4582237ed240d18b554082 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/AppFooter.js @@ -0,0 +1,14 @@ +import React, { Component } from 'react'; + +export class AppFooter extends Component { + + render() { + return ( + <div className="layout-footer"> + <span className="footer-text" style={{'marginRight': '5px'}}><strong>TMSS</strong> by <strong>ASTRON</strong></span> + </div> + ); + } +} + +export default AppFooter; \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/components/AppFooter.test.js b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/AppFooter.test.js new file mode 100644 index 0000000000000000000000000000000000000000..fed16b1eb682162af3c51f16a5cf79527fb3ab19 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/AppFooter.test.js @@ -0,0 +1,9 @@ +import React from 'react'; +import { render } from '@testing-library/react'; +import AppFooter from './AppFooter'; + +test('renders ASTRON in footer', () => { + const { getByText } = render(<AppFooter />); + const linkElement = getByText("ASTRON"); + expect(linkElement).toBeInTheDocument(); +}); diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/components/AppMenu.js b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/AppMenu.js new file mode 100644 index 0000000000000000000000000000000000000000..7f0e4e18c1ee09448b7ed7f2330702f7b3d875dc --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/AppMenu.js @@ -0,0 +1,137 @@ +import React, { Component } from 'react'; +import {NavLink} from 'react-router-dom' +import PropTypes from 'prop-types'; +import classNames from 'classnames'; + +class AppSubmenu extends Component { + + static defaultProps = { + className: null, + items: null, + onMenuItemClick: null, + root: false, + permissions: null + + } + + static propTypes = { + className: PropTypes.string, + items: PropTypes.array, + onMenuItemClick: PropTypes.func, + root: PropTypes.bool, + permissions: PropTypes.array + } + + constructor(props) { + super(props); + this.state = {activeIndex: null}; + } + + onMenuItemClick(event, item, index) { + //avoid processing disabled items + if(item.disabled) { + event.preventDefault(); + return true; + } + + //execute command + if(item.command) { + item.command({originalEvent: event, item: item}); + } + + if(index === this.state.activeIndex) + this.setState({activeIndex: null}); + else + this.setState({activeIndex: index}); + + if(this.props.onMenuItemClick) { + this.props.onMenuItemClick({ + originalEvent: event, + item: item + }); + } + } + + renderLinkContent(item) { + let submenuIcon = item.items && <i className="pi pi-fw pi-angle-down menuitem-toggle-icon"></i>; + let badge = item.badge && <span className="menuitem-badge">{item.badge}</span>; + + return ( + <React.Fragment> + <i className={item.icon}></i> + <span>{item.label}</span> + {submenuIcon} + {badge} + </React.Fragment> + ); + } + + renderLink(item, i) { + let content = this.renderLinkContent(item); + + if (item.to) { + return ( + <NavLink activeClassName="active-route" to={item.to} onClick={(e) => this.onMenuItemClick(e, item, i)} exact target={item.target}> + {content} + </NavLink> + ) + } + else { + return ( + <a href={item.url} onClick={(e) => this.onMenuItemClick(e, item, i)} target={item.target}> + {content} + </a> + ); + + } + } + + render() { + + let items = this.props.items && this.props.items.map((item, i) => { + let active = this.state.activeIndex === i; + // let styleClass = classNames(item.badgeStyleClass, {'active-menuitem': active && !item.to}); + let styleClass = classNames(item.badgeStyleClass, {'active-menuitem': active && item.to}); + console.log(item.badgeStyleClass); + console.log(i); + console.log(this.state.activeIndex); + console.log(active); + console.log(styleClass); + return ( + <li className={styleClass} key={i}> + {item.items && this.props.root===true && <div className='arrow'></div>} + {this.renderLink(item, i)} + <AppSubmenu items={item.items} onMenuItemClick={this.props.onMenuItemClick}/> + </li> + ); + + }); + + return items ? <ul className={this.props.className}>{items}</ul> : null; + } +} + +export class AppMenu extends Component { + + static defaultProps = { + model: null, + onMenuItemClick: null + } + + static propTypes = { + model: PropTypes.array, + onMenuItemClick: PropTypes.func + } + + render() { + // console.log(authenticationService.currentUserValue); + return ( + <div className={'layout-sidebar layout-sidebar-light'} > + <div className="layout-menu-container"> + {/* <AppSubmenu items={this.props.model} permissions={authenticationService.currentUserValue.permissions} className="layout-menu" onMenuItemClick={this.props.onMenuItemClick} root={true}/> */} + <AppSubmenu items={this.props.model} className="layout-menu" onMenuItemClick={this.props.onMenuItemClick} root={true}/> + </div> + </div> + ); + } +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/components/AppTopbar.js b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/AppTopbar.js new file mode 100644 index 0000000000000000000000000000000000000000..ccd16881c0a2f108516bd1fc3a40e723cb6da71f --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/components/AppTopbar.js @@ -0,0 +1,39 @@ + +import React, {Component} from 'react'; +import 'primeicons/primeicons.css'; +import 'primereact/resources/themes/nova-light/theme.css'; +import 'primereact/resources/primereact.css'; +import 'primeflex/primeflex.css'; + +// import { PropTypes } from 'prop-types'; + + + export class AppTopbar extends Component { + + // constructor(props) { + // super(props); + // } + + + // static defaultProps = { + // onToggleMenu: null + // } + + + // static propTypes = { + // onToggleMenu: PropTypes.func + // } + + render() { + return ( + <React.Fragment> + <div className="layout-wrapper layout-static layout-static-sidebar-inactive"> + <div className="layout-topbar clearfix"> + <button className="p-link layout-menu-button"><i className="pi pi-bars"></i></button> + <span className="header-title">TMSS</span> + </div> + </div> + </React.Fragment> + ) + } +} diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/layout.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/layout.scss new file mode 100644 index 0000000000000000000000000000000000000000..5980a6378545ea73371eef791355b27aba58dd6d --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/layout.scss @@ -0,0 +1,3 @@ +@import "./_variables"; +@import "./sass/_layout"; +@import "./_overrides"; \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_content.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_content.scss new file mode 100644 index 0000000000000000000000000000000000000000..693a30ddbfb3214511538caff487fcd8bb9c1a8f --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_content.scss @@ -0,0 +1,6 @@ +.layout-main { + @include transition(margin-left $transitionDuration); + padding: 60px 16px 16px 16px; + min-height: 95vh; + background-color: white; +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_dashboard.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_dashboard.scss new file mode 100644 index 0000000000000000000000000000000000000000..8fd362957dac1f26dd3c9b6089352a2e51f6baf8 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_dashboard.scss @@ -0,0 +1,234 @@ +/* Dashboard */ +.dashboard { + .summary-box.summary-box-projects .card { + border-top: 4px solid #0097a7; + background-color: hsl(187, 100%, 42%); + } + .summary-box.summary-box-activeschedules .card { + border-top: 4px solid #ad1457; + background-color: #e91e63; + } + .summary-box.summary-box-taskcompleted .card { + border-top: 4px solid #f47b08; + background-color: #ffb300; + } + .summary-box.summary-box-activetask .card { + border-top: 4px solid #455a64; + background-color: #607d8b; + } + .summary-box { + position: relative; + } + .summary-box .card { + min-height: 100px; + } + .summary-box-icon{ + position: absolute; + right: 24px; + top: 32px; + } + .card { + height: 100%; + margin-bottom: 0; + } + .summary { + position: relative; + + .title { + font-size: 20px; + } + + .detail { + color: $textSecondaryColor; + display: block; + margin-top: 10px; + } + + .count { + color: #ffffff; + position: absolute; + top: 10px; + right: 10px; + font-size: 10px; + padding: 7px 14px; + @include border-radius($borderRadius); + + &.visitors { + background-color: #20d077; + } + + &.purchases { + background-color: #f9c851; + } + + &.revenue { + background-color: #007be5; + } + } + } + + .highlight-box { + height: 100px; + @include clearfix(); + + .initials { + height: 100%; + float: left; + width: 50%; + text-align: center; + padding: 1em; + + > span { + font-size: 48px; + } + } + + .highlight-details { + height: 100%; + background-color: #ffffff; + float: left; + width: 50%; + padding: 1em; + + i { + font-size: 24px; + vertical-align: middle; + margin-right: .25em; + } + + .count { + color: $textSecondaryColor; + font-size: 20px; + display: block; + } + } + } + + .task-list { + list-style-type: none; + margin: 0; + padding: 0; + + li { + padding: .5em .25em; + border-bottom: 1px solid $dividerColor; + @include clearfix(); + } + + .p-checkbox { + vertical-align: middle; + margin-right: .5em; + } + + .task-name { + vertical-align: middle; + } + + i { + float: right; + font-size: 24px; + color: $textSecondaryColor; + } + + .p-panel-content { + min-height: 256px; + } + } + + .contact-form { + .p-panel-content { + min-height: 256px; + } + } + + .contacts { + + ul { + list-style-type: none; + padding: 0; + margin: 0; + + li { + border-bottom: 1px solid $dividerColor; + + button { + padding: 9px; + width: 100%; + box-sizing: border-box; + text-decoration: none; + position: relative; + display: block; + @include border-radius(2px); + @include transition(background-color .2s); + + .name { + position: absolute; + right: 10px; + top: 10px; + font-size: 18px; + } + + .email { + position: absolute; + right: 10px; + top: 30px; + font-size: 14px; + color: $textSecondaryColor; + } + + &:hover { + cursor: pointer; + background-color: #eeeeee; + } + } + + &:last-child { + border: 0; + } + } + } + + .p-panel-content { + min-height: 256px; + } + } + + .activity-list { + list-style-type: none; + padding: 0; + margin: 0; + + li { + border-bottom: 1px solid $dividerColor; + padding: 16px 8px; + + .count { + font-size: 24px; + color: #ffffff; + background-color: #007be5; + font-weight: 700; + display: inline-block; + padding: .25em .5em; + @include border-radius($borderRadius); + } + + &:first-child { + border-top: 1px solid $dividerColor; + } + + &:last-child { + border: 0; + } + + .p-g-6:first-child { + font-size: 18px; + padding-left: 0; + } + + .p-g-6:last-child { + text-align: right; + color: $textSecondaryColor; + } + } + } +} diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_editor.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_editor.scss new file mode 100644 index 0000000000000000000000000000000000000000..c1ab91ab97c61072018d69751b4be94eba6d6e1f --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_editor.scss @@ -0,0 +1,797 @@ +/*! + * Quill Editor v1.0.0-beta.3 + * https://quilljs.com/ + * Copyright (c) 2014, Jason Chen + * Copyright (c) 2013, salesforce.com + */ +.ql-container { + box-sizing: border-box; + font-family: Helvetica, Arial, sans-serif; + font-size: 13px; + height: 100%; + margin: 0px; + position: relative; +} +.ql-clipboard { + left: -100000px; + position: absolute; + top: 50%; +} +.ql-clipboard p { + margin: 0; + padding: 0; +} +.ql-editor { + box-sizing: border-box; + cursor: text; + line-height: 1.42; + height: 100%; + outline: none; + overflow-y: auto; + padding: 12px 15px; + tab-size: 4; + text-align: left; + white-space: pre-wrap; +} +.ql-editor p, +.ql-editor ol, +.ql-editor ul, +.ql-editor pre, +.ql-editor blockquote, +.ql-editor h1, +.ql-editor h2, +.ql-editor h3, +.ql-editor h4, +.ql-editor h5, +.ql-editor h6 { + margin: 0; + padding: 0; + counter-reset: list-1 list-2 list-3 list-4 list-5 list-6 list-7 list-8; +} +.ql-editor ol { + padding-left: 20px; +} +.ql-editor ul { + padding-left: 8px; + list-style: disc inside; +} +.ql-editor ol > li { + list-style-type: none; +} +.ql-editor ol { + list-style-type: none; + position: relative; +} +.ql-editor ol li { + counter-reset: list-1 list-2 list-3 list-4 list-5 list-6 list-7 list-8; + counter-increment: list-num; +} +.ql-editor ol li:before { + content: counter(list-num, decimal) '. '; + margin-right: -16px; + position: absolute; + right: 100%; + text-align: right; +} +.ql-editor ol li.ql-indent-1 { + counter-increment: list-1; +} +.ql-editor ol li.ql-indent-1:before { + content: counter(list-1, lower-alpha) '. '; + margin-right: -56px; +} +.ql-editor ol li.ql-indent-1 { + counter-reset: list-2 list-3 list-4 list-5 list-6 list-7 list-8; +} +.ql-editor ol li.ql-indent-2 { + counter-increment: list-2; +} +.ql-editor ol li.ql-indent-2:before { + content: counter(list-2, lower-roman) '. '; + margin-right: -96px; +} +.ql-editor ol li.ql-indent-2 { + counter-reset: list-3 list-4 list-5 list-6 list-7 list-8; +} +.ql-editor ol li.ql-indent-3 { + counter-increment: list-3; +} +.ql-editor ol li.ql-indent-3:before { + content: counter(list-3, decimal) '. '; + margin-right: -136px; +} +.ql-editor ol li.ql-indent-3 { + counter-reset: list-4 list-5 list-6 list-7 list-8; +} +.ql-editor ol li.ql-indent-4 { + counter-increment: list-4; +} +.ql-editor ol li.ql-indent-4:before { + content: counter(list-4, lower-alpha) '. '; + margin-right: -176px; +} +.ql-editor ol li.ql-indent-4 { + counter-reset: list-5 list-6 list-7 list-8; +} +.ql-editor ol li.ql-indent-5 { + counter-increment: list-5; +} +.ql-editor ol li.ql-indent-5:before { + content: counter(list-5, lower-roman) '. '; + margin-right: -216px; +} +.ql-editor ol li.ql-indent-5 { + counter-reset: list-6 list-7 list-8; +} +.ql-editor ol li.ql-indent-6 { + counter-increment: list-6; +} +.ql-editor ol li.ql-indent-6:before { + content: counter(list-6, decimal) '. '; + margin-right: -256px; +} +.ql-editor ol li.ql-indent-6 { + counter-reset: list-7 list-8; +} +.ql-editor ol li.ql-indent-7 { + counter-increment: list-7; +} +.ql-editor ol li.ql-indent-7:before { + content: counter(list-7, lower-alpha) '. '; + margin-right: -296px; +} +.ql-editor ol li.ql-indent-7 { + counter-reset: list-8; +} +.ql-editor ol li.ql-indent-8 { + counter-increment: list-8; +} +.ql-editor ol li.ql-indent-8:before { + content: counter(list-8, lower-roman) '. '; + margin-right: -336px; +} +.ql-editor .ql-indent-1 { + padding-left: 40px; +} +.ql-editor .ql-indent-2 { + padding-left: 80px; +} +.ql-editor .ql-indent-3 { + padding-left: 120px; +} +.ql-editor .ql-indent-4 { + padding-left: 160px; +} +.ql-editor .ql-indent-5 { + padding-left: 200px; +} +.ql-editor .ql-indent-6 { + padding-left: 240px; +} +.ql-editor .ql-indent-7 { + padding-left: 280px; +} +.ql-editor .ql-indent-8 { + padding-left: 320px; +} +.ql-editor .ql-video { + display: block; +} +.ql-editor .ql-video.ql-align-center { + margin: 0 auto; +} +.ql-editor .ql-video.ql-align-right { + margin: 0 0 0 auto; +} +.ql-editor .ql-bg-black { + background-color: #000; +} +.ql-editor .ql-bg-red { + background-color: #e60000; +} +.ql-editor .ql-bg-orange { + background-color: #f90; +} +.ql-editor .ql-bg-yellow { + background-color: #ff0; +} +.ql-editor .ql-bg-green { + background-color: #008a00; +} +.ql-editor .ql-bg-blue { + background-color: #06c; +} +.ql-editor .ql-bg-purple { + background-color: #93f; +} +.ql-editor .ql-color-white { + color: #fff; +} +.ql-editor .ql-color-red { + color: #e60000; +} +.ql-editor .ql-color-orange { + color: #f90; +} +.ql-editor .ql-color-yellow { + color: #ff0; +} +.ql-editor .ql-color-green { + color: #008a00; +} +.ql-editor .ql-color-blue { + color: #06c; +} +.ql-editor .ql-color-purple { + color: #93f; +} +.ql-editor .ql-font-serif { + font-family: Georgia, Times New Roman, serif; +} +.ql-editor .ql-font-monospace { + font-family: Monaco, Courier New, monospace; +} +.ql-editor .ql-size-small { + font-size: 10px; +} +.ql-editor .ql-size-large { + font-size: 18px; +} +.ql-editor .ql-size-huge { + font-size: 32px; +} +.ql-editor .ql-direction-rtl { + direction: rtl; + text-align: inherit; +} +.ql-editor .ql-align-center { + text-align: center; +} +.ql-editor .ql-align-justify { + text-align: justify; +} +.ql-editor .ql-align-right { + text-align: right; +} +.ql-editor.ql-blank::before { + color: rgba(0,0,0,0.6); + content: attr(data-placeholder); + font-style: italic; + pointer-events: none; + position: absolute; +} +.ql-hidden { + display: none; +} +.ql-tooltip { + position: absolute; +} +.ql-tooltip a { + cursor: pointer; + text-decoration: none; +} +.ql-toolbar:before, +.ql-toolbar:after { + content: ' '; + display: table; +} +.ql-toolbar:after { + clear: both; +} +.ql-toolbar button { + background: none; + border: none; + cursor: pointer; + display: inline-block; + float: left; + height: 24px; + outline: none; + padding: 3px 5px; + width: 24px; +} +.ql-toolbar button svg { + height: 100%; +} +.ql-toolbar .ql-stroke { + fill: none; + stroke-linecap: round; + stroke-linejoin: round; + stroke-width: 2; +} +.ql-toolbar .ql-stroke-mitter { + fill: none; + stroke-mitterlimit: 10; + stroke-width: 2; +} +.ql-toolbar .ql-empty { + fill: none; +} +.ql-toolbar .ql-even { + fill-rule: evenodd; +} +.ql-toolbar .ql-thin, +.ql-toolbar .ql-stroke.ql-thin { + stroke-width: 1; +} +.ql-toolbar .ql-transparent { + opacity: 0.4; +} +.ql-editor h1 { + font-size: 2em; +} +.ql-editor h2 { + font-size: 1.5em; +} +.ql-editor h3 { + font-size: 1.17em; +} +.ql-editor h4 { + font-size: 1em; +} +.ql-editor h5 { + font-size: 0.83em; +} +.ql-editor h6 { + font-size: 0.67em; +} +.ql-editor pre { + white-space: pre-wrap; +} +.ql-editor a { + text-decoration: underline; +} +.ql-editor blockquote { + border-left: 4px solid #ccc; + margin-bottom: 5px; + margin-top: 5px; + padding-left: 16px; +} +.ql-editor pre.hljs { + overflow: visible; +} +.ql-editor code, +.ql-editor pre:not(.hljs) { + background-color: #f0f0f0; +} +.ql-editor code, +.ql-editor pre { + border-radius: 3px; +} +.ql-editor code { + font-size: 85%; + padding-bottom: 2px; + padding-top: 2px; +} +.ql-editor code:before, +.ql-editor code:after { + content: "\A0"; + letter-spacing: -2px; +} +.ql-editor *:not(pre) + pre, +.ql-editor pre:first-of-type { + margin-top: 5px; + padding-top: 5px; +} +.ql-editor pre { + margin-bottom: 5px; + padding: 0px 10px 5px; +} +.ql-editor pre + pre { + margin-top: -10px; +} +.ql-editor img { + max-width: 100%; +} +.ql-snow .ql-image-tooltip { + padding: 10px; + text-align: center; + width: 300px; +} +.ql-snow .ql-image-tooltip input[type=text] { + height: 30px; + width: 100%; +} +.ql-snow .ql-image-tooltip a.ql-cancel, +.ql-snow .ql-image-tooltip a.ql-action { + border: 1px solid #06c; + display: inline-block; + float: left; + line-height: 18px; + padding: 5px; + width: 50%; +} +.ql-snow .ql-image-tooltip a.ql-cancel::before { + color: #06c; + content: 'Cancel'; +} +.ql-snow .ql-image-tooltip a.ql-action { + background-color: #06c; +} +.ql-snow .ql-image-tooltip a.ql-action::before { + color: #fff; + content: 'Insert'; +} +.ql-snow .ql-image-tooltip .ql-preview { + height: 200px; + margin: 10px 0px; + position: relative; +} +.ql-snow .ql-image-tooltip .ql-preview img { + bottom: 0; + left: 0; + margin: auto; + max-height: 100%; + max-width: 100%; + position: absolute; + right: 0; + top: 0; +} +.ql-snow .ql-image-tooltip .ql-empty.ql-preview::before { + border: 1px dashed #ccc; + color: #ccc; + content: 'Preview'; + display: block; + line-height: 200px; +} +.ql-snow .ql-link-tooltip::before { + content: "Visit URL:"; + line-height: 26px; + margin-right: 8px; +} +.ql-snow .ql-link-tooltip { + margin-top: 10px; + padding: 5px 12px; + white-space: nowrap; +} +.ql-snow .ql-link-tooltip input[type=text] { + display: none; + height: 26px; + width: 170px; +} +.ql-snow .ql-link-tooltip a.ql-preview { + display: inline-block; + max-width: 200px; + overflow-x: hidden; + text-overflow: ellipsis; + vertical-align: top; +} +.ql-snow .ql-link-tooltip a.ql-action::before { + color: #444; + content: '-'; + line-height: 26px; + margin-left: 8px; +} +.ql-snow .ql-link-tooltip a.ql-action::after { + border-right: 1px solid #ccc; + content: 'Edit'; + margin-left: 8px; + padding-right: 8px; +} +.ql-snow .ql-link-tooltip a.ql-remove::before { + content: 'Remove'; + margin-left: 8px; +} +.ql-snow .ql-link-tooltip a { + line-height: 26px; +} +.ql-snow .ql-link-tooltip.ql-editing a.ql-preview, +.ql-snow .ql-link-tooltip.ql-editing a.ql-remove { + display: none; +} +.ql-snow .ql-link-tooltip.ql-editing input[type=text] { + display: inline-block; +} +.ql-snow .ql-link-tooltip.ql-editing a.ql-action::after { + border-right: 0px; + content: 'Save'; + padding-right: 0px; +} +.ql-toolbar.ql-snow { + border: 1px solid #ccc; + box-sizing: border-box; + font-family: 'Helvetica Neue', 'Helvetica', 'Arial', sans-serif; + padding: 8px; +} +.ql-toolbar.ql-snow * { + box-sizing: border-box; +} +.ql-toolbar.ql-snow .ql-formats { + display: inline-block; + margin-right: 15px; + vertical-align: middle; +} +.ql-toolbar.ql-snow button:hover, +.ql-toolbar.ql-snow button.ql-active, +.ql-toolbar.ql-snow .ql-picker-label:hover, +.ql-toolbar.ql-snow .ql-picker-label.ql-active, +.ql-toolbar.ql-snow .ql-picker-item:hover, +.ql-toolbar.ql-snow .ql-picker-item.ql-selected { + color: #06c; +} +.ql-toolbar.ql-snow button:hover .ql-fill, +.ql-toolbar.ql-snow button.ql-active .ql-fill, +.ql-toolbar.ql-snow .ql-picker-label:hover .ql-fill, +.ql-toolbar.ql-snow .ql-picker-label.ql-active .ql-fill, +.ql-toolbar.ql-snow .ql-picker-item:hover .ql-fill, +.ql-toolbar.ql-snow .ql-picker-item.ql-selected .ql-fill, +.ql-toolbar.ql-snow button:hover .ql-stroke.ql-fill, +.ql-toolbar.ql-snow button.ql-active .ql-stroke.ql-fill, +.ql-toolbar.ql-snow .ql-picker-label:hover .ql-stroke.ql-fill, +.ql-toolbar.ql-snow .ql-picker-label.ql-active .ql-stroke.ql-fill, +.ql-toolbar.ql-snow .ql-picker-item:hover .ql-stroke.ql-fill, +.ql-toolbar.ql-snow .ql-picker-item.ql-selected .ql-stroke.ql-fill { + fill: #06c; +} +.ql-toolbar.ql-snow button:hover .ql-stroke, +.ql-toolbar.ql-snow button.ql-active .ql-stroke, +.ql-toolbar.ql-snow .ql-picker-label:hover .ql-stroke, +.ql-toolbar.ql-snow .ql-picker-label.ql-active .ql-stroke, +.ql-toolbar.ql-snow .ql-picker-item:hover .ql-stroke, +.ql-toolbar.ql-snow .ql-picker-item.ql-selected .ql-stroke, +.ql-toolbar.ql-snow button:hover .ql-stroke-mitter, +.ql-toolbar.ql-snow button.ql-active .ql-stroke-mitter, +.ql-toolbar.ql-snow .ql-picker-label:hover .ql-stroke-mitter, +.ql-toolbar.ql-snow .ql-picker-label.ql-active .ql-stroke-mitter, +.ql-toolbar.ql-snow .ql-picker-item:hover .ql-stroke-mitter, +.ql-toolbar.ql-snow .ql-picker-item.ql-selected .ql-stroke-mitter { + stroke: #06c; +} +.ql-toolbar.ql-snow .ql-stroke { + stroke: #444; +} +.ql-toolbar.ql-snow .ql-stroke-mitter { + stroke: #444; +} +.ql-toolbar.ql-snow .ql-fill, +.ql-toolbar.ql-snow .ql-stroke.ql-fill { + fill: #444; +} +.ql-toolbar.ql-snow .ql-direction svg:last-child { + display: none; +} +.ql-toolbar.ql-snow .ql-direction.ql-active svg:last-child { + display: inline; +} +.ql-toolbar.ql-snow .ql-direction.ql-active svg:first-child { + display: none; +} +.ql-toolbar.ql-snow .ql-picker { + color: #444; + display: inline-block; + float: left; + font-size: 14px; + font-weight: 500; + height: 24px; + position: relative; + vertical-align: middle; +} +.ql-toolbar.ql-snow .ql-picker-label { + border: 1px solid transparent; + cursor: pointer; + display: inline-block; + height: 100%; + padding-left: 8px; + padding-right: 2px; + position: relative; + width: 100%; +} +.ql-toolbar.ql-snow .ql-picker-label svg { + float: right; + height: 18px; + margin-top: 2px; + width: 18px; +} +.ql-toolbar.ql-snow .ql-picker-label::before { + vertical-align: middle; +} +.ql-toolbar.ql-snow .ql-picker-options { + background-color: #fff; + border: 1px solid transparent; + display: none; + min-width: 100%; + padding: 4px 8px; + position: absolute; + white-space: nowrap; +} +.ql-toolbar.ql-snow .ql-picker-options .ql-picker-item { + cursor: pointer; + display: block; + padding-bottom: 5px; + padding-top: 5px; +} +.ql-toolbar.ql-snow .ql-picker.ql-expanded .ql-picker-label { + border-color: #ccc; + color: #ccc; + z-index: 2; +} +.ql-toolbar.ql-snow .ql-picker.ql-expanded .ql-picker-label .ql-fill { + fill: #ccc; +} +.ql-toolbar.ql-snow .ql-picker.ql-expanded .ql-picker-label .ql-stroke { + stroke: #ccc; +} +.ql-toolbar.ql-snow .ql-picker.ql-expanded .ql-picker-options { + border-color: #ccc; + box-shadow: rgba(0,0,0,0.2) 0 2px 8px; + display: block; + margin-top: -1px; + z-index: 1; +} +.ql-toolbar.ql-snow .ql-color-picker, +.ql-toolbar.ql-snow .ql-icon-picker { + width: 28px; +} +.ql-toolbar.ql-snow .ql-color-picker .ql-picker-label, +.ql-toolbar.ql-snow .ql-icon-picker .ql-picker-label { + padding: 2px 4px; +} +.ql-toolbar.ql-snow .ql-color-picker .ql-picker-label svg, +.ql-toolbar.ql-snow .ql-icon-picker .ql-picker-label svg { + margin-top: 0px; +} +.ql-toolbar.ql-snow .ql-icon-picker .ql-picker-options { + padding: 4px 0px; +} +.ql-toolbar.ql-snow .ql-icon-picker .ql-picker-item { + height: 24px; + padding: 2px 4px; +} +.ql-toolbar.ql-snow .ql-color-picker.ql-expanded .ql-picker-options, +.ql-toolbar.ql-snow .ql-icon-picker.ql-expanded .ql-picker-options { + margin-top: -6px; +} +.ql-toolbar.ql-snow .ql-color-picker .ql-picker-options { + padding: 5px; + width: 152px; +} +.ql-toolbar.ql-snow .ql-color-picker .ql-picker-item { + border: 1px solid transparent; + float: left; + height: 16px; + margin: 2px; + padding: 0px; + width: 16px; +} +.ql-toolbar.ql-snow .ql-color-picker .ql-picker-item.ql-primary-color { + margin-bottom: toolbarPadding; +} +.ql-toolbar.ql-snow .ql-color-picker .ql-picker-item.ql-selected, +.ql-toolbar.ql-snow .ql-color-picker .ql-picker-item:hover { + border-color: #000; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-label[data-label]:not([data-label=''])::before, +.ql-toolbar.ql-snow .ql-picker.ql-font .ql-picker-label[data-label]:not([data-label=''])::before, +.ql-toolbar.ql-snow .ql-picker.ql-size .ql-picker-label[data-label]:not([data-label=''])::before, +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item[data-label]:not([data-label=''])::before, +.ql-toolbar.ql-snow .ql-picker.ql-font .ql-picker-item[data-label]:not([data-label=''])::before, +.ql-toolbar.ql-snow .ql-picker.ql-size .ql-picker-item[data-label]:not([data-label=''])::before { + content: attr(data-label); +} +.ql-toolbar.ql-snow .ql-picker.ql-header { + width: 98px; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-label::before, +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item::before { + content: 'Normal'; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-label[data-value="1"]::before, +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="1"]::before { + content: 'Heading 1'; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-label[data-value="2"]::before, +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="2"]::before { + content: 'Heading 2'; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-label[data-value="3"]::before, +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="3"]::before { + content: 'Heading 3'; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-label[data-value="4"]::before, +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="4"]::before { + content: 'Heading 4'; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-label[data-value="5"]::before, +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="5"]::before { + content: 'Heading 5'; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-label[data-value="6"]::before, +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="6"]::before { + content: 'Heading 6'; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="1"]::before { + font-size: 2em; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="2"]::before { + font-size: 1.5em; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="3"]::before { + font-size: 1.17em; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="4"]::before { + font-size: 1em; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="5"]::before { + font-size: 0.83em; +} +.ql-toolbar.ql-snow .ql-picker.ql-header .ql-picker-item[data-value="6"]::before { + font-size: 0.67em; +} +.ql-toolbar.ql-snow .ql-picker.ql-font { + width: 108px; +} +.ql-toolbar.ql-snow .ql-picker.ql-font .ql-picker-label::before, +.ql-toolbar.ql-snow .ql-picker.ql-font .ql-picker-item::before { + content: 'Sans Serif'; +} +.ql-toolbar.ql-snow .ql-picker.ql-font .ql-picker-label[data-value=serif]::before, +.ql-toolbar.ql-snow .ql-picker.ql-font .ql-picker-item[data-value=serif]::before { + content: 'Serif'; +} +.ql-toolbar.ql-snow .ql-picker.ql-font .ql-picker-label[data-value=monospace]::before, +.ql-toolbar.ql-snow .ql-picker.ql-font .ql-picker-item[data-value=monospace]::before { + content: 'Monospace'; +} +.ql-toolbar.ql-snow .ql-picker.ql-font .ql-picker-item[data-value=serif]::before { + font-family: Georgia, Times New Roman, serif; +} +.ql-toolbar.ql-snow .ql-picker.ql-font .ql-picker-item[data-value=monospace]::before { + font-family: Monaco, Courier New, monospace; +} +.ql-toolbar.ql-snow .ql-picker.ql-size { + width: 98px; +} +.ql-toolbar.ql-snow .ql-picker.ql-size .ql-picker-label::before, +.ql-toolbar.ql-snow .ql-picker.ql-size .ql-picker-item::before { + content: 'Normal'; +} +.ql-toolbar.ql-snow .ql-picker.ql-size .ql-picker-label[data-value=small]::before, +.ql-toolbar.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=small]::before { + content: 'Small'; +} +.ql-toolbar.ql-snow .ql-picker.ql-size .ql-picker-label[data-value=large]::before, +.ql-toolbar.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=large]::before { + content: 'Large'; +} +.ql-toolbar.ql-snow .ql-picker.ql-size .ql-picker-label[data-value=huge]::before, +.ql-toolbar.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=huge]::before { + content: 'Huge'; +} +.ql-toolbar.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=small]::before { + font-size: 10px; +} +.ql-toolbar.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=large]::before { + font-size: 18px; +} +.ql-toolbar.ql-snow .ql-picker.ql-size .ql-picker-item[data-value=huge]::before { + font-size: 32px; +} +.ql-toolbar.ql-snow .ql-color-picker.ql-background .ql-picker-item { + background-color: #fff; +} +.ql-toolbar.ql-snow .ql-color-picker.ql-color .ql-picker-item { + background-color: #000; +} +.ql-toolbar.ql-snow + .ql-container.ql-snow { + border-top: 0px; +} +.ql-snow .ql-tooltip { + background-color: #fff; + border: 1px solid #ccc; + box-shadow: 0px 0px 5px #ddd; + color: #444; +} +.ql-snow .ql-tooltip input[type=text] { + border: 1px solid #ccc; + font-size: 13px; + margin: 0px; + padding: 3px 5px; +} +.ql-container.ql-snow { + border: 1px solid #ccc; +} +.ql-container.ql-snow a { + color: #06c; +} +.ql-container.ql-snow * { + box-sizing: border-box; +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_footer.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_footer.scss new file mode 100644 index 0000000000000000000000000000000000000000..35d3116a9c189ae82be06ffb8d7103d4f9dcfcf9 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_footer.scss @@ -0,0 +1,14 @@ +.layout-footer { + @include transition(margin-left $transitionDuration); + background-color: $footerBgColor; + padding: 1em 2em; + + img { + vertical-align: middle; + } + + .footer-text { + vertical-align: middle; + float: right; + } +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_layout.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_layout.scss new file mode 100644 index 0000000000000000000000000000000000000000..b5182b1c58420ad500fc0cb48e1b85d4ef2ae393 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_layout.scss @@ -0,0 +1,12 @@ +@import "./_mixins"; +@import "./_splash"; +@import "./_main"; +@import "./_topbar"; +@import "./_sidebar"; +@import "./_profile"; +@import "./_menu"; +@import "./_content"; +@import "./_footer"; +@import "./_responsive"; +@import "./_utils"; +@import "./_dashboard"; \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_main.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_main.scss new file mode 100644 index 0000000000000000000000000000000000000000..9c4b17db84c02f84691163de1075398385ef0230 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_main.scss @@ -0,0 +1,28 @@ +* { + box-sizing: border-box; +} + +html { + height: 100%; +} + +body { + font-family: "Open Sans", "Helvetica Neue", sans-serif; + font-size: $fontSize; + color: $textColor; + background-color: $bodyBgColor; + margin: 0; + padding: 0; + min-height: 100%; + -webkit-font-smoothing: antialiased; +} + +a { + text-decoration: none; +} + +.layout-wrapper { + padding: 0; + // min-height: 100vh; +} + diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_menu.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_menu.scss new file mode 100644 index 0000000000000000000000000000000000000000..9fea8e94f390367c23a1874b1d0e19060a43e5b5 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_menu.scss @@ -0,0 +1,184 @@ +.layout-menu-container { + padding-bottom: 120px; +} + +.layout-menu { + list-style-type: none; + margin: 0; + padding: 0; + + li { + a { + cursor: pointer; + position: relative; + text-decoration: none; + display: block; + @include transition(color $transitionDuration); + + i { + font-size: 18px; + vertical-align: middle; + } + + span { + margin-left: .25em; + vertical-align: middle; + line-height: 18px; + display: inline-block; + } + + .menuitem-toggle-icon { + position: absolute; + top: 50%; + right: 1em; + margin-top: -9px; + } + + &.router-link-active { + font-weight: 700; + } + } + + &.active-menuitem { + > a { + .menuitem-toggle-icon { + @include icon-override("\e933"); + } + } + + > ul { + max-height: 500px; + } + } + } + + > li { + > a { + padding: 1em; + + span { + font-size: $fontSize; + } + } + + &:last-child { + > a { + border-bottom: 1px solid $menuitemBorderColor; + } + } + + ul { + list-style-type: none; + margin: 0; + padding: 0; + padding-left: 1.5em; + max-height: 0; + overflow: hidden; + @include transition-property(max-height); + @include transition-duration(0.4s); + @include transition-timing-function(cubic-bezier(0.86, 0, 0.07, 1)); + + li { + a { + cursor: pointer; + padding: .75em 1em; + border-top: 0 none; + + span { + font-size: $submenuFontSize; + } + } + + &:last-child { + padding-bottom: 1em; + } + } + } + } +} + +.layout-sidebar-light { + @include linear-gradient($menuBgColorFirst, $menuBgColorLast); + + .layout-menu { + > li { + > a { + border-top: 1px solid $menuitemBorderColor; + } + + &:last-child { + > a { + border-bottom: 1px solid $menuitemBorderColor; + } + } + + ul { + background-color: $menuitemActiveBgColor; + } + } + + li { + a { + color: $menuitemColor; + + &.router-link-active { + color: $menuitemActiveRouteColor; + } + + &:hover { + color: $menuitemHoverColor; + } + } + + &.active-menuitem { + > a { + background-color: $menuitemActiveBgColor; + color: $menuitemActiveColor; + } + } + } + } +} + +.layout-sidebar-dark { + @include linear-gradient($menuDarkBgColorFirst, $menuDarkBgColorLast); + + .layout-menu { + > li { + > a { + border-top: 1px solid $menuitemDarkBorderColor; + } + + &:last-child { + > a { + border-bottom: 1px solid $menuitemDarkBorderColor; + } + } + + ul { + background-color: $menuitemDarkActiveBgColor; + } + } + + li { + a { + color: $menuitemDarkColor; + + &.router-link-active { + color: $menuitemActiveRouteColor; + } + + &:hover { + color: $menuitemDarkHoverColor; + } + } + + &.active-menuitem { + > a { + background-color: $menuitemDarkActiveBgColor; + color: $menuitemDarkActiveColor; + } + } + } + } +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_mixins.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_mixins.scss new file mode 100644 index 0000000000000000000000000000000000000000..e31c9cf0394ee94a67565d2588318b944fbf4a31 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_mixins.scss @@ -0,0 +1,120 @@ +// Icon Override Mixin +@mixin icon-override($icon) { + &:before { + content: $icon; + } +} + +// === MIXINS === // +@mixin border-radius($val) { + -moz-border-radius: $val; + -webkit-border-radius: $val; + border-radius: $val; +} + +@mixin border-radius-right($val) { + -moz-border-radius-topright: $val; + -webkit-border-top-right-radius: $val; + border-top-right-radius: $val; + -moz-border-radius-bottomright: $val; + -webkit-border-bottom-right-radius: $val; + border-bottom-right-radius: $val; +} + +@mixin border-radius-left($val) { + -moz-border-radius-topleft: $val; + -webkit-border-top-left-radius: $val; + border-top-left-radius: $val; + -moz-border-radius-bottomleft: $val; + -webkit-border-bottom-left-radius: $val; + border-bottom-left-radius: $val; +} + +@mixin border-radius-top($val) { + -moz-border-radius-topleft: $val; + -webkit-border-top-left-radius: $val; + border-top-left-radius: $val; + -moz-border-radius-topright: $val; + -webkit-border-top-right-radius: $val; + border-top-right-radius: $val; +} + +@mixin border-radius-bottom($val) { + -moz-border-radius-bottomleft: $val; + -webkit-border-bottom-left-radius: $val; + border-bottom-left-radius: $val; + -moz-border-radius-bottomright: $val; + -webkit-border-bottom-right-radius: $val; + border-bottom-right-radius: $val; +} + +@mixin linear-gradient($top, $bottom){ + background: $top; /* Old browsers */ + background: -moz-linear-gradient(top, $top 0%, $bottom 100%); /* FF3.6+ */ + background: -webkit-gradient(linear, left top, left bottom, color-stop(0%,$top), color-stop(100%,$bottom)); /* Chrome,Safari4+ */ + background: -webkit-linear-gradient(top, $top 0%,$bottom 100%); /* Chrome10+,Safari5.1+ */ + background: -o-linear-gradient(top, $top 0%,$bottom 100%); /* Opera 11.10+ */ + background: -ms-linear-gradient(top, $top 0%,$bottom 100%); /* IE10+ */ + background: linear-gradient(to bottom, $top 0%,$bottom 100%); /* W3C */ + filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#ffffff', endColorstr='#000000',GradientType=0 ); /* IE6-9 */ +} + +@mixin linear-gradient-left($left, $right){ + background: $left; /* Old browsers */ + background: -moz-linear-gradient(left, $left 0%, $right 100%); /* FF3.6+ */ + background: -webkit-gradient(linear, left top, right top, color-stop(0%,$left), color-stop(100%,$right)); /* Chrome,Safari4+ */ + background: -webkit-linear-gradient(left, $left 0%,$right 100%); /* Chrome10+,Safari5.1+ */ + background: -o-linear-gradient(left, $left 0%,$right 100%); /* Opera 11.10+ */ + background: -ms-linear-gradient(left, $left 0%,$right 100%); /* IE10+ */ + background: linear-gradient(to right, $left 0%,$right 100%); /* W3C */ + filter: progid:DXImageTransform.Microsoft.gradient( startColorstr=$left, endColorstr=$right,GradientType=1 ); /* IE6-9 */ +} + +@mixin transition($transition...) { + -moz-transition: $transition; + -o-transition: $transition; + -webkit-transition: $transition; + transition: $transition; +} + +@mixin transition-property($property) { + -webkit-transition-property: $property; + -moz-transition-property: $property; + -ms-transition-property: $property; + -o-transition-property: $property; + transition-property: $property; +} + +@mixin transition-duration($duration) { + -moz-transition-duration: $duration; + -webkit-transition-duration: $duration; + -o-transition-duration: $duration; + transition-duration: $duration; +} + +@mixin transition-timing-function($function) { + -webkit-transition-timing-function: $function; + -moz-transition-timing-function: $function; + -o-transition-timing-function: $function; + transition-timing-function: $function; +} + +@mixin opacity($opacity) { + opacity: $opacity; + $opacity-ie: $opacity * 100; + filter: alpha(opacity=$opacity-ie); +} + +@mixin shadow($value) { + -webkit-box-shadow: $value; + -moz-box-shadow: $value; + box-shadow: $value; +} + +@mixin clearfix { + &:after { + content: ""; + display: table; + clear: both; + } +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_profile.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_profile.scss new file mode 100644 index 0000000000000000000000000000000000000000..da266609bc672200fefa0f7de350354caaac5f25 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_profile.scss @@ -0,0 +1,113 @@ +.layout-profile { + text-align: center; + padding: 20px 0; + + img { + width: 56px; + margin: 10px; + } + + .layout-profile-link { + cursor: pointer; + display: inline-block; + margin-bottom: .75em; + @include transition(color $transitionDuration); + + i { + display: inline-block; + font-size: 16px; + vertical-align: middle; + } + } + + ul { + list-style-type: none; + padding: 0; + margin: 0; + max-height: 0; + overflow: hidden; + @include transition-property(max-height); + @include transition-duration(0.4s); + @include transition-timing-function(cubic-bezier(0.86, 0, 0.07, 1)); + + &.layout-profile-expanded { + max-height: 500px; + } + + li { + button { + width: 100%; + padding: 1em; + border: 0 none; + border-radius: 0; + cursor: pointer; + @include transition(color $transitionDuration); + + &:hover { + color: $menuitemHoverColor; + } + + span { + margin-left: .25em; + vertical-align: middle; + } + + i { + vertical-align: middle; + } + } + } + } +} + +.layout-sidebar-light { + .layout-profile { + .layout-profile-link { + color: $menuitemColor; + + &:hover { + color: $menuitemHoverColor; + } + } + + ul { + background-color: $menuitemActiveBgColor; + + li { + button { + color: $menuitemColor; + + &:hover { + color: $menuitemHoverColor; + } + } + } + } + } +} + +.layout-sidebar-dark { + .layout-profile { + .layout-profile-link { + color: $menuitemDarkColor; + + &:hover { + color: $menuitemDarkHoverColor; + } + } + + ul { + background-color: $menuitemDarkActiveBgColor; + + li { + button { + color: $menuitemDarkColor; + + &:hover { + color: $menuitemDarkHoverColor; + } + } + } + } + } +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_responsive.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_responsive.scss new file mode 100644 index 0000000000000000000000000000000000000000..97ab84ce038e66fe3bfdf4a5a2de512d491709ec --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_responsive.scss @@ -0,0 +1,98 @@ +@media (min-width: 1025px) { + .layout-wrapper { + &.layout-overlay { + .layout-sidebar { + left: -250px; + } + + .layout-topbar { + left: 0; + } + + .layout-main, .layout-footer { + margin-left: 0; + } + + &.layout-overlay-sidebar-active { + .layout-sidebar { + left: 0; + } + + .layout-topbar { + left: 250px; + } + } + } + + &.layout-static { + .layout-sidebar { + left: 0; + } + + .layout-topbar { + left: 250px; + } + + .layout-main, .layout-footer { + margin-left: 250px; + } + + &.layout-static-sidebar-inactive { + .layout-sidebar { + left: -250px; + } + + .layout-topbar { + left: 0; + } + + .layout-main, .layout-footer { + margin-left: 0; + } + } + } + } +} + +@media (max-width: 1024px) { + .layout-wrapper { + .layout-topbar { + left: 0; + } + + .layout-main, .layout-footer { + margin-left: 0; + } + + .layout-sidebar { + left: -250px; + margin-top: 50px; + } + + .layout-mask { + display: none; + position: fixed; + width: 100%; + height: 100%; + top: 50px; + left: 0; + z-index: 998; + background-color: $maskBgColor; + @include opacity(0.7); + } + + &.layout-mobile-sidebar-active { + .layout-sidebar { + left: -0; + } + + .layout-mask { + display: block; + } + } + } + + .body-overflow-hidden { + overflow: hidden; + } +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_sidebar.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_sidebar.scss new file mode 100644 index 0000000000000000000000000000000000000000..cca2a830c7f57a18ff9687d8d98de016709c7aa5 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_sidebar.scss @@ -0,0 +1,31 @@ +.layout-sidebar { + padding-top: 60px; + position: fixed; + width: 250px; + height: 100%; + // z-index: 999; + overflow-y: auto; + user-select: none; + -moz-user-select: none; + -webkit-user-select: none; + @include transition(left $transitionDuration); + @include shadow(0 0 6px 0 rgba(0, 0, 0, 0.16)); + + .layout-logo { + text-align: center; + margin-top: 24px; + } + + .menuitem-badge { + display: inline-block; + margin-left: 4px; + font-size: 10px; + width: 16px; + height: 16px; + line-height: 16px; + text-align: center; + color: $menuitemBadgeColor; + background-color: $menuitemBadgeBgColor; + @include border-radius(50%); + } +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_splash.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_splash.scss new file mode 100644 index 0000000000000000000000000000000000000000..1af75ac61e4cf9c635c4ed791901cc5fa571b3fb --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_splash.scss @@ -0,0 +1,52 @@ +.splash-screen { + width: 100%; + height: 100%; + position: fixed; + @include linear-gradient-left($topbarLeftBgColor, $topbarRightBgColor); + + .splash-container { + width: 40px; + height: 40px; + margin: 0px auto; + position: absolute; + left: 50%; + top: 50%; + margin-left: -20px; + margin-top: -20px; + + } + + .splash-double-bounce1, .splash-double-bounce2 { + width: 100%; + height: 100%; + border-radius: 50%; + background-color: $topbarItemColor; + opacity: 0.6; + position: absolute; + top: 0; + left: 0; + + -webkit-animation: splash-bounce 2.0s infinite ease-in-out; + animation: splash-bounce 2.0s infinite ease-in-out; + } + + .splash-double-bounce2 { + -webkit-animation-delay: -1.0s; + animation-delay: -1.0s; + } + + @-webkit-keyframes splash-bounce { + 0%, 100% { -webkit-transform: scale(0.0) } + 50% { -webkit-transform: scale(1.0) } + } + + @keyframes splash-bounce { + 0%, 100% { + transform: scale(0.0); + -webkit-transform: scale(0.0); + } 50% { + transform: scale(1.0); + -webkit-transform: scale(1.0); + } + } +} diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_topbar.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_topbar.scss new file mode 100644 index 0000000000000000000000000000000000000000..310133c2db197d0c33a8db3e045e256ffe19c868 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_topbar.scss @@ -0,0 +1,133 @@ +.layout-topbar { + position: fixed; + height: 50px; + padding: .7em 1.5em 0em 1.5em; + color: #ffffff; + z-index: 999; + right: 0; + @include clearfix(); + @include linear-gradient-left($topbarLeftBgColor,$topbarRightBgColor); + @include transition(left $transitionDuration); + + .header-title { + font-size: 22px; + margin-left: 10px; + } + + .layout-topbar-icons { + float: right; + display: block; + -moz-animation-duration: .5s; + -webkit-animation-duration: .5s; + animation-duration: .5s; + + + button { + position: relative; + color: $topbarItemColor; + margin-left: 20px; + display: inline-block; + text-decoration: none; + @include transition(color $transitionDuration); + overflow: visible; + + &:hover { + color: $topbarItemHoverColor; + } + + span { + &.layout-topbar-icon { + font-size: 2em; + } + + &.layout-topbar-item-text { + font-size: 20px; + display: none; + } + + &.layout-topbar-badge { + position: absolute; + font-size: 10px; + right: -5px; + top: -5px; + width: 16px; + height: 16px; + text-align: center; + line-height: 16px; + color: $topbarItemBadgeColor; + background-color: $topbarItemBadgeBgColor; + @include border-radius(50%); + } + } + } + + .layout-topbar-search { + padding: 0; + position: relative; + display: inline-block; + top: -4px; + + input { + display: inline-block; + border: 0 none; + font-size: $fontSize; + background: transparent; + border-bottom: 2px solid $topbarSearchInputBorderBottomColor; + outline: 0 none; + -webkit-box-shadow: none; + box-shadow: none; + color: $topbarSearchInputColor; + width: 100px; + padding: 1px 20px 1px 1px; + margin: 0px; + @include border-radius(2px); + + &::-webkit-input-placeholder { color:$topbarSearchInputColor; opacity: .7; @include transition(opacity $transitionDuration);} + &:-moz-placeholder { color:$topbarSearchInputColor; opacity: .7; @include transition(opacity $transitionDuration);} + &::-moz-placeholder { color:$topbarSearchInputColor; opacity: .7; @include transition(opacity $transitionDuration);} + &:-ms-input-placeholder { color:$topbarSearchInputColor; opacity: .7; @include transition(opacity $transitionDuration);} + } + + .layout-topbar-search-icon { + font-size: 18px; + position: absolute; + top: -1px; + right: 0px; + } + + &:hover { + input { + border-bottom-color: $topbarItemHoverColor; + &::-webkit-input-placeholder { opacity: 1 } + &:-moz-placeholder {opacity: 1} + &::-moz-placeholder {opacity: 1} + &:-ms-input-placeholder {opacity: 1} + } + + .layout-topbar-search-icon { + color: $topbarItemHoverColor; + } + } + } + } + + .layout-menu-button { + cursor: pointer; + display: inline-block; + text-decoration: none; + color: $topbarItemColor; + @include transition(color $transitionDuration); + + span { + font-size: 2em; + } + + &:hover { + color: $topbarItemHoverColor; + } + } + + button { + cursor: pointer; + } +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_utils.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_utils.scss new file mode 100644 index 0000000000000000000000000000000000000000..2a37da7437e2d182dea31fb4cc0da9ea3cb67c29 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_utils.scss @@ -0,0 +1,38 @@ +/* Typography */ +h1 { + font-weight: normal; + margin: 0; + font-size: 24px; +} + +h2 { + font-size: 20px; + font-weight: normal; + margin: 0; +} + +.card { + background-color: #ffffff; + padding: 1em; + margin-bottom: 16px; + @include border-radius($borderRadius); + + &.card-w-title { + padding-bottom: 2em; + } + + h1 { + margin: 1em 0 .5em 0; + border-bottom: 1px solid #d5d5d5; + padding: .1em; + font-size: 24px; + + &:first-child { + margin: 0 0 .5em 0; + } + } +} + +.p-g { + -ms-flex-wrap: wrap; +} diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Dashboard/index.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Dashboard/index.js new file mode 100644 index 0000000000000000000000000000000000000000..dd52fdf11774d21f1e002716cf9804f1ac581d8d --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Dashboard/index.js @@ -0,0 +1,12 @@ +import React, {Component} from 'react'; + +export class Dashboard extends Component { + + render() { + return ( + <h1>Dashboard</h1> + ); + } +} + +export default Dashboard; \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/index.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/index.js new file mode 100644 index 0000000000000000000000000000000000000000..5ce2273ff25cb9371132f73e8c685415cb3ab5de --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/index.js @@ -0,0 +1,12 @@ +import React, {Component} from 'react'; + +export class Scheduling extends Component { + + render() { + return ( + <h1>Scheduling Units</h1> + ); + } +} + +export default Scheduling; \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/index.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/index.js new file mode 100644 index 0000000000000000000000000000000000000000..323019846cfc267c7874b64bee9ed4bd8a795f10 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/index.js @@ -0,0 +1,19 @@ +import React from 'react'; +import { + Route, + Switch, + Redirect +} from 'react-router-dom'; + +import {Dashboard} from './Dashboard'; +import {Scheduling} from './Scheduling'; + +export const RoutedContent = () => { + return ( + <Switch> + <Redirect from="/" to="/" exact /> + <Route path="/dashboard" exact component={Dashboard} /> + <Route path="/scheduling" exact component={Scheduling} /> + </Switch> + ); +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/serviceWorker.js b/SAS/TMSS/frontend/tmss_webapp/src/serviceWorker.js new file mode 100644 index 0000000000000000000000000000000000000000..b04b771a82613a80b0532d7082508763620074bf --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/serviceWorker.js @@ -0,0 +1,141 @@ +// This optional code is used to register a service worker. +// register() is not called by default. + +// This lets the app load faster on subsequent visits in production, and gives +// it offline capabilities. However, it also means that developers (and users) +// will only see deployed updates on subsequent visits to a page, after all the +// existing tabs open on the page have been closed, since previously cached +// resources are updated in the background. + +// To learn more about the benefits of this model and instructions on how to +// opt-in, read https://bit.ly/CRA-PWA + +const isLocalhost = Boolean( + window.location.hostname === 'localhost' || + // [::1] is the IPv6 localhost address. + window.location.hostname === '[::1]' || + // 127.0.0.0/8 are considered localhost for IPv4. + window.location.hostname.match( + /^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/ + ) +); + +export function register(config) { + if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) { + // The URL constructor is available in all browsers that support SW. + const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href); + if (publicUrl.origin !== window.location.origin) { + // Our service worker won't work if PUBLIC_URL is on a different origin + // from what our page is served on. This might happen if a CDN is used to + // serve assets; see https://github.com/facebook/create-react-app/issues/2374 + return; + } + + window.addEventListener('load', () => { + const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`; + + if (isLocalhost) { + // This is running on localhost. Let's check if a service worker still exists or not. + checkValidServiceWorker(swUrl, config); + + // Add some additional logging to localhost, pointing developers to the + // service worker/PWA documentation. + navigator.serviceWorker.ready.then(() => { + console.log( + 'This web app is being served cache-first by a service ' + + 'worker. To learn more, visit https://bit.ly/CRA-PWA' + ); + }); + } else { + // Is not localhost. Just register service worker + registerValidSW(swUrl, config); + } + }); + } +} + +function registerValidSW(swUrl, config) { + navigator.serviceWorker + .register(swUrl) + .then(registration => { + registration.onupdatefound = () => { + const installingWorker = registration.installing; + if (installingWorker == null) { + return; + } + installingWorker.onstatechange = () => { + if (installingWorker.state === 'installed') { + if (navigator.serviceWorker.controller) { + // At this point, the updated precached content has been fetched, + // but the previous service worker will still serve the older + // content until all client tabs are closed. + console.log( + 'New content is available and will be used when all ' + + 'tabs for this page are closed. See https://bit.ly/CRA-PWA.' + ); + + // Execute callback + if (config && config.onUpdate) { + config.onUpdate(registration); + } + } else { + // At this point, everything has been precached. + // It's the perfect time to display a + // "Content is cached for offline use." message. + console.log('Content is cached for offline use.'); + + // Execute callback + if (config && config.onSuccess) { + config.onSuccess(registration); + } + } + } + }; + }; + }) + .catch(error => { + console.error('Error during service worker registration:', error); + }); +} + +function checkValidServiceWorker(swUrl, config) { + // Check if the service worker can be found. If it can't reload the page. + fetch(swUrl, { + headers: { 'Service-Worker': 'script' }, + }) + .then(response => { + // Ensure service worker exists, and that we really are getting a JS file. + const contentType = response.headers.get('content-type'); + if ( + response.status === 404 || + (contentType != null && contentType.indexOf('javascript') === -1) + ) { + // No service worker found. Probably a different app. Reload the page. + navigator.serviceWorker.ready.then(registration => { + registration.unregister().then(() => { + window.location.reload(); + }); + }); + } else { + // Service worker found. Proceed as normal. + registerValidSW(swUrl, config); + } + }) + .catch(() => { + console.log( + 'No internet connection found. App is running in offline mode.' + ); + }); +} + +export function unregister() { + if ('serviceWorker' in navigator) { + navigator.serviceWorker.ready + .then(registration => { + registration.unregister(); + }) + .catch(error => { + console.error(error.message); + }); + } +} diff --git a/SAS/TMSS/frontend/tmss_webapp/src/setupTests.js b/SAS/TMSS/frontend/tmss_webapp/src/setupTests.js new file mode 100644 index 0000000000000000000000000000000000000000..74b1a275a0ea7df518f17bcea5375abf003abe55 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/setupTests.js @@ -0,0 +1,5 @@ +// jest-dom adds custom jest matchers for asserting on DOM nodes. +// allows you to do things like: +// expect(element).toHaveTextContent(/react/i) +// learn more: https://github.com/testing-library/jest-dom +import '@testing-library/jest-dom/extend-expect'; diff --git a/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py b/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py index bb8f136e467e6353f9b5ea96dab25e2214094cf0..09fb330ef2e33507df236374fea799e342ee72c2 100644 --- a/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py +++ b/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py @@ -60,20 +60,22 @@ class TMSSSubTaskSchedulingEventMessageHandler(TMSSSubTaskEventMessageHandler): logger.info("subtask %s finished. Trying to schedule defined successor subtasks...", subtask_id) successors = self.tmss_client.get_subtask_successors(subtask_id, state="defined") + successor_ids = [s['id'] for s in successors] - if not successors: - logger.info("subtask %s finished. No (defined) successor subtasks to schedule...", subtask_id) + logger.info("subtask %s finished. trying to schedule defined successors: %s", + subtask_id, + ', '.join(str(id) for id in successor_ids) or 'None') for successor in successors: try: - suc_subtask_id = successor['url'].split('/')[successor['url'].split('/').index('subtask')+1] #ugly -> check how I can get the ¨id¨ - suc_subtask_state = successor['state_str'] + suc_subtask_id = successor['id'] + suc_subtask_state = successor['state_value'] if suc_subtask_state == "defined": logger.info("trying to schedule successor subtask %s for finished subtask %s", suc_subtask_id, subtask_id) scheduled_successor = self.tmss_client.schedule_subtask(suc_subtask_id) - suc_subtask_state = scheduled_successor['state_str'] - logger.info("successor subtask %s for finished subtask %s is now has state '%s'", suc_subtask_id, subtask_id, suc_subtask_state) + suc_subtask_state = scheduled_successor['state_value'] + logger.info("successor subtask %s for finished subtask %s now has state '%s', see %s", suc_subtask_id, subtask_id, suc_subtask_state, scheduled_successor['url']) else: logger.warning("skipping scheduling of successor subtask %s for finished subtask %s because its state is '%s'", suc_subtask_id, subtask_id, suc_subtask_state) diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py b/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py index 5f95ef42935b5b2d8ce0a96b522160099412ea26..1027b12e91d8d22686a32da76d8c126136d6c8a3 100755 --- a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py +++ b/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py @@ -92,25 +92,22 @@ class TestSubtaskSchedulingService(unittest.TestCase): subtask1 = tmss_client.get_subtask(subtask1_id) subtask2 = tmss_client.get_subtask(subtask2_id) - subtask1_status = subtask1['state_str'] - subtask2_status = subtask2['state_str'] - self.assertEqual(subtask1_status, 'defined') - self.assertEqual(subtask2_status, 'defined') + self.assertEqual(subtask1['state_value'], 'defined') + self.assertEqual(subtask2['state_value'], 'defined') # the first subtask ran, and is now finished... set it's status. This should trigger the scheduling service to schedule the second subtask. tmss_client.set_subtask_status(subtask1_id, 'finished') # allow some time for the scheduling service to do its thing... start = datetime.utcnow() - while subtask2_status != 'scheduled': + while subtask2['state_value'] != 'scheduled': subtask2 = tmss_client.get_subtask(subtask2_id) - subtask2_status = subtask2['state_str'] sleep(0.5) if datetime.utcnow() - start > timedelta(seconds=2): raise TimeoutError() # subtask2 should now be scheduled - self.assertEqual(subtask2_status, 'scheduled') + self.assertEqual(subtask2['state_value'], 'scheduled') logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) diff --git a/SAS/TMSS/src/tmss/settings.py b/SAS/TMSS/src/tmss/settings.py index 908a5814f3b2e00ce6827b5e963930096cc18655..9596a203b8ae2b802f444de31994db4501dcfbb4 100644 --- a/SAS/TMSS/src/tmss/settings.py +++ b/SAS/TMSS/src/tmss/settings.py @@ -12,7 +12,7 @@ https://docs.djangoproject.com/en/2.0/ref/settings/ import os import logging -from lofar.common import dbcredentials, isDevelopmentEnvironment +from lofar.common import dbcredentials, isDevelopmentEnvironment, isTestEnvironment logger = logging.getLogger(__name__) @@ -224,12 +224,12 @@ if "TMSS_LDAPCREDENTIALS" in os.environ.keys(): # LDAP ldap_creds_name = os.environ.get('TMSS_LDAPCREDENTIALS', 'tmss_ldap') django_ldap_credentials = dbcredentials.DBCredentials().get(ldap_creds_name) - logger.debug("TMSS Django settings: Using dbcreds '%s' for ldap authentication: %s", + logger.info("TMSS Django settings: Using dbcreds '%s' for ldap authentication: %s", ldap_creds_name, django_ldap_credentials.stringWithHiddenPassword()) AUTH_LDAP_GLOBAL_OPTIONS = {ldap.OPT_X_TLS_REQUIRE_CERT: ldap.OPT_X_TLS_NEVER} # cert still expired? AUTH_LDAP_CONNECTION_OPTIONS = {ldap.OPT_X_TLS_REQUIRE_CERT: ldap.OPT_X_TLS_NEVER} # cert still expired? - protocol = 'ldap://' if isDevelopmentEnvironment() else 'ldaps://' + protocol = 'ldap://' if isDevelopmentEnvironment() or isTestEnvironment() else 'ldaps://' AUTH_LDAP_SERVER_URI = "%s%s:%s" % (protocol, django_ldap_credentials.host, django_ldap_credentials.port) AUTH_LDAP_USER_DN_TEMPLATE = "cn=%(user)s,ou=Users,o=lofar,c=eu" diff --git a/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py b/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py index 86200b5ed56655c90b52f9807093e045578c73f1..5ed5940cd2b6bca37ce160c2613ce634632a87ab 100644 --- a/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py +++ b/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py @@ -45,9 +45,9 @@ def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> parset["Observation.otdbID"] = 0 # Needed by MACScheduler; should/can this be the same as subtask.pk? parset["Observation.processType"] = subtask.specifications_template.type.value.capitalize() parset["Observation.processSubtype"] = "Beam Observation" # TODO: where to derive the processSubtype from? - parset["Observation.Campaign.name"] = "TMSS_test" #toDo: replace by project name - parset["Observation.startTime"] = formatDatetime(subtask.start_time) if isinstance(subtask.start_time, datetime) else datetime, subtask.start_time - parset["Observation.stopTime"] = formatDatetime(subtask.stop_time) if isinstance(subtask.stop_time, datetime) else datetime, subtask.stop_time + parset["Observation.Campaign.name"] = subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name + parset["Observation.startTime"] = formatDatetime(subtask.start_time) if isinstance(subtask.start_time, datetime) else subtask.start_time + parset["Observation.stopTime"] = formatDatetime(subtask.stop_time) if isinstance(subtask.stop_time, datetime) else subtask.stop_time parset["Observation.VirtualInstrument.minimalNrStations"] = 1 # maybe not mandatory? parset["Observation.VirtualInstrument.stationSet"] = "Custom" # maybe not mandatory? parset["Observation.VirtualInstrument.stationList"] = "[%s]" % ','.join(s for s in spec["stations"]["station_list"]) @@ -108,7 +108,7 @@ def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> for subtask_output in subtask_outputs: dataproducts = list(models.Dataproduct.objects.filter(producer_id=subtask_output.id)) parset["Observation.DataProducts.Output_Correlated.filenames"] = "[%s]" % ','.join(dp.filename for dp in dataproducts) - parset["Observation.DataProducts.Output_Correlated.locations"] = "[%s]" % ','.join(dp.directory for dp in dataproducts) + parset["Observation.DataProducts.Output_Correlated.locations"] = "[%s]" % ','.join("%s:%s" % (subtask.cluster.name, dp.directory) for dp in dataproducts) # various additional 'Control' settings which seem to be needed for MAC parset["prefix"] = "LOFAR." @@ -158,14 +158,17 @@ def _convert_to_parset_for_pipelinecontrol_schema(subtask: models.Subtask) -> pa parset = dict() # parameterset has no proper assignment operators, so take detour via dict... # General + parset["prefix"] = "LOFAR." parset["ObsSW.Observation.processType"] = "Pipeline" parset["ObsSW.Observation.ObservationControl.PythonControl.pythonProgram"] = "preprocessing_pipeline.py" parset["ObsSW.Observation.ObservationControl.PythonControl.softwareVersion"] = "" - parset["ObsSW.Observation.Campaign.name"] = "<project_name>" # todo, but how? + parset["ObsSW.Observation.Campaign.name"] = subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name parset["ObsSW.Observation.Scheduler.taskName"] = subtask.task_blueprint.name parset["ObsSW.Observation.Scheduler.predecessors"] = [] parset["ObsSW.Observation.Cluster.ProcessingCluster.clusterName"] = subtask.cluster.name parset["ObsSW.Observation.Cluster.ProcessingCluster.clusterPartition"] = 'cpu' + parset["ObsSW.Observation.Cluster.ProcessingCluster.numberOfTasks"] = 110 # TODO: compute numberOfTasks + parset["ObsSW.Observation.Cluster.ProcessingCluster.numberOfCoresPerTask"] = 2 # TODO: compute numberOfCoresPerTask # DPPP steps dppp_steps = [] @@ -254,7 +257,8 @@ def _convert_to_parset_for_pipelinecontrol_schema(subtask: models.Subtask) -> pa for subtask_input in subtask.inputs.all(): in_dataproducts = subtask_input.dataproducts.all() parset["ObsSW.Observation.DataProducts.Input_Correlated.filenames"] = "[%s]" % ",".join([dp.filename for dp in in_dataproducts]) - parset["ObsSW.Observation.DataProducts.Input_Correlated.locations"] = "[%s]" % ",".join([dp.directory for dp in in_dataproducts]) + parset["ObsSW.Observation.DataProducts.Input_Correlated.locations"] = "[%s]" % ",".join(["%s:%s" % (subtask.cluster.name, dp.directory) for dp in in_dataproducts]) + parset["ObsSW.Observation.DataProducts.Input_Correlated.skip"] = "[%s]" % ",".join(['0']*len(in_dataproducts)) # TODO: do not use SubtaskOutput.objects.filter but make subtask.subtask_outputs work @@ -264,7 +268,7 @@ def _convert_to_parset_for_pipelinecontrol_schema(subtask: models.Subtask) -> pa out_dataproducts = list(models.Dataproduct.objects.filter(producer_id=subtask_output.id)) parset["ObsSW.Observation.DataProducts.Output_Correlated.enabled"] = "true" parset["ObsSW.Observation.DataProducts.Output_Correlated.filenames"] = "[%s]" % ",".join([dp.filename for dp in out_dataproducts]) - parset["ObsSW.Observation.DataProducts.Output_Correlated.locations"] = "[%s]" % ",".join([dp.directory for dp in out_dataproducts]) + parset["ObsSW.Observation.DataProducts.Output_Correlated.locations"] = "[%s]" % ",".join(["%s:%s" % (subtask.cluster.name, dp.directory) for dp in out_dataproducts]) parset["ObsSW.Observation.DataProducts.Output_Correlated.skip"] = "[%s]" % ",".join(['0']*len(out_dataproducts)) # Other diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py index a20fb8d083719afb7dd0dcd78c279a503bcbda8b..3ba1b9d1e868d4e00030209cf3fa65f54e18c690 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.0.6 on 2020-06-17 15:34 +# Generated by Django 2.2.12 on 2020-06-26 14:21 from django.conf import settings import django.contrib.postgres.fields @@ -558,7 +558,7 @@ class Migration(migrations.Migration): }, ), migrations.CreateModel( - name='TaskConnector', + name='TaskConnectorType', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), @@ -661,7 +661,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='taskrelationdraft', name='consumer', - field=models.ForeignKey(help_text='Task Draft that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskDraft'), + field=models.ForeignKey(help_text='Task Draft that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskDraft'), ), migrations.AddField( model_name='taskrelationdraft', @@ -670,18 +670,18 @@ class Migration(migrations.Migration): ), migrations.AddField( model_name='taskrelationdraft', - name='input', - field=models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_draft', to='tmssapp.TaskConnector'), + name='input_role', + field=models.ForeignKey(help_text='Input connector type (what kind of data can be taken as input).', on_delete=django.db.models.deletion.CASCADE, related_name='taskrelationdraft_input_roles', to='tmssapp.TaskConnectorType'), ), migrations.AddField( model_name='taskrelationdraft', - name='output', - field=models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_draft', to='tmssapp.TaskConnector'), + name='output_role', + field=models.ForeignKey(help_text='Output connector type (what kind of data can be created as output).', on_delete=django.db.models.deletion.CASCADE, related_name='taskrelationdraft_output_roles', to='tmssapp.TaskConnectorType'), ), migrations.AddField( model_name='taskrelationdraft', name='producer', - field=models.ForeignKey(help_text='Task Draft that has the output connector. NOTE: The producer does typically, but not necessarily, belong to the same Scheduling Unit (or even the same Project) as the consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskDraft'), + field=models.ForeignKey(help_text='Task Draft that has the output connector. NOTE: The producer does typically, but not necessarily, belong to the same Scheduling Unit (or even the same Project) as the consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskDraft'), ), migrations.AddField( model_name='taskrelationdraft', @@ -691,7 +691,7 @@ class Migration(migrations.Migration): migrations.AddField( model_name='taskrelationblueprint', name='consumer', - field=models.ForeignKey(help_text='Task Blueprint that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskBlueprint'), + field=models.ForeignKey(help_text='Task Blueprint that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskBlueprint'), ), migrations.AddField( model_name='taskrelationblueprint', @@ -705,18 +705,18 @@ class Migration(migrations.Migration): ), migrations.AddField( model_name='taskrelationblueprint', - name='input', - field=models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_blueprint', to='tmssapp.TaskConnector'), + name='input_role', + field=models.ForeignKey(help_text='Input connector type (what kind of data can be taken as input).', on_delete=django.db.models.deletion.CASCADE, related_name='taskrelationblueprint_input_roles', to='tmssapp.TaskConnectorType'), ), migrations.AddField( model_name='taskrelationblueprint', - name='output', - field=models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_blueprint', to='tmssapp.TaskConnector'), + name='output_role', + field=models.ForeignKey(help_text='Output connector type (what kind of data can be created as output).', on_delete=django.db.models.deletion.CASCADE, related_name='taskrelationblueprint_output_roles', to='tmssapp.TaskConnectorType'), ), migrations.AddField( model_name='taskrelationblueprint', name='producer', - field=models.ForeignKey(help_text='Task Blueprint that has the output connector.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskBlueprint'), + field=models.ForeignKey(help_text='Task Blueprint that has the output connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskBlueprint'), ), migrations.AddField( model_name='taskrelationblueprint', @@ -744,27 +744,27 @@ class Migration(migrations.Migration): field=models.ForeignKey(help_text='Schema used for requirements_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.TaskTemplate'), ), migrations.AddField( - model_name='taskconnector', + model_name='taskconnectortype', name='dataformats', field=models.ManyToManyField(blank=True, to='tmssapp.Dataformat'), ), migrations.AddField( - model_name='taskconnector', + model_name='taskconnectortype', name='datatype', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Datatype'), ), migrations.AddField( - model_name='taskconnector', + model_name='taskconnectortype', name='input_of', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inputs', to='tmssapp.TaskTemplate'), + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inpput_connector_types', to='tmssapp.TaskTemplate'), ), migrations.AddField( - model_name='taskconnector', + model_name='taskconnectortype', name='output_of', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='outputs', to='tmssapp.TaskTemplate'), + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='output_connector_types', to='tmssapp.TaskTemplate'), ), migrations.AddField( - model_name='taskconnector', + model_name='taskconnectortype', name='role', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Role'), ), @@ -1043,8 +1043,8 @@ class Migration(migrations.Migration): index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_256437_gin'), ), migrations.AddIndex( - model_name='taskconnector', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_a12728_gin'), + model_name='taskconnectortype', + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_19ff09_gin'), ), migrations.AddConstraint( model_name='subtasktemplate', diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py index e36eef6bcb5e48b64143d287278fb0fd3488b66b..e869c80cc92bf69d09bc0ac0b90442c2714070aa 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py @@ -24,6 +24,7 @@ from lofar.common.datetimeutils import formatDatetime from lofar.messaging.messagebus import ToBus, DEFAULT_BROKER, DEFAULT_BUSNAME from lofar.messaging.messages import EventMessage from lofar.sas.tmss.client.tmssbuslistener import DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX +from lofar.common.util import single_line_with_single_spaces # # I/O @@ -167,6 +168,9 @@ class Subtask(BasicCommon): broker=os.environ.get("TMSS_BROKER", DEFAULT_BROKER)) as tobus: #TODO: do we want to connect to the bus for each new message, or have some global tobus? msg = EventMessage(subject="%s.%s" % (DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX, new_state.capitalize()), content={'subtask_id': subtask_id, 'old_state': old_state, 'new_state': new_state}) + address = tobus.remote_address + logger.info("Sending message with subject '%s' to exchange='%s' on broker=%s:%s content: %s", + msg.subject, tobus.exchange, address[0], address[1], single_line_with_single_spaces(msg.content)) tobus.send(msg) @property @@ -199,7 +203,7 @@ class Subtask(BasicCommon): if self.state.value == SubtaskState.Choices.SCHEDULED.value and self.__original_state.value == SubtaskState.Choices.SCHEDULING.value: if self.start_time is None: - if self.predecessors.all().count() is 0: + if self.predecessors.all().count() == 0: raise SubtaskSchedulingException("Cannot schedule subtask id=%s when start time is 'None'." % (self.pk, )) else: self.start_time = datetime.utcnow() diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py index 2d93d37047d325562c538c78e1a6b43fda372824..c5534610cf7a90fddab6afec2e847dac14bee6da 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py @@ -90,7 +90,7 @@ class AbstractChoice(Model): class Role(AbstractChoice): - """Defines the model and predefined list of possible Role's for TaskConnector. + """Defines the model and predefined list of possible Role's for TaskConnectorType. The items in the Choises class below are automagically populated into the database via a data migration.""" class Choices(Enum): CORRELATOR = "correlator" @@ -103,7 +103,7 @@ class Role(AbstractChoice): class Datatype(AbstractChoice): - """Defines the model and predefined list of possible Datatype's for TaskConnector. + """Defines the model and predefined list of possible Datatype's for TaskConnectorType. The items in the Choises class below are automagically populated into the database via a data migration.""" class Choices(Enum): VISIBILITIES = "visibilities" @@ -145,12 +145,12 @@ class Setting(BasicCommon): value = BooleanField(null=False) -class TaskConnector(BasicCommon): +class TaskConnectorType(BasicCommon): role = ForeignKey('Role', null=False, on_delete=PROTECT) datatype = ForeignKey('Datatype', null=False, on_delete=PROTECT) dataformats = ManyToManyField('Dataformat', blank=True) - output_of = ForeignKey("TaskTemplate", related_name='outputs', on_delete=CASCADE) - input_of = ForeignKey("TaskTemplate", related_name='inputs', on_delete=CASCADE) + output_of = ForeignKey("TaskTemplate", related_name='output_connector_types', on_delete=CASCADE) + input_of = ForeignKey("TaskTemplate", related_name='inpput_connector_types', on_delete=CASCADE) # @@ -364,12 +364,16 @@ class TaskBlueprint(NamedCommon): class TaskRelationDraft(BasicCommon): selection_doc = JSONField(help_text='Filter for selecting dataproducts from the output role.') - dataformat = ForeignKey('Dataformat', null=False, on_delete=PROTECT, help_text='Selected data format to use. One of (MS, HDF5).') - producer = ForeignKey('TaskDraft', related_name='produced_by', on_delete=CASCADE, help_text='Task Draft that has the output connector. NOTE: The producer does typically, but not necessarily, belong to the same Scheduling Unit (or even the same Project) as the consumer.') - consumer = ForeignKey('TaskDraft', related_name='consumed_by', on_delete=CASCADE, help_text='Task Draft that has the input connector.') - input = ForeignKey('TaskConnector', related_name='inputs_task_relation_draft', on_delete=CASCADE, help_text='Input connector of consumer.') - output = ForeignKey('TaskConnector', related_name='outputs_task_relation_draft', on_delete=CASCADE, help_text='Output connector of producer.') selection_template = ForeignKey('TaskRelationSelectionTemplate', on_delete=CASCADE, help_text='Schema used for selection_doc.') # todo: 'schema'? + dataformat = ForeignKey('Dataformat', null=False, on_delete=PROTECT, help_text='Selected data format to use. One of (MS, HDF5).') + + # caveat: it might look like producer has an incorrect related_name='consumed_by'. But it really is correct, denends on the way you look at it + producer = ForeignKey('TaskDraft', related_name='consumed_by', on_delete=CASCADE, help_text='Task Draft that has the output connector. NOTE: The producer does typically, but not necessarily, belong to the same Scheduling Unit (or even the same Project) as the consumer.') + # caveat: it might look like consumer has an incorrect related_name='produced_by'. But it really is correct, denends on the way you look at it + consumer = ForeignKey('TaskDraft', related_name='produced_by', on_delete=CASCADE, help_text='Task Draft that has the input connector.') + + input_role = ForeignKey('TaskConnectorType', related_name='taskrelationdraft_input_roles', on_delete=CASCADE, help_text='Input connector type (what kind of data can be taken as input).') + output_role = ForeignKey('TaskConnectorType', related_name='taskrelationdraft_output_roles', on_delete=CASCADE, help_text='Output connector type (what kind of data can be created as output).') def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if self.selection_doc and self.selection_template_id and self.selection_template.schema: @@ -381,10 +385,14 @@ class TaskRelationDraft(BasicCommon): class TaskRelationBlueprint(BasicCommon): selection_doc = JSONField(help_text='Filter for selecting dataproducts from the output role.') dataformat = ForeignKey('Dataformat', null=False, on_delete=PROTECT, help_text='Selected data format to use.') - producer = ForeignKey('TaskBlueprint', related_name='produced_by', on_delete=CASCADE, help_text='Task Blueprint that has the output connector.') - consumer = ForeignKey('TaskBlueprint', related_name='consumed_by', on_delete=CASCADE, help_text='Task Blueprint that has the input connector.') - input = ForeignKey('TaskConnector', related_name='inputs_task_relation_blueprint', on_delete=CASCADE, help_text='Input connector of consumer.') - output = ForeignKey('TaskConnector', related_name='outputs_task_relation_blueprint', on_delete=CASCADE, help_text='Output connector of producer.') + + # caveat: it might look like producer has an incorrect related_name='consumed_by'. But it really is correct, denends on the way you look at it + producer = ForeignKey('TaskBlueprint', related_name='consumed_by', on_delete=CASCADE, help_text='Task Blueprint that has the output connector.') + # caveat: it might look like consumer has an incorrect related_name='produced_by'. But it really is correct, denends on the way you look at it + consumer = ForeignKey('TaskBlueprint', related_name='produced_by', on_delete=CASCADE, help_text='Task Blueprint that has the input connector.') + + input_role = ForeignKey('TaskConnectorType', related_name='taskrelationblueprint_input_roles', on_delete=CASCADE, help_text='Input connector type (what kind of data can be taken as input).') + output_role = ForeignKey('TaskConnectorType', related_name='taskrelationblueprint_output_roles', on_delete=CASCADE, help_text='Output connector type (what kind of data can be created as output).') draft = ForeignKey('TaskRelationDraft', on_delete=CASCADE, related_name='related_task_relation_blueprint', help_text='Task Relation Draft which this work request instantiates.') selection_template = ForeignKey('TaskRelationSelectionTemplate', on_delete=CASCADE, help_text='Schema used for selection_doc.') # todo: 'schema'? @@ -393,3 +401,4 @@ class TaskRelationBlueprint(BasicCommon): validate_json_against_schema(self.selection_doc, self.selection_template.schema) super().save(force_insert, force_update, using, update_fields) + diff --git a/SAS/TMSS/src/tmss/tmssapp/populate.py b/SAS/TMSS/src/tmss/tmssapp/populate.py index 8d0daea2ee277a864da2f27f58422d691b243119..e32a715291e958072857aa6e054988aea7f74014 100644 --- a/SAS/TMSS/src/tmss/tmssapp/populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/populate.py @@ -62,13 +62,47 @@ def _populate_task_draft_example(): :return: """ try: - from datetime import datetime + from datetime import datetime, timezone from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.test.tmss_test_data_django_models import TaskDraft_test_data if isTestEnvironment() or isDevelopmentEnvironment(): + for nr in range(0, 18): + models.Cycle.objects.create(name="Cycle %s" % nr, + description="Lofar Cycle %s" % nr, + start=datetime(2013+nr//2, 6 if nr%2==0 else 11, 1, 0, 0, 0, 0, tzinfo=timezone.utc), + stop=datetime(2013+(nr+1)//2, 6 if nr%2==1 else 11, 1, 0, 0, 0, 0, tzinfo=timezone.utc), + number=nr, + standard_hours=0, # TODO: fill in cycle hours + expert_hours=0, + filler_hours=0) + + tmss_project = models.Project.objects.create(cycle=models.Cycle.objects.get(number=14), + name="TMSS-Commissioning", + description="Project for all TMSS tests and commissioning", + priority=1, + can_trigger=False, + private_data=True, + expert=True, + filler=False) + + scheduling_set = models.SchedulingSet.objects.create(name="UC1 test set", + description="UC1 test set", + project=tmss_project) + + requirements_template = models.SchedulingUnitTemplate.objects.create(name="UC1 test scheduling unit template", + description="UC1 test scheduling unit template", + version="0.1", + schema={}) + + scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(name="UC1 test scheduling unit", + description="UC1 test scheduling unit", + scheduling_set=scheduling_set, + requirements_template=requirements_template, + requirements_doc=get_default_json_object_for_schema(requirements_template.schema)) + obs_task_template = models.TaskTemplate.objects.get(name='correlator schema') - task_draft_data = TaskDraft_test_data(name="Test Target Observation", specifications_template=obs_task_template) + task_draft_data = TaskDraft_test_data(name="Test Target Observation", specifications_template=obs_task_template, scheduling_unit_draft=scheduling_unit_draft) obs_task_draft = models.TaskDraft.objects.create(**task_draft_data) pipeline_task_template = models.TaskTemplate.objects.get(name='preprocessing schema') @@ -78,15 +112,19 @@ def _populate_task_draft_example(): pipeline_task_draft = models.TaskDraft.objects.create(**task_draft_data) # connect them - connector = models.TaskConnector.objects.first() # TODO: get the correct connector instead of the first + connector_type = models.TaskConnectorType.objects.first() # TODO: get the correct connectortype instead of the first + selection_template = models.TaskRelationSelectionTemplate.objects.get(name="SAP") + selection_doc = get_default_json_object_for_schema(selection_template.schema) + selection_doc['sap'] = [0] + task_relation_data = {"tags": [], - "selection_doc": {'sap': [0]}, "dataformat": models.Dataformat.objects.get(value='MeasurementSet'), - "producer": pipeline_task_draft, - "consumer": obs_task_draft, - "input": connector, - "output": connector, - "selection_template": models.TaskRelationSelectionTemplate.objects.get(name="SAP")} + "producer": obs_task_draft, + "consumer": pipeline_task_draft, + "input_role": connector_type, + "output_role": connector_type, + "selection_doc": selection_doc, + "selection_template": selection_template } models.TaskRelationDraft.objects.create(**task_relation_data) except ImportError: @@ -886,29 +924,29 @@ def _populate_taskrelation_selection_templates(): TaskRelationSelectionTemplate.objects.create(**data) # SAP - data = {"name": "SAP", - "description": 'Select by SAP.', - "version": '1', - "schema": json.loads('''{ - "$id": "http://example.com/example.json", - "$schema": "http://json-schema.org/draft-06/schema#", - "additionalProperties": false, - "definitions": {}, - "properties": { - "sap": { - "type": "array", - "title": "sap list", - "additionalItems": false, - "default": [], - "items": { - "type": "integer", - "title": "sap", - "minimum": 0, - "maximum": 1 - }}}, - "type": "object" - }'''), - "tags": []} + data = { "name": "SAP", + "description": 'Select by SAP.', + "version": '1', + "schema": json.loads('''{ +"$id": "http://example.com/example.json", +"$schema": "http://json-schema.org/draft-06/schema#", +"additionalProperties": false, +"definitions": {}, +"properties": { + "sap": { + "type": "array", + "title": "sap list", + "additionalItems": false, + "default": [], + "items": { + "type": "integer", + "title": "sap", + "minimum": 0, + "maximum": 1 + }}}, +"type": "object" +}'''), + "tags": []} TaskRelationSelectionTemplate.objects.create(**data) @@ -1394,14 +1432,14 @@ def _populate_pipelinecontrol_schema(): def _populate_connectors(): - # the TaskConnector's define how the Task[Draft/Blueprint] *can* be connected. - TaskConnector.objects.create(role=Role.objects.get(value=Role.Choices.CALIBRATOR.value), - datatype=Datatype.objects.get(value=Datatype.Choices.VISIBILITIES.value), - output_of=TaskTemplate.objects.get(name='correlator schema'), - input_of=TaskTemplate.objects.get(name='preprocessing schema')) - - TaskConnector.objects.create(role=Role.objects.get(value=Role.Choices.TARGET.value), - datatype=Datatype.objects.get(value=Datatype.Choices.VISIBILITIES.value), - output_of=TaskTemplate.objects.get(name='correlator schema'), - input_of=TaskTemplate.objects.get(name='preprocessing schema')) + # the TaskConnectorType's define how the Task[Draft/Blueprint] *can* be connected. + TaskConnectorType.objects.create(role=Role.objects.get(value=Role.Choices.CALIBRATOR.value), + datatype=Datatype.objects.get(value=Datatype.Choices.VISIBILITIES.value), + output_of=TaskTemplate.objects.get(name='correlator schema'), + input_of=TaskTemplate.objects.get(name='preprocessing schema')) + + TaskConnectorType.objects.create(role=Role.objects.get(value=Role.Choices.TARGET.value), + datatype=Datatype.objects.get(value=Datatype.Choices.VISIBILITIES.value), + output_of=TaskTemplate.objects.get(name='correlator schema'), + input_of=TaskTemplate.objects.get(name='preprocessing schema')) diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py index 2494cb6ecf15d8fa3033b17d77624b90e8d0dbb5..fb9acf45af5d7e3a7e7c64f8600a263be8793451 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py @@ -16,7 +16,7 @@ class SubtaskStateSerializer(serializers.ModelSerializer): fields = '__all__' -class SubtaskStateLogSerializer(serializers.HyperlinkedModelSerializer): +class SubtaskStateLogSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.SubtaskStateLog fields = '__all__' @@ -46,31 +46,32 @@ class ScheduleMethodSerializer(serializers.ModelSerializer): fields = '__all__' -class SubtaskTemplateSerializer(serializers.HyperlinkedModelSerializer): +class SubtaskTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.SubtaskTemplate fields = '__all__' -class DefaultSubtaskTemplateSerializer(serializers.HyperlinkedModelSerializer): +class DefaultSubtaskTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.DefaultSubtaskTemplate fields = '__all__' -class DataproductSpecificationsTemplateSerializer(serializers.HyperlinkedModelSerializer): +class DataproductSpecificationsTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.DataproductSpecificationsTemplate fields = '__all__' -class DefaultDataproductSpecificationsTemplateSerializer(serializers.HyperlinkedModelSerializer): +class DefaultDataproductSpecificationsTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.DefaultDataproductSpecificationsTemplate fields = '__all__' -class DataproductFeedbackTemplateSerializer(serializers.HyperlinkedModelSerializer): + +class DataproductFeedbackTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.DataproductFeedbackTemplate fields = '__all__' @@ -78,19 +79,25 @@ class DataproductFeedbackTemplateSerializer(serializers.HyperlinkedModelSerializ class SubtaskSerializer(RelationalHyperlinkedModelSerializer): # If this is OK then we can extend API with NO url ('flat' values) on more places if required - state_str = serializers.StringRelatedField(source='state', read_only=True) - cluster_as_string = serializers.StringRelatedField(source='cluster', read_only=True) - schedule_method_as_string = serializers.StringRelatedField(source='schedule_method', read_only=True) - task_blueprint_id = serializers.StringRelatedField(source='task_blueprint', read_only=True) - specifications_template_id = serializers.StringRelatedField(source='specifications_template', read_only=True) + cluster_value = serializers.StringRelatedField(source='cluster', label='cluster_value', read_only=True) + class Meta: model = models.Subtask fields = '__all__' - extra_fields = ['state_str', 'cluster_as_string', 'schedule_method_as_string', 'task_blueprint_id', - 'specifications_template_id'] + extra_fields = ['cluster_value'] + + # Create a JSON editor form to replace the simple text field based on the schema in the template that this + # draft refers to. If that fails, the JSONField remains a standard text input. + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + try: + self.fields['specifications_doc'] = JSONEditorField(self.instance.specifications_template.schema) + except Exception as e: + print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) + # todo: Shall we use the schema for one of the default templates in this case instead? -class SubtaskInputSerializer(serializers.HyperlinkedModelSerializer): +class SubtaskInputSerializer(RelationalHyperlinkedModelSerializer): # Create a JSON editor form to replace the simple text field based on the schema in the template that this # draft refers to. If that fails, the JSONField remains a standard text input. @@ -114,7 +121,7 @@ class SubtaskOutputSerializer(RelationalHyperlinkedModelSerializer): #extra_fields = ['dataproducts', 'consumers'] #TODO: how can we make the inputs and outputs visible in the rest view without making them required for POSTs? -class DataproductSerializer(serializers.HyperlinkedModelSerializer): +class DataproductSerializer(RelationalHyperlinkedModelSerializer): # Create a JSON editor form to replace the simple text field based on the schema in the template that this # draft refers to. If that fails, the JSONField remains a standard text input. @@ -132,64 +139,39 @@ class DataproductSerializer(serializers.HyperlinkedModelSerializer): fields = '__all__' -class AntennaSetSerializer(serializers.HyperlinkedModelSerializer): +class AntennaSetSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.AntennaSet fields = '__all__' -class DataproductTransformSerializer(serializers.HyperlinkedModelSerializer): +class DataproductTransformSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.DataproductTransform fields = '__all__' -class FilesystemSerializer(serializers.HyperlinkedModelSerializer): +class FilesystemSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.Filesystem fields = '__all__' -class ClusterSerializer(serializers.HyperlinkedModelSerializer): +class ClusterSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.Cluster fields = '__all__' -class DataproductArchiveInfoSerializer(serializers.HyperlinkedModelSerializer): +class DataproductArchiveInfoSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.DataproductArchiveInfo fields = '__all__' -class DataproductHashSerializer(serializers.HyperlinkedModelSerializer): +class DataproductHashSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.DataproductHash fields = '__all__' -class SubtaskSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializer): - - # Create a JSON editor form to replace the simple text field based on the schema in the template that this - # draft refers to. If that fails, the JSONField remains a standard text input. - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - try: - self.fields['specifications_doc'] = JSONEditorField(self.instance.specifications_template.schema) - except Exception as e: - print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) - # todo: Shall we use the schema for one of the default templates in this case instead? - - def create(self, validated_data): - validated_data['created_or_updated_by_user'] = self.context.get('request').user - return models.Subtask.objects.create(**validated_data) - - def update(self, instance, validated_data): - validated_data['created_or_updated_by_user'] = self.context.get('request').user - return super().update(instance, validated_data) - - class Meta: - model = models.Subtask - #fields = '__all__' - # extra_fields = ['inputs', 'outputs'] #TODO: how can we make the inputs and outputs visible in the rest view without making them required for POSTs? - exclude = ('created_or_updated_by_user',) diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py index afbc1b2ed5e7c32e5f2f222b313da3e87acac3a3..5ee99b0d0dd7be6ec2cf34ae02e5e66a091b97ba 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py @@ -6,19 +6,66 @@ from rest_framework import serializers from .. import models from .widgets import JSONEditorField from django.contrib.auth.models import User +from django.core.exceptions import ImproperlyConfigured from rest_framework import decorators import json class RelationalHyperlinkedModelSerializer(serializers.HyperlinkedModelSerializer): + _accepted_pk_names = ('id', 'name') def get_field_names(self, declared_fields, info): - expanded_fields = super(RelationalHyperlinkedModelSerializer, self).get_field_names(declared_fields, info) + field_names = super().get_field_names(declared_fields, info) + + # always add the primay key as well, cause it makes parsing in the client so much easier (no 'id' extraction from urls) + field_names.append(info.pk.name) if getattr(self.Meta, 'extra_fields', None): - return expanded_fields + self.Meta.extra_fields - else: - return expanded_fields + field_names += self.Meta.extra_fields + + # add 'plain'-values of the fieldnames which relate to AbstractChoice-'lookup-tables' + choice_field_names = [name+'_value' for name, rel in info.forward_relations.items() + if issubclass(rel.related_model, models.AbstractChoice)] + + # add 'plain-id(s)'-values of the fieldnammes which to forward_relations + forward_related_field_names = [name+'_ids' if rel.to_many else name+'_id' + for name, rel in info.forward_relations.items() + if rel.related_model._meta.pk.name in self._accepted_pk_names + and name in field_names] + + # always add 'plain-id'-values of the fieldnammes which to reverse_relations + reverse_related_field_names = [name+'_ids' for name, rel in info.reverse_relations.items() + if rel.related_model._meta.pk.name in self._accepted_pk_names + and name in field_names] + + # return them sorted alphabetically + return sorted(field_names + choice_field_names + forward_related_field_names + reverse_related_field_names) + + def build_field(self, field_name, info, model_class, nested_depth): + '''override of super.build_field to handle 'choice' fields''' + try: + return super().build_field(field_name, info, model_class, nested_depth) + except ImproperlyConfigured: + if field_name.endswith('_ids'): + return self.build_reverse_relations_ids_field(field_name, info, model_class, nested_depth) + if field_name.endswith('_value'): + return self.build_choice_field(field_name, info) + raise + + def build_reverse_relations_ids_field(self, field_name, info, model_class, nested_depth): + '''builds a PrimaryKeyRelatedField serializer for the 'reverse_relations_ids' fields''' + return serializers.PrimaryKeyRelatedField, {'label':field_name, + 'source':field_name[:-4], # cut '_ids' from end + 'many':True, + 'read_only':True} + + def build_choice_field(self, field_name, info): + '''builds a StringRelatedField serializer for the 'choice' fields''' + original_field_name = field_name[:-6] # cut '_value' from end + if original_field_name in info.forward_relations.keys(): + return serializers.StringRelatedField, {'label':field_name, + 'source': original_field_name, + 'read_only':True} # This is required for keeping a user reference as ForeignKey in other models @@ -29,55 +76,55 @@ class UserSerializer(serializers.Serializer): fields = '__all__' -class TagsSerializer(serializers.HyperlinkedModelSerializer): +class TagsSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.Tags fields = '__all__' -class GeneratorTemplateSerializer(serializers.HyperlinkedModelSerializer): +class GeneratorTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.GeneratorTemplate fields = '__all__' -class DefaultGeneratorTemplateSerializer(serializers.HyperlinkedModelSerializer): +class DefaultGeneratorTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.DefaultGeneratorTemplate fields = '__all__' -class SchedulingUnitTemplateSerializer(serializers.HyperlinkedModelSerializer): +class SchedulingUnitTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.SchedulingUnitTemplate fields = '__all__' -class DefaultSchedulingUnitTemplateSerializer(serializers.HyperlinkedModelSerializer): +class DefaultSchedulingUnitTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.DefaultSchedulingUnitTemplate fields = '__all__' -class TaskTemplateSerializer(serializers.HyperlinkedModelSerializer): +class TaskTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.TaskTemplate fields = '__all__' -class DefaultTaskTemplateSerializer(serializers.HyperlinkedModelSerializer): +class DefaultTaskTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.DefaultTaskTemplate fields = '__all__' -class TaskRelationSelectionTemplateSerializer(serializers.HyperlinkedModelSerializer): +class TaskRelationSelectionTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.TaskRelationSelectionTemplate fields = '__all__' -class DefaultTaskRelationSelectionTemplateSerializer(serializers.HyperlinkedModelSerializer): +class DefaultTaskRelationSelectionTemplateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.DefaultTaskRelationSelectionTemplate fields = '__all__' @@ -107,9 +154,9 @@ class CopyReasonSerializer(serializers.ModelSerializer): fields = '__all__' -class TaskConnectorSerializer(serializers.HyperlinkedModelSerializer): +class TaskConnectorTypeSerializer(RelationalHyperlinkedModelSerializer): class Meta: - model = models.TaskConnector + model = models.TaskConnectorType fields = '__all__' @@ -121,10 +168,12 @@ class CycleSerializer(RelationalHyperlinkedModelSerializer): class ProjectSerializer(RelationalHyperlinkedModelSerializer): +# scheduling_sets = serializers.PrimaryKeyRelatedField(source='scheduling_sets', read_only=True, many=True) + class Meta: model = models.Project fields = '__all__' - extra_fields = ['name','project_quota'] + extra_fields = ['name','project_quota'] #, 'scheduling_sets'] class ProjectQuotaSerializer(RelationalHyperlinkedModelSerializer): @@ -262,7 +311,7 @@ class TaskRelationDraftSerializer(RelationalHyperlinkedModelSerializer): extra_fields = ['related_task_relation_blueprint'] -class TaskRelationBlueprintSerializer(serializers.HyperlinkedModelSerializer): +class TaskRelationBlueprintSerializer(RelationalHyperlinkedModelSerializer): # Create a JSON editor form to replace the simple text field based on the schema in the template that this # draft refers to. If that fails, the JSONField remains a standard text input. diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py index 80f76dd09e646e544159f99c6b2c29f1c6a9cfa2..335226d5cb031db2535730088a5856726bc5a79a 100644 --- a/SAS/TMSS/src/tmss/tmssapp/subtasks.py +++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py @@ -5,6 +5,7 @@ from functools import cmp_to_key from collections.abc import Iterable from lofar.common.datetimeutils import formatDatetime +from lofar.common import isProductionEnvironment from lofar.common.json_utils import add_defaults_to_json_object_for_schema, get_default_json_object_for_schema from lofar.sas.tmss.tmss.exceptions import SubtaskCreationException, SubtaskSchedulingException @@ -64,12 +65,14 @@ def create_observation_control_subtask_from_task_blueprint(task_blueprint: TaskB "angle2": 0.5787463318245085}, "digital_pointings": [{"name": "3C48", "pointing": {"direction_type": "J2000", + "angle1": 0.4262457643630986, "angle1": 0.4262457643630986, "angle2": 0.5787463318245085}, - "subbands": list(range(0, 8)) + "subbands": list(range(0, 244)) }] } } + specifications_doc = add_defaults_to_json_object_for_schema(extra_specifications_doc, subtask_template.schema) cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4") subtask_data = { "start_time": None, @@ -96,12 +99,12 @@ def create_observation_control_subtask_from_task_blueprint(task_blueprint: TaskB def create_qafile_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask: - observation_subtasks = [st for st in task_blueprint.subtasks.all() if st.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value] + observation_subtasks = [st for st in task_blueprint.subtasks.order_by('id').all() if st.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value] if not observation_subtasks: raise SubtaskCreationException("Cannot create %s subtask for task_blueprint id=%d because it has no observation subtask(s)" % ( SubtaskType.Choices.QA_FILES.value, task_blueprint.pk)) - observation_subtask = observation_subtasks[0] # TODO: decide what to do when there are multiple observation subtasks? + observation_subtask = observation_subtasks[-1] # TODO: decide what to do when there are multiple observation subtasks? return create_qafile_subtask_from_observation_subtask(observation_subtask) @@ -148,10 +151,12 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask) qafile_subtask = Subtask.objects.create(**qafile_subtask_data) # step 2: create and link subtask input/output + selection_template = TaskRelationSelectionTemplate.objects.get(name="All") + selection_doc = get_default_json_object_for_schema(selection_template.schema) qafile_subtask_input = SubtaskInput.objects.create(subtask=qafile_subtask, producer=observation_subtask.outputs.first(), # TODO: determine proper producer based on spec in task_relation_blueprint - selection_doc="{}", - selection_template=TaskRelationSelectionTemplate.objects.get(name="All")) + selection_doc=selection_doc, + selection_template=selection_template) qafile_subtask_output = SubtaskOutput.objects.create(subtask=qafile_subtask) # step 3: set state to DEFINED @@ -163,12 +168,11 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask) def create_qaplots_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask: qafile_subtasks = [st for st in task_blueprint.subtasks.all() if st.specifications_template.type.value == SubtaskType.Choices.QA_FILES.value] - if not qafile_subtasks: - raise ValueError("Cannot create %s subtask for task_blueprint id=%d because it has no qafile subtask(s)" % ( - SubtaskType.Choices.QA_FILES.value, task_blueprint.pk)) - - qafile_subtask = qafile_subtasks[0] # TODO: decide what to do when there are multiple qafile subtasks? - return create_qaplots_subtask_from_qafile_subtask(qafile_subtask) + if qafile_subtasks: + qafile_subtask = qafile_subtasks[0] # TODO: decide what to do when there are multiple qafile subtasks? + return create_qaplots_subtask_from_qafile_subtask(qafile_subtask) + else: + raise SubtaskCreationException('Cannot create QA plotting subtask for task id=%s because QA file conversion subtask exists.' % (task_blueprint.pk, )) def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subtask: @@ -210,10 +214,12 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta qaplots_subtask = Subtask.objects.create(**qaplots_subtask_data) # step 2: create and link subtask input/output + selection_template = TaskRelationSelectionTemplate.objects.get(name="All") + selection_doc = get_default_json_object_for_schema(selection_template.schema) qaplots_subtask_input = SubtaskInput.objects.create(subtask=qaplots_subtask, producer=qafile_subtask.outputs.first(), - selection_doc="{}", - selection_template=TaskRelationSelectionTemplate.objects.get(name="All")) + selection_doc=selection_doc, + selection_template=selection_template) qaplots_subtask_output = SubtaskOutput.objects.create(subtask=qaplots_subtask) # step 3: set state to DEFINED @@ -252,9 +258,13 @@ def create_preprocessing_subtask_from_task_blueprint(task_blueprint: TaskBluepri subtask = Subtask.objects.create(**subtask_data) # step 2: create and link subtask input/output - for task_relation_blueprint in task_blueprint.consumed_by.all(): # todo: rename related name 'consumed_by' to 'consuming_task_relation_blueprints' or sth. - for predecessor_subtask in task_relation_blueprint.producer.subtasks.all(): - for predecessor_subtask_output in predecessor_subtask.outputs.all(): + for task_relation_blueprint in task_blueprint.produced_by.all(): + producing_task_blueprint = task_relation_blueprint.producer + + # TODO: apply some better filtering. Now we're just connecting it to all predecessor observation subtasks + predecessor_observation_subtasks = [st for st in producing_task_blueprint.subtasks.order_by('id').all() if st.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value] + for predecessor_obs_subtask in predecessor_observation_subtasks: + for predecessor_subtask_output in predecessor_obs_subtask.outputs.all(): subtask_input = SubtaskInput.objects.create(subtask=subtask, producer=predecessor_subtask_output, selection_doc=task_relation_blueprint.selection_doc, @@ -309,6 +319,12 @@ def check_prerequities_for_scheduling(subtask: Subtask) -> bool: if predecessor.state.value != SubtaskState.Choices.FINISHED.value: raise SubtaskSchedulingException("Cannot schedule subtask id=%d because its predecessor id=%s in not FINISHED but state=%s" % (subtask.pk, predecessor.pk, predecessor.state.value)) + # check if settings allow scheduling observations + setting = Setting.objects.get(name='allow_scheduling_observations') + if not setting.value: + raise SubtaskSchedulingException("Cannot schedule subtask id=%d because setting %s=%s does not allow that." % + (subtask.pk, setting.name, setting.value)) + return True def schedule_qafile_subtask(qafile_subtask: Subtask): @@ -325,14 +341,16 @@ def schedule_qafile_subtask(qafile_subtask: Subtask): raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s but type should be %s" % (qafile_subtask.pk, qafile_subtask.specifications_template.type, SubtaskType.Choices.QA_FILES.value)) + if len(qafile_subtask.inputs.all()) != 1: + raise SubtaskSchedulingException("QA subtask id=%s should have 1 input, but it has %s" % (qafile_subtask.id, len(qafile_subtask.inputs))) # step 1: set state to SCHEDULING qafile_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULING.value) qafile_subtask.save() # step 2: link input dataproducts - for input in qafile_subtask.inputs.all(): - input.dataproducts.set(input.producer.dataproducts.all()) + qa_input = qafile_subtask.inputs.first() + qa_input.dataproducts.set(qa_input.producer.dataproducts.all()) # step 3: resource assigner # is a no-op for QA @@ -340,7 +358,7 @@ def schedule_qafile_subtask(qafile_subtask: Subtask): # step 4: create output dataproducts, and link these to the output # TODO: Should the output and/or dataproduct be determined by the specification in task_relation_blueprint? if qafile_subtask.outputs.first(): - qafile_subtask_dataproduct = Dataproduct.objects.create(filename="L%d_QA.h5" % (qafile_subtask.id,), + qafile_subtask_dataproduct = Dataproduct.objects.create(filename="L%s_QA.h5" % (qa_input.producer.subtask_id, ), directory="/data/qa/qa_files", dataformat=Dataformat.objects.get(value=Dataformat.Choices.QA_HDF5.value), producer=qafile_subtask.outputs.first(), @@ -372,21 +390,26 @@ def schedule_qaplots_subtask(qaplots_subtask: Subtask): qaplots_subtask.specifications_template.type, SubtaskType.Choices.QA_PLOTS.value)) + if len(qaplots_subtask.inputs.all()) != 1: + raise SubtaskSchedulingException("QA subtask id=%s should have 1 input, but it has %s" % (qaplots_subtask.id, len(qaplots_subtask.inputs))) + # step 1: set state to SCHEDULING qaplots_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULING.value) qaplots_subtask.save() # step 2: link input dataproducts # this should typically be a single input with a single dataproduct (the qa h5 file) - for input in qaplots_subtask.inputs.all(): - input.dataproducts.set(input.producer.dataproducts.all()) + qa_input = qaplots_subtask.inputs.first() + qa_input.dataproducts.set(qa_input.producer.dataproducts.all()) # step 3: resource assigner # is a no-op for QA # step 4: create output dataproducts, and link these to the output # TODO: Should the output and/or dataproduct be determined by the specification in task_relation_blueprint? - qaplots_subtask_dataproduct = Dataproduct.objects.create(directory="/data/qa/plots/L%d" % (qaplots_subtask.id,), + qafile_subtask = qaplots_subtask.predecessors.first() + obs_subtask = qafile_subtask.predecessors.first() + qaplots_subtask_dataproduct = Dataproduct.objects.create(directory="/data/qa/plots/L%s" % (obs_subtask.id, ), dataformat=Dataformat.objects.get(value=Dataformat.Choices.QA_PLOTS.value), producer=qaplots_subtask.outputs.first(), specifications_doc="", @@ -417,13 +440,6 @@ def schedule_observation_subtask(observation_subtask: Subtask): observation_subtask.specifications_template.type, SubtaskType.Choices.OBSERVATION.value)) - # check if settings allow scheduling observations - # (not sure if this should be in check_prerequities_for_scheduling() instead....?) - setting = Setting.objects.get(name='allow_scheduling_observations') - if not setting.value: - raise SubtaskSchedulingException("Cannot schedule subtask id=%d because setting %s=%s does not allow that." % - (observation_subtask.pk, setting.name, setting.value)) - # step 1: set state to SCHEDULING observation_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULING.value) observation_subtask.save() @@ -436,7 +452,7 @@ def schedule_observation_subtask(observation_subtask: Subtask): observation_subtask.start_time = next_start_time if observation_subtask.stop_time is None: - stop_time = observation_subtask.start_time + timedelta(minutes=+2) + stop_time = observation_subtask.start_time + timedelta(minutes=+1) logger.info("observation id=%s has no stop_time. assigned default: %s", observation_subtask.pk, formatDatetime(stop_time)) observation_subtask.stop_time = stop_time @@ -451,9 +467,12 @@ def schedule_observation_subtask(observation_subtask: Subtask): dataproduct_specifications_template = DataproductSpecificationsTemplate.objects.get(name="SAP") # todo: should this be derived from the task relation specification template? dataproduct_feedback_template = DataproductFeedbackTemplate.objects.get(name="Empty") subtask_output = observation_subtask.outputs.first() # TODO: make proper selection, not default first() + directory = "/data/%s/%s/L%s/uv" % ("projects" if isProductionEnvironment() else "test-projects", + observation_subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name, + observation_subtask.id) for sb_nr in specifications_doc['stations']['digital_pointings'][0]['subbands']: Dataproduct.objects.create(filename="L%d_SB%03d_uv.MS" % (observation_subtask.id, sb_nr), - directory="CEP4:/data/test-projects/TMSS_test/L%d/uv/" % (observation_subtask.id,), # todo: set correct path + directory=directory, dataformat=Dataformat.objects.get(value="MeasurementSet"), producer=subtask_output, specifications_doc={"sap": [0]}, # todo: set correct value. This will be provided by the RA somehow @@ -516,7 +535,7 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): filename = "L%s_%s" % (pipeline_subtask.pk, input_dp.filename) output_dp = Dataproduct.objects.create(filename=filename, - directory=input_dp.directory.replace(str(pipeline_subtask_input.subtask.pk), str(pipeline_subtask.pk)), + directory=input_dp.directory.replace(str(pipeline_subtask_input.producer.subtask.pk), str(pipeline_subtask.pk)), dataformat=Dataformat.objects.get(value="MeasurementSet"), producer=pipeline_subtask_output, specifications_doc={}, @@ -525,6 +544,7 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): feedback_template=dataproduct_feedback_template) DataproductTransform.objects.create(input=input_dp, output=output_dp, identity=False) output_dps.append(output_dp) + pipeline_subtask_output.dataproducts.set(output_dps) # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it) @@ -537,14 +557,19 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): def create_and_schedule_subtasks_from_task_blueprint(task_blueprint: TaskBlueprint) -> [Subtask]: '''Convenience method: Create the subtasks form the task_blueprint, and schedule the ones that are not dependend on predecessors''' - subtasks = create_subtasks_from_task_blueprint(task_blueprint) + create_subtasks_from_task_blueprint(task_blueprint) + return schedule_independent_subtasks_in_task_blueprint(task_blueprint) + +def schedule_independent_subtasks_in_task_blueprint(task_blueprint: TaskBlueprint) -> [Subtask]: + '''Convenience method: Schedule the subtasks in the task_blueprint that are not dependend on predecessors''' + subtasks = list(task_blueprint.subtasks.all()) # sort them in 'data-flow'-order, # because successors can depend on predecessors, so the first tbp's need to be subtask'd first. subtasks.sort(key=cmp_to_key(lambda st_a, st_b: -1 if st_a in st_b.predecessors else 1 if st_b in st_a.predecessors else 0)) for subtask in subtasks: - if len(subtask.predecessors.all()) == 0: + if len(subtask.predecessors.all()) == len(subtask.predecessors.filter(state__value='finished').all()): schedule_subtask(subtask) return subtasks diff --git a/SAS/TMSS/src/tmss/tmssapp/tasks.py b/SAS/TMSS/src/tmss/tmssapp/tasks.py index b97afa0eb13b3714d55586c27c03b265ab7524d6..a8bc9937a69f80006a252e69b4ad3d1386496fbd 100644 --- a/SAS/TMSS/src/tmss/tmssapp/tasks.py +++ b/SAS/TMSS/src/tmss/tmssapp/tasks.py @@ -1,6 +1,6 @@ from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskBlueprint, SchedulingUnitBlueprint -from lofar.sas.tmss.tmss.tmssapp.subtasks import create_and_schedule_subtasks_from_task_blueprint +from lofar.sas.tmss.tmss.tmssapp.subtasks import create_and_schedule_subtasks_from_task_blueprint, create_subtasks_from_task_blueprint, schedule_independent_subtasks_in_task_blueprint from functools import cmp_to_key import logging @@ -61,42 +61,49 @@ def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft) -> model # so, when converting two TaskDrafts (for example an observation and a pipeline), then for the conversion # of the first TaskDraft->TaskBlueprint no relation is setup, # and for the second TaskDraft->TaskBlueprint conversion we have both endpoints available, so we can connect them. - for consumer in task_draft.consumed_by.all(): - producing_task_blueprint = consumer.producer.task_blueprints.first() - if producing_task_blueprint is not None: - relation = models.TaskRelationBlueprint.objects.create(draft=consumer, - input=consumer.input, - output=consumer.output, - producer=task_blueprint, - consumer=producing_task_blueprint, - selection_doc=consumer.selection_doc, - selection_template=consumer.selection_template, - dataformat=consumer.dataformat) - logger.info("create_task_blueprint_from_task_draft(task_draft.id=%s) connected task_blueprint id=%s to task_blueprint id=%s via task_relation_blueprint id=%s", - task_draft.pk, task_blueprint.pk, producing_task_blueprint.pk, relation.pk) - for producer in task_draft.produced_by.all(): - consuming_task_blueprint = producer.consumer.task_blueprints.first() - if consuming_task_blueprint is not None: - relation = models.TaskRelationBlueprint.objects.create(draft=producer, - input=producer.input, - output=producer.output, - producer=consuming_task_blueprint, - consumer=task_blueprint, - selection_doc=producer.selection_doc, - selection_template=producer.selection_template, - dataformat=producer.dataformat) - logger.info("create_task_blueprint_from_task_draft(task_draft.id=%s) connected task_blueprint id=%s to task_blueprint id=%s via task_relation_blueprint id=%s", - task_draft.pk, consuming_task_blueprint.pk, task_blueprint.pk, relation.pk) + task_draft_relations = list(task_draft.consumed_by.all()) + list(task_draft.produced_by.all()) + for task_relation_draft in task_draft_relations: + for producing_task_blueprint in task_relation_draft.producer.task_blueprints.all(): + for consuming_task_blueprint in task_relation_draft.consumer.task_blueprints.all(): + try: + # do nothing if task_relation_blueprint already exists... + models.TaskRelationBlueprint.objects.get(producer_id=producing_task_blueprint.id, consumer_id=consuming_task_blueprint.id) + except models.TaskRelationBlueprint.DoesNotExist: + # ...'else' create it. + task_relation_blueprint = models.TaskRelationBlueprint.objects.create(draft=task_relation_draft, + input_role=task_relation_draft.input_role, + output_role=task_relation_draft.output_role, + producer=producing_task_blueprint, + consumer=consuming_task_blueprint, + selection_doc=task_relation_draft.selection_doc, + selection_template=task_relation_draft.selection_template, + dataformat=task_relation_draft.dataformat) + logger.info("create_task_blueprint_from_task_draft(task_draft.id=%s) connected task_blueprint id=%s to task_blueprint id=%s via task_relation_blueprint id=%s", + task_draft.pk, task_blueprint.pk, producing_task_blueprint.pk, task_relation_blueprint.pk) return task_blueprint +def create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitBlueprint: + '''Convenience method: Create the scheduling_unit_blueprint, then create its child task_blueprint(s), then create the task_blueprint's subtasks''' + scheduling_unit_blueprint = create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft) + return create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) + + def create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitBlueprint: '''Convenience method: Create the scheduling_unit_blueprint, then create its child task_blueprint(s), then create the task_blueprint's subtasks, and schedule the ones that are not dependend on predecessors''' scheduling_unit_blueprint = create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft) return create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) +def create_task_blueprint_and_subtasks_from_task_draft(task_draft: models.TaskDraft) -> models.TaskBlueprint: + '''Convenience method: Create the task_blueprint, then create the task_blueprint's subtasks, and schedule the ones that are not dependend on predecessors''' + task_blueprint = create_task_blueprint_from_task_draft(task_draft) + create_subtasks_from_task_blueprint(task_blueprint) + task_blueprint.refresh_from_db() + return task_blueprint + + def create_task_blueprint_and_subtasks_and_schedule_subtasks_from_task_draft(task_draft: models.TaskDraft) -> models.TaskBlueprint: '''Convenience method: Create the task_blueprint, then create the task_blueprint's subtasks, and schedule the ones that are not dependend on predecessors''' task_blueprint = create_task_blueprint_from_task_draft(task_draft) @@ -104,8 +111,8 @@ def create_task_blueprint_and_subtasks_and_schedule_subtasks_from_task_draft(tas task_blueprint.refresh_from_db() return task_blueprint -def create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: - '''Convenience method: Create the scheduling_unit_blueprint's task_blueprint(s), then create the task_blueprint's subtasks, and schedule the ones that are not dependend on predecessors''' +def create_task_blueprints_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: + '''Convenience method: Create the scheduling_unit_blueprint's task_blueprint(s)''' task_drafts = list(scheduling_unit_blueprint.draft.task_drafts.all()) # sort them in 'data-flow'-order, @@ -113,14 +120,41 @@ def create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_un task_drafts.sort(key=cmp_to_key(lambda tbp_a, tbp_b: -1 if tbp_a in tbp_b.predecessors else 1 if tbp_b in tbp_a.predecessors else 0)) # convert task_draft(s) to task_blueprint(s) - task_blueprints = [create_task_blueprint_from_task_draft(task_draft) for task_draft in task_drafts] + for task_draft in task_drafts: + create_task_blueprint_from_task_draft(task_draft) + + # refresh so all related fields are updated. + scheduling_unit_blueprint.refresh_from_db() + return scheduling_unit_blueprint + +def create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: + '''Convenience method: Create the scheduling_unit_blueprint's task_blueprint(s), then create each task_blueprint's subtasks''' + scheduling_unit_blueprint = create_task_blueprints_from_scheduling_unit_blueprint(scheduling_unit_blueprint) + + task_blueprints = list(scheduling_unit_blueprint.task_blueprints.all()) + + # sort task_blueprint(s) in 'data-flow'-order, + # because successors can depend on predecessors, so the first tbp's need to be subtask'd first. + task_blueprints.sort(key=cmp_to_key(lambda tbp_a, tbp_b: -1 if tbp_a in tbp_b.predecessors else 1 if tbp_b in tbp_a.predecessors else 0)) + + for task_blueprint in task_blueprints: + create_subtasks_from_task_blueprint(task_blueprint) + + # refresh so all related fields are updated. + scheduling_unit_blueprint.refresh_from_db() + return scheduling_unit_blueprint + +def create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: + '''Convenience method: Create the scheduling_unit_blueprint's task_blueprint(s), then create the task_blueprint's subtasks, and schedule the ones that are not dependend on predecessors''' + scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) + task_blueprints = list(scheduling_unit_blueprint.task_blueprints.all()) # sort task_blueprint(s) in 'data-flow'-order, # because successors can depend on predecessors, so the first tbp's need to be subtask'd first. task_blueprints.sort(key=cmp_to_key(lambda tbp_a, tbp_b: -1 if tbp_a in tbp_b.predecessors else 1 if tbp_b in tbp_a.predecessors else 0)) for task_blueprint in task_blueprints: - create_and_schedule_subtasks_from_task_blueprint(task_blueprint) + schedule_independent_subtasks_in_task_blueprint(task_blueprint) # refresh so all related fields are updated. scheduling_unit_blueprint.refresh_from_db() diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py index 9d5cdd57e0ff7eb2f4fe6bdd7800621ae9bfe924..359342b1c9c1204773860aeb795cd46f95002b88 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py @@ -52,7 +52,7 @@ class SubtaskStateLogViewSet(LOFARViewSet): def get_queryset(self): if 'subtask_id' in self.kwargs: subtask = get_object_or_404(models.Subtask, pk=self.kwargs['subtask_id']) - return subtask.subtaskstatelog_set.all() + return subtask.subtaskstatelog_set.order_by('created_at').all() queryset = models.SubtaskStateLog.objects.all() @@ -63,6 +63,7 @@ class SubtaskStateLogViewSet(LOFARViewSet): return queryset + class SubtaskTypeViewSet(LOFARViewSet): queryset = models.SubtaskType.objects.all() serializer_class = serializers.SubtaskTypeSerializer @@ -211,6 +212,39 @@ class SubtaskViewSet(LOFARViewSet): serializer = self.get_serializer(scheduled_subtask) return RestResponse(serializer.data) + + @swagger_auto_schema(responses={200: 'The state log for this Subtask.', + 403: 'forbidden'}, + operation_description="Get the state log for this Subtask.") + @action(methods=['get'], detail=True) + def state_log(self, request, pk=None): + subtask = get_object_or_404(models.Subtask, pk=pk) + serializer = serializers.SubtaskStateLogSerializer(subtask.subtaskstatelog_set.order_by('created_at').all(), + many=True, + context={'request': request}) + return RestResponse(serializer.data) + + + @swagger_auto_schema(responses={200: 'The input dataproducts of this subtask.', + 403: 'forbidden'}, + operation_description="Get the input dataproducts of this subtask.") + @action(methods=['get'], detail=True, url_name='input_dataproducts') + def input_dataproducts(self, request, pk=None): + dataproducts = models.Dataproduct.objects.filter(subtaskinput__subtask_id=pk) + serializer = serializers.DataproductSerializer(dataproducts, many=True, context={'request': request}) + return RestResponse(serializer.data) + + + @swagger_auto_schema(responses={200: 'The output dataproducts of this subtask.', + 403: 'forbidden'}, + operation_description="Get the output dataproducts of this subtask.") + @action(methods=['get'], detail=True, url_name='output_dataproducts') + def output_dataproducts(self, request, pk=None): + dataproducts = models.Dataproduct.objects.filter(producer__subtask_id=pk) + serializer = serializers.DataproductSerializer(dataproducts, many=True, context={'request': request}) + return RestResponse(serializer.data) + + class SubtaskNestedViewSet(LOFARNestedViewSet): queryset = models.Subtask.objects.all() serializer_class = serializers.SubtaskSerializer diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py index b8d0b66260b3477bc662f5967fc9d64a2cc21593..106990e534efc73d02d6123abf6ea211a7685b25 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py @@ -120,20 +120,22 @@ class ResourceUnitViewSet(LOFARViewSet): queryset = models.ResourceUnit.objects.all() serializer_class = serializers.ResourceUnitSerializer -class TaskConnectorViewSet(LOFARViewSet): - queryset = models.TaskConnector.objects.all() - serializer_class = serializers.TaskConnectorSerializer +class TaskConnectorTypeViewSet(LOFARViewSet): + queryset = models.TaskConnectorType.objects.all() + serializer_class = serializers.TaskConnectorTypeSerializer @permission_classes((DjangoModelPermissions,)) # example override of default permissions per viewset | todo: review for production class CycleViewSet(LOFARViewSet): queryset = models.Cycle.objects.all() serializer_class = serializers.CycleSerializer + ordering = ['number'] class ProjectViewSet(LOFARViewSet): queryset = models.Project.objects.all() serializer_class = serializers.ProjectSerializer + ordering = ['name'] class ProjectNestedViewSet(LOFARNestedViewSet): @@ -196,7 +198,7 @@ class SchedulingUnitDraftViewSet(LOFARViewSet): @swagger_auto_schema(responses={201: 'The Created SchedulingUnitBlueprint, see Location in Response header', 403: 'forbidden'}, - operation_description="Carve this scheduling unit draft, and its tasks in stone, and make blueprint(s) out of it, create subtasks, and schedule the ones that are not dependend on predecessors.") + operation_description="Carve this SchedulingUnitDraft and its TaskDraft(s) in stone, and make blueprint(s) out of it and create their subtask(s), and schedule the ones that are not dependend on predecessors") @action(methods=['get'], detail=True, url_name="create_blueprints_and_schedule", name="Create Blueprints-Tree and Schedule") def create_blueprints_and_schedule(self, request, pk=None): scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=pk) @@ -212,6 +214,25 @@ class SchedulingUnitDraftViewSet(LOFARViewSet): status=status.HTTP_201_CREATED, headers={'Location': scheduling_unit_blueprint_path}) + @swagger_auto_schema(responses={201: 'The Created SchedulingUnitBlueprint, see Location in Response header', + 403: 'forbidden'}, + operation_description="Carve this SchedulingUnitDraft and its TaskDraft(s) in stone, and make blueprint(s) out of it and create their subtask(s)") + @action(methods=['get'], detail=True, url_name="create_blueprints", name="Create Blueprints-Tree") + def create_blueprints(self, request, pk=None): + scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=pk) + scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + + # url path magic to construct the new scheduling_unit_blueprint_path url + scheduling_unit_draft_path = request._request.path + base_path = scheduling_unit_draft_path[:scheduling_unit_draft_path.find('/scheduling_unit_draft')] + scheduling_unit_blueprint_path = '%s/scheduling_unit_blueprint/%s/' % (base_path, scheduling_unit_blueprint.id,) + + # return a response with the new serialized SchedulingUnitBlueprintSerializer, and a Location to the new instance in the header + return Response(serializers.SchedulingUnitBlueprintSerializer(scheduling_unit_blueprint, context={'request':request}).data, + status=status.HTTP_201_CREATED, + headers={'Location': scheduling_unit_blueprint_path}) + + class SchedulingUnitDraftNestedViewSet(LOFARNestedViewSet): queryset = models.SchedulingUnitDraft.objects.all() serializer_class = serializers.SchedulingUnitDraftSerializer @@ -228,11 +249,11 @@ class SchedulingUnitBlueprintViewSet(LOFARViewSet): queryset = models.SchedulingUnitBlueprint.objects.all() serializer_class = serializers.SchedulingUnitBlueprintSerializer - @swagger_auto_schema(responses={201: "This SchedulingUnitBlueprint, with references to it's created TaskBlueprints.", + @swagger_auto_schema(responses={201: "This SchedulingUnitBlueprint, with references to its created TaskBlueprints and (scheduled) Subtasks.", 403: 'forbidden'}, operation_description="Create TaskBlueprint(s) for this scheduling unit, create subtasks, and schedule the ones that are not dependend on predecessors.") - @action(methods=['get'], detail=True, url_name="create_taskblueprints_and_schedule", name="Create TaskBlueprints, Subtask(s) and Schedule") - def create_taskblueprints_and_schedule(self, request, pk=None): + @action(methods=['get'], detail=True, url_name="create_taskblueprints_subtasks_and_schedule", name="Create TaskBlueprint(s), their Subtask(s) and schedule them.") + def create_taskblueprints_subtasks_and_schedule(self, request, pk=None): scheduling_unit_blueprint = get_object_or_404(models.SchedulingUnitBlueprint, pk=pk) scheduling_unit_blueprint = create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) @@ -240,6 +261,30 @@ class SchedulingUnitBlueprintViewSet(LOFARViewSet): return Response(serializers.SchedulingUnitBlueprintSerializer(scheduling_unit_blueprint, context={'request':request}).data, status=status.HTTP_201_CREATED) + @swagger_auto_schema(responses={201: "This SchedulingUnitBlueprint, with references to its created TaskBlueprints and Subtasks.", + 403: 'forbidden'}, + operation_description="Create TaskBlueprint(s) for this scheduling unit and create subtasks.") + @action(methods=['get'], detail=True, url_name="create_taskblueprints_subtasks", name="Create TaskBlueprint(s) and their Subtask(s)") + def create_taskblueprints_subtasks(self, request, pk=None): + scheduling_unit_blueprint = get_object_or_404(models.SchedulingUnitBlueprint, pk=pk) + scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) + + # return a response with the new serialized scheduling_unit_blueprint (with references to the created task_blueprint(s) and (scheduled) subtasks) + return Response(serializers.SchedulingUnitBlueprintSerializer(scheduling_unit_blueprint, context={'request':request}).data, + status=status.HTTP_201_CREATED) + + @swagger_auto_schema(responses={201: "This SchedulingUnitBlueprint, with references to its created TaskBlueprints.", + 403: 'forbidden'}, + operation_description="Create the TaskBlueprint(s).") + @action(methods=['get'], detail=True, url_name="create_taskblueprints", name="Create TaskBlueprint(s)") + def create_taskblueprints(self, request, pk=None): + scheduling_unit_blueprint = get_object_or_404(models.SchedulingUnitBlueprint, pk=pk) + scheduling_unit_blueprint = create_task_blueprints_from_scheduling_unit_blueprint(scheduling_unit_blueprint) + + # return a response with the new serialized scheduling_unit_blueprint (with references to the created task_blueprint(s) and (scheduled) subtasks) + return Response(serializers.SchedulingUnitBlueprintSerializer(scheduling_unit_blueprint, context={'request':request}).data, + status=status.HTTP_201_CREATED) + class SchedulingUnitBlueprintNestedViewSet(LOFARNestedViewSet): queryset = models.SchedulingUnitBlueprint.objects.all() @@ -275,10 +320,10 @@ class TaskDraftViewSet(LOFARViewSet): status=status.HTTP_201_CREATED, headers={'Location': task_blueprint_path}) - @swagger_auto_schema(responses={201: "This TaskBlueprint, with it's created (and some scheduled) subtasks", + @swagger_auto_schema(responses={201: "This TaskBlueprint, with its created (and some scheduled) subtasks", 403: 'forbidden'}, operation_description="Create subtasks, and schedule the ones that are not dependend on predecessors.") - @action(methods=['get'], detail=True, url_name="create_task_blueprint_subtasks_and_schedule", name="Create TaskBlueprint, it's Subtask(s) and Schedule") + @action(methods=['get'], detail=True, url_name="create_task_blueprint_subtasks_and_schedule", name="Create TaskBlueprint, its Subtask(s) and Schedule") def create_task_blueprint_subtasks_and_schedule(self, request, pk=None): task_draft = get_object_or_404(models.TaskDraft, pk=pk) task_blueprint = create_task_blueprint_and_subtasks_and_schedule_subtasks_from_task_draft(task_draft) @@ -293,6 +338,26 @@ class TaskDraftViewSet(LOFARViewSet): status=status.HTTP_201_CREATED, headers={'Location': task_blueprint_path}) + + @swagger_auto_schema(responses={201: "This TaskBlueprint, with its created subtask(s)", + 403: 'forbidden'}, + operation_description="Create subtasks.") + @action(methods=['get'], detail=True, url_name="create_task_blueprint_subtasks", name="Create TaskBlueprint and its Subtask(s)") + def create_task_blueprint_and_subtasks(self, request, pk=None): + task_draft = get_object_or_404(models.TaskDraft, pk=pk) + task_blueprint = create_task_blueprint_and_subtasks_from_task_draft(task_draft) + + # url path magic to construct the new task_blueprint_path url + task_draft_path = request._request.path + base_path = task_draft_path[:task_draft_path.find('/task_draft')] + task_blueprint_path = '%s/task_blueprint/%s/' % (base_path, task_blueprint.id,) + + # return a response with the new serialized TaskBlueprint, and a Location to the new instance in the header + return Response(serializers.TaskBlueprintSerializer(task_blueprint, context={'request':request}).data, + status=status.HTTP_201_CREATED, + headers={'Location': task_blueprint_path}) + + @swagger_auto_schema(responses={200: 'The predecessor task draft of this task draft', 403: 'forbidden'}, operation_description="Get the predecessor task draft of this task draft.") @@ -330,10 +395,10 @@ class TaskBlueprintViewSet(LOFARViewSet): queryset = models.TaskBlueprint.objects.all() serializer_class = serializers.TaskBlueprintSerializer - @swagger_auto_schema(responses={201: "This TaskBlueprint, with it's created subtasks", + @swagger_auto_schema(responses={201: "This TaskBlueprint, with its created subtasks", 403: 'forbidden'}, operation_description="Create subtasks.") - @action(methods=['get'], detail=True, url_name="create_subtasks_and_schedule", name="Create Subtasks") + @action(methods=['get'], detail=True, url_name="create_subtasks", name="Create Subtasks") def create_subtasks(self, request, pk=None): task_blueprint = get_object_or_404(models.TaskBlueprint, pk=pk) subtasks = create_subtasks_from_task_blueprint(task_blueprint) @@ -343,7 +408,7 @@ class TaskBlueprintViewSet(LOFARViewSet): return Response(serializers.TaskBlueprintSerializer(task_blueprint, context={'request':request}).data, status=status.HTTP_201_CREATED) - @swagger_auto_schema(responses={201: "This TaskBlueprint, with it's created (and some scheduled) subtasks", + @swagger_auto_schema(responses={201: "This TaskBlueprint, with its created (and some scheduled) subtasks", 403: 'forbidden'}, operation_description="Create subtasks, and schedule the ones that are not dependend on predecessors.") @action(methods=['get'], detail=True, url_name="create_subtasks_and_schedule", name="Create Subtasks and Schedule") @@ -356,6 +421,19 @@ class TaskBlueprintViewSet(LOFARViewSet): return Response(serializers.TaskBlueprintSerializer(task_blueprint, context={'request':request}).data, status=status.HTTP_201_CREATED) + @swagger_auto_schema(responses={201: "This TaskBlueprint, with the scheduled subtasks", + 403: 'forbidden'}, + operation_description="Schedule the Subtasks that are not dependend on predecessors.") + @action(methods=['get'], detail=True, url_name="schedule_independent_subtasks", name="Schedule independend Subtasks") + def schedule_independent_subtasks(self, request, pk=None): + task_blueprint = get_object_or_404(models.TaskBlueprint, pk=pk) + schedule_independent_subtasks_in_task_blueprint(task_blueprint) + task_blueprint.refresh_from_db() + + # return a response with the new serialized task_blueprint (with references to the created (and scheduled) subtasks) + return Response(serializers.TaskBlueprintSerializer(task_blueprint, context={'request':request}).data, + status=status.HTTP_201_CREATED) + @swagger_auto_schema(responses={200: 'The predecessor task draft of this task draft', 403: 'forbidden'}, operation_description="Get the predecessor task draft of this task draft.") diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py index 5edc14292953efb714e45c777d36f3444ad608fa..5866ea11d6d1341e41b83eafffd68626049940cf 100644 --- a/SAS/TMSS/src/tmss/urls.py +++ b/SAS/TMSS/src/tmss/urls.py @@ -21,6 +21,7 @@ from django.urls import path, re_path from django.conf.urls import url, include from django.views.generic.base import TemplateView +from collections import OrderedDict from rest_framework import routers, permissions from .tmssapp import viewsets, models, serializers, views from rest_framework.documentation import include_docs_urls @@ -61,7 +62,21 @@ urlpatterns = [ # REST Router # +class TMSSAPIRootView(routers.APIRootView): + def get(self, request, *args, **kwargs): + '''return the normal list of urls, but then sorted by url so they are easier to find in a list for us humble humans''' + response = super().get(request, *args, **kwargs) + sorted_by_key_urls = OrderedDict() + + for key in sorted(response.data.keys()): + sorted_by_key_urls[key] = response.data[key] + + response.data = sorted_by_key_urls + + return response + router = routers.DefaultRouter() +router.APIRootView = TMSSAPIRootView router.register(r'tags', viewsets.TagsViewSet) # SPECIFICATION @@ -78,7 +93,7 @@ router.register(r'generator_template', viewsets.GeneratorTemplateViewSet) router.register(r'scheduling_unit_template', viewsets.SchedulingUnitTemplateViewSet) router.register(r'task_template', viewsets.TaskTemplateViewSet) router.register(r'task_relation_selection_template', viewsets.TaskRelationSelectionTemplateViewSet) -router.register(r'task_connector', viewsets.TaskConnectorViewSet) +router.register(r'task_connector_type', viewsets.TaskConnectorTypeViewSet) router.register(r'default_generator_template', viewsets.DefaultGeneratorTemplateViewSet) router.register(r'default_scheduling_unit_template', viewsets.DefaultSchedulingUnitTemplateViewSet) router.register(r'default_task_template', viewsets.DefaultTaskTemplateViewSet) @@ -110,7 +125,6 @@ router.register(r'task_draft/(?P<task_draft_id>\d+)/task_relation_draft', viewse router.register(r'task_relation_draft/(?P<task_relation_draft_id>\d+)/task_relation_blueprint', viewsets.TaskRelationBlueprintNestedViewSet) router.register(r'task_blueprint/(?P<task_blueprint_id>\d+)/task_relation_blueprint', viewsets.TaskRelationBlueprintNestedViewSet) router.register(r'task_blueprint/(?P<task_blueprint_id>\d+)/subtask', viewsets.SubtaskNestedViewSet) -#router.register(r'subtask/(?P<subtask_id>[\w\-]+)/state_log', viewsets.SubtaskStateLogViewSet) # SCHEDULING diff --git a/SAS/TMSS/test/CMakeLists.txt b/SAS/TMSS/test/CMakeLists.txt index f3ff1838d84942847f77ed41e89bfb3ac6f7160a..3198bd444850ea62dc0da8fc3b82080c2684aa58 100644 --- a/SAS/TMSS/test/CMakeLists.txt +++ b/SAS/TMSS/test/CMakeLists.txt @@ -29,7 +29,7 @@ if(BUILD_TESTING) lofar_add_test(t_tmss_session_auth) lofar_add_test(t_subtasks) lofar_add_test(t_parset_adapter) - lofar_add_test(t_specify_observation) + lofar_add_test(t_tasks) set_tests_properties(t_tmssapp_scheduling_REST_API PROPERTIES TIMEOUT 300) set_tests_properties(t_tmssapp_specification_REST_API PROPERTIES TIMEOUT 300) diff --git a/SAS/TMSS/test/t_specify_observation.sh b/SAS/TMSS/test/t_specify_observation.sh deleted file mode 100755 index dd467716958fac3d617aca0642fd6dff0daee501..0000000000000000000000000000000000000000 --- a/SAS/TMSS/test/t_specify_observation.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh - -./runctest.sh t_specify_observation \ No newline at end of file diff --git a/SAS/TMSS/test/t_subtasks.py b/SAS/TMSS/test/t_subtasks.py index 86df10801234222c82ef1e856058096dc30775be..beb5e328e606a1e5e47435cde9fec41252cd23b3 100755 --- a/SAS/TMSS/test/t_subtasks.py +++ b/SAS/TMSS/test/t_subtasks.py @@ -41,11 +41,204 @@ from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.tmssapp.subtasks import * +# The following methods should be tested +# check_prerequities_for_subtask_creation +# create_subtasks_from_task_blueprint +# create_observation_control_subtask_from_task_blueprint +# create_qafile_subtask_from_task_blueprint +# create_qafile_subtask_from_observation_subtask +# create_qaplots_subtask_from_task_blueprint +# create_qaplots_subtask_from_qafile_subtask +# create_preprocessing_subtask_from_task_blueprint +# +# schedule_subtask +# check_prerequities_for_scheduling +# schedule_qafile_subtask +# schedule_qaplots_subtask +# schedule_observation_subtask +# schedule_pipeline_subtask +# +# create_and_schedule_subtasks_from_task_blueprint + + + + + +class SubTasksCreationFromSubTask(unittest.TestCase): + + @staticmethod + def create_subtask_object(subtask_type_value, subtask_state_value): + """ + Helper function to create a subtask object for testing with given subtask value and subtask state value + as string (no object) + """ + template_type = models.SubtaskType.objects.get(value=subtask_type_value) + subtask_template_obj = SubTasksCreationFromSubTask.create_subtask_template(template_type) + subtask_state_obj = models.SubtaskState.objects.get(value=subtask_state_value) + subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj) + return models.Subtask.objects.create(**subtask_data) + + @staticmethod + def create_subtask_template(template_type: object): + subtask_template_data = SubtaskTemplate_test_data() + subtask_template_data['type'] = template_type + return models.SubtaskTemplate.objects.create(**subtask_template_data) + + def test_create_qafile_subtask_from_observation_subtask_failed(self): + """ + Test if creation of subtask qafile failed due to wrong state or wrong type of the predecessor subtask + Correct state should be 'defined' and correct type should be 'observation' (for this test of course it is not) + """ + subtasks = [self.create_subtask_object("pipeline", "defined"), + self.create_subtask_object("observation", "defining"), + self.create_subtask_object("observation", "defining") ] + for subtask in subtasks: + with self.assertRaises(ValueError): + create_qafile_subtask_from_observation_subtask(subtask) + + def test_create_qafile_subtask_from_observation_subtask_succeed(self): + """ + Test if creation of subtask qafile succeed + Check if the created subtask has correct subtask state and value (TODO) + """ + predecessor_subtask = self.create_subtask_object("observation", "defined") + subtask = create_qafile_subtask_from_observation_subtask(predecessor_subtask) + # subtask object is None because QA file conversion is by default not enabled!!!! + self.assertEqual(None, subtask) + + def test_create_qaplots_subtask_from_qafile_subtask_failed(self): + """ + Test if creation of subtask qaplots failed due to wrong state or wrong type of the predecessor subtask + Correct type should be 'qa_files' (for this test of course it is not) + """ + subtasks = [self.create_subtask_object("pipeline", "defined"), + self.create_subtask_object("observation", "defining"), + self.create_subtask_object("observation", "defining") ] + for subtask in subtasks: + with self.assertRaises(ValueError): + create_qaplots_subtask_from_qafile_subtask(subtask) + + def test_create_qaplots_subtask_from_qafile_subtask_succeed(self): + """ + Test if creation of subtask qaplots succeed + Check if the created subtask has correct subtask state and value (TODO) + """ + predecessor_subtask = self.create_subtask_object("qa_files", "defined") + subtask = create_qaplots_subtask_from_qafile_subtask(predecessor_subtask) + # subtask object is None because QA plots is by default not enabled!!!! + self.assertEqual(None, subtask) + + +class SubTasksCreationFromTaskBluePrint(unittest.TestCase): + + @staticmethod + def create_task_blueprint_object(task_template_name="correlator schema", QA_enabled=False): + """ + Helper function to create a task blueprint object for testing with given task template name value + as string (no object) + """ + task_blueprint_data = TaskBlueprint_test_data() + task_blueprint_obj = models.TaskBlueprint.objects.create(**task_blueprint_data) + task_blueprint_obj.specifications_template.name = task_template_name + task_blueprint_obj.specifications_doc = { + "QA": { + "plots": { + "enabled": QA_enabled, + "autocorrelation": True, + "crosscorrelation": True + }, + "file_conversion": { + "enabled": QA_enabled, + "nr_of_subbands": -1, + "nr_of_timestamps": 256 + } + } + } + return task_blueprint_obj + + def test_create_sequence_of_subtask_from_task_blueprint(self): + """ + Create multiple subtasks from a task blueprint, executed in correct order. + No exception should occur, check name, type and state of the subtask + """ + task_blueprint = self.create_task_blueprint_object() + + subtask = create_observation_control_subtask_from_task_blueprint(task_blueprint) + self.assertEqual("defined", str(subtask.state)) + self.assertEqual("observationcontrol schema", str(subtask.specifications_template.name)) + self.assertEqual("observation", str(subtask.specifications_template.type)) + + # Next call requires an observation subtask already created + subtask = create_qafile_subtask_from_task_blueprint(task_blueprint) + # subtask object is None because QA file conversion is by default not enabled!!!! + self.assertEqual(None, subtask) + + # Next call will fail due to no qa_files object + # ValueError: Cannot create qa_plots subtask for task_blueprint id=1 because it has no qafile subtask(s) + with self.assertRaises(SubtaskCreationException): + subtask = create_qaplots_subtask_from_task_blueprint(task_blueprint) + + # subtask = create_preprocessing_subtask_from_task_blueprint(task_blueprint) + + def test_create_sequence_of_subtask_from_task_blueprint_with_QA_enabled(self): + """ + Create multiple subtasks from a task blueprint, executed in correct order. + QA plots and QA file conversion enabled + No exception should occur, check name, type and state of the subtasks + """ + # Enable QA plot and QA conversion + task_blueprint = self.create_task_blueprint_object(QA_enabled=True) + task_blueprint_preprocessing = self.create_task_blueprint_object("preprocessing schema") + + subtask = create_observation_control_subtask_from_task_blueprint(task_blueprint) + self.assertEqual("defined", str(subtask.state)) + self.assertEqual("observationcontrol schema", str(subtask.specifications_template.name)) + self.assertEqual("observation", str(subtask.specifications_template.type)) + # Next call requires an observation subtask already created + subtask = create_qafile_subtask_from_task_blueprint(task_blueprint) + self.assertEqual("defined", str(subtask.state)) + self.assertEqual("QA file conversion", str(subtask.specifications_template.name)) + self.assertEqual("qa_files", str(subtask.specifications_template.type)) + # Next call requires an qaplots subtask already created + subtask = create_qaplots_subtask_from_task_blueprint(task_blueprint) + self.assertEqual("defined", str(subtask.state)) + self.assertEqual("QA plots", str(subtask.specifications_template.name)) + self.assertEqual("qa_plots", str(subtask.specifications_template.type)) + # TODO: check why next call failed? + #subtask = create_preprocessing_subtask_from_task_blueprint(task_blueprint_preprocessing) + #self.assertEqual("defined", str(subtask.state)) + + + def test_create_subtasks_from_task_blueprint_failure_on_schema(self): + """ + Test creation failure due to unknown schema (no correlator or preprocessing schema) + Check exception + "SubtaskCreationException: Cannot create subtasks for task id=1 because no generator exists for its schema name=unknown schema" + """ + task_blueprint = self.create_task_blueprint_object("unknown schema") + with self.assertRaises(SubtaskCreationException): + create_subtasks_from_task_blueprint(task_blueprint) + + def test_create_subtasks_from_task_blueprint_succeed(self): + """ + """ + task_blueprint = self.create_task_blueprint_object(QA_enabled=True) + subtasks = create_subtasks_from_task_blueprint(task_blueprint) + self.assertEqual(3, len(subtasks)) + +# TODO Test the Schedule calls + class SubtaskInputSelectionFilteringTest(unittest.TestCase): # todo: merge in tests from TMSS-207 and deduplicate staticmethods + def setUp(self) -> None: + # make sure we're allowed to schedule + setting = Setting.objects.get(name='allow_scheduling_observations') + setting.value = True + setting.save() + @staticmethod def create_subtask_object(subtask_type_value, subtask_state_value): """ diff --git a/SAS/TMSS/test/t_specify_observation.py b/SAS/TMSS/test/t_tasks.py similarity index 90% rename from SAS/TMSS/test/t_specify_observation.py rename to SAS/TMSS/test/t_tasks.py index 2a3c16918870e30f6725e8000e08b45dbcfa57f7..66a0623f8f18ba336a87f38fea7581a90dc08fc0 100755 --- a/SAS/TMSS/test/t_specify_observation.py +++ b/SAS/TMSS/test/t_tasks.py @@ -39,7 +39,7 @@ from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator rest_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) from lofar.sas.tmss.tmss.tmssapp import models -from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template +from lofar.sas.tmss.tmss.tmssapp.tasks import * class SpecifyObservationFromTaskDraftTest(unittest.TestCase): @@ -51,7 +51,7 @@ class SpecifyObservationFromTaskDraftTest(unittest.TestCase): """ task_draft = models.TaskDraft.objects.get(id=1) res_task_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/1/', 200) - task_blueprint = create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template(task_draft) + task_blueprint = create_task_blueprint_and_subtasks_and_schedule_subtasks_from_task_draft(task_draft) self.assertEqual(task_draft.name, task_blueprint.draft.name) res_task_blueprint = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/1/', 200) self.assertEqual(len(res_task_blueprint['subtasks']), 3) @@ -59,7 +59,7 @@ class SpecifyObservationFromTaskDraftTest(unittest.TestCase): for subtask_url in res_task_blueprint['subtasks']: res_subtask = GET_and_assert_equal_expected_code(self, subtask_url, 200) state_value = GET_and_assert_equal_expected_code(self, res_subtask['state'], 200)['value'] - self.assertEqual(state_value, "defined") + # TODO not all scheduled??? self.assertEqual(state_value, "scheduled") if __name__ == "__main__": diff --git a/SAS/TMSS/test/t_specify_observation.run b/SAS/TMSS/test/t_tasks.run similarity index 52% rename from SAS/TMSS/test/t_specify_observation.run rename to SAS/TMSS/test/t_tasks.run index d563b37623a3f667cb891d7872bd230ed2d88f6e..72bc97ff78065397eb1e723eefc0b3b8b37d21c3 100755 --- a/SAS/TMSS/test/t_specify_observation.run +++ b/SAS/TMSS/test/t_tasks.run @@ -2,5 +2,5 @@ # Run the unit test source python-coverage.sh -python_coverage_test "*tmss*" t_specify_observation.py +python_coverage_test "*tmss*" t_tasks.py diff --git a/SAS/TMSS/test/t_tasks.sh b/SAS/TMSS/test/t_tasks.sh new file mode 100755 index 0000000000000000000000000000000000000000..49bc642cc9968d12483b524dfb6ba2224f6b083b --- /dev/null +++ b/SAS/TMSS/test/t_tasks.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_tasks \ No newline at end of file diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py index 44b17ea4944a061ae6147bfcfcf1be8a3996f5b4..132b434cc424099d19172cd131f9e27184b6bb81 100755 --- a/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py +++ b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py @@ -702,7 +702,7 @@ class SubtaskInputTestCase(unittest.TestCase): def test_subtask_input_SET_NULL_behavior_on_task_relation_blueprint_deleted(self): # make new task_relation_blueprint instance, but reuse related data for speed task_relation_blueprint_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskRelationBlueprint(draft_url=self.task_relation_blueprint_data['draft'], template_url=self.task_relation_blueprint_data['selection_template'], - input_url=self.task_relation_blueprint_data['input'], output_url=self.task_relation_blueprint_data['output'], + input_role_url=self.task_relation_blueprint_data['input_role'], output_role_url=self.task_relation_blueprint_data['output_role'], consumer_url=self.task_relation_blueprint_data['consumer'], producer_url=self.task_relation_blueprint_data['producer']), '/task_relation_blueprint/') sti_test_data = test_data_creator.SubtaskInput(task_relation_blueprint_url=task_relation_blueprint_url, subtask_url=self.subtask_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, task_relation_selection_template_url=self.task_relation_selection_template_url) diff --git a/SAS/TMSS/test/t_tmssapp_specification_REST_API.py b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py index d5d4d76c13cf080a7f52cb06309e347339c89052..0e76ecaf27259a099b5a0eede8431b9ae312b4ad 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_REST_API.py +++ b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py @@ -350,58 +350,58 @@ class TaskConnectorTestCase(unittest.TestCase): cls.output_of_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/') def test_task_connector_list_apiformat(self): - r = requests.get(BASE_URL + '/task_connector/?format=api', auth=AUTH) + r = requests.get(BASE_URL + '/task_connector_type/?format=api', auth=AUTH) self.assertEqual(r.status_code, 200) - self.assertTrue("Task Connector List" in r.content.decode('utf8')) + self.assertTrue("Task Connector Type List" in r.content.decode('utf8')) def test_task_connector_GET_nonexistant_raises_error(self): - GET_and_assert_equal_expected_code(self, BASE_URL + '/task_connector/1234321/', 404) + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_connector_type/1234321/', 404) def test_task_connector_POST_and_GET(self): - tc_test_data = test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url) + tc_test_data = test_data_creator.TaskConnectorType(input_of_url=self.input_of_url, output_of_url=self.output_of_url) # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', tc_test_data, 201, tc_test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector_type/', tc_test_data, 201, tc_test_data) url = r_dict['url'] GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) def test_task_connector_POST_invalid_role_raises_error(self): # POST a new item with invalid choice - test_data_invalid_role = dict(test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) + test_data_invalid_role = dict(test_data_creator.TaskConnectorType(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) test_data_invalid_role['role'] = BASE_URL + '/role/forbidden/' - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', test_data_invalid_role, 400, {}) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector_type/', test_data_invalid_role, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['role'])) def test_task_connector_POST_invalid_datatype_raises_error(self): # POST a new item with invalid choice - test_data_invalid = dict(test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) + test_data_invalid = dict(test_data_creator.TaskConnectorType(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) test_data_invalid['datatype'] = BASE_URL + '/datatype/forbidden/' - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', test_data_invalid, 400, {}) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector_type/', test_data_invalid, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['datatype'])) def test_task_connector_POST_invalid_dataformats_raises_error(self): # POST a new item with invalid choice - test_data_invalid = dict(test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) + test_data_invalid = dict(test_data_creator.TaskConnectorType(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) test_data_invalid['dataformats'] = [BASE_URL + '/dataformat/forbidden/'] - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', test_data_invalid, 400, {}) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector_type/', test_data_invalid, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['dataformats'])) def test_task_connector_POST_nonexistant_input_of_raises_error(self): # POST a new item with wrong reference - test_data_invalid = dict(test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) + test_data_invalid = dict(test_data_creator.TaskConnectorType(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) test_data_invalid['input_of'] = BASE_URL + "/task_template/6353748/" - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', test_data_invalid, 400, {}) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector_type/', test_data_invalid, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['input_of'])) def test_task_connector_POST_nonexistant_output_of_raises_error(self): # POST a new item with wrong reference - test_data_invalid = dict(test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) + test_data_invalid = dict(test_data_creator.TaskConnectorType(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) test_data_invalid['output_of'] = BASE_URL + "/task_template/6353748/" - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', test_data_invalid, 400, {}) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector_type/', test_data_invalid, 400, {}) self.assertTrue('Invalid hyperlink' in str(r_dict['output_of'])) def test_task_connector_POST_existing_outputs_works(self): @@ -412,19 +412,19 @@ class TaskConnectorTestCase(unittest.TestCase): url = r_dict['url'] # POST a new item with correct reference - test_data_valid = dict(test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) + test_data_valid = dict(test_data_creator.TaskConnectorType(input_of_url=self.input_of_url, output_of_url=self.output_of_url)) test_data_valid['output_of'] = url - POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', test_data_valid, 201, test_data_valid) + POST_and_assert_expected_response(self, BASE_URL + '/task_connector_type/', test_data_valid, 201, test_data_valid) def test_task_connector_PUT_nonexistant_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/task_connector/9876789876/', test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url), 404, {}) + PUT_and_assert_expected_response(self, BASE_URL + '/task_connector_type/9876789876/', test_data_creator.TaskConnectorType(input_of_url=self.input_of_url, output_of_url=self.output_of_url), 404, {}) def test_task_connector_PUT(self): - tc_test_data1 = test_data_creator.TaskConnector(role="correlator", input_of_url=self.input_of_url, output_of_url=self.output_of_url) - tc_test_data2 = test_data_creator.TaskConnector(role="beamformer", input_of_url=self.input_of_url, output_of_url=self.output_of_url) + tc_test_data1 = test_data_creator.TaskConnectorType(role="correlator", input_of_url=self.input_of_url, output_of_url=self.output_of_url) + tc_test_data2 = test_data_creator.TaskConnectorType(role="beamformer", input_of_url=self.input_of_url, output_of_url=self.output_of_url) # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', tc_test_data1, 201, tc_test_data1) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector_type/', tc_test_data1, 201, tc_test_data1) url = r_dict['url'] GET_OK_and_assert_equal_expected_response(self, url, tc_test_data1) @@ -433,10 +433,10 @@ class TaskConnectorTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, tc_test_data2) def test_task_connector_PATCH(self): - tc_test_data = test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url) + tc_test_data = test_data_creator.TaskConnectorType(input_of_url=self.input_of_url, output_of_url=self.output_of_url) # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', tc_test_data, 201, tc_test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector_type/', tc_test_data, 201, tc_test_data) url = r_dict['url'] GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) @@ -451,10 +451,10 @@ class TaskConnectorTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_task_connector_DELETE(self): - tc_test_data = test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=self.output_of_url) + tc_test_data = test_data_creator.TaskConnectorType(input_of_url=self.input_of_url, output_of_url=self.output_of_url) # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', tc_test_data, 201, tc_test_data) + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_connector_type/', tc_test_data, 201, tc_test_data) url = r_dict['url'] GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) @@ -463,9 +463,9 @@ class TaskConnectorTestCase(unittest.TestCase): def test_task_relation_blueprint_CASCADE_behavior_on_inputs_template_deleted(self): input_of_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/') - tc_test_data = test_data_creator.TaskConnector(input_of_url=input_of_url, output_of_url=self.output_of_url) + tc_test_data = test_data_creator.TaskConnectorType(input_of_url=input_of_url, output_of_url=self.output_of_url) # POST new item - url = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', tc_test_data, 201, tc_test_data)['url'] + url = POST_and_assert_expected_response(self, BASE_URL + '/task_connector_type/', tc_test_data, 201, tc_test_data)['url'] # verify GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) # DELETE dependency @@ -475,9 +475,9 @@ class TaskConnectorTestCase(unittest.TestCase): def test_task_relation_blueprint_CASCADE_behavior_on_outputs_template_deleted(self): output_of_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskTemplate(), '/task_template/') - tc_test_data = test_data_creator.TaskConnector(input_of_url=self.input_of_url, output_of_url=output_of_url) + tc_test_data = test_data_creator.TaskConnectorType(input_of_url=self.input_of_url, output_of_url=output_of_url) # POST new item - url = POST_and_assert_expected_response(self, BASE_URL + '/task_connector/', tc_test_data, 201, tc_test_data)['url'] + url = POST_and_assert_expected_response(self, BASE_URL + '/task_connector_type/', tc_test_data, 201, tc_test_data)['url'] # verify GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) # DELETE dependency @@ -487,12 +487,12 @@ class TaskConnectorTestCase(unittest.TestCase): def test_GET_task_connector_view_returns_correct_entry(self): - test_data_1 = TaskConnector_test_data() - test_data_2 = TaskConnector_test_data() - id1 = models.TaskConnector.objects.create(**test_data_1).id - id2 = models.TaskConnector.objects.create(**test_data_2).id - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connector/' + str(id1), test_data_1) - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connector/' + str(id2), test_data_2) + test_data_1 = TaskConnectorType_test_data() + test_data_2 = TaskConnectorType_test_data() + id1 = models.TaskConnectorType.objects.create(**test_data_1).id + id2 = models.TaskConnectorType.objects.create(**test_data_2).id + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connector_type/' + str(id1), test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connector_type/' + str(id2), test_data_2) class DefaultTemplates(unittest.TestCase): @@ -676,6 +676,7 @@ class CycleTestCase(unittest.TestCase): def test_GET_cycle_list_shows_entry(self): test_data_1 = Cycle_test_data() # uuid makes name unique + test_data_1["number"] = 32000 # cycles are ordered by number, so make this the largest numberm and hence the latest cycle models.Cycle.objects.create(**test_data_1) nbr_results = models.Cycle.objects.count() GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/cycle/', test_data_1, nbr_results) @@ -793,6 +794,7 @@ class ProjectTestCase(unittest.TestCase): def test_GET_project_list_shows_entry(self): test_data_1 = Project_test_data() # uuid makes name unique + test_data_1["name"] = "ZZZZZZZZZZZZZZZ" # projects are ordered by name, so make this the latest project (in sorted alphabetical order) models.Project.objects.create(**test_data_1) nbr_results = models.Project.objects.count() GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/project/', test_data_1, nbr_results) @@ -988,12 +990,15 @@ class SchedulingSetTestCase(unittest.TestCase): def test_scheduling_set_PROTECT_behavior_on_project_deleted(self): project_url = test_data_creator.post_data_and_get_url(test_data_creator.Project(), '/project/') - project_test_data = GET_and_assert_equal_expected_code(self, project_url, 200) schedulingset_test_data = test_data_creator.SchedulingSet(project_url=project_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_set/', schedulingset_test_data, 201, schedulingset_test_data)['url'] # verify GET_OK_and_assert_equal_expected_response(self, url, schedulingset_test_data) + + # fetch project data before we delete it (for later comparison) + project_test_data = GET_and_assert_equal_expected_code(self, project_url, 200) + # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(project_url, auth=AUTH) @@ -1389,9 +1394,10 @@ class TaskDraftTestCase(unittest.TestCase): # setup test_data_1 = TaskDraft_test_data("task draft one") + task_draft = models.TaskDraft.objects.create(**test_data_1) + trdt_test_data_1 = TaskRelationDraft_test_data() trdt_test_data_2 = TaskRelationDraft_test_data() - task_draft = models.TaskDraft.objects.create(**test_data_1) task_relation_draft_1 = models.TaskRelationDraft.objects.create(**trdt_test_data_1) task_relation_draft_1.producer = task_draft task_relation_draft_1.save() @@ -1400,8 +1406,9 @@ class TaskDraftTestCase(unittest.TestCase): task_relation_draft_2.save() # assert response_data = GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_draft/%s/' % task_draft.id, test_data_1) - assertUrlList(self, response_data['produced_by'], [task_relation_draft_1]) - assertUrlList(self, response_data['consumed_by'], [task_relation_draft_2]) + # consumed_by and produced_by might appear to be swapped, but they are actually correct. Depends on the angle you're looking at it. + assertUrlList(self, response_data['consumed_by'], [task_relation_draft_1]) + assertUrlList(self, response_data['produced_by'], [task_relation_draft_2]) class TaskRelationDraftTestCase(unittest.TestCase): @@ -1410,8 +1417,8 @@ class TaskRelationDraftTestCase(unittest.TestCase): cls.producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') cls.consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') cls.template_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskRelationSelectionTemplate(), '/task_relation_selection_template/') - cls.input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') - cls.output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') + cls.input_role_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectorType(), '/task_connector_type/') + cls.output_role_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectorType(), '/task_connector_type/') def test_task_relation_draft_list_apiformat(self): r = requests.get(BASE_URL + '/task_relation_draft/?format=api', auth=AUTH) @@ -1422,7 +1429,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, BASE_URL + '/task_relation_draft/1234321/', 404) def test_task_relation_draft_POST_and_GET(self): - trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url) + trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url) # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data) @@ -1430,12 +1437,12 @@ class TaskRelationDraftTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, trd_test_data) def test_task_relation_draft_PUT_invalid_raises_error(self): - trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url) + trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url) PUT_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/9876789876/', trd_test_data, 404, {}) def test_task_relation_draft_PUT(self): - trd_test_data1 = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url) - trd_test_data2 = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url) + trd_test_data1 = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url) + trd_test_data2 = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data1, 201, trd_test_data1) @@ -1447,7 +1454,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, trd_test_data2) def test_task_relation_draft_PATCH(self): - trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url) + trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data) @@ -1463,7 +1470,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_task_relation_draft_DELETE(self): - trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url) + trd_test_data = test_data_creator.TaskRelationDraft(producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data) @@ -1475,7 +1482,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): def test_task_relation_draft_CASCADE_behavior_on_task_relation_selection_template_deleted(self): template_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskRelationSelectionTemplate(), '/task_relation_selection_template/') - trd_test_data = test_data_creator.TaskRelationDraft(template_url=template_url, producer_url=self.producer_url, consumer_url=self.consumer_url, input_url=self.input_url, output_url=self.output_url) + trd_test_data = test_data_creator.TaskRelationDraft(template_url=template_url, producer_url=self.producer_url, consumer_url=self.consumer_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', trd_test_data, 201, trd_test_data)['url'] @@ -1491,7 +1498,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): def test_task_relation_draft_CASCADE_behavior_on_producer_deleted(self): producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') - trd_test_data = test_data_creator.TaskRelationDraft(producer_url=producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url) + trd_test_data = test_data_creator.TaskRelationDraft(producer_url=producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', @@ -1508,7 +1515,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): def test_task_relation_draft_CASCADE_behavior_on_consumer_deleted(self): consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') - trd_test_data = test_data_creator.TaskRelationDraft(consumer_url=consumer_url, producer_url=self.producer_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url) + trd_test_data = test_data_creator.TaskRelationDraft(consumer_url=consumer_url, producer_url=self.producer_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url) # POST new item with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', @@ -1524,8 +1531,8 @@ class TaskRelationDraftTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_draft_CASCADE_behavior_on_input_deleted(self): - input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') - trd_test_data = test_data_creator.TaskRelationDraft(input_url=input_url, producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, output_url=self.output_url) + input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectorType(), '/task_connector_type/') + trd_test_data = test_data_creator.TaskRelationDraft(input_role_url=input_url, producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, output_role_url=self.output_role_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', @@ -1541,8 +1548,8 @@ class TaskRelationDraftTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_draft_CASCADE_behavior_on_output_deleted(self): - output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') - trd_test_data = test_data_creator.TaskRelationDraft(output_url=output_url, producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_url=self.input_url) + output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectorType(), '/task_connector_type/') + trd_test_data = test_data_creator.TaskRelationDraft(output_role_url=output_url, producer_url=self.producer_url, consumer_url=self.consumer_url, template_url=self.template_url, input_role_url=self.input_role_url) # POST new item with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_draft/', @@ -1945,8 +1952,8 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): cls.producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/') cls.consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/') cls.template_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskRelationSelectionTemplate(), '/task_relation_selection_template/') - cls.input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') - cls.output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') + cls.input_role_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectorType(), '/task_connector_type/') + cls.output_role_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectorType(), '/task_connector_type/') def test_task_relation_blueprint_list_apiformat(self): r = requests.get(BASE_URL + '/task_relation_blueprint/?format=api', auth=AUTH) @@ -1957,7 +1964,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, BASE_URL + '/task_relation_blueprint/1234321/', 404) def test_task_relation_blueprint_POST_and_GET(self): - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data) @@ -1965,12 +1972,12 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, trb_test_data) def test_task_relation_blueprint_PUT_invalid_raises_error(self): - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) PUT_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/9876789876/', trb_test_data, 404, {}) def test_task_relation_blueprint_PUT(self): - trb_test_data1 = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) - trb_test_data2 = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + trb_test_data1 = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + trb_test_data2 = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data1, 201, trb_test_data1) @@ -1982,7 +1989,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, trb_test_data2) def test_task_relation_blueprint_PATCH(self): - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data) @@ -1998,7 +2005,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_task_relation_blueprint_DELETE(self): - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data) @@ -2009,7 +2016,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): DELETE_and_assert_gone(self, url) def test_task_relation_blueprint_prevents_missing_selection_template(self): - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # test data test_data = dict(trb_test_data) @@ -2020,7 +2027,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): self.assertTrue('This field may not be null' in str(r_dict['selection_template'])) def test_task_relation_blueprint_prevents_missing_draft(self): - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # test data test_data = dict(trb_test_data) @@ -2031,7 +2038,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): self.assertTrue('This field may not be null' in str(r_dict['draft'])) def test_task_relation_blueprint_prevents_missing_producer(self): - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # test data test_data = dict(trb_test_data) @@ -2042,7 +2049,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): self.assertTrue('This field may not be null' in str(r_dict['producer'])) def test_task_relation_blueprint_prevents_missing_consumer(self): - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # test data test_data = dict(trb_test_data) @@ -2053,30 +2060,30 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): self.assertTrue('This field may not be null' in str(r_dict['consumer'])) def test_task_relation_blueprint_prevents_missing_input(self): - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # test data test_data = dict(trb_test_data) - test_data['input'] = None + test_data['input_role'] = None # POST invalid data and assert response r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', test_data, 400, {}) - self.assertTrue('This field may not be null' in str(r_dict['input'])) + self.assertTrue('This field may not be null' in str(r_dict['input_role'])) def test_task_relation_blueprint_prevents_missing_output(self): - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # test data test_data = dict(trb_test_data) - test_data['output'] = None + test_data['output_role'] = None # POST invalid data and assert response r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', test_data, 400, {}) - self.assertTrue('This field may not be null' in str(r_dict['output'])) + self.assertTrue('This field may not be null' in str(r_dict['output_role'])) def test_task_relation_blueprint_CASCADE_behavior_on_task_relation_selection_template_deleted(self): template_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskRelationSelectionTemplate(), '/task_relation_selection_template/') - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', trb_test_data, 201, trb_test_data)['url'] @@ -2092,7 +2099,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): def test_task_relation_blueprint_CASCADE_behavior_on_producer_deleted(self): producer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/') - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=producer_url) + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=producer_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', @@ -2109,7 +2116,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): def test_task_relation_blueprint_CASCADE_behavior_on_consumer_deleted(self): consumer_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/') - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=self.output_url, consumer_url=consumer_url, producer_url=self.producer_url) + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=self.output_role_url, consumer_url=consumer_url, producer_url=self.producer_url) # POST new item with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', @@ -2125,8 +2132,8 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_blueprint_CASCADE_behavior_on_input_deleted(self): - input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=input_url, output_url=self.output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + input_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectorType(), '/task_connector_type/') + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=input_url, output_role_url=self.output_role_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # POST new item url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', @@ -2142,8 +2149,8 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, url, 404) def test_task_relation_blueprint_CASCADE_behavior_on_output_deleted(self): - output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnector(), '/task_connector/') - trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_url=self.input_url, output_url=output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) + output_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskConnectorType(), '/task_connector_type/') + trb_test_data = test_data_creator.TaskRelationBlueprint(draft_url=self.draft_url, template_url=self.template_url, input_role_url=self.input_role_url, output_role_url=output_url, consumer_url=self.consumer_url, producer_url=self.producer_url) # POST new item with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/task_relation_blueprint/', diff --git a/SAS/TMSS/test/t_tmssapp_specification_django_API.py b/SAS/TMSS/test/t_tmssapp_specification_django_API.py index 8922cb8c4ca8cb7dc95dcca262b879c1dfe6b3ea..e994df895e9f5167535d8981ce9ab552bc3cd69b 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_django_API.py +++ b/SAS/TMSS/test/t_tmssapp_specification_django_API.py @@ -178,22 +178,22 @@ class TaskConnectorTest(unittest.TestCase): def test_POST_TaskConnector_prevents_missing_input_of(self): # setup - test_data_1 = dict(TaskConnector_test_data()) + test_data_1 = dict(TaskConnectorType_test_data()) test_data_1['input_of'] = None # assert with self.assertRaises(IntegrityError): - models.TaskConnector.objects.create(**test_data_1) + models.TaskConnectorType.objects.create(**test_data_1) def test_POST_TaskConnector_prevents_missing_output_of(self): # setup - test_data_1 = dict(TaskConnector_test_data()) + test_data_1 = dict(TaskConnectorType_test_data()) test_data_1['output_of'] = None # assert with self.assertRaises(IntegrityError): - models.TaskConnector.objects.create(**test_data_1) + models.TaskConnectorType.objects.create(**test_data_1) class CycleTest(unittest.TestCase): @@ -723,7 +723,7 @@ class TaskRelationBlueprintTest(unittest.TestCase): def test_TaskRelationBlueprint_prevents_missing_input(self): # setup test_data = dict(TaskRelationBlueprint_test_data()) - test_data['input'] = None + test_data['input_role'] = None # assert with self.assertRaises(IntegrityError): @@ -732,7 +732,7 @@ class TaskRelationBlueprintTest(unittest.TestCase): def test_TaskRelationBlueprint_prevents_missing_output(self): # setup test_data = dict(TaskRelationBlueprint_test_data()) - test_data['output'] = None + test_data['output_role'] = None # assert with self.assertRaises(IntegrityError): diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py index b71275873b5d4ab01f336e7077b77b299f6cacca..2a4e2a0b8644cc7c0b53f90d7d46b19818ce2270 100644 --- a/SAS/TMSS/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/test/tmss_test_data_django_models.py @@ -81,7 +81,7 @@ def TaskRelationSelectionTemplate_test_data(name="my_TaskRelationSelectionTempla "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} -def TaskConnector_test_data() -> dict: +def TaskConnectorType_test_data() -> dict: return {"role": models.Role.objects.get(value='calibrator'), "datatype": models.Datatype.objects.get(value='instrument model'), "output_of": models.TaskTemplate.objects.create(**TaskTemplate_test_data()), @@ -132,16 +132,22 @@ def ProjectQuota_test_data() -> dict: "resource_type": models.ResourceType.objects.create(**ResourceType_test_data()) } -def SchedulingSet_test_data(name="my_scheduling_set") -> dict: +def SchedulingSet_test_data(name="my_scheduling_set", project: models.Project=None) -> dict: + if project is None: + project = models.Project.objects.create(**Project_test_data()) + return {"name": name, "description": "", "tags": [], "generator_doc": {}, - "project": models.Project.objects.create(**Project_test_data()), + "project": project, "generator_template": models.GeneratorTemplate.objects.create(**GeneratorTemplate_test_data()), "generator_source": None} -def SchedulingUnitDraft_test_data(name="my_scheduling_unit_draft") -> dict: +def SchedulingUnitDraft_test_data(name="my_scheduling_unit_draft", scheduling_set: models.SchedulingSet=None) -> dict: + if scheduling_set is None: + scheduling_set = models.SchedulingSet.objects.create(**SchedulingSet_test_data()) + return {"name": name, "description": "", "tags": [], @@ -149,7 +155,7 @@ def SchedulingUnitDraft_test_data(name="my_scheduling_unit_draft") -> dict: "copy_reason": models.CopyReason.objects.get(value='template'), "generator_instance_doc": "para", "copies": None, - "scheduling_set": models.SchedulingSet.objects.create(**SchedulingSet_test_data()), + "scheduling_set": scheduling_set, "requirements_template": models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data())} def TaskDraft_test_data(name: str="my_task_draft", specifications_template: models.TaskTemplate=None, scheduling_unit_draft: models.SchedulingUnitDraft=None) -> dict: @@ -180,8 +186,8 @@ def TaskRelationDraft_test_data(producer: models.TaskDraft = None, consumer: mod "dataformat": models.Dataformat.objects.get(value='Beamformed'), "producer": producer, "consumer": consumer, - "input": models.TaskConnector.objects.create(**TaskConnector_test_data()), - "output": models.TaskConnector.objects.create(**TaskConnector_test_data()), + "input_role": models.TaskConnectorType.objects.create(**TaskConnectorType_test_data()), + "output_role": models.TaskConnectorType.objects.create(**TaskConnectorType_test_data()), "selection_template": models.TaskRelationSelectionTemplate.objects.create(**TaskRelationSelectionTemplate_test_data())} def SchedulingUnitBlueprint_test_data(name='my_scheduling_unit_blueprint') -> dict: @@ -216,8 +222,8 @@ def TaskRelationBlueprint_test_data(producer: models.TaskBlueprint = None, consu return {"tags": [], "selection_doc": {}, "dataformat": models.Dataformat.objects.get(value='Beamformed'), - "input": models.TaskConnector.objects.create(**TaskConnector_test_data()), - "output": models.TaskConnector.objects.create(**TaskConnector_test_data()), + "input_role": models.TaskConnectorType.objects.create(**TaskConnectorType_test_data()), + "output_role": models.TaskConnectorType.objects.create(**TaskConnectorType_test_data()), "draft": models.TaskRelationDraft.objects.create(**TaskRelationDraft_test_data()), "selection_template": models.TaskRelationSelectionTemplate.objects.create(**TaskRelationSelectionTemplate_test_data()), "producer": producer, @@ -322,7 +328,7 @@ def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_templat def Dataproduct_test_data(producer: models.SubtaskOutput=None, filename: str="my_file.ext", - directory: str="CEP4:/data/test-projects", + directory: str="/data/test-projects", dataformat: models.Dataformat=None, specifications_doc: object=None) -> dict: diff --git a/SAS/TMSS/test/tmss_test_data_rest.py b/SAS/TMSS/test/tmss_test_data_rest.py index 3bc7f4471c649b768efcb15157abaf177a6c8597..79a864a2de4bbefb2cdb5836149a122065b89139 100644 --- a/SAS/TMSS/test/tmss_test_data_rest.py +++ b/SAS/TMSS/test/tmss_test_data_rest.py @@ -31,13 +31,21 @@ class TMSSRESTTestDataCreator(): self.django_api_url = django_api_url self.auth = auth + def get_response_as_json_object(self, url): + """GET the given data the self.django_api_url+url_postfix, and return the response""" + return json.loads(requests.get(url, auth=self.auth).content.decode('utf-8')) + def post_data_and_get_response(self, data, url_postfix): """POST the given data the self.django_api_url+url_postfix, and return the response""" return requests.post(self.django_api_url + url_postfix, json=data, auth=self.auth) + def post_data_and_get_response_as_json_object(self, data, url_postfix): + """POST the given data the self.django_api_url+url_postfix, and return the response""" + return json.loads(self.post_data_and_get_response(data, url_postfix).content.decode('utf-8')) + def post_data_and_get_url(self, data, url_postfix): """POST the given data the self.django_api_url+url_postfix, and return the response's url""" - return json.loads(self.post_data_and_get_response(data, url_postfix).content.decode('utf-8'))['url'] + return self.post_data_and_get_response_as_json_object(data, url_postfix)['url'] ####################################################### # the methods below can be used to create test data @@ -87,7 +95,7 @@ class TMSSRESTTestDataCreator(): "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} - def TaskConnector(self, role="correlator", input_of_url=None, output_of_url=None): + def TaskConnectorType(self, role="correlator", input_of_url=None, output_of_url=None): if input_of_url is None: input_of_url = self.post_data_and_get_url(self.TaskTemplate(), '/task_template/') @@ -213,7 +221,7 @@ class TMSSRESTTestDataCreator(): 'consumed_by': []} - def TaskRelationDraft(self, producer_url=None, consumer_url=None, template_url=None, input_url=None, output_url=None): + def TaskRelationDraft(self, producer_url=None, consumer_url=None, template_url=None, input_role_url=None, output_role_url=None): if producer_url is None: producer_url = self.post_data_and_get_url(self.TaskDraft(), '/task_draft/') @@ -223,19 +231,19 @@ class TMSSRESTTestDataCreator(): if template_url is None: template_url = self.post_data_and_get_url(self.TaskRelationSelectionTemplate(), '/task_relation_selection_template/') - if input_url is None: - input_url = self.post_data_and_get_url(self.TaskConnector(), '/task_connector/') + if input_role_url is None: + input_role_url = self.post_data_and_get_url(self.TaskConnectorType(), '/task_connector_type/') - if output_url is None: - output_url = self.post_data_and_get_url(self.TaskConnector(), '/task_connector/') + if output_role_url is None: + output_role_url = self.post_data_and_get_url(self.TaskConnectorType(), '/task_connector_type/') return {"tags": [], "selection_doc": "{}", "dataformat": self.django_api_url + "/dataformat/Beamformed/", "producer": producer_url, "consumer": consumer_url, - "input": input_url, - "output": output_url, + "input_role": input_role_url, + "output_role": output_role_url, "selection_template": template_url, 'related_task_relation_blueprint': []} @@ -253,7 +261,7 @@ class TMSSRESTTestDataCreator(): "do_cancel": False, "draft": scheduling_unit_draft_url, "requirements_template": template_url, - "task_blueprints":[]} + "task_blueprints": []} def TaskBlueprint(self, name="my_TaskBlueprint", draft_url=None, template_url=None, scheduling_unit_blueprint_url=None): if draft_url is None: @@ -277,7 +285,7 @@ class TMSSRESTTestDataCreator(): "produced_by": [], "consumed_by": []} - def TaskRelationBlueprint(self, draft_url=None, template_url=None, input_url=None, output_url=None, consumer_url=None, producer_url=None): + def TaskRelationBlueprint(self, draft_url=None, template_url=None, input_role_url=None, output_role_url=None, consumer_url=None, producer_url=None): if draft_url is None: draft_url = self.post_data_and_get_url(self.TaskRelationDraft(), '/task_relation_draft/') @@ -290,18 +298,18 @@ class TMSSRESTTestDataCreator(): if template_url is None: template_url = self.post_data_and_get_url(self.TaskRelationSelectionTemplate(), '/task_relation_selection_template/') - if input_url is None: - input_url = self.post_data_and_get_url(self.TaskConnector(), '/task_connector/') + if input_role_url is None: + input_role_url = self.post_data_and_get_url(self.TaskConnectorType(), '/task_connector_type/') - if output_url is None: - output_url = self.post_data_and_get_url(self.TaskConnector(), '/task_connector/') + if output_role_url is None: + output_role_url = self.post_data_and_get_url(self.TaskConnectorType(), '/task_connector_type/') # test data return {"tags": [], "selection_doc": "{}", "dataformat": self.django_api_url + '/dataformat/MeasurementSet/', - "input": input_url, - "output": output_url, + "input_role": input_role_url, + "output_role": output_role_url, "draft": draft_url, "selection_template": template_url, "producer": producer_url, @@ -393,7 +401,7 @@ class TMSSRESTTestDataCreator(): return {"subtask": subtask_url, "tags": []} - def Dataproduct(self, filename="my_filename", specifications_template_url=None, subtask_output_url=None, dataproduct_feedback_template_url=None): + def Dataproduct(self, filename="my_filename", directory="/tmp/", specifications_template_url=None, subtask_output_url=None, dataproduct_feedback_template_url=None, dataformat="MeasurementSet"): if specifications_template_url is None: specifications_template_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/dataproduct_specifications_template/') @@ -403,16 +411,16 @@ class TMSSRESTTestDataCreator(): if dataproduct_feedback_template_url is None: dataproduct_feedback_template_url = self.post_data_and_get_url(self.DataproductFeedbackTemplate(), '/dataproduct_feedback_template/') - return {"filename": "my.file", - "directory": "/home/boskabouter/", - "dataformat": self.django_api_url + '/dataformat/Beamformed/', + return {"filename": filename, + "directory": directory, + "dataformat": "%s/dataformat/%s/" % (self.django_api_url, dataformat), "deleted_since": None, "pinned_since": None, "specifications_doc": "{}", "specifications_template": specifications_template_url, "tags": ["TMSS", "TESTING"], "producer": subtask_output_url, - "do_cancel": datetime.utcnow().isoformat(), + "do_cancel": None, "expected_size": 1234, "size": 123, "feedback_doc": "{}", @@ -472,7 +480,6 @@ class TMSSRESTTestDataCreator(): if dataproduct_urls is None: dataproduct_urls = [self.post_data_and_get_url(self.Dataproduct(), '/dataproduct/'), self.post_data_and_get_url(self.Dataproduct(), '/dataproduct/')] - if subtask_output_url is None: subtask_output_url = self.post_data_and_get_url(self.SubtaskOutput(), '/subtask_output/') diff --git a/SAS/TMSS/test/tmss_test_environment_unittest_setup.py b/SAS/TMSS/test/tmss_test_environment_unittest_setup.py index 269771a1b2e3df2111cb007d4a72b9708d2d254a..98375bb80e3b66b19320ef3c129d4757f1bbc7b6 100644 --- a/SAS/TMSS/test/tmss_test_environment_unittest_setup.py +++ b/SAS/TMSS/test/tmss_test_environment_unittest_setup.py @@ -90,7 +90,7 @@ def _call_API_and_assert_expected_response(test_instance, url, call, data, expec value = value.replace(' ', '%20') test_instance.assertTrue(str(value) in r_dict[key]) elif type(value) is list: - test_instance.assertEqual(sorted(value), sorted(r_dict[key])) # compare lists independent of ordering + test_instance.assertEqual(sorted(value), sorted(r_dict[key]), msg="lists differ for key=%s"%key) # compare lists independent of ordering else: test_instance.assertEqual(value, r_dict[key]) return r_dict