diff --git a/CMake/FindBoost.cmake b/CMake/FindBoost.cmake index 7591d7fd009eda4096be60058155b5ea7baeeee3..ea39e0aecac0fb98b90b8722641dd7c680b0f914 100644 --- a/CMake/FindBoost.cmake +++ b/CMake/FindBoost.cmake @@ -79,6 +79,21 @@ if("${Boost_FIND_COMPONENTS}" MATCHES "python" AND NOT "${Boost_FIND_COMPONENTS} else(PYTHON_FOUND) message(SEND_ERROR "boost-python was requested but python was not found.") endif(PYTHON_FOUND) +else("${Boost_FIND_COMPONENTS}" MATCHES "python" AND NOT "${Boost_FIND_COMPONENTS}" MATCHES "python3") + if("${Boost_FIND_COMPONENTS}" MATCHES "python") + find_package(Python) + if(PYTHON_FOUND) + if(PYTHON_VERSION_MINOR EQUAL 8) + if(EXISTS "/etc/debian_version") + # ubuntu 20.04 comes with python3.8 and boost1.71 + string(REPLACE "python3" "python" Boost_FIND_COMPONENTS "${Boost_FIND_COMPONENTS}") + set(Boost_FIND_REQUIRED_python TRUE) + endif(EXISTS "/etc/debian_version") + endif(PYTHON_VERSION_MINOR EQUAL 8) + else(PYTHON_FOUND) + message(SEND_ERROR "boost-python was requested but python was not found.") + endif(PYTHON_FOUND) + endif("${Boost_FIND_COMPONENTS}" MATCHES "python") endif("${Boost_FIND_COMPONENTS}" MATCHES "python" AND NOT "${Boost_FIND_COMPONENTS}" MATCHES "python3") # Call the "real" FindBoost module. diff --git a/LCS/PyCommon/CMakeLists.txt b/LCS/PyCommon/CMakeLists.txt index 4f82276090fd9bbcdc898ee21dc203390a238baa..2ab093021552dc5a10dcd660e127dd67f0be96b4 100644 --- a/LCS/PyCommon/CMakeLists.txt +++ b/LCS/PyCommon/CMakeLists.txt @@ -29,6 +29,7 @@ set(_py_files subprocess_utils.py xmlparse.py json_utils.py + locking.py test_utils.py) python_install(${_py_files} DESTINATION lofar/common) diff --git a/LCS/PyCommon/cep4_utils.py b/LCS/PyCommon/cep4_utils.py index 8489ec3e52b410b030ccf8e1d4ed66122a9ab109..5326fd90ce8c351f8858cfd4e71a9843aa33e996 100755 --- a/LCS/PyCommon/cep4_utils.py +++ b/LCS/PyCommon/cep4_utils.py @@ -317,6 +317,13 @@ def get_cep4_node_with_lowest_load(max_normalized_load=0.33, partition: str=SLUR if not nodes: # if not, then just query all up and running nodes nodes = get_cep4_up_and_running_nodes(partition=partition) + if not nodes: # still no nodes? return None + return None + + if len(nodes) == 1: + # no need to sort + return nodes[0] + node_nrs = get_cep4_available_nodes_sorted_ascending_by_load(max_normalized_load=max_normalized_load, min_nr_of_nodes=1, node_nrs=nodes, diff --git a/LCS/PyCommon/locking.py b/LCS/PyCommon/locking.py new file mode 100644 index 0000000000000000000000000000000000000000..de3e0f2810b603e05acc80aa5aed32638e4de248 --- /dev/null +++ b/LCS/PyCommon/locking.py @@ -0,0 +1,247 @@ +''' + Copyright (c) 2016, 2017 Timothy Savannah All Rights Reserved under terms of LGPLv3. + You should have received a copy of this as LICENSE with the source distribution, or it is always available at + http://www.gnu.org/licenses/lgpl-3.0.en.html + + See https://github.com/kata198/NamedAtomicLock for latest version + + NamedAtomicLock - A Named atomic lock local to the machine + +''' +# vim: set ts=4 sw=4 expandtab : + + +import os +import tempfile +import time + + +__all__ = ('NamedAtomicLock',) + +__version__ = '1.1.3' + +__version_tuple__ = (1, 1, 3) + +DEFAULT_POLL_TIME = .1 + +try: + FileNotFoundError +except: + FileNotFoundError = OSError + +class NamedAtomicLock(object): + + def __init__(self, name, lockDir=None, maxLockAge=None): + ''' + NamedAtomicLock - Create a NamedAtomicLock. + This uses a named directory, which is defined by POSIX as an atomic operation. + + @param name <str> - The lock name, Cannot contain directory seperator (like '/') + + @param lockDir <None/str> - Directory in which to store locks. Defaults to tempdir + + @param maxLockAge <None/float> - Maximum number of seconds lock can be held before it is considered "too old" and fair game to be taken. + You should likely define this as a reasonable number, maybe 4x as long as you think the operation will take, so that the lock doesn't get + held by a dead process. + + ''' + self.name = name + self.maxLockAge = maxLockAge + + if os.sep in name: + raise ValueError('Name cannot contain "%s"' %(os.sep,)) + + if lockDir: + if lockDir[-1] == os.sep: + lockDir = lockDir[:-1] + if not lockDir: + raise ValueError('lockDir cannot be ' + os.sep) + else: + lockDir = tempfile.gettempdir() + + self.lockDir = lockDir + + if not os.path.isdir(lockDir): + raise ValueError('lockDir %s either does not exist or is not a directory.' %(lockDir,)) + + if not os.access(lockDir, os.W_OK): + raise ValueError('Cannot write to lock directory: %s' %(lockDir,)) + self.lockPath = lockDir + os.sep + name + + self.held = False + self.acquiredAt = None + + def __enter__(self): + '''acquire the lock''' + try: + self.acquire() + except Exception as e: + self.release() + raise + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + '''release the lock''' + self.release() + + def acquire(self, timeout=None): + ''' + acquire - Acquire given lock. Can be blocking or nonblocking by providing a timeout. + Returns "True" if you got the lock, otherwise "False" + + @param timeout <None/float> - Max number of seconds to wait, or None to block until we can acquire it. + + @return <bool> - True if you got the lock, otherwise False. + ''' + if self.held is True: + # NOTE: Without some type of in-directory marker (like a uuid) we cannot + # refresh an expired lock accurately + if os.path.exists(self.lockPath): + return True + # Someone removed our lock + self.held = False + + # If we aren't going to poll at least 5 times, give us a smaller interval + if timeout: + if timeout / 5.0 < DEFAULT_POLL_TIME: + pollTime = timeout / 10.0 + else: + pollTime = DEFAULT_POLL_TIME + + endTime = time.time() + timeout + keepGoing = lambda : bool(time.time() < endTime) + else: + pollTime = DEFAULT_POLL_TIME + keepGoing = lambda : True + + + + success = False + while keepGoing(): + try: + os.mkdir(self.lockPath) + success = True + break + except: + time.sleep(pollTime) + if self.maxLockAge: + if os.path.exists(self.lockPath) and os.stat(self.lockPath).st_mtime < time.time() - self.maxLockAge: + try: + os.rmdir(self.lockPath) + except: + # If we did not remove the lock, someone else is at the same point and contending. Let them win. + time.sleep(pollTime) + + if success is True: + self.acquiredAt = time.time() + + self.held = success + return success + + def release(self, forceRelease=False): + ''' + release - Release the lock. + + @param forceRelease <bool> default False - If True, will release the lock even if we don't hold it. + + @return - True if lock is released, otherwise False + ''' + if not self.held: + if forceRelease is False: + return False # We were not holding the lock + else: + self.held = True # If we have force release set, pretend like we held its + + if not os.path.exists(self.lockPath): + self.held = False + self.acquiredAt = None + return True + + if forceRelease is False: + # We waited too long and lost the lock + if self.maxLockAge and time.time() > self.acquiredAt + self.maxLockAge: + self.held = False + self.acquiredAt = None + return False + + self.acquiredAt = None + + try: + os.rmdir(self.lockPath) + self.held = False + return True + except: + self.held = False + return False + + + def __checkExpiration(self, mtime=None): + ''' + __checkExpiration - Check if we have expired + + @param mtime <int> - Optional mtime if known, otherwise will be gathered + + @return <bool> - True if we did expire, otherwise False + ''' + if not self.maxLockAge: + return False + + if mtime is None: + try: + mtime = os.stat(self.lockPath).st_mtime + except FileNotFoundError as e: + return False + + if mtime < time.time() - self.maxLockAge: + return True + + return False + + @property + def isHeld(self): + ''' + isHeld - True if anyone holds the lock, otherwise False. + + @return bool - If lock is held by anyone + ''' + if not os.path.exists(self.lockPath): + return False + + try: + mtime = os.stat(self.lockPath).st_mtime + except FileNotFoundError as e: + return False + + if self.__checkExpiration(mtime): + return False + + return True + + @property + def hasLock(self): + ''' + hasLock - Property, returns True if we have the lock, or False if we do not. + + @return <bool> - True/False if we have the lock or not. + ''' + # If we don't hold it currently, return False + if self.held is False: + return False + + # Otherwise if we think we hold it, but it is not held, we have lost it. + if not self.isHeld: + self.acquiredAt = None + self.held = False + return False + + # Check if we expired + if self.__checkExpiration(self.acquiredAt): + self.acquiredAt = None + self.held = False + return False + + + return True + + +# vim: set ts=4 sw=4 expandtab : diff --git a/LCS/PyCommon/test/postgres.py b/LCS/PyCommon/test/postgres.py index 6ff9dc07e9f2bfdd4a811547e956cac24de87baf..f98092c3d7fd2a42f745c20ff59b2703bdb6cb43 100755 --- a/LCS/PyCommon/test/postgres.py +++ b/LCS/PyCommon/test/postgres.py @@ -30,12 +30,16 @@ from lofar.common.dbcredentials import Credentials from lofar.common.postgres import PostgresDatabaseConnection from lofar.common.testing.dbcredentials import TemporaryCredentials from lofar.common.util import find_free_port +from datetime import datetime, timedelta + +from lofar.common.locking import NamedAtomicLock class PostgresTestDatabaseInstance(): ''' A helper class which instantiates a running postgres server (not interfering with any other test/production postgres servers) Best used in a 'with'-context so the server is destroyed automagically. Derive your own sub-class and implement apply_database_schema with your own sql schema to setup your type of database. ''' + _named_lock = NamedAtomicLock('PostgresTestDatabaseInstance') def __init__(self, user: str = 'test_user', preferred_port: int=5444) -> None: self._postgresql = None @@ -68,22 +72,30 @@ class PostgresTestDatabaseInstance(): '''instantiate the isolated postgres server''' logger.info('creating test-database instance...') - factory = testing.postgresql.PostgresqlFactory(cache_initialized_db=True) - factory.settings['port'] = find_free_port(self.tmp_creds.dbcreds.port) - self._postgresql = factory() - - # fill credentials with the dynamically created postgress instance (e.g. port changes for each time) - dsn = self._postgresql.dsn() - self.tmp_creds.dbcreds.host = dsn['host'] - self.tmp_creds.dbcreds.database = dsn['database'] - self.tmp_creds.dbcreds.port = dsn['port'] - self.tmp_creds.create() - - # make the user known in the new test database - self._create_superuser(dsn) - - logger.info('Applying test-database schema...') - self.apply_database_schema() + with self._named_lock: + start_time = datetime.utcnow() + while datetime.utcnow()-start_time < timedelta(minutes=1): + try: + factory = testing.postgresql.PostgresqlFactory(cache_initialized_db=True) + factory.settings['port'] = find_free_port(self.tmp_creds.dbcreds.port) + self._postgresql = factory() + + # fill credentials with the dynamically created postgress instance (e.g. port changes for each time) + dsn = self._postgresql.dsn() + self.tmp_creds.dbcreds.host = dsn['host'] + self.tmp_creds.dbcreds.database = dsn['database'] + self.tmp_creds.dbcreds.port = dsn['port'] + self.tmp_creds.create() + + # make the user known in the new test database + self._create_superuser(dsn) + + logger.info('Applying test-database schema...') + self.apply_database_schema() + return + except Exception as e: + logger.warning("%s could not be started, retrying with next free port. Error: %s %s", self.__class__.__name__, e.__class__.__name__, e) + raise TimeoutError("%s could not be started within 60 seconds. bailing out..." % self.__class__.__name__) def _create_superuser(self, dsn): try: diff --git a/MAC/Services/CMakeLists.txt b/MAC/Services/CMakeLists.txt index fdfc8c9ab299ee4bb3cc29cfc444a9bcc342315d..5a28a358a0b37cc84a27a1bdaa1a8b4c12eeda3d 100644 --- a/MAC/Services/CMakeLists.txt +++ b/MAC/Services/CMakeLists.txt @@ -1,6 +1,6 @@ # $Id$ -lofar_package(MAC_Services 1.0 DEPENDS PyMessaging OTDB_Services pyparameterset Docker ResourceAssignmentService TBBService) +lofar_package(MAC_Services 1.0 DEPENDS PyMessaging OTDB_Services pyparameterset Docker ResourceAssignmentService TBBService TMSSClient) add_subdirectory(src) add_subdirectory(test) diff --git a/MAC/Services/src/PipelineControl.py b/MAC/Services/src/PipelineControl.py index eca0cc35376b26c3d248716aefd8ed1fc1b63600..9c2f7b047bc3c565ef2e2dbe871f0e1ca10d5d8c 100755 --- a/MAC/Services/src/PipelineControl.py +++ b/MAC/Services/src/PipelineControl.py @@ -74,7 +74,7 @@ from lofar.common import isProductionEnvironment from lofar.common.subprocess_utils import communicate_returning_strings from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_SUBJECT -from lofar.sas.tmss.util import TMSSsession +from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession from lofar.sas.tmss.client.tmssbuslistener import TMSSSubTaskEventMessageHandler, TMSSSubTaskBusListener import subprocess @@ -83,7 +83,6 @@ import os import re from socket import getfqdn import logging -import requests logger = logging.getLogger(__name__) @@ -95,12 +94,6 @@ NUMBER_OF_CORES_PER_NODE = 24 # We /4 because we can then run 4 pipelines, and -2 to reserve cores for TBBwriter DEFAULT_NUMBER_OF_TASKS = (NUMBER_OF_NODES // 4) * (NUMBER_OF_CORES_PER_NODE - 2) // DEFAULT_NUMBER_OF_CORES_PER_TASK -# todo: config file? -TMSS_URL = 'http://localhost:80/' -TMSS_USER = 'paulus' -TMSS_PASS = 'pauluspass' -TMSS_AUTHENTICATION_METHOD = TMSSsession.BASICAUTH - def runCommand(cmdline, input=None): logger.info("runCommand starting: %s", cmdline) @@ -353,20 +346,13 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): logger.info('PipelineControlTMSS busname=%s', exchange) self.exchange = exchange self.slurm = Slurm() + self.tmss_client = TMSSsession.create_from_dbcreds_for_ldap() - def _setStatus(self, subtask_id, status): - with TMSSsession(TMSS_USER, TMSS_PASS, TMSS_URL, TMSS_AUTHENTICATION_METHOD) as session: - session.patch(url='%s/api/subtask/%s/' % (TMSS_URL, subtask_id), - data={'state': "%s/api/subtask_state/%s/" % (TMSS_URL, status)}) + def start_handling(self): + self.tmss_client.open() - def _getParset(self, subtask_id): - try: - with TMSSsession(TMSS_USER, TMSS_PASS, TMSS_URL, TMSS_AUTHENTICATION_METHOD) as session: - r = session.get(TMSS_URL + 'api/subtask/' + subtask_id + '/parset/') - return Parset(r) - except Exception as e: - logger.error("Cannot retrieve parset of task %s from TMSS: %s", subtask_id, e) - return None + def stop_handling(self): + self.tmss_client.close() def check_scheduled_pipelines(self): """ @@ -377,10 +363,9 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): try: logger.info("Checking for already scheduled pipelines in TMSS...") - with TMSSsession(TMSS_USER, TMSS_PASS, TMSS_URL, TMSS_AUTHENTICATION_METHOD) as session: - r = session.get(TMSS_URL + 'api/subtask/?state__value=scheduled&format=json') + scheduled_subtasks = self.tmss_client.get_subtasks(state="scheduled") scheduled_pipelines = [] - for subtask in r['results']: + for subtask in scheduled_subtasks: bits = subtask['url'].split['/'] scheduled_pipelines.append(bits[bits.index("subtask") + 1]) @@ -389,7 +374,7 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): for subtask_id in scheduled_pipelines: logger.info("Checking if scheduled pipeline subtask_id=%s can start.", subtask_id) try: - parset = self._getParset(subtask_id) + parset = Parset(self.tmss_client.get_subtask_parset(subtask_id)) if not parset or not self._shouldHandle(parset): continue self._startPipeline(subtask_id, parset) @@ -400,7 +385,7 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): def onSubTaskScheduled(self, subtask_id: int, old_state: str, new_state: str): try: - parset = self._getParset(subtask_id) + parset = Parset(self.tmss_client.get_subtask_parset(subtask_id)) if parset and self._shouldHandle(parset): self._startPipeline(subtask_id, parset) except Exception as e: @@ -604,10 +589,10 @@ class PipelineControlTMSSHandler(TMSSSubTaskEventMessageHandler): logger.info("Scheduled SLURM job %s for abort trigger for subtask_id=%s", slurm_cancel_job_id, subtask_id) logger.info("Handed over pipeline %s to SLURM, setting status to QUEUED", subtask_id) - self._setStatus(subtask_id, "queued") + self.tmss_client.set_subtask_status(subtask_id, "queued") except Exception as e: logger.error(str(e)) - self._setStatus(subtask_id, "finished") + self.tmss_client.set_subtask_status(subtask_id, "finished") class PipelineControlHandler( OTDBEventMessageHandler): diff --git a/QA/QA_Common/bin/show_hdf5_info b/QA/QA_Common/bin/show_hdf5_info index 63c2f8c98dfdb1ce7d9c014a30ec0dbddd5d572f..1914d492a601afdb0018f1a16d89d9f196c1f8c1 100755 --- a/QA/QA_Common/bin/show_hdf5_info +++ b/QA/QA_Common/bin/show_hdf5_info @@ -38,7 +38,7 @@ if __name__ == '__main__': (options, args) = parser.parse_args() - if len(args) != 1: + if len(args) < 1: parser.print_help() exit(-1) diff --git a/QA/QA_Service/CMakeLists.txt b/QA/QA_Service/CMakeLists.txt index 3da6a2d95811cc7bc01fe5147727e1a6edf4d9c0..37e8061110e7450dc8549439a7aac2938c79931a 100644 --- a/QA/QA_Service/CMakeLists.txt +++ b/QA/QA_Service/CMakeLists.txt @@ -17,7 +17,7 @@ # $Id$ -lofar_package(QA_Service 1.0 DEPENDS QA_Common PyMessaging OTDB_Services pyparameterset) +lofar_package(QA_Service 1.0 DEPENDS QA_Common PyMessaging OTDB_Services pyparameterset TMSS TMSSClient) add_subdirectory(lib) add_subdirectory(bin) diff --git a/QA/QA_Service/lib/config.py b/QA/QA_Service/lib/config.py index 3c975678d40a758dd78468de7763cd822f18270d..6b1b1089f2170569e6ada3a9207573ec68eb3988 100644 --- a/QA/QA_Service/lib/config.py +++ b/QA/QA_Service/lib/config.py @@ -17,4 +17,4 @@ # $Id$ -DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX='QA' +DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX='QA.notification' diff --git a/QA/QA_Service/lib/qa_service.py b/QA/QA_Service/lib/qa_service.py index e4f5ba654d471f4a09eb5bc28fb158d169bc4f27..e8283195becfe50615448a3d5ed1d72f8716a2ad 100644 --- a/QA/QA_Service/lib/qa_service.py +++ b/QA/QA_Service/lib/qa_service.py @@ -18,57 +18,47 @@ # $Id: qa_service.py 43930 2019-08-30 07:57:17Z klazema $ import os.path -import logging -from subprocess import call, Popen, PIPE, STDOUT +import json +from subprocess import call from optparse import OptionParser, OptionGroup -from threading import Thread from lofar.common.util import waitForInterrupt -from lofar.sas.otdb.OTDBBusListener import OTDBBusListener, OTDBEventMessageHandler, DEFAULT_OTDB_NOTIFICATION_SUBJECT -from lofar.messaging import UsingToBusMixin, BusListener -from lofar.messaging.messages import EventMessage +from lofar.sas.otdb.OTDBBusListener import OTDBBusListener, OTDBEventMessageHandler +from lofar.sas.tmss.client.tmssbuslistener import TMSSSubTaskEventMessageHandler, TMSSSubTaskBusListener +from lofar.messaging import UsingToBusMixin, BusListener, ToBus, AbstractMessageHandler +from lofar.messaging.messages import EventMessage, CommandMessage from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME from lofar.qa.service.config import DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX from lofar.common.cep4_utils import * from lofar.parameterset import parameterset from lofar.sas.otdb.otdbrpc import OTDBRPC +from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession +import logging logger = logging.getLogger(__name__) -QA_LUSTRE_BASE_DIR = '/data/qa' -QA_NFS_BASE_DIR = '/qa' -DEFAULT_FILTERED_OTDB_NOTIFICATION_SUBJECT = "filtered.%s" % (DEFAULT_OTDB_NOTIFICATION_SUBJECT,) - -#TODO: idea: convert periodically while observing? +_DEFAULT_QA_COMMAND_SUBJECT_TEMPLATE = "QA.Command.%s" +DEFAULT_DO_QAFILE_CONVERSION_SUBJECT = _DEFAULT_QA_COMMAND_SUBJECT_TEMPLATE % "QAFileConversion" +DEFAULT_DO_QAFILE_FINALIZE_SUBJECT = _DEFAULT_QA_COMMAND_SUBJECT_TEMPLATE % "QAFileFinalize" +DEFAULT_DO_QAPLOTS_SUBJECT = _DEFAULT_QA_COMMAND_SUBJECT_TEMPLATE % "QAPlots" +DEFAULT_DO_QAFILE_CONVERSION_AND_PLOTS_SUBJECT = _DEFAULT_QA_COMMAND_SUBJECT_TEMPLATE % "QAFileConversionAndPlots" +DEFAULT_QA_COMMANDS_SUBJECT = _DEFAULT_QA_COMMAND_SUBJECT_TEMPLATE % "#" class QAFilteringOTDBBusListener(OTDBBusListener): class QAFilteringOTDBEventMessageHandler(UsingToBusMixin, OTDBEventMessageHandler): - def _send_filtered_event_message(self, otdb_id: int, modificationTime: datetime, state: str): - try: - with OTDBRPC.create(exchange=self.exchange, broker=self.broker, timeout=2) as otdbrpc: - parset = parameterset(otdbrpc.taskGetSpecification(otdb_id=otdb_id).get("specification", '')) - task_type = parset.get("ObsSW.Observation.processType") - priority = 6 if task_type == "Observation" else 2 - except Exception as e: - logger.warning('Could not determine task type for otdb_id=%s, using default priority=4: %s', otdb_id, e) - priority = 4 - + def _send_qa_command_message(self, otdb_id: int, command_subject: str): try: - content = {"treeID": otdb_id, - "state": state, - "time_of_change": modificationTime} - msg = EventMessage(subject=DEFAULT_FILTERED_OTDB_NOTIFICATION_SUBJECT, - content=content, - priority=priority) - logger.info('sending filtered event message subject:\'%s\' content: %s', msg.subject, content) + content = {"otdb_id": otdb_id } + msg = CommandMessage(subject=command_subject, content=content) + logger.info('sending command message subject:\'%s\' content: %s', msg.subject, content) self.send(msg) except Exception as e: logger.error('Could not send event message: %s', e) def onObservationCompleting(self, otdb_id, modificationTime): - self._send_filtered_event_message(otdb_id, modificationTime, 'completing') + self._send_qa_command_message(otdb_id, DEFAULT_DO_QAFILE_CONVERSION_AND_PLOTS_SUBJECT) def onObservationFinished(self, otdb_id, modificationTime): - self._send_filtered_event_message(otdb_id, modificationTime, 'finished') + self._send_qa_command_message(otdb_id, DEFAULT_DO_QAFILE_FINALIZE_SUBJECT) def __init__(self, exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER): super().__init__(handler_type=QAFilteringOTDBBusListener.QAFilteringOTDBEventMessageHandler, @@ -77,68 +67,131 @@ class QAFilteringOTDBBusListener(OTDBBusListener): broker=broker) -class QAFilteredOTDBBusListener(BusListener): - class QAFilteredOTDBEventMessageHandler(UsingToBusMixin, OTDBEventMessageHandler): - ''' - QAFilteredOTDBEventMessageHandler listens on the lofar otdb message bus for NotificationMessages and starts qa processes - upon observation/pipeline completion. The qa processes convert MS (measurement sets) to hdf5 qa files, - and then starts generating plots from the hdf5 file. - ''' - def __init__(self): +class QAFilteringTMSSSubTaskBusListener(TMSSSubTaskBusListener): + class QAFilteringTMSSSubTaskEventMessageHandler(UsingToBusMixin, TMSSSubTaskEventMessageHandler): + def _send_qa_command_message(self, subtask_id: int, command_subject: str): + with TMSSsession.create_from_dbcreds_for_ldap() as tmsssession: + tmsssession.set_subtask_status(subtask_id, 'queueing') + + try: + content = {"subtask_id": subtask_id } + msg = CommandMessage(subject=command_subject, content=content) + logger.info('sending command message subject:\'%s\' content: %s', msg.subject, content) + self.send(msg) + except Exception as e: + logger.error('Could not send event message: %s', e) + + tmsssession.set_subtask_status(subtask_id, 'queued') + + def onSubTaskScheduled(self, subtask_id: int, old_state: str, new_state:str): + with TMSSsession.create_from_dbcreds_for_ldap() as tmsssession: + subtask = tmsssession.get_subtask(subtask_id) + spec = tmsssession.get_url_as_json_object(subtask['specifications_template']) + if '/qa_files/' in spec['type']: + self._send_qa_command_message(subtask_id, DEFAULT_DO_QAFILE_CONVERSION_SUBJECT) + elif '/qa_plots/' in spec['type']: + self._send_qa_command_message(subtask_id, DEFAULT_DO_QAPLOTS_SUBJECT) + + def __init__(self, exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER): + super().__init__(handler_type=QAFilteringTMSSSubTaskBusListener.QAFilteringTMSSSubTaskEventMessageHandler, + exchange=exchange, + num_threads=1, + broker=broker) + + +class QACommandsBusListener(BusListener): + class QACommandsMessageHandler(AbstractMessageHandler): + def __init__(self, qa_service): super().__init__() - self._unfinished_otdb_id_map = {} + self.qa_service = qa_service - def onObservationCompleting(self, otdb_id, modificationTime): - ''' - this mehod is called automatically upon receiving a Completion NotificationMessage - :param int otdb_id: the task's otdb database id - :param datetime modificationTime: timestamp when the task's status changed to completing - :return: None - ''' - logger.info("task with otdb_id %s completed.", otdb_id) - - # immediately do qa when the obs is completing, because the data is already on disk... - # and do the handling of the feedback in onObservationFinished - self.do_qa(otdb_id=otdb_id) + def handle_message(self, msg: CommandMessage): + if not isinstance(msg, CommandMessage): + raise ValueError("%s: Ignoring non-CommandMessage: %s" % (self.__class__.__name__, msg)) - def onObservationFinished(self, otdb_id, modificationTime): - ''' - this mehod is called automatically upon receiving a Finished NotificationMessage - :param int otdb_id: the task's otdb database id - :param datetime modificationTime: timestamp when the task's status changed to finished - :return: None - ''' - logger.info("task with otdb_id %s finished. trying to add parset (with feedback) to h5 file", otdb_id) - - # lookup the hdf5_file_path for the given otdb_id - # and (re)add the parset to the file (which now includes feedback) - hdf5_file_path = self._unfinished_otdb_id_map.get(otdb_id) - if hdf5_file_path: - del self._unfinished_otdb_id_map[otdb_id] + logger.debug("%s.handleMessage: %s" % (self.__class__.__name__, str(msg))) - try: - cmd = ['add_parset_to_hdf5', hdf5_file_path] - cmd = wrap_command_for_docker(cmd, 'adder', 'latest') - cmd = wrap_command_in_cep4_random_node_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) + if msg.subject == DEFAULT_DO_QAFILE_CONVERSION_SUBJECT: + self.qa_service.do_qafile_conversion(otdb_id=msg.content.get('otdb_id'), subtask_id=msg.content.get('subtask_id')) + elif msg.subject == DEFAULT_DO_QAPLOTS_SUBJECT: + self.qa_service.do_qaplots(otdb_id=msg.content.get('otdb_id'), subtask_id=msg.content.get('subtask_id')) + elif msg.subject == DEFAULT_DO_QAFILE_CONVERSION_AND_PLOTS_SUBJECT: + if msg.content.get('subtask_id'): + raise ValueError("%s: cannot do qa file conversion and plotting in one call for TMSS subtask id=%s. These steps are modelled seperately" % (self.__class__.__name__, msg.content.get('subtask_id'))) - logger.info(' '.join(cmd)) - if call(cmd) == 0: - self._copy_hdf5_to_nfs_dir(hdf5_file_path) - except Exception as e: - logger.warning("Cannot add parset with feedback for otdb=%s. error: %s", otdb_id, e) + self.qa_service.do_qa(otdb_id=msg.content.get('otdb_id')) + elif msg.subject == DEFAULT_DO_QAFILE_FINALIZE_SUBJECT: + self.qa_service.finalize_qa(otdb_id=msg.content.get('otdb_id'), subtask_id=msg.content.get('subtask_id')) else: - logger.info("Could not find the h5 file for task with otdb_id %s to add the parset to.", otdb_id) + raise ValueError("%s: cannot handle CommandMessage with subject: %s" % (self.__class__.__name__, msg.subject)) - def do_qa(self, otdb_id): - ''' - try to do all qa (quality assurance) steps for the given otdb_id - resulting in an h5 MS-extract file and inspection plots - :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done. - :return: None - ''' + def __init__(self, qa_service, exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER): + super().__init__(handler_type=QACommandsBusListener.QACommandsMessageHandler, + handler_kwargs={'qa_service': qa_service}, + exchange=exchange, + routing_key=DEFAULT_QA_COMMANDS_SUBJECT, + num_threads=1, + broker=broker) - hdf5_file_path = None +class QAService: + QA_LUSTRE_BASE_DIR = '/data/qa' + QA_NFS_BASE_DIR = '/qa' + def __init__(self, exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER): + """ + :param exchange: valid message exchange address + :param broker: valid broker host (default: None, which means localhost) + """ + self.exchange = exchange + self.broker = broker + self.tobus = ToBus(exchange, broker) + self.filtering_otdbbuslistener = QAFilteringOTDBBusListener(exchange = exchange, broker = broker) + self.filtering_tmssbuslistener = QAFilteringTMSSSubTaskBusListener(exchange = exchange, broker = broker) + self.commands_buslistener = QACommandsBusListener(qa_service=self, exchange = exchange, broker = broker) + self._unfinished_otdb_id_map = {} + self.tmsssession = None + + def __enter__(self): + self.tmsssession = TMSSsession.create_from_dbcreds_for_ldap() + self.tmsssession.open() + self.tobus.open() + self.filtering_otdbbuslistener.start_listening() + self.filtering_tmssbuslistener.start_listening() + self.commands_buslistener.start_listening() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.filtering_otdbbuslistener.stop_listening() + self.filtering_tmssbuslistener.stop_listening() + self.commands_buslistener.stop_listening() + self.tobus.close() + self.tmsssession.close() + + @staticmethod + def h5_lustre_filepath(observation_id) -> str: + h5_filename = 'L%s.MS_extract.h5' % observation_id + return os.path.join(QAService.QA_LUSTRE_BASE_DIR, 'qa_h5_files', h5_filename) + + @staticmethod + def plots_lustre_dirpath(observation_id) -> str: + plots_dirname = 'L%s' % observation_id + return os.path.join(QAService.QA_LUSTRE_BASE_DIR, 'plots', plots_dirname) + + def do_qa(self, otdb_id=None): + ''' + convert a MS or BeamFormed observation to a qa h5 file, and create plots. + ''' + if self.do_qafile_conversion(otdb_id=otdb_id): + self.do_qaplots(otdb_id=otdb_id) + + def do_qafile_conversion(self, otdb_id=None, subtask_id=None): + ''' + convert a MS or BeamFormed observation to a qa h5 file + ''' + + hdf5_file_path = None + + if otdb_id: with OTDBRPC.create(exchange=self.exchange, broker=self.broker, timeout=5) as otdbrpc: parset = parameterset(otdbrpc.taskGetSpecification(otdb_id=otdb_id).get("specification", '')) @@ -153,279 +206,332 @@ class QAFilteredOTDBBusListener(BusListener): else: logger.info("No uv or cs dataproducts avaiblable to convert for otdb_id %s", otdb_id) return + elif subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'starting') + self.tmsssession.set_subtask_status(subtask_id, 'started') - if hdf5_file_path: - # keep a note of where the h5 file was stored for this unfinished otdb_id - self._unfinished_otdb_id_map[otdb_id] = hdf5_file_path + hdf5_file_path = self._convert_ms2hdf5(otdb_id=otdb_id, subtask_id=subtask_id) - # cluster it - self._cluster_h5_file(hdf5_file_path, otdb_id) + if hdf5_file_path: + # cluster it + self._cluster_h5_file(hdf5_file_path, otdb_id=otdb_id, subtask_id=subtask_id) - self._copy_hdf5_to_nfs_dir(hdf5_file_path) + self._copy_hdf5_to_nfs_dir(hdf5_file_path) - plot_dir_path = self._create_plots_for_h5_file(hdf5_file_path, otdb_id) - plot_dir_path = self._move_plots_to_nfs_dir(plot_dir_path) + if subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'finishing') + self.tmsssession.set_subtask_status(subtask_id, 'finished') - # and notify that we're finished - self._send_event_message('Finished', {'otdb_id': otdb_id, - 'hdf5_file_path': hdf5_file_path, - 'plot_dir_path': plot_dir_path or ''}) + return hdf5_file_path - def _send_event_message(self, subject_suffix, content): - try: - subject = '%s.%s' % (DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX, subject_suffix) - msg = EventMessage(subject=subject, content=content) - logger.info('sending event message %s: %s', subject, content) - self.send(msg) - except Exception as e: - logger.error('Could not send event message: %s', e) + if subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'error') - def _convert_ms2hdf5(self, otdb_id): - ''' - convert the MS for the given otdb_id to an h5 MS-extract file. - The conversion will run via ssh on cep4 with massive parellelization. - When running on cep4, it is assumed that a docker image called adder exists on head.cep4 - When running locally, it is assumed that ms2hdf5 is installed locally. - :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done. - :return string: path to the generated h5 file. - ''' - try: - # define default h5 filename use default cep4 qa output dir - h5_filename = 'L%s.MS_extract.h5' % otdb_id - h5_dir_path = os.path.join(QA_LUSTRE_BASE_DIR, 'ms_extract') - hdf5_path = os.path.join(h5_dir_path, h5_filename) + return None - cmd = ['ls', hdf5_path] - cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + def do_qaplots(self, otdb_id=None, subtask_id=None): + if subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'starting') + self.tmsssession.set_subtask_status(subtask_id, 'started') - if call(cmd) == 0: - logger.info('uv dataset with otdb_id %s was already converted to hdf5 file %s', otdb_id, hdf5_path) - return hdf5_path + hdf5_path = self.h5_lustre_filepath(otdb_id or subtask_id) + plot_dir_path = self._create_plots_for_h5_file(hdf5_path, otdb_id, subtask_id) - logger.info('trying to convert MS uv dataset with otdb_id %s if any', otdb_id) - cmd = ['ms2hdf5', '-o', str(otdb_id), '--cep4', '-p', '-20', '-t', '256'] - cmd += ['--output_dir', h5_dir_path] - cmd += ['--output_filename', h5_filename] + if plot_dir_path: + if subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'finishing') - # wrap the command in a cep4 docker ssh call - cmd = wrap_command_for_docker(cmd, 'adder', 'latest') - cmd = wrap_command_in_cep4_available_node_with_lowest_load_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) + plot_dir_path = self._move_plots_to_nfs_dir(plot_dir_path) - logger.info('starting ms2hdf5, executing: %s', ' '.join(cmd)) + if subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'finished') - if call(cmd) == 0: - logger.info('converted uv dataset with otdb_id %s to hdf5 file %s', otdb_id, hdf5_path) - self._send_event_message('ConvertedMS2Hdf5', {'otdb_id': otdb_id, 'hdf5_file_path': hdf5_path}) - return hdf5_path - else: - msg = 'could not convert dataset with otdb_id %s' % otdb_id - logger.error(msg) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': msg}) + # and notify that we're finished + self._send_event_message('Finished', {'otdb_id': otdb_id, + 'subtask_id': subtask_id, + 'hdf5_file_path': hdf5_path, + 'plot_dir_path': plot_dir_path or ''}) + else: + if subtask_id: + self.tmsssession.set_subtask_status(subtask_id, 'error') + + def finalize_qa(self, otdb_id=None, subtask_id=None): + ''' + this mehod is called automatically upon receiving a Finished NotificationMessage + :param int otdb_id: the task's otdb database id + :param datetime modificationTime: timestamp when the task's status changed to finished + :return: None + ''' + logger.info("task with otdb_id %s finished. trying to add parset (with feedback) to h5 file", otdb_id) + + # lookup the hdf5_file_path for the given otdb_id + # and (re)add the parset to the file (which now includes feedback) + hdf5_file_path = self._unfinished_otdb_id_map.get(otdb_id) + if hdf5_file_path: + del self._unfinished_otdb_id_map[otdb_id] - except Exception as e: - logging.exception('error in _convert_ms2hdf5: %s', e) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': str(e)}) - return None - - def _create_plots_for_h5_file(self, hdf5_path, otdb_id=None): - ''' - create plots for the given h5 file. The plots are created via an ssh call to cep4 - where the plots are created in parallel in the docker image. - :param hdf5_path: the full path to the hdf5 file for which we want the plots. - :param otdb_id: the otdb_id of the converted observation/pipeline (is used for logging only) - :return: the full directory path to the directory containing the created plots. - ''' try: - #use default cep4 qa output dir. - plot_dir_path = os.path.join(QA_LUSTRE_BASE_DIR, 'plots') - task_plot_dir_path = '' - all_plots_succeeded = True - - for plot_options in [['-1', '-acb'], # 'hot' autocor/crosscor, per baseline scaling with distinct polarization scales, in dB - ['-1', '-acg'], # 'complex' autocor/crosscor, all baseline scaling with same polarization scales, in dB - ['-1', '-acn', '--raw'], # normalized 'hot' autocor/crosscor, raw - ['-4']]: # delay-rate - cmd = ['plot_hdf5_dynamic_spectra', '-o %s' % (plot_dir_path,), '--force', '--cep4'] + plot_options + [hdf5_path] - - # wrap the command in a cep4 ssh call to docker container - cmd = wrap_command_for_docker(cmd, 'adder', 'latest') - cmd = wrap_command_in_cep4_available_node_with_lowest_load_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) - - logger.info('generating plots for otdb_id %s, executing: %s', otdb_id, ' '.join(cmd)) - - if call(cmd) == 0: - task_plot_dir_path = os.path.join(plot_dir_path, 'L%s' % otdb_id) - logger.info('generated plots for otdb_id %s in %s with command=%s', otdb_id, - task_plot_dir_path, - ' '.join(cmd)) - else: - all_plots_succeeded &= False - msg = 'could not generate plots for otdb_id %s cmd=%s' % (otdb_id, ' '.join(cmd)) - logger.error(msg) - self._send_event_message('Error', {'otdb_id': otdb_id, - 'message': msg}) - - - self._send_event_message('CreatedInspectionPlots', {'otdb_id': otdb_id, - 'hdf5_file_path': hdf5_path, - 'plot_dir_path': task_plot_dir_path}) - return task_plot_dir_path + cmd = ['add_parset_to_hdf5', hdf5_file_path] + cmd = wrap_command_for_docker(cmd, 'adder', 'latest') + cmd = wrap_command_in_cep4_random_node_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) + + logger.info(' '.join(cmd)) + + self._copy_hdf5_to_nfs_dir(hdf5_file_path) except Exception as e: - logging.exception('error in _create_plots_for_h5_file: %s', e) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': str(e)}) - return None - - def _convert_bf2hdf5(self, otdb_id): - ''' - convert the beamformed h5 dataset for the given otdb_id to an h5 MS-extract file. - When running on cep4, it is assumed that a docker image called adder exists on head.cep4 - When running locally, it is assumed that ms2hdf5 is installed locally. - :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done. - :return string: path to the generated h5 file. - ''' - try: - # define default h5 filename use default cep4 qa output dir - h5_filename = 'L%s.MS_extract.h5' % otdb_id - h5_dir_path = os.path.join(QA_LUSTRE_BASE_DIR, 'ms_extract') - hdf5_path = os.path.join(h5_dir_path, h5_filename) + logger.warning("Cannot add parset with feedback for otdb=%s. error: %s", otdb_id, e) + else: + logger.info("Could not find the h5 file for task with otdb_id %s to add the parset to.", otdb_id) + + def _send_event_message(self, subject_suffix, content): + try: + subject = '%s.%s' % (DEFAULT_QA_NOTIFICATION_SUBJECT_PREFIX, subject_suffix) + msg = EventMessage(subject=subject, content=content) + logger.info('sending event message %s: %s', subject, content) + self.tobus.send(msg) + except Exception as e: + logger.error('Could not send event message: %s', e) + + def _convert_ms2hdf5(self, otdb_id=None, subtask_id=None): + ''' + convert the MS for the given otdb_id to an h5 MS-extract file. + The conversion will run via ssh on cep4 with massive parellelization. + When running on cep4, it is assumed that a docker image called adder exists on head.cep4 + When running locally, it is assumed that ms2hdf5 is installed locally. + :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done. + :return string: path to the generated h5 file. + ''' + try: + obs_id = otdb_id or subtask_id - cmd = ['ls', hdf5_path] - cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + # define default h5 filename use default cep4 qa output dir + hdf5_path = self.h5_lustre_filepath(obs_id) + h5_dir_path, h5_filename = os.path.split(hdf5_path) + + nr_of_timestamps = -1 + nr_of_subbands = -1 - if call(cmd, stdout=None, stderr=None) == 0: - logger.info('bf dataset with otdb_id %s was already converted to hdf5 file %s', otdb_id, hdf5_path) - return hdf5_path + if subtask_id: + subtask = self.tmsssession.get_subtask(subtask_id=subtask_id) + nr_of_timestamps = subtask['specifications_doc'].get('nr_of_timestamps', -1) + nr_of_subbands = subtask['specifications_doc'].get('nr_of_subbands', -1) - logger.info('trying to convert beamformed dataset with otdb_id %s if any', otdb_id) + logger.info('trying to convert MS uv dataset with otdb_id=%s subtask_id=%s if any', otdb_id, subtask_id) + cmd = ['ms2hdf5', '-o', str(obs_id), '--cep4', '-p', '-22', '-t', str(nr_of_timestamps), '-s', str(nr_of_subbands), ] + cmd += ['--output_dir', h5_dir_path] + cmd += ['--output_filename', h5_filename] - cmd = ['bf2hdf5', '-o', str(otdb_id)] - cmd += ['--output_dir', h5_dir_path] - cmd += ['--output_filename', h5_filename] + # wrap the command in a cep4 docker ssh call + cmd = wrap_command_for_docker(cmd, 'adder', 'latest') + cmd = wrap_command_in_cep4_available_node_with_lowest_load_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) - # wrap the command in a cep4 docker ssh call + logger.info('starting ms2hdf5, executing: %s', ' '.join(cmd)) + + if call(cmd) == 0: + logger.info('converted uv dataset with otdb_id=%s subtask_id=%s to hdf5 file %s', otdb_id, subtask_id, hdf5_path) + self._send_event_message('ConvertedMS2Hdf5', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'hdf5_file_path': hdf5_path}) + return hdf5_path + else: + msg = 'could not convert dataset with otdb_id=%s subtask_id=%s' % (otdb_id, subtask_id) + logger.error(msg) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': msg}) + + except Exception as e: + logging.exception('error in _convert_ms2hdf5: %s', e) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': str(e)}) + return None + + def _create_plots_for_h5_file(self, hdf5_path, otdb_id=None, subtask_id=None): + ''' + create plots for the given h5 file. The plots are created via an ssh call to cep4 + where the plots are created in parallel in the docker image. + :param hdf5_path: the full path to the hdf5 file for which we want the plots. + :param otdb_id: the otdb_id of the converted observation/pipeline (is used for logging only) + :return: the full directory path to the directory containing the created plots. + ''' + try: + #use default cep4 qa output dir. + task_plot_dir_path = QAService.plots_lustre_dirpath(otdb_id or subtask_id) + base_plot_dir_path = os.path.dirname(task_plot_dir_path) + + if subtask_id: + subtask = self.tmsssession.get_subtask(subtask_id=subtask_id) + #TODO: use settings from subtask to tweak plot_hdf5_dynamic_spectra options + + for plot_options in [['-1', '-acb'], # 'hot' autocor/crosscor, per baseline scaling with distinct polarization scales, in dB + ['-1', '-acg'], # 'complex' autocor/crosscor, all baseline scaling with same polarization scales, in dB + ['-1', '-acn', '--raw'], # normalized 'hot' autocor/crosscor, raw + ['-4']]: # delay-rate + cmd = ['plot_hdf5_dynamic_spectra', '-o %s' % (base_plot_dir_path,), '--force', '--cep4'] + plot_options + [hdf5_path] + + # wrap the command in a cep4 ssh call to docker container cmd = wrap_command_for_docker(cmd, 'adder', 'latest') cmd = wrap_command_in_cep4_available_node_with_lowest_load_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) - logger.info('starting bf2hdf5, executing: %s', ' '.join(cmd)) + logger.info('generating plots for otdb_id=%s subtask_id=%s, executing: %s',otdb_id, subtask_id, ' '.join(cmd)) if call(cmd) == 0: - hdf5_path = os.path.join(h5_dir_path, h5_filename) - logger.info('converted bf dataset with otdb_id %s to hdf5 file %s', otdb_id, hdf5_path) - self._send_event_message('ConvertedBF2Hdf5', {'otdb_id': otdb_id, 'hdf5_file_path': hdf5_path}) - return hdf5_path + logger.info('generated plots for otdb_id=%s subtask_id=%s in %s with command=%s', otdb_id, subtask_id, + task_plot_dir_path, + ' '.join(cmd)) else: - msg = 'could not convert dataset with otdb_id %s' % otdb_id + msg = 'could not generate plots for otdb_id=%s subtask_id=%s cmd=%s' % (otdb_id, subtask_id, ' '.join(cmd)) logger.error(msg) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': msg}) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, + 'message': msg}) + return None + + self._send_event_message('CreatedInspectionPlots', {'otdb_id': otdb_id, 'subtask_id': subtask_id, + 'hdf5_file_path': hdf5_path, + 'plot_dir_path': task_plot_dir_path}) + return task_plot_dir_path + except Exception as e: + logging.exception('error in _create_plots_for_h5_file: %s', e) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': str(e)}) + return None + + def _convert_bf2hdf5(self, otdb_id=None, subtask_id=None): + ''' + convert the beamformed h5 dataset for the given otdb_id to an h5 MS-extract file. + When running on cep4, it is assumed that a docker image called adder exists on head.cep4 + When running locally, it is assumed that ms2hdf5 is installed locally. + :param int otdb_id: observation/pipeline otdb id for which the conversion needs to be done. + :return string: path to the generated h5 file. + ''' + try: + # define default h5 filename use default cep4 qa output dir + hdf5_path = self.h5_lustre_filepath(otdb_id or subtask_id) + h5_dir_path, h5_filename = os.path.split(hdf5_path) - except Exception as e: - logging.exception('error in _convert_ms2hdf5: %s', e) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': str(e)}) - return None + cmd = ['ls', hdf5_path] + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) - def _copy_hdf5_to_nfs_dir(self, hdf5_path): - try: - hdf5_filename = os.path.basename(hdf5_path) - hdf5_nfs_path = os.path.join(QA_NFS_BASE_DIR, 'h5', hdf5_filename) - cmd = ['cp', hdf5_path, hdf5_nfs_path] - cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + if call(cmd, stdout=None, stderr=None) == 0: + logger.info('bf dataset with otdb_id=%s subtask_id=%s was already converted to hdf5 file %s', otdb_id, subtask_id, hdf5_path) + return hdf5_path - logger.debug('copying h5 file to nfs dir: %s', ' '.join(cmd)) - if call(cmd) == 0: - logger.info('copied h5 file to nfs dir: %s -> %s', hdf5_path, hdf5_nfs_path) - return hdf5_nfs_path - except Exception as e: - logging.exception('error in _copy_hdf5_to_nfs_dir: %s', e) + logger.info('trying to convert beamformed dataset with otdb_id=%s subtask_id=%s if any', otdb_id, subtask_id) - def _move_plots_to_nfs_dir(self, plot_dir_path): - try: - plot_dir_name = os.path.basename(plot_dir_path) - plot_nfs_path = os.path.join(QA_NFS_BASE_DIR, 'plots', plot_dir_name) - cmd = ['cp', '-rf', plot_dir_path, plot_nfs_path] - cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + cmd = ['bf2hdf5', '-o', str(otdb_id)] + cmd += ['--output_dir', h5_dir_path] + cmd += ['--output_filename', h5_filename] - logger.info('copying plots: %s', ' '.join(cmd)) - if call(cmd) == 0: - logger.info('copied plots from %s to nfs dir: %s', plot_dir_path, plot_nfs_path) + # wrap the command in a cep4 docker ssh call + cmd = wrap_command_for_docker(cmd, 'adder', 'latest') + cmd = wrap_command_in_cep4_available_node_with_lowest_load_ssh_call(cmd, partition=SLURM_CPU_PARTITION, via_head=True) - cmd = ['rm', '-rf', plot_dir_path] - cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + logger.info('starting bf2hdf5, executing: %s', ' '.join(cmd)) - logger.debug('removing plots: %s', ' '.join(cmd)) - if call(cmd) == 0: - logger.info('removed plots from %s after they were copied to nfs dir %s', plot_dir_path, plot_nfs_path) - return plot_nfs_path - except Exception as e: - logging.exception('error in _copy_hdf5_to_nfs_dir: %s', e) - - def _cluster_h5_file(self, hdf5_path, otdb_id=None): - ''' - Try to cluster the baselines based on visibilities in the h5 file - using the clustering docker image developed by e-science. - This method assumes the adder_clustering docker image is available on cep4. If not, or if anything else - goes wrong, then the qa steps can just continue on the un-clustered h5 file. - The docker image can be build from the source on github: - https://github.com/NLeSC/lofar-predictive-maintenance - This is a private repo until the project has been published. At astron, jorrit has access. - In the future, we might incorporate the clustering code from the github repo in to the LOFAR source tree. - :param hdf5_path: the full path to the hdf5 file for which we want the plots. - :param otdb_id: the otdb_id of the converted observation/pipeline (is used for logging only) - :return: None - ''' - try: - cmd = ['show_hdf5_info', hdf5_path, '|', 'grep', 'clusters'] - cmd = wrap_command_for_docker(cmd, 'adder', 'latest') + if call(cmd) == 0: + hdf5_path = os.path.join(h5_dir_path, h5_filename) + logger.info('converted bf dataset with otdb_id=%s subtask_id=%s to hdf5 file %s', otdb_id, subtask_id, hdf5_path) + self._send_event_message('ConvertedBF2Hdf5', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'hdf5_file_path': hdf5_path}) + return hdf5_path + else: + msg = 'could not convert dataset with otdb_id=%s subtask_id=%s' % (otdb_id, subtask_id) + logger.error(msg) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': msg}) + + except Exception as e: + logging.exception('error in _convert_ms2hdf5: %s', e) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': str(e)}) + return None + + def _copy_hdf5_to_nfs_dir(self, h5_path): + try: + h5_org_dir_path, h5_org_filename = os.path.split(h5_path) + h5_nfs_path = os.path.join(QAService.QA_NFS_BASE_DIR, 'h5', h5_org_filename) + h5_nfs_dir, h5_nfs_filename = os.path.split(h5_nfs_path) + + # create nfs dir if needed + cmd = ['mkdir', '-p', h5_nfs_dir] + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + logger.info('create nfs dir if needed: %s', ' '.join(cmd)) + call(cmd) + + cmd = ['cp', '-f', h5_path, h5_nfs_path] + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + + logger.info('copying h5 file to nfs dir: %s', ' '.join(cmd)) + if call(cmd) == 0: + logger.info('copied h5 file to nfs dir: %s -> %s', h5_path, h5_nfs_path) + return h5_nfs_path + except Exception as e: + logging.exception('error in _copy_hdf5_to_nfs_dir: %s', e) + + def _move_plots_to_nfs_dir(self, plot_dir_path): + try: + plot_dir_name = os.path.basename(plot_dir_path) + plot_nfs_base_path = os.path.join(QAService.QA_NFS_BASE_DIR, 'plots') + + # create nfs dir if needed + cmd = ['mkdir', '-p', plot_nfs_base_path] + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + logger.info('create nfs dir if needed: %s', ' '.join(cmd)) + call(cmd) + + plot_nfs_path = os.path.join(plot_nfs_base_path, plot_dir_name) + cmd = ['cp', '-rf', plot_dir_path, plot_nfs_path] + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + + logger.info('copying plots: %s', ' '.join(cmd)) + if call(cmd) == 0: + logger.info('copied plots from %s to nfs dir: %s', plot_dir_path, plot_nfs_path) + + cmd = ['rm', '-rf', plot_dir_path] cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + logger.debug('removing plots: %s', ' '.join(cmd)) if call(cmd) == 0: - logger.info('hdf5 file %s otdb_id %s was already clustered', hdf5_path, otdb_id) - return + logger.info('removed plots from %s after they were copied to nfs dir %s', plot_dir_path, plot_nfs_path) + return plot_nfs_path - # the command to cluster the given h5 file (executed in the e-science adder docker image) - cmd = ['cluster_this.py', hdf5_path] - cmd = wrap_command_for_docker(cmd, 'adder_clustering', 'latest') - cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) + # move failed, so plots are still in original dir + return plot_dir_path + except Exception as e: + logging.exception('error in _copy_hdf5_to_nfs_dir: %s', e) - logger.info('clustering hdf5 file %s otdb_id %s, executing: %s', hdf5_path, otdb_id, ' '.join(cmd)) + def _cluster_h5_file(self, hdf5_path, otdb_id=None, subtask_id=None): + ''' + Try to cluster the baselines based on visibilities in the h5 file + using the clustering docker image developed by e-science. + This method assumes the adder_clustering docker image is available on cep4. If not, or if anything else + goes wrong, then the qa steps can just continue on the un-clustered h5 file. + The docker image can be build from the source on github: + https://github.com/NLeSC/lofar-predictive-maintenance + This is a private repo until the project has been published. At astron, jorrit has access. + In the future, we might incorporate the clustering code from the github repo in to the LOFAR source tree. + :return: None + ''' + try: + cmd = ['show_hdf5_info', hdf5_path, '|', 'grep', 'clusters', '>&', '/dev/null'] + cmd = wrap_command_for_docker(cmd, 'adder', 'latest') + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) - if call(cmd) == 0: - logger.info('clustered hdf5 file %s otdb_id %s', hdf5_path, otdb_id) + if call(cmd) == 0: + logger.info('skipping clustering of hdf5 file %s otdb_id=%s subtask_id=%s which was already clustered', hdf5_path, otdb_id, subtask_id) + return - self._send_event_message('Clustered', {'otdb_id': otdb_id, - 'hdf5_file_path': hdf5_path}) - else: - msg = 'could not cluster hdf5 file %s otdb_id %s' % (hdf5_path, otdb_id) - logger.error(msg) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': msg}) - except Exception as e: - logging.exception('error in _cluster_h5_file: %s', e) - self._send_event_message('Error', {'otdb_id': otdb_id, 'message': str(e)}) + # the command to cluster the given h5 file (executed in the e-science adder docker image) + cmd = ['cluster_this.py', hdf5_path] + cmd = wrap_command_for_docker(cmd, 'adder_clustering', 'latest') + cmd = wrap_command_in_cep4_head_node_ssh_call(cmd) - def __init__(self, exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER): - super().__init__(handler_type=QAFilteredOTDBBusListener.QAFilteredOTDBEventMessageHandler, - handler_kwargs={}, - exchange=exchange, - routing_key="%s.#" % (DEFAULT_FILTERED_OTDB_NOTIFICATION_SUBJECT,), - num_threads=1, - broker=broker) + logger.info('clustering hdf5 file %s otdb_id=%s subtask_id=%s, executing: %s', hdf5_path, otdb_id, subtask_id, ' '.join(cmd)) -class QAService: - def __init__(self, exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER): - """ - :param exchange: valid message exchange address - :param broker: valid broker host (default: None, which means localhost) - """ - self.filtering_buslistener = QAFilteringOTDBBusListener(exchange = exchange, broker = broker) - self.filtered_buslistener = QAFilteredOTDBBusListener(exchange = exchange, broker = broker) + if call(cmd) == 0: + logger.info('clustered hdf5 file %s otdb_id=%s subtask_id=%s', hdf5_path, otdb_id, subtask_id) - def __enter__(self): - self.filtering_buslistener.start_listening() - self.filtered_buslistener.start_listening() - return self + self._send_event_message('Clustered', {'otdb_id': otdb_id, 'subtask_id': subtask_id, + 'hdf5_file_path': hdf5_path}) + else: + msg = 'could not cluster hdf5 file %s otdb_id=%s subtask_id=%s' % (hdf5_path, otdb_id, subtask_id) + logger.error(msg) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': msg}) + except Exception as e: + logging.exception('error in _cluster_h5_file: %s', e) + self._send_event_message('Error', {'otdb_id': otdb_id, 'subtask_id': subtask_id, 'message': str(e)}) - def __exit__(self, exc_type, exc_val, exc_tb): - self.filtering_buslistener.stop_listening() - self.filtered_buslistener.stop_listening() def main(): ''' @@ -437,11 +543,11 @@ def main(): description='run the qa_service which listens for observations/pipelines finished events on ' 'the bus and then starts the QA (Quality Assurance) processes to convert MS to ' 'hdf5 files and generate inspection plots.') - group = OptionGroup(parser, 'QPid Messaging options') - group.add_option('-b', '--broker', dest='broker', type='string', default='localhost', help='Address of the qpid broker, default: %default') + group = OptionGroup(parser, 'Messaging options') + group.add_option('-b', '--broker', dest='broker', type='string', default='localhost', help='Address of the message broker, default: %default') group.add_option('-e', "--exchange", dest="exchange", type="string", default=DEFAULT_BUSNAME, - help="Bus or queue where the OTDB notifications are published. [default: %default]") + help="Bus or queue where the QA notifications are published. [default: %default]") parser.add_option_group(group) (options, args) = parser.parse_args() diff --git a/QA/QA_Service/test/t_qa_service.py b/QA/QA_Service/test/t_qa_service.py index a9a5b401b6e38dbfc3ee790882ddd964ea972107..3318d0cf092869223bd4382b34009c27a5082bee 100755 --- a/QA/QA_Service/test/t_qa_service.py +++ b/QA/QA_Service/test/t_qa_service.py @@ -26,6 +26,10 @@ import os from datetime import datetime import logging + +from lofar.sas.tmss.client.tmssbuslistener import DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX +from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment + logger = logging.getLogger(__name__) from lofar.qa.service.qa_service import QAService @@ -35,6 +39,7 @@ from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor from lofar.messaging.messages import EventMessage from lofar.sas.otdb.config import DEFAULT_OTDB_NOTIFICATION_SUBJECT from lofar.common.test_utils import unit_test, integration_test +from lofar.common.json_utils import add_defaults_to_json_object_for_schema # the tests below test is multi threaded (even multi process) # define a SynchronizationQABusListener-derivative to handle synchronization (set the *_events) @@ -85,23 +90,33 @@ class TestQAService(unittest.TestCase): ''' Tests for the QAService class ''' + @classmethod + def setUpClass(cls) -> None: + cls.TEST_UUID = uuid.uuid1() + cls.TEST_OTDB_ID = 999999 + + cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID)) + cls.tmp_exchange.open() + + cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address) + cls.tmss_test_env.start() + + @classmethod + def tearDownClass(cls) -> None: + cls.tmss_test_env.stop() + cls.tmp_exchange.close() + + def setUp(self): ''' quite complicated setup to setup test message-exchanges/queues and mock away ssh calls to cep4 and mock away dockerized commands ''' - self.TEST_UUID = uuid.uuid1() - self.TEST_OTDB_ID = 999999 - - self.tmp_exchange = TemporaryExchange("%s_%s" % (__class__.__name__, self.TEST_UUID)) - self.tmp_exchange.open() - self.addCleanup(self.tmp_exchange.close) - # where to store the test results - self.TEST_DIR = '/tmp/qa_service_%s' % self.TEST_UUID - self.TEST_H5_FILE = 'L%s.MS_extract.h5' % (self.TEST_OTDB_ID,) - self.TEST_H5_PATH = os.path.join(self.TEST_DIR, 'ms_extract', self.TEST_H5_FILE) + self.TEST_DIR = '/tmp/test_qa_service_%s' % self.TEST_UUID + QAService.QA_LUSTRE_BASE_DIR = os.path.join(self.TEST_DIR, 'lustre') + QAService.QA_NFS_BASE_DIR = os.path.join(self.TEST_DIR, 'nfs') # mock the calls to ssh cep4 and docker def mocked_wrap_command_for_docker(cmd, image_name=None, image_label=None): @@ -109,12 +124,12 @@ class TestQAService(unittest.TestCase): return cmd def mocked_wrap_command_in_cep4_head_node_ssh_call(cmd): - logger.info('mocked_wrap_command_in_cep4_head_node_ssh_call returning original command: %s', ' '.join(cmd)) + logger.info('mocked_wrap_command_in_cep4_head_node_ssh_call returning original command (without ssh): %s', ' '.join(cmd)) return cmd def mocked_wrap_command_in_cep4_node_ssh_call(cmd, cpu_node_nr, partition, via_head): logger.info('mocked_wrap_command_in_cep4_node_ssh_call for %s node nr %s via head=%s ' \ - 'returning original command: %s', partition, cpu_node_nr, via_head, ' '.join(cmd)) + 'returning original command (without ssh): %s', partition, cpu_node_nr, via_head, ' '.join(cmd)) return cmd def mocked_get_cep4_available_nodes(partition): @@ -201,10 +216,9 @@ class TestQAService(unittest.TestCase): # by a call to the create_test_hypercube which fakes the ms2hdf5 conversion for this test. if 'ms2hdf5' in cmd: # the create_test_hypercube executable should be available in the PATH environment - create_test_hypercube_path = 'create_test_hypercube' - - mocked_cmd = [create_test_hypercube_path, '-s 4', '-S 8', '-t 16', - '-o', str(self.TEST_OTDB_ID), self.TEST_H5_PATH] + hdf5_path = QAService.h5_lustre_filepath(self.TEST_OTDB_ID) + mocked_cmd = ['create_test_hypercube', '-s 4', '-S 8', '-t 16', + '-o', str(self.TEST_OTDB_ID), hdf5_path] logger.info('''mocked_wrap_command_for_docker returning mocked command to create test h5 file: '%s', instead of original command: '%s' ''', ' '.join(mocked_cmd), ' '.join(cmd)) return mocked_cmd @@ -233,7 +247,7 @@ class TestQAService(unittest.TestCase): # start the QAService (the object under test) qaservice = QAService(exchange=self.tmp_exchange.address) - with qaservice, BusListenerJanitor(qaservice.filtering_buslistener), BusListenerJanitor(qaservice.filtered_buslistener): + with qaservice, BusListenerJanitor(qaservice.filtering_otdbbuslistener), BusListenerJanitor(qaservice.filtering_tmssbuslistener), BusListenerJanitor(qaservice.commands_buslistener): # start listening for QA event messages from the QAService with BusListenerJanitor(SynchronizationQABusListener(exchange=self.tmp_exchange.address)) as qa_listener: @@ -335,7 +349,7 @@ class TestQAService(unittest.TestCase): # start the QAService (the object under test) qaservice = QAService(exchange=self.tmp_exchange.address) - with qaservice, BusListenerJanitor(qaservice.filtering_buslistener), BusListenerJanitor(qaservice.filtered_buslistener): + with qaservice, BusListenerJanitor(qaservice.filtering_otdbbuslistener), BusListenerJanitor(qaservice.filtering_tmssbuslistener), BusListenerJanitor(qaservice.commands_buslistener): # start listening for QA event messages from the QAService with BusListenerJanitor(SynchronizationQABusListener(exchange=self.tmp_exchange.address)) as qa_listener: @@ -376,11 +390,11 @@ class TestQAService(unittest.TestCase): # replace the ms2hdf5 command which runs normally in the docker container # by a call to the create_test_hypercube which fakes the ms2hdf5 conversion for this test. # the create_test_hypercube executable should be available in the PATH environment - create_test_hypercube_path = 'create_test_hypercube' - mocked_cmd = [create_test_hypercube_path, '-s 4', '-S 8', '-t 16', - '-o', str(self.TEST_OTDB_ID), self.TEST_H5_PATH] - logger.info('mocked_wrap_command_for_docker returning mocked command to create test h5 file: %s', - ' '.join(mocked_cmd)) + hdf5_path = QAService.h5_lustre_filepath(self.TEST_OTDB_ID) + mocked_cmd = ['create_test_hypercube', '-s 4', '-S 8', '-t 16', + '-o', str(self.TEST_OTDB_ID), hdf5_path] + logger.info('''mocked_wrap_command_for_docker returning mocked command to create test h5 file: '%s', instead of original command: '%s' ''', + ' '.join(mocked_cmd), ' '.join(cmd)) return mocked_cmd if 'cluster_this.py' in cmd: @@ -405,7 +419,7 @@ class TestQAService(unittest.TestCase): # start the QAService (the object under test) qaservice = QAService(exchange=self.tmp_exchange.address) - with qaservice, BusListenerJanitor(qaservice.filtering_buslistener), BusListenerJanitor(qaservice.filtered_buslistener): + with qaservice, BusListenerJanitor(qaservice.filtering_otdbbuslistener), BusListenerJanitor(qaservice.filtering_tmssbuslistener), BusListenerJanitor(qaservice.commands_buslistener): # start listening for QA event messages from the QAService with BusListenerJanitor(SynchronizationQABusListener(exchange=self.tmp_exchange.address)) as qa_listener: @@ -452,13 +466,13 @@ class TestQAService(unittest.TestCase): def mocked_wrap_command_in_cep4_node_ssh_call(cmd, cpu_node_nr, partition, via_head): logger.info('mocked_wrap_command_in_cep4_node_ssh_call for cpu node nr %s via head=%s ' \ 'returning call to bash false', cpu_node_nr, via_head) - return ['false', ';'] + return ['false'] self.wrap_command_in_cep4_node_ssh_call_mock.side_effect = mocked_wrap_command_in_cep4_node_ssh_call # start the QAService (the object under test) qaservice = QAService(exchange=self.tmp_exchange.address) - with qaservice, BusListenerJanitor(qaservice.filtering_buslistener), BusListenerJanitor(qaservice.filtered_buslistener): + with qaservice, BusListenerJanitor(qaservice.filtering_otdbbuslistener), BusListenerJanitor(qaservice.filtering_tmssbuslistener), BusListenerJanitor(qaservice.commands_buslistener): # start listening for QA event messages from the QAService with BusListenerJanitor(SynchronizationQABusListener(exchange=self.tmp_exchange.address)) as qa_listener: @@ -482,6 +496,159 @@ class TestQAService(unittest.TestCase): self.ssh_cmd_list_mock1.assert_not_called() self.ssh_cmd_list_mock2.assert_not_called() + def test_05_qa_service_for_expected_behaviour_on_tmss_events(self): + ''' + This is a "duplicate" of test_01_qa_service_for_expected_behaviour, but then for TMSS. + This test starts a QAService, triggers a TMSS test observation finished event, + and tests if the generated h5 file and plots are as expected. + It is an end-to-end test which does not check the intermediate results. It is assumed that + the intermediate steps are tested in other tests/modules. + ''' + logger.info(' -- test_05_qa_service_for_expected_behaviour_on_tmss_events -- ') + + from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator + from lofar.sas.tmss.tmss.tmssapp.subtasks import create_observation_to_qafile_subtask, create_qafile_to_qaplots_subtask + + tmss_client = self.tmss_test_env.create_tmss_client() + with tmss_client: + # make sure we have the proper templates in the database + self.assertIsNotNone(tmss_client.get_subtask_template(name="QA file conversion")) + self.assertIsNotNone(tmss_client.get_subtask_template(name="QA plots")) + + # override the mock behaviour from setUp for this specific test + def mocked_wrap_command_for_docker(cmd, image_name=None, image_label=None): + # replace the ms2hdf5 command which runs normally in the docker container + # by a call to the create_test_hypercube which fakes the ms2hdf5 conversion for this test. + if 'ms2hdf5' in cmd: + # the create_test_hypercube executable should be available in the PATH environment + hdf5_path = os.path.join(cmd[cmd.index('--output_dir')+1], cmd[cmd.index('--output_filename')+1]) + mocked_cmd = ['create_test_hypercube', '-s 4', '-S 8', '-t 16', hdf5_path] + logger.info('''mocked_wrap_command_for_docker returning mocked command to create test h5 file: '%s', instead of original command: '%s' ''', + ' '.join(mocked_cmd), ' '.join(cmd)) + return mocked_cmd + + if 'cluster_this.py' in cmd: + # replace the cluster command which runs normally in the docker container + # by a call to bash true, so the 'cluster_this' call returns 0 exit code + mocked_cmd = ['true'] + logger.info('''mocked_wrap_command_for_docker returning mocked command: '%s', instead of original command: '%s' ''', + ' '.join(mocked_cmd), ' '.join(cmd)) + return mocked_cmd + + #TODO: merge adder branch into trunk so we can use plot_hdf5_dynamic_spectra on the test-h5 file to create plots + if 'plot_hdf5_dynamic_spectra' in cmd: + # replace the plot_hdf5_dynamic_spectra command which runs normally in the docker container + # by a call to bash true, so the 'plot_hdf5_dynamic_spectra' call returns 0 exit code + mocked_cmd = ['true'] + logger.info('''mocked_wrap_command_for_docker returning mocked command: '%s', instead of original command: '%s' ''', + ' '.join(mocked_cmd), ' '.join(cmd)) + return mocked_cmd + + logger.info('''mocked_wrap_command_for_docker returning original command: '%s' ''', ' '.join(cmd)) + return cmd + + self.wrap_command_for_docker_mock.side_effect = mocked_wrap_command_for_docker + + # start the QAService (the object under test) + qaservice = QAService(exchange=self.tmp_exchange.address) + + with qaservice, tmss_client, BusListenerJanitor(qaservice.filtering_otdbbuslistener), BusListenerJanitor(qaservice.filtering_tmssbuslistener), BusListenerJanitor(qaservice.commands_buslistener): + # start listening for QA event messages from the QAService + with BusListenerJanitor(SynchronizationQABusListener(exchange=self.tmp_exchange.address)) as qa_listener: + tdc = TMSSRESTTestDataCreator(django_api_url=self.tmss_test_env.django_server.url, + auth=(self.tmss_test_env.client_credentials.dbcreds.user, + self.tmss_test_env.client_credentials.dbcreds.password)) + + qafile_subtask_template = tmss_client.get_subtask_template(name="QA file conversion") + qafile_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qafile_subtask_template['schema']) + + subtask_url = tdc.post_data_and_get_url(tdc.Subtask(specifications_template_url=qafile_subtask_template['url'], + specifications_doc=qafile_subtask_spec_doc), + '/subtask/') + subtask_id = subtask_url.split('/')[-2] + + qaplots_subtask_template = tmss_client.get_subtask_template(name="QA plots") + qaplots_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qaplots_subtask_template['schema']) + + subtask_url2 = tdc.post_data_and_get_url(tdc.Subtask(specifications_template_url=qaplots_subtask_template['url'], + specifications_doc=qaplots_subtask_spec_doc), '/subtask/') + subtask_id2 = subtask_url2.split('/')[-2] + + # trigger a qa process by setting the tmss subtask to scheduled + # this will result in the QAService actually doing its magic + tmss_client.set_subtask_status(subtask_id, 'scheduled') + + # start waiting until ConvertedMS2Hdf5 event message received (or timeout) + qa_listener.converted_event.wait(30) + + # ConvertedMS2Hdf5 event message should have been sent, so converted_event should have been set + self.assertTrue(qa_listener.converted_event.is_set()) + + # check the converted_msg_content + self.assertTrue('subtask_id' in qa_listener.converted_msg_content) + self.assertTrue('hdf5_file_path' in qa_listener.converted_msg_content) + + + # start waiting until Clustered event message received (or timeout) + qa_listener.clustered_event.wait(30) + + + # Clustered event message should have been sent, so clustered_event should have been set + self.assertTrue(qa_listener.clustered_event.is_set()) + + # check the clustered_msg_content + self.assertTrue('subtask_id' in qa_listener.clustered_msg_content) + self.assertTrue('hdf5_file_path' in qa_listener.clustered_msg_content) + + tmss_client.set_subtask_status(subtask_id2, 'scheduled') + + # start waiting until CreatedInspectionPlots event message received (or timeout) + qa_listener.plotted_event.wait(30) + + # CreatedInspectionPlots event message should have been sent, so plotted_event should have been set + self.assertTrue(qa_listener.plotted_event.is_set()) + + # check the plotted_msg_content + self.assertTrue('otdb_id' in qa_listener.plotted_msg_content) + self.assertTrue('hdf5_file_path' in qa_listener.plotted_msg_content) + self.assertTrue('plot_dir_path' in qa_listener.plotted_msg_content) + + # TODO: merge adder branch into trunk so we can use plot_hdf5_dynamic_spectra on the test-h5 file to create plots, then re-enable the checks on created plots + # # check if the output dirs/files exist + # self.assertTrue(os.path.exists(qa_listener.plotted_msg_content['hdf5_file_path'])) + # logger.info(qa_listener.plotted_msg_content['plot_dir_path']) + # self.assertTrue(os.path.exists(qa_listener.plotted_msg_content['plot_dir_path'])) + # plot_file_names = [f for f in os.listdir(qa_listener.plotted_msg_content['plot_dir_path']) + # if f.endswith('png')] + # self.assertEqual(10, len(plot_file_names)) + # + # auto_correlation_plot_file_names = [f for f in plot_file_names + # if 'auto' in f] + # self.assertEqual(4, len(auto_correlation_plot_file_names)) + # + # complex_plot_file_names = [f for f in plot_file_names + # if 'complex' in f] + # self.assertEqual(6, len(complex_plot_file_names)) + + # start waiting until QAFinished event message received (or timeout) + qa_listener.finished_event.wait(30) + + # QAFinished event message should have been sent, so finished_event should have been set + self.assertTrue(qa_listener.finished_event.is_set()) + + # check the result_msg_content + self.assertTrue('otdb_id' in qa_listener.finished_msg_content) + self.assertTrue('hdf5_file_path' in qa_listener.finished_msg_content) + self.assertTrue('plot_dir_path' in qa_listener.finished_msg_content) + + self.wrap_command_for_docker_mock.assert_called() + self.wrap_command_in_cep4_node_ssh_call_mock.assert_called() + self.wrap_command_in_cep4_head_node_ssh_call_mock.assert_called() + self.get_cep4_available_cpu_nodes_mock.assert_called() + self.ssh_cmd_list_mock1.assert_not_called() + self.ssh_cmd_list_mock2.assert_not_called() + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) diff --git a/SAS/TMSS/CMakeLists.txt b/SAS/TMSS/CMakeLists.txt index a26ff751debaddf70b1958efe95e15e98f7271fc..afc70e4bdb9d18cdc4bedface086d071fb769db6 100644 --- a/SAS/TMSS/CMakeLists.txt +++ b/SAS/TMSS/CMakeLists.txt @@ -1,6 +1,6 @@ -lofar_package(TMSS 0.1 DEPENDS PyCommon pyparameterset) +lofar_package(TMSS 0.1 DEPENDS PyCommon pyparameterset PyMessaging) add_subdirectory(src) add_subdirectory(bin) diff --git a/SAS/TMSS/bin/CMakeLists.txt b/SAS/TMSS/bin/CMakeLists.txt index 0822868224ad008e69af2a7b414687316f061398..447e457176ebedda5204f02f318a9f3cf22fb8fd 100644 --- a/SAS/TMSS/bin/CMakeLists.txt +++ b/SAS/TMSS/bin/CMakeLists.txt @@ -3,5 +3,3 @@ lofar_add_bin_scripts(tmss_test_database) lofar_add_bin_scripts(tmss_test_ldap) lofar_add_bin_scripts(tmss_test_environment) lofar_add_bin_scripts(tmss_manage_django) -lofar_add_bin_scripts(tmss_set_subtask_state) -lofar_add_bin_scripts(tmss_get_subtask_parset) diff --git a/SAS/TMSS/bin/tmss b/SAS/TMSS/bin/tmss index b4898cee0919ae362c6bad77d5e10d222c5bf2a6..078fab655d8c0313eabb965a6c83d9a1687bb97a 100755 --- a/SAS/TMSS/bin/tmss +++ b/SAS/TMSS/bin/tmss @@ -24,18 +24,27 @@ PORT=8008 CREDENTIALS="tmss" LDAP_CREDENTIALS="tmss_ldap" +EXCHANGE="lofar" +BROKER="localhost" # Parse args: -while getopts "p:C:L:h" opt; do +while getopts "p:C:L:e:b:h" opt; do case ${opt} in p ) PORT=${OPTARG} ;; C ) CREDENTIALS=${OPTARG} ;; L ) LDAP_CREDENTIALS=${OPTARG} ;; + e ) EXCHANGE=${OPTARG} ;; + b ) BROKER=${OPTARG} ;; h ) echo "usage: tmss [OPTIONS]" echo " where options are:" echo " -p <port> the port where django runs the rest http interface on. default=$PORT" echo " -C <credentials-name> the name of the database credentials in ~/.lofar/dbcredentials. default=$CREDENTIALS" echo " -L <credentials-name> the name of the ldap credentials in ~/.lofar/dbcredentials. default=$LDAP_CREDENTIALS" + echo "" + echo " Messaging options:" + echo " -b BROKER, Address of the message broker, default: $BROKER" + echo " -e EXCHANGE Bus or queue where the TMSS messages are published. [default: $EXCHANGE]" + exit 0 ;; esac @@ -43,10 +52,12 @@ done echo "!!! This tmss application is for testing only, properly deploy in Nginx or Apache for production use !!!" -echo "Using Django port=$PORT credentials=$CREDENTIALS ldap_credentials=$LDAP_CREDENTIALS" +echo "Using Django port=$PORT credentials=$CREDENTIALS ldap_credentials=$LDAP_CREDENTIALS exchange=$EXCHANGE broker=$BROKER" export TMSS_DBCREDENTIALS=$CREDENTIALS export TMSS_LDAPCREDENTIALS=$LDAP_CREDENTIALS +export TMSS_EXCHANGE=$EXCHANGE +export TMSS_BROKER=$BROKER DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" diff --git a/SAS/TMSS/client/CMakeLists.txt b/SAS/TMSS/client/CMakeLists.txt index 362c98b257a327565da9aa21cb9832d9e2e10759..573d9749e4fa2ac258540634672514382c6a4e38 100644 --- a/SAS/TMSS/client/CMakeLists.txt +++ b/SAS/TMSS/client/CMakeLists.txt @@ -2,12 +2,5 @@ lofar_package(TMSSClient 0.1 DEPENDS PyCommon pyparameterset PyMessaging) lofar_find_package(PythonInterp 3.4 REQUIRED) -include(PythonInstall) - -set(_py_files - tmssbuslistener.py - ) - -python_install(${_py_files} - DESTINATION lofar/sas/tmss/client) - +add_subdirectory(lib) +add_subdirectory(bin) diff --git a/SAS/TMSS/client/bin/CMakeLists.txt b/SAS/TMSS/client/bin/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..501bc8f8b66f37c17a4b74a29fd3f29478bd9287 --- /dev/null +++ b/SAS/TMSS/client/bin/CMakeLists.txt @@ -0,0 +1,4 @@ +lofar_add_bin_scripts(tmss_set_subtask_state) +lofar_add_bin_scripts(tmss_get_subtask_parset) +lofar_add_bin_scripts(tmss_get_subtask) +lofar_add_bin_scripts(tmss_get_subtasks) diff --git a/SAS/TMSS/client/bin/tmss_get_subtask b/SAS/TMSS/client/bin/tmss_get_subtask new file mode 100755 index 0000000000000000000000000000000000000000..61ad27dbca05a70f447f9f28357c95247018658f --- /dev/null +++ b/SAS/TMSS/client/bin/tmss_get_subtask @@ -0,0 +1,23 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from lofar.sas.tmss.client.mains import main_get_subtask + +if __name__ == "__main__": + main_get_subtask() diff --git a/SAS/TMSS/bin/tmss_get_subtask_parset b/SAS/TMSS/client/bin/tmss_get_subtask_parset similarity index 94% rename from SAS/TMSS/bin/tmss_get_subtask_parset rename to SAS/TMSS/client/bin/tmss_get_subtask_parset index d335874da67e69eb22bfede64d2be3ded3637cbf..56cd6b8ff47d52e444ec1869f85af8ce7f3e31e3 100755 --- a/SAS/TMSS/bin/tmss_get_subtask_parset +++ b/SAS/TMSS/client/bin/tmss_get_subtask_parset @@ -20,7 +20,7 @@ # Script to create, setup, and run a temporary ldap service with fixtures for easy functional testing -from lofar.sas.tmss.query import main_get_subtask_parset +from lofar.sas.tmss.client.mains import main_get_subtask_parset if __name__ == "__main__": main_get_subtask_parset() diff --git a/SAS/TMSS/client/bin/tmss_get_subtasks b/SAS/TMSS/client/bin/tmss_get_subtasks new file mode 100755 index 0000000000000000000000000000000000000000..88d6233b52a7bcdb79e396fc4bb72c69d497477d --- /dev/null +++ b/SAS/TMSS/client/bin/tmss_get_subtasks @@ -0,0 +1,23 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +from lofar.sas.tmss.client.mains import main_get_subtasks + +if __name__ == "__main__": + main_get_subtasks() diff --git a/SAS/TMSS/bin/tmss_set_subtask_state b/SAS/TMSS/client/bin/tmss_set_subtask_state similarity index 94% rename from SAS/TMSS/bin/tmss_set_subtask_state rename to SAS/TMSS/client/bin/tmss_set_subtask_state index d8eeb7971e2d8dcbd7c80f50198885222a3b4417..0f63ad453bfa681b5ff5766340a5808e9e53a54b 100755 --- a/SAS/TMSS/bin/tmss_set_subtask_state +++ b/SAS/TMSS/client/bin/tmss_set_subtask_state @@ -20,7 +20,7 @@ # Script to create, setup, and run a temporary ldap service with fixtures for easy functional testing -from lofar.sas.tmss.query import main_set_subtask_state +from lofar.sas.tmss.client.mains import main_set_subtask_state if __name__ == "__main__": main_set_subtask_state() diff --git a/SAS/TMSS/client/lib/CMakeLists.txt b/SAS/TMSS/client/lib/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..94606c743637ebf74951b6d15efd87ec369737eb --- /dev/null +++ b/SAS/TMSS/client/lib/CMakeLists.txt @@ -0,0 +1,12 @@ +lofar_find_package(PythonInterp 3.4 REQUIRED) +include(PythonInstall) + +set(_py_files + tmssbuslistener.py + mains.py + tmss_http_rest_client.py + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/client) + diff --git a/SAS/TMSS/client/lib/mains.py b/SAS/TMSS/client/lib/mains.py new file mode 100644 index 0000000000000000000000000000000000000000..46927a69392a3d5c46a689d7e6346e37d87c0a70 --- /dev/null +++ b/SAS/TMSS/client/lib/mains.py @@ -0,0 +1,64 @@ +import json +import argparse +from pprint import pprint +from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession +from lofar.common.datetimeutils import parseDatetime + + +def main_get_subtask_parset(): + parser = argparse.ArgumentParser() + parser.add_argument("subtask_id", help="The ID of the TMSS subtask to get the parset from") + args = parser.parse_args() + + with TMSSsession.create_from_dbcreds_for_ldap() as session: + print(session.get_subtask_parset(args.subtask_id)) + + +def main_get_subtask(): + parser = argparse.ArgumentParser() + parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to get") + args = parser.parse_args() + + with TMSSsession.create_from_dbcreds_for_ldap() as session: + pprint(session.get_subtask(args.subtask_id)) + + +def main_get_subtasks(): + parser = argparse.ArgumentParser() + parser.add_argument('-s', '--state', help="only get subtasks with this state") + parser.add_argument('--start_time_less_then', help="only get subtasks with a start time less then this timestamp") + parser.add_argument('--start_time_greater_then', help="only get subtasks with a start time greater then this timestamp") + parser.add_argument('--stop_time_less_then', help="only get subtasks with a stop time less then this timestamp") + parser.add_argument('--stop_time_greater_then', help="only get subtasks with a stop time greater then this timestamp") + args = parser.parse_args() + + with TMSSsession.create_from_dbcreds_for_ldap() as session: + result = session.get_subtasks(state=args.state, + start_time_less_then=parseDatetime(args.start_time_less_then) if args.start_time_less_then else None, + start_time_greater_then=parseDatetime(args.start_time_greater_then) if args.start_time_greater_then else None, + stop_time_less_then=parseDatetime(args.stop_time_less_then) if args.stop_time_less_then else None, + stop_time_greater_then=parseDatetime(args.stop_time_greater_then) if args.stop_time_greater_then else None) + pprint(result['results']) + + +def main_set_subtask_state(): + parser = argparse.ArgumentParser() + parser.add_argument("subtask_id", type=int, help="The ID of the TMSS subtask to set the status on") + parser.add_argument("state", help="The state to set") + args = parser.parse_args() + + with TMSSsession.create_from_dbcreds_for_ldap() as session: + result = session.set_subtask_status(args.subtask_id, args.state) + result_obj = json.loads(result.content.decode('utf-8')) + print("%s now has state %s" % (result_obj['url'], result_obj['state'])) + + +def main_specify_observation_task(): + """ + Ask user for parameter 'taskid' and execute API-call to specify observation + """ + parser = argparse.ArgumentParser() + parser.add_argument("task_id", help="The ID of the TMSS task to specify for observation") + args = parser.parse_args() + with TMSSsession.create_from_dbcreds_for_ldap() as session: + result = session.specify_observation_task(args.task_id) diff --git a/SAS/TMSS/client/lib/tmss_http_rest_client.py b/SAS/TMSS/client/lib/tmss_http_rest_client.py new file mode 100644 index 0000000000000000000000000000000000000000..adf98398ae50fcc4b29a22f85e648fef3c43ba55 --- /dev/null +++ b/SAS/TMSS/client/lib/tmss_http_rest_client.py @@ -0,0 +1,163 @@ +import logging +logger = logging.getLogger(__file__) + +import requests +import os +import json +from datetime import datetime +from lofar.common.datetimeutils import formatDatetime + +# usage example: +# +# with TMSSsession('paulus', 'pauluspass', 'localhost', 8000) as tmsssession: +# response = tmsssession.session.get(url='http://localhost/api/task_draft/') +# print(response) + + +#TODO: add unittests! +class TMSSsession(object): + + OPENID = "openid" + BASICAUTH = "basicauth" + + def __init__(self, username, password, host, port: int=8000, authentication_method=OPENID): + self.session = requests.session() + self.username = username + self.password = password + self.base_url = "http://%s:%d/api" % (host, port) + self.authentication_method = authentication_method + + @staticmethod + def create_from_dbcreds_for_ldap(dbcreds_name: str=None): + '''Factory method to create a TMSSSession object which uses the credentials in the ~/.lofar/dbcredentials/<dbcreds_name>.ini file + (mis)use the DBCredentials to get a url with user/pass for tmss + the contents below are used to contruct a url like this: http://localhost:8000/api + [database:TMSS] + host=localhost + user=<username> + password=<password> + type=http + port=8000 + ''' + if dbcreds_name is None: + dbcreds_name = os.environ.get("TMSS_CLIENT_DBCREDENTIALS", "TMSSClient") + + from lofar.common.dbcredentials import DBCredentials + dbcreds = DBCredentials().get(dbcreds_name) + return TMSSsession(username=dbcreds.user, password=dbcreds.password, + host=dbcreds.host, + port=dbcreds.port, + authentication_method=TMSSsession.BASICAUTH) + + def __enter__(self): + self.open() + + # return the request session for use within the context + return self + + def __exit__(self, type, value, traceback): + self.close() + + def open(self): + '''open the request session and login''' + self.session.__enter__() + self.session.verify = False + + if self.authentication_method == self.OPENID: + # get authentication page of OIDC through TMSS redirect + response = self.session.get(self.base_url.replace('/api', '/oidc/authenticate/'), allow_redirects=True) + csrftoken = self.session.cookies['csrftoken'] + + # post user credentials to login page, also pass csrf token + data = {'username': self.username, 'password': self.password, 'csrfmiddlewaretoken': csrftoken} + response = self.session.post(url=response.url, data=data, allow_redirects=True) + + # raise when sth went wrong + if "The username and/or password you specified are not correct" in response.content.decode('utf8'): + raise ValueError("The username and/or password you specified are not correct") + if response.status_code != 200: + raise ConnectionError(response.content.decode('utf8')) + + if self.authentication_method == self.BASICAUTH: + self.session.auth = (self.username, self.password) + + def close(self): + '''close the request session and logout''' + try: + # logout user + self.session.get(self.base_url + '/logout/', allow_redirects=True) + self.session.close() + except: + pass + + def set_subtask_status(self, subtask_id: int, status: str) -> requests.Response: + '''set the status for the given subtask''' + result = self.session.patch(url='%s/subtask/%s/' % (self.base_url, subtask_id), + json={'state': "%s/subtask_state/%s/" % (self.base_url, status)}) + return result + + def get_subtask_parset(self, subtask_id) -> str: + '''get the lofar parameterset (as text) for the given subtask''' + result = self.session.get(url='%s/subtask/%s/parset' % (self.base_url, subtask_id)) + if result.status_code >= 200 and result.status_code < 300: + return result.content.decode('utf-8') + raise Exception("Could not get parameterset for subtask %s.\nResponse: %s" % (subtask_id, result)) + + def get_subtask(self, subtask_id: int) -> dict: + '''get the subtask as dict for the given subtask''' + path = 'subtask/%s' % (subtask_id,) + return self.get_path_as_json_object(path) + + def get_subtasks(self, state: str=None, + start_time_less_then: datetime=None, start_time_greater_then: datetime=None, + stop_time_less_then: datetime = None, stop_time_greater_then: datetime = None) -> list: + '''get subtasks (as list of dicts) filtered by the given parameters''' + clauses = {} + if state is not None: + clauses["state__value"] = state + if start_time_less_then is not None: + clauses["start_time__lt="] = formatDatetime(start_time_less_then) + if start_time_greater_then is not None: + clauses["start_time__gt"] = formatDatetime(start_time_greater_then) + if stop_time_less_then is not None: + clauses["stop_time__lt"] = formatDatetime(stop_time_less_then) + if stop_time_greater_then is not None: + clauses["stop_time__gt"] = formatDatetime(stop_time_greater_then) + + return self.get_path_as_json_object("subtask", clauses) + + def get_path_as_json_object(self, path: str, params={}) -> dict: + '''get resource at the given path, interpret it as json, and return it as as native object''' + full_url = '%s/%s/' % (self.base_url, path) + return self.get_url_as_json_object(full_url, params=params) + + def get_url_as_json_object(self, full_url: str, params={}) -> dict: + '''get resource at the given full url (including http://<base_url>, interpret it as json, and return it as as native object''' + if "format=json" not in full_url or params.get("format") != "json": + params['format'] ='json' + + result = self.session.get(url=full_url, params=params) + if result.status_code >= 200 and result.status_code < 300: + return json.loads(result.content.decode('utf-8')) + raise Exception("Could not get %s.\nResponse: %s" % (full_url, result)) + + def get_subtask_template(self, name: str, version: str=None) -> dict: + '''get the subtask_template as dict for the given name (and version)''' + clauses = {} + if name is not None: + clauses["name"] = name + if version is not None: + clauses["version"] = version + result = self.get_path_as_json_object('subtask_template', clauses) + if result['count'] > 1: + raise ValueError("Found more then one SubtaskTemplate for clauses: %s" % (clauses,)) + elif result['count'] == 1: + return result['results'][0] + return None + + def specify_observation_task(self, task_id: int) -> requests.Response: + """specify observation for the given draft task by just doing a REST API call """ + result = self.session.get(url='%s/api/task/%s/specify_observation' % (self.base_url, task_id)) + if result.status_code >= 200 and result.status_code < 300: + return result.content.decode('utf-8') + raise Exception("Could not specify observation for task %s.\nResponse: %s" % (task_id, result)) diff --git a/SAS/TMSS/client/tmssbuslistener.py b/SAS/TMSS/client/lib/tmssbuslistener.py similarity index 98% rename from SAS/TMSS/client/tmssbuslistener.py rename to SAS/TMSS/client/lib/tmssbuslistener.py index 456fda768284f8a81315f89fb901b5d1a226cf04..30c49bb7ce9ae2bd70093821088dbd1a4667e607 100644 --- a/SAS/TMSS/client/tmssbuslistener.py +++ b/SAS/TMSS/client/lib/tmssbuslistener.py @@ -72,8 +72,8 @@ class TMSSSubTaskEventMessageHandler(AbstractMessageHandler): self.onSubTaskStarted(**msg.content) elif stripped_subject == 'Finishing': self.onSubTaskFinishing(**msg.content) - elif stripped_subject == 'Finishing': - self.onSubTaskDefined(**msg.content) + elif stripped_subject == 'Finished': + self.onSubTaskFinished(**msg.content) elif stripped_subject == 'Cancelling': self.onSubTaskCancelling(**msg.content) elif stripped_subject == 'Cancelled': @@ -211,7 +211,7 @@ if __name__ == '__main__': class ExampleTMSSSubTaskEventMessageHandler(TMSSSubTaskEventMessageHandler): def onSubTaskDefined(self, **kwargs): - logger.info("MyTMSSSubTaskEventMessageHandler.onSubTaskDefined(%s)", kwargs) + logger.debug("MyTMSSSubTaskEventMessageHandler.onSubTaskDefined(%s)", kwargs) with TMSSSubTaskBusListener(handler_type=ExampleTMSSSubTaskEventMessageHandler): waitForInterrupt() diff --git a/SAS/TMSS/src/CMakeLists.txt b/SAS/TMSS/src/CMakeLists.txt index 72e8a771833bc25214aa2066583f04cace5aa359..fd5a8389a74c27f43c3def1fadb5a87813d9212f 100644 --- a/SAS/TMSS/src/CMakeLists.txt +++ b/SAS/TMSS/src/CMakeLists.txt @@ -26,8 +26,6 @@ find_python_module(swagger_spec_validator REQUIRED) # pip install swagger-spec-v set(_py_files manage.py remakemigrations.py - util.py - query.py ) python_install(${_py_files} diff --git a/SAS/TMSS/src/query.py b/SAS/TMSS/src/query.py deleted file mode 100644 index d8b352d7b82648af1834f32876bc2e08e0207142..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/query.py +++ /dev/null @@ -1,34 +0,0 @@ -import argparse -from .util import TMSSsession - -TMSS_USER = 'paulus' -TMSS_PASS = 'pauluspass' -TMSS_HOST = 'http://localhost:8008' -TMSS_AUTHENTICATION_METHOD = TMSSsession.BASICAUTH - -def get_subtask_parset(subtask_id: int): - with TMSSsession(TMSS_USER, TMSS_PASS, TMSS_HOST, TMSS_AUTHENTICATION_METHOD) as session: - return session.get(url=TMSS_HOST+'/api/subtask/%s/parset' % subtask_id) - - -def main_get_subtask_parset(): - - parser = argparse.ArgumentParser() - parser.add_argument("subtask_id", help="The ID of the TMSS subtask to get the parset from") - args = parser.parse_args() - print(get_subtask_parset(args.subtask_id).content.decode('utf-8')) - - -def set_subtask_state(subtask_id: int, state: str): - with TMSSsession(TMSS_USER, TMSS_PASS, TMSS_HOST, TMSS_AUTHENTICATION_METHOD) as session: - return session.patch(url=TMSS_HOST+'/api/subtask/%s/' % subtask_id, - data={'state': TMSS_HOST+"/api/subtask_state/%s/" % state}) - - -def main_set_subtask_state(): - - parser = argparse.ArgumentParser() - parser.add_argument("subtask_id", help="The ID of the TMSS subtask to set the status on") - parser.add_argument("state", help="The state to set") - args = parser.parse_args() - set_subtask_state(args.subtask_id, args.state) \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/settings.py b/SAS/TMSS/src/tmss/settings.py index 12df0bce00548cfc02dd5e7122dbdc1403ac11c4..580814f99d223c52ffe4cfa7255c02346efd001a 100644 --- a/SAS/TMSS/src/tmss/settings.py +++ b/SAS/TMSS/src/tmss/settings.py @@ -30,7 +30,10 @@ LOGGING = { 'formatters': { 'django.server': { '()': 'django.utils.log.ServerFormatter', - 'format': '[%(server_time)s] %(message)s', + 'format': '%(asctime)s %(levelname)s %(message)s', + }, + 'lofar': { + 'format': '%(asctime)s %(levelname)s %(message)s', }, }, 'handlers': { @@ -49,6 +52,11 @@ LOGGING = { 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' }, + 'lofar': { + 'level': 'DEBUG', + 'class': 'logging.StreamHandler', + 'formatter': 'lofar', + }, }, 'loggers': { 'django': { @@ -69,6 +77,10 @@ LOGGING = { 'level': 'DEBUG', # change debug level as appropiate 'propagate': False, }, + 'lofar': { + 'handlers': ['lofar'], + 'level': 'INFO', + }, } } @@ -253,7 +265,7 @@ if "OIDC_RP_CLIENT_ID" in os.environ.keys(): AUTHENTICATION_BACKENDS += ('mozilla_django_oidc.auth.OIDCAuthenticationBackend',) MIDDLEWARE.append('mozilla_django_oidc.middleware.SessionRefresh') -if len(AUTHENTICATION_BACKENDS) is 1: +if len(AUTHENTICATION_BACKENDS) == 1: REST_FRAMEWORK['DEFAULT_PERMISSION_CLASSES'].append('rest_framework.permissions.AllowAny') # todo: Whoo! This seems unsafe! Maybe we should at least have users explicitly disable authentication on startup?! logger.warning("No authentication configured! please set either OIDC_RP_CLIENT_ID or TMSS_LDAPCREDENTIALS environment variable.") diff --git a/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt b/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt index aae86f69ee7e51a1c1339d3007b88287ffa5778a..3a7daaa829f09722bd78609d1ec653ec2240d133 100644 --- a/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/tmssapp/CMakeLists.txt @@ -9,6 +9,7 @@ set(_py_files populate.py validation.py subtasks.py + tasks.py ) python_install(${_py_files} diff --git a/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py b/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py index 738ac5ffacf91493ff58caf3b6bba110c1db367f..457231416786fbd2ab24a2827ef990eb3d8e2978 100644 --- a/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py +++ b/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py @@ -22,6 +22,7 @@ from lofar.parameterset import parameterset from lofar.common.datetimeutils import formatDatetime from lofar.common.json_utils import add_defaults_to_json_object_for_schema from lofar.sas.tmss.tmss.exceptions import * +from datetime import datetime def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> parameterset: # make sure the spec is complete (including all non-filled in properties with default) @@ -244,7 +245,7 @@ def _convert_to_parset_for_pipelinecontrol_schema(subtask: models.Subtask) -> pa parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.type"] = "demixer" parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.steps"] = "[%s]" % ",".join(dppp_steps) - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.storagemanager.name"] = spec["storagemanager"] + parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.storagemanager.name"] = spec["storagemanager"] # todo: needs to be emptystring when standard/basic/non-dysco? # Dataproducts parset["ObsSW.Observation.DataProducts.Input_Correlated.enabled"] = "true" @@ -313,4 +314,7 @@ def convert_to_parset(subtask: models.Subtask) -> parameterset: except KeyError: raise ConversionException("Cannot convert subtask id=%d to parset. No conversion routine available for specifications_template='%s'" % ( subtask.id, subtask.specifications_template.name)) - return convertor(subtask) + + parset_str = convertor(subtask) + header = "# THIS PARSET WAS GENERATED BY TMSS FROM THE SPECIFICATION OF SUBTASK ID=%d ON %s\n" % (subtask.pk, formatDatetime(datetime.utcnow())) + return header + parset_str \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py index 8916d0ee2eee37f56a3515982c54e7c62fe36e8c..59f8844375a6d721e5a569c6ef3bd919ed86d94d 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 2.2.10 on 2020-04-17 08:37 +# Generated by Django 2.2.12 on 2020-05-27 09:15 from django.conf import settings import django.contrib.postgres.fields @@ -26,6 +26,22 @@ class Migration(migrations.Migration): 'abstract': False, }, ), + migrations.CreateModel( + name='AntennaSet', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('rcus', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=128)), + ('inputs', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, size=128)), + ], + options={ + 'abstract': False, + }, + ), migrations.CreateModel( name='Cluster', fields=[ @@ -94,7 +110,21 @@ class Migration(migrations.Migration): ('expected_size', models.BigIntegerField(help_text='Expected size of dataproduct size, in bytes. Used for scheduling purposes. NULL if size is unknown (NULLable).', null=True)), ('size', models.BigIntegerField(help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).', null=True)), ('feedback_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Dataproduct properties, as reported by the producing process.')), - ('dataformat', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DataproductArchiveInfo', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('storage_ticket', models.CharField(help_text='Archive-system identifier.', max_length=128)), + ('public_since', models.DateTimeField(help_text='Dataproduct is available for public download since this moment, or NULL if dataproduct is not (NULLable).', null=True)), + ('corrupted_since', models.DateTimeField(help_text='Earliest timestamp from which this dataproduct is known to be partially or fully corrupt, or NULL if dataproduct is not known to be corrupt (NULLable).', null=True)), ], options={ 'abstract': False, @@ -112,6 +142,16 @@ class Migration(migrations.Migration): ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), ], + ), + migrations.CreateModel( + name='DataproductHash', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('hash', models.CharField(help_text='Hash value.', max_length=128)), + ], options={ 'abstract': False, }, @@ -128,6 +168,16 @@ class Migration(migrations.Migration): ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), ], + ), + migrations.CreateModel( + name='DataproductTransform', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('identity', models.BooleanField(help_text='TRUE if this transform only copies, tars, or losslessly compresses its input, FALSE if the transform changes the data. Allows for efficient reasoning about data duplication.')), + ], options={ 'abstract': False, }, @@ -141,6 +191,99 @@ class Migration(migrations.Migration): 'abstract': False, }, ), + migrations.CreateModel( + name='DefaultDataproductSpecificationsTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultGeneratorTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultSchedulingUnitTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultSubtaskTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultTaskTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='DefaultWorkRelationSelectionTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(max_length=128, unique=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Filesystem', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('capacity', models.BigIntegerField(help_text='Capacity in bytes')), + ], + options={ + 'abstract': False, + }, + ), migrations.CreateModel( name='GeneratorTemplate', fields=[ @@ -154,9 +297,6 @@ class Migration(migrations.Migration): ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), ('create_function', models.CharField(help_text='Python function to call to execute the generator.', max_length=128)), ], - options={ - 'abstract': False, - }, ), migrations.CreateModel( name='Project', @@ -171,7 +311,26 @@ class Migration(migrations.Migration): ('private_data', models.BooleanField(default=True, help_text='True if data of this project is sensitive. Sensitive data is not made public.')), ('expert', models.BooleanField(default=False, help_text='Expert projects put more responsibility on the PI.')), ('filler', models.BooleanField(default=False, help_text='Use this project to fill up idle telescope time.')), - ('cycle', models.ForeignKey(help_text='Cycle(s) to which this project belongs (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='projects', to='tmssapp.Cycle')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ProjectQuota', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('value', models.FloatField(help_text='Resource Quota value')), + ], + ), + migrations.CreateModel( + name='ResourceType', + fields=[ + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), ], options={ 'abstract': False, @@ -250,8 +409,6 @@ class Migration(migrations.Migration): ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), ('requirements_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling and/or quality requirements for this run.')), ('generator_instance_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Parameter value that generated this run draft (NULLable).', null=True)), - ('copies', models.ForeignKey(help_text='Source reference, if we are a copy (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='copied_from', to='tmssapp.SchedulingUnitDraft')), - ('copy_reason', models.ForeignKey(help_text='Reason why source was copied (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.CopyReason')), ], options={ 'abstract': False, @@ -269,9 +426,6 @@ class Migration(migrations.Migration): ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), ], - options={ - 'abstract': False, - }, ), migrations.CreateModel( name='StationType', @@ -294,23 +448,19 @@ class Migration(migrations.Migration): ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Final specifications, as input for the controller.')), ('do_cancel', models.DateTimeField(help_text='Timestamp when the subtask has been ordered to cancel (NULLable).', null=True)), ('priority', models.IntegerField(help_text='Absolute priority of this subtask (higher value means more important).')), - ('cluster', models.ForeignKey(help_text='Where the Subtask is scheduled to run (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Cluster')), - ('created_or_updated_by_user', models.ForeignKey(editable=False, help_text='The user who created / updated the subtask.', null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)), - ('schedule_method', models.ForeignKey(help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ScheduleMethod')), ], options={ 'abstract': False, }, ), migrations.CreateModel( - name='SubtaskConnector', + name='SubtaskInput', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('dataformats', models.ManyToManyField(blank=True, to='tmssapp.Dataformat')), - ('datatype', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Datatype')), + ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter to apply to the dataproducts of the producer, to derive input dataproducts when scheduling.')), ], options={ 'abstract': False, @@ -328,6 +478,15 @@ class Migration(migrations.Migration): ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), ], + ), + migrations.CreateModel( + name='SubtaskOutput', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ], options={ 'abstract': False, }, @@ -341,6 +500,34 @@ class Migration(migrations.Migration): 'abstract': False, }, ), + migrations.CreateModel( + name='SubtaskStateLog', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('user_identifier', models.CharField(editable=False, help_text='The ID of the user who changed the state of the subtask.', max_length=128, null=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SubtaskTemplate', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), + ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ('queue', models.BooleanField(default=False)), + ('realtime', models.BooleanField(default=False)), + ], + ), migrations.CreateModel( name='SubtaskType', fields=[ @@ -381,8 +568,6 @@ class Migration(migrations.Migration): ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('dataformats', models.ManyToManyField(blank=True, to='tmssapp.Dataformat')), - ('datatype', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Datatype')), ], options={ 'abstract': False, @@ -398,98 +583,174 @@ class Migration(migrations.Migration): ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Specifications for this task.')), - ('copies', models.ForeignKey(help_text='Source reference, if we are a copy (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='copied_from', to='tmssapp.TaskDraft')), - ('copy_reason', models.ForeignKey(help_text='Reason why source was copied (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.CopyReason')), - ('scheduling_unit_draft', models.ForeignKey(help_text='Scheduling Unit draft to which this task draft belongs.', on_delete=django.db.models.deletion.CASCADE, related_name='task_drafts', to='tmssapp.SchedulingUnitDraft')), ], options={ 'abstract': False, }, ), migrations.CreateModel( - name='TaskTemplate', + name='TaskRelationBlueprint', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), - ('validation_code_js', models.CharField(help_text='JavaScript code for additional (complex) validation.', max_length=128)), + ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), ], options={ 'abstract': False, }, ), migrations.CreateModel( - name='WorkRelationSelectionTemplate', + name='TaskRelationDraft', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), ], options={ 'abstract': False, }, ), migrations.CreateModel( - name='TaskRelationDraft', + name='TaskTemplate', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), - ('consumer', models.ForeignKey(help_text='Task Draft that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskDraft')), - ('dataformat', models.ForeignKey(help_text='Selected data format to use. One of (MS, HDF5).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat')), - ('input', models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_draft', to='tmssapp.TaskConnector')), - ('output', models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_draft', to='tmssapp.TaskConnector')), - ('producer', models.ForeignKey(help_text='Task Draft that has the output connector. NOTE: The producer does typically, but not necessarily, belong to the same Scheduling Unit (or even the same Project) as the consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskDraft')), - ('selection_template', models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.WorkRelationSelectionTemplate')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), + ('validation_code_js', models.CharField(help_text='JavaScript code for additional (complex) validation.', max_length=128)), ], - options={ - 'abstract': False, - }, ), migrations.CreateModel( - name='TaskRelationBlueprint', + name='WorkRelationSelectionTemplate', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter for selecting dataproducts from the output role.')), - ('consumer', models.ForeignKey(help_text='Task Blueprint that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskBlueprint')), - ('dataformat', models.ForeignKey(help_text='Selected data format to use.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat')), - ('draft', models.ForeignKey(help_text='Task Relation Draft which this work request instantiates.', on_delete=django.db.models.deletion.CASCADE, related_name='related_task_relation_blueprint', to='tmssapp.TaskRelationDraft')), - ('input', models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_blueprint', to='tmssapp.TaskConnector')), - ('output', models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_blueprint', to='tmssapp.TaskConnector')), - ('producer', models.ForeignKey(help_text='Task Blueprint that has the output connector.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskBlueprint')), - ('selection_template', models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.WorkRelationSelectionTemplate')), + ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), + ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), + ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), + ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), ], - options={ - 'abstract': False, - }, + ), + migrations.AddConstraint( + model_name='workrelationselectiontemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='WorkRelationSelectionTemplate_unique_name_version'), + ), + migrations.AddConstraint( + model_name='tasktemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='TaskTemplate_unique_name_version'), ), migrations.AddField( - model_name='taskdraft', - name='specifications_template', - field=models.ForeignKey(help_text='Schema used for requirements_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.TaskTemplate'), + model_name='taskrelationdraft', + name='consumer', + field=models.ForeignKey(help_text='Task Draft that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskDraft'), ), migrations.AddField( - model_name='taskconnector', - name='input_of', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inputs', to='tmssapp.TaskTemplate'), + model_name='taskrelationdraft', + name='dataformat', + field=models.ForeignKey(help_text='Selected data format to use. One of (MS, HDF5).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat'), ), migrations.AddField( - model_name='taskconnector', + model_name='taskrelationdraft', + name='input', + field=models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_draft', to='tmssapp.TaskConnector'), + ), + migrations.AddField( + model_name='taskrelationdraft', + name='output', + field=models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_draft', to='tmssapp.TaskConnector'), + ), + migrations.AddField( + model_name='taskrelationdraft', + name='producer', + field=models.ForeignKey(help_text='Task Draft that has the output connector. NOTE: The producer does typically, but not necessarily, belong to the same Scheduling Unit (or even the same Project) as the consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskDraft'), + ), + migrations.AddField( + model_name='taskrelationdraft', + name='selection_template', + field=models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.WorkRelationSelectionTemplate'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='consumer', + field=models.ForeignKey(help_text='Task Blueprint that has the input connector.', on_delete=django.db.models.deletion.CASCADE, related_name='consumed_by', to='tmssapp.TaskBlueprint'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='dataformat', + field=models.ForeignKey(help_text='Selected data format to use.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='draft', + field=models.ForeignKey(help_text='Task Relation Draft which this work request instantiates.', on_delete=django.db.models.deletion.CASCADE, related_name='related_task_relation_blueprint', to='tmssapp.TaskRelationDraft'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='input', + field=models.ForeignKey(help_text='Input connector of consumer.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs_task_relation_blueprint', to='tmssapp.TaskConnector'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='output', + field=models.ForeignKey(help_text='Output connector of producer.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs_task_relation_blueprint', to='tmssapp.TaskConnector'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='producer', + field=models.ForeignKey(help_text='Task Blueprint that has the output connector.', on_delete=django.db.models.deletion.CASCADE, related_name='produced_by', to='tmssapp.TaskBlueprint'), + ), + migrations.AddField( + model_name='taskrelationblueprint', + name='selection_template', + field=models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.WorkRelationSelectionTemplate'), + ), + migrations.AddField( + model_name='taskdraft', + name='copies', + field=models.ForeignKey(help_text='Source reference, if we are a copy (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='copied_from', to='tmssapp.TaskDraft'), + ), + migrations.AddField( + model_name='taskdraft', + name='copy_reason', + field=models.ForeignKey(help_text='Reason why source was copied (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.CopyReason'), + ), + migrations.AddField( + model_name='taskdraft', + name='scheduling_unit_draft', + field=models.ForeignKey(help_text='Scheduling Unit draft to which this task draft belongs.', on_delete=django.db.models.deletion.CASCADE, related_name='task_drafts', to='tmssapp.SchedulingUnitDraft'), + ), + migrations.AddField( + model_name='taskdraft', + name='specifications_template', + field=models.ForeignKey(help_text='Schema used for requirements_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.TaskTemplate'), + ), + migrations.AddField( + model_name='taskconnector', + name='dataformats', + field=models.ManyToManyField(blank=True, to='tmssapp.Dataformat'), + ), + migrations.AddField( + model_name='taskconnector', + name='datatype', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Datatype'), + ), + migrations.AddField( + model_name='taskconnector', + name='input_of', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inputs', to='tmssapp.TaskTemplate'), + ), + migrations.AddField( + model_name='taskconnector', name='output_of', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='outputs', to='tmssapp.TaskTemplate'), ), @@ -513,89 +774,79 @@ class Migration(migrations.Migration): name='specifications_template', field=models.ForeignKey(help_text='Schema used for specifications_doc (IMMUTABLE).', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.TaskTemplate'), ), - migrations.CreateModel( - name='SubtaskTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('version', models.CharField(help_text='Version of this template (with respect to other templates of the same name).', max_length=128)), - ('schema', django.contrib.postgres.fields.jsonb.JSONField(help_text='Schema for the configurable parameters needed to use this template.')), - ('queue', models.BooleanField(default=False)), - ('realtime', models.BooleanField(default=False)), - ('type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskType')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='subtasktemplate', + name='type', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskType'), ), - migrations.CreateModel( - name='SubtaskStateLog', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('user_identifier', models.CharField(editable=False, help_text='The ID of the user who changed the state of the subtask.', max_length=128, null=True)), - ('new_state', models.ForeignKey(editable=False, help_text='Subtask state after update (see Subtask State Machine).', on_delete=django.db.models.deletion.PROTECT, related_name='is_new_state_of', to='tmssapp.SubtaskState')), - ('old_state', models.ForeignKey(editable=False, help_text='Subtask state before update (see Subtask State Machine).', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='is_old_state_of', to='tmssapp.SubtaskState')), - ('subtask', models.ForeignKey(editable=False, help_text='Subtask to which this state change refers.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Subtask')), - ('user', models.ForeignKey(editable=False, help_text='The user who changed the state of the subtask.', null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='subtaskstatelog', + name='new_state', + field=models.ForeignKey(editable=False, help_text='Subtask state after update (see Subtask State Machine).', on_delete=django.db.models.deletion.PROTECT, related_name='is_new_state_of', to='tmssapp.SubtaskState'), ), - migrations.CreateModel( - name='SubtaskOutput', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('connector', models.ForeignKey(help_text='Which connector this Subtask Output implements.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.SubtaskConnector')), - ('subtask', models.ForeignKey(help_text='Subtask to which this output specification refers.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Subtask')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='subtaskstatelog', + name='old_state', + field=models.ForeignKey(editable=False, help_text='Subtask state before update (see Subtask State Machine).', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='is_old_state_of', to='tmssapp.SubtaskState'), ), - migrations.CreateModel( - name='SubtaskInput', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('selection_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Filter to apply to the dataproducts of the producer, to derive input dataproducts when scheduling.')), - ('connector', models.ForeignKey(help_text='Which connector this Task Input implements.', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.SubtaskConnector')), - ('dataproducts', models.ManyToManyField(help_text='The Dataproducts resulting from application of the filter at time of scheduling Although the dataproducts are simply the result of applying the filter on immutable data, the filter application could change over time. We thus store the result of this filtering directly to retain which input was specified for the task..', to='tmssapp.Dataproduct')), - ('producer', models.ForeignKey(help_text='The Subtask Output providing the input dataproducts.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskOutput')), - ('selection_template', models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskInputSelectionTemplate')), - ('subtask', models.ForeignKey(help_text='Subtask to which this input specification refers.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Subtask')), - ('task_relation_blueprint', models.ForeignKey(help_text='Task Relation Blueprint which this Subtask Input implements (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.TaskRelationBlueprint')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='subtaskstatelog', + name='subtask', + field=models.ForeignKey(editable=False, help_text='Subtask to which this state change refers.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.Subtask'), ), migrations.AddField( - model_name='subtaskconnector', - name='input_of', - field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.PROTECT, related_name='outputs', to='tmssapp.SubtaskTemplate'), + model_name='subtaskstatelog', + name='user', + field=models.ForeignKey(editable=False, help_text='The user who changed the state of the subtask.', null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL), ), migrations.AddField( - model_name='subtaskconnector', - name='output_of', - field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.PROTECT, related_name='inputs', to='tmssapp.SubtaskTemplate'), + model_name='subtaskoutput', + name='subtask', + field=models.ForeignKey(help_text='Subtask to which this output specification refers.', on_delete=django.db.models.deletion.CASCADE, related_name='outputs', to='tmssapp.Subtask'), + ), + migrations.AddConstraint( + model_name='subtaskinputselectiontemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='SubtaskInputSelectionTemplate_unique_name_version'), ), migrations.AddField( - model_name='subtaskconnector', - name='role', - field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Role'), + model_name='subtaskinput', + name='dataproducts', + field=models.ManyToManyField(help_text='The Dataproducts resulting from application of the filter at time of scheduling Although the dataproducts are simply the result of applying the filter on immutable data, the filter application could change over time. We thus store the result of this filtering directly to retain which input was specified for the task..', to='tmssapp.Dataproduct'), + ), + migrations.AddField( + model_name='subtaskinput', + name='producer', + field=models.ForeignKey(help_text='The SubtaskOutput producing the input dataproducts for this SubtaskInput.', on_delete=django.db.models.deletion.PROTECT, related_name='consumers', to='tmssapp.SubtaskOutput'), + ), + migrations.AddField( + model_name='subtaskinput', + name='selection_template', + field=models.ForeignKey(help_text='Schema used for selection_doc.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskInputSelectionTemplate'), + ), + migrations.AddField( + model_name='subtaskinput', + name='subtask', + field=models.ForeignKey(help_text='Subtask to which this input specification refers.', on_delete=django.db.models.deletion.CASCADE, related_name='inputs', to='tmssapp.Subtask'), + ), + migrations.AddField( + model_name='subtaskinput', + name='task_relation_blueprint', + field=models.ForeignKey(help_text='Task Relation Blueprint which this Subtask Input implements (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, to='tmssapp.TaskRelationBlueprint'), + ), + migrations.AddField( + model_name='subtask', + name='cluster', + field=models.ForeignKey(help_text='Where the Subtask is scheduled to run (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Cluster'), + ), + migrations.AddField( + model_name='subtask', + name='created_or_updated_by_user', + field=models.ForeignKey(editable=False, help_text='The user who created / updated the subtask.', null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='subtask', + name='schedule_method', + field=models.ForeignKey(help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ScheduleMethod'), ), migrations.AddField( model_name='subtask', @@ -612,6 +863,20 @@ class Migration(migrations.Migration): name='task_blueprint', field=models.ForeignKey(help_text='Task Blueprint to which this Subtask belongs.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='subtasks', to='tmssapp.TaskBlueprint'), ), + migrations.AddConstraint( + model_name='schedulingunittemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='SchedulingUnitTemplate_unique_name_version'), + ), + migrations.AddField( + model_name='schedulingunitdraft', + name='copies', + field=models.ForeignKey(help_text='Source reference, if we are a copy (NULLable).', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='copied_from', to='tmssapp.SchedulingUnitDraft'), + ), + migrations.AddField( + model_name='schedulingunitdraft', + name='copy_reason', + field=models.ForeignKey(help_text='Reason why source was copied (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.CopyReason'), + ), migrations.AddField( model_name='schedulingunitdraft', name='requirements_template', @@ -647,174 +912,102 @@ class Migration(migrations.Migration): name='project', field=models.ForeignKey(help_text='Project to which this scheduling set belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='scheduling_sets', to='tmssapp.Project'), ), - migrations.CreateModel( - name='ResourceType', - fields=[ - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), - ('resource_unit', models.ForeignKey(help_text='Unit of current resource.', on_delete=django.db.models.deletion.PROTECT, related_name='resource_types', to='tmssapp.ResourceUnit')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='resourcetype', + name='resource_unit', + field=models.ForeignKey(help_text='Unit of current resource.', on_delete=django.db.models.deletion.PROTECT, related_name='resource_types', to='tmssapp.ResourceUnit'), ), - migrations.CreateModel( - name='ProjectQuota', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('value', models.FloatField(help_text='Resource Quota value')), - ('project', models.ForeignKey(help_text='Project to wich this quota belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='project_quota', to='tmssapp.Project')), - ('resource_type', models.ForeignKey(help_text='Resource type.', on_delete=django.db.models.deletion.PROTECT, related_name='resource_type', to='tmssapp.ResourceType')), - ], + migrations.AddField( + model_name='projectquota', + name='project', + field=models.ForeignKey(help_text='Project to wich this quota belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='project_quota', to='tmssapp.Project'), ), - migrations.CreateModel( - name='Filesystem', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('capacity', models.BigIntegerField(help_text='Capacity in bytes')), - ('cluster', models.ForeignKey(help_text='Cluster hosting this filesystem.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Cluster')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='projectquota', + name='resource_type', + field=models.ForeignKey(help_text='Resource type.', on_delete=django.db.models.deletion.PROTECT, related_name='resource_type', to='tmssapp.ResourceType'), ), - migrations.CreateModel( - name='DefaultWorkRelationSelectionTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.WorkRelationSelectionTemplate')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='project', + name='cycle', + field=models.ForeignKey(help_text='Cycle(s) to which this project belongs (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='projects', to='tmssapp.Cycle'), ), - migrations.CreateModel( - name='DefaultTaskTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.TaskTemplate')), - ], - options={ - 'abstract': False, - }, + migrations.AddConstraint( + model_name='generatortemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='GeneratorTemplate_unique_name_version'), ), - migrations.CreateModel( - name='DefaultSubtaskTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskTemplate')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='filesystem', + name='cluster', + field=models.ForeignKey(help_text='Cluster hosting this filesystem.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Cluster'), ), - migrations.CreateModel( - name='DefaultSchedulingUnitTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingUnitTemplate')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='defaultworkrelationselectiontemplate', + name='template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.WorkRelationSelectionTemplate'), ), - migrations.CreateModel( - name='DefaultGeneratorTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.GeneratorTemplate')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='defaulttasktemplate', + name='template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.TaskTemplate'), ), - migrations.CreateModel( - name='DefaultDataproductSpecificationsTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(max_length=128, unique=True)), - ('template', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.DataproductSpecificationsTemplate')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='defaultsubtasktemplate', + name='template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskTemplate'), ), - migrations.CreateModel( - name='DataproductTransform', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('identity', models.BooleanField(help_text='TRUE if this transform only copies, tars, or losslessly compresses its input, FALSE if the transform changes the data. Allows for efficient reasoning about data duplication.')), - ('input', models.ForeignKey(help_text='A dataproduct that was the input of a transformation.', on_delete=django.db.models.deletion.PROTECT, related_name='inputs', to='tmssapp.Dataproduct')), - ('output', models.ForeignKey(help_text='A dataproduct that was produced from the input dataproduct.', on_delete=django.db.models.deletion.PROTECT, related_name='outputs', to='tmssapp.Dataproduct')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='defaultschedulingunittemplate', + name='template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingUnitTemplate'), ), - migrations.CreateModel( - name='DataproductHash', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('hash', models.CharField(help_text='Hash value.', max_length=128)), - ('algorithm', models.ForeignKey(help_text='Algorithm used (MD5, AES256).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Algorithm')), - ('dataproduct', models.ForeignKey(help_text='The dataproduct to which this hash refers.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataproduct')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='defaultgeneratortemplate', + name='template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.GeneratorTemplate'), ), - migrations.CreateModel( - name='DataproductArchiveInfo', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('storage_ticket', models.CharField(help_text='Archive-system identifier.', max_length=128)), - ('public_since', models.DateTimeField(help_text='Dataproduct is available for public download since this moment, or NULL if dataproduct is not (NULLable).', null=True)), - ('corrupted_since', models.DateTimeField(help_text='Earliest timestamp from which this dataproduct is known to be partially or fully corrupt, or NULL if dataproduct is not known to be corrupt (NULLable).', null=True)), - ('dataproduct', models.ForeignKey(help_text='A dataproduct residing in the archive.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataproduct')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='defaultdataproductspecificationstemplate', + name='template', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.DataproductSpecificationsTemplate'), + ), + migrations.AddField( + model_name='dataproducttransform', + name='input', + field=models.ForeignKey(help_text='A dataproduct that was the input of a transformation.', on_delete=django.db.models.deletion.PROTECT, related_name='inputs', to='tmssapp.Dataproduct'), + ), + migrations.AddField( + model_name='dataproducttransform', + name='output', + field=models.ForeignKey(help_text='A dataproduct that was produced from the input dataproduct.', on_delete=django.db.models.deletion.PROTECT, related_name='outputs', to='tmssapp.Dataproduct'), + ), + migrations.AddConstraint( + model_name='dataproductspecificationstemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='DataproductSpecificationsTemplate_unique_name_version'), + ), + migrations.AddField( + model_name='dataproducthash', + name='algorithm', + field=models.ForeignKey(help_text='Algorithm used (MD5, AES256).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Algorithm'), + ), + migrations.AddField( + model_name='dataproducthash', + name='dataproduct', + field=models.ForeignKey(help_text='The dataproduct to which this hash refers.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataproduct'), + ), + migrations.AddConstraint( + model_name='dataproductfeedbacktemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='DataproductFeedbackTemplate_unique_name_version'), + ), + migrations.AddField( + model_name='dataproductarchiveinfo', + name='dataproduct', + field=models.ForeignKey(help_text='A dataproduct residing in the archive.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataproduct'), + ), + migrations.AddField( + model_name='dataproduct', + name='dataformat', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Dataformat'), ), migrations.AddField( model_name='dataproduct', @@ -824,29 +1017,17 @@ class Migration(migrations.Migration): migrations.AddField( model_name='dataproduct', name='producer', - field=models.ForeignKey(help_text='Subtask Output which generates this dataproduct.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SubtaskOutput'), + field=models.ForeignKey(help_text='Subtask Output which generates this dataproduct.', on_delete=django.db.models.deletion.PROTECT, related_name='dataproducts', to='tmssapp.SubtaskOutput'), ), migrations.AddField( model_name='dataproduct', name='specifications_template', field=models.ForeignKey(help_text='Schema used for specifications_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.DataproductSpecificationsTemplate'), ), - migrations.CreateModel( - name='AntennaSet', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128)), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('rcus', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=128)), - ('inputs', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, size=128)), - ('station_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.StationType')), - ], - options={ - 'abstract': False, - }, + migrations.AddField( + model_name='antennaset', + name='station_type', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='tmssapp.StationType'), ), migrations.AddIndex( model_name='taskrelationdraft', @@ -858,7 +1039,11 @@ class Migration(migrations.Migration): ), migrations.AddIndex( model_name='taskconnector', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_0ebd6d_gin'), + index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_tas_tags_a12728_gin'), + ), + migrations.AddConstraint( + model_name='subtasktemplate', + constraint=models.UniqueConstraint(fields=('name', 'version'), name='SubtaskTemplate_unique_name_version'), ), migrations.AddIndex( model_name='subtaskstatelog', @@ -872,10 +1057,6 @@ class Migration(migrations.Migration): model_name='subtaskinput', index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_fb9960_gin'), ), - migrations.AddIndex( - model_name='subtaskconnector', - index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_60e299_gin'), - ), migrations.AddIndex( model_name='subtask', index=django.contrib.postgres.indexes.GinIndex(fields=['tags'], name='tmssapp_sub_tags_d2fc43_gin'), diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py index 7a25dab177badfe0cab6eeaecee51d3d1fdfee19..e33461a92950831b9b30d7cec2b13a718689035e 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py @@ -17,6 +17,5 @@ class Migration(migrations.Migration): # Start SubTask id with 2 000 000 to avoid overlap with 'old' (test/production) OTDB operations = [ migrations.RunSQL('ALTER SEQUENCE tmssapp_SubTask_id_seq RESTART WITH 2000000;'), migrations.RunPython(populate_choices), - migrations.RunPython(populate_resources), migrations.RunPython(populate_misc), migrations.RunPython(populate_lofar_json_schemas) ] diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py index 20825f3e6ac048a15eb2d518e74fe7ff3b635cf4..b067845f2a6c8c6219f35fa8bfdcc622779d918b 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py @@ -2,11 +2,12 @@ This file contains the database models """ +import os import logging logger = logging.getLogger(__name__) from django.db.models import ForeignKey, CharField, DateTimeField, BooleanField, IntegerField, BigIntegerField, \ - ManyToManyField, CASCADE, SET_NULL, PROTECT + ManyToManyField, CASCADE, SET_NULL, PROTECT, UniqueConstraint from django.contrib.postgres.fields import ArrayField, JSONField from django.contrib.auth.models import User from .specification import AbstractChoice, BasicCommon, Template, NamedCommon # , <TaskBlueprint @@ -15,7 +16,7 @@ from rest_framework.serializers import HyperlinkedRelatedField from django.dispatch import receiver from lofar.sas.tmss.tmss.tmssapp.validation import validate_json_against_schema -from lofar.messaging.messagebus import ToBus +from lofar.messaging.messagebus import ToBus, DEFAULT_BROKER, DEFAULT_BUSNAME from lofar.messaging.messages import EventMessage from lofar.sas.tmss.client.tmssbuslistener import DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX @@ -23,18 +24,6 @@ from lofar.sas.tmss.client.tmssbuslistener import DEFAULT_TMSS_SUBTASK_NOTIFICAT # I/O # -class SubtaskConnector(BasicCommon): - """ - Represents the relation between input and output of the Subtasks. Some of these relations implement the Task - Relations. An input is tied to an output of another Subtask, and allows a filter to be specified. - """ - role = ForeignKey('Role', null=False, on_delete=PROTECT) - datatype = ForeignKey('Datatype', null=False, on_delete=PROTECT) - dataformats = ManyToManyField('Dataformat', blank=True) - output_of = ForeignKey('SubtaskTemplate', related_name='inputs', blank=True, on_delete=PROTECT) - input_of = ForeignKey('SubtaskTemplate', related_name='outputs', blank=True, on_delete=PROTECT) - - # # Choices # @@ -66,6 +55,8 @@ class SubtaskType(AbstractChoice): PIPELINE = "pipeline" COPY = "copy" INSPECTION = "inspection" + QA_FILES = "qa_files" # task which creates "adder" QA h5 file(s) from a MeasurementSet of beamformed data + QA_PLOTS = "qa_plots" # task which creates "adder" QA plots from an "adder" QA h5 file h5 DELETION = "deletion" MANUAL = 'manual' OTHER = 'other' @@ -109,6 +100,10 @@ class SubtaskTemplate(Template): queue = BooleanField(default=False) realtime = BooleanField(default=False) + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='SubtaskTemplate_unique_name_version')] + class DefaultSubtaskTemplate(BasicCommon): name = CharField(max_length=128, unique=True) @@ -116,7 +111,9 @@ class DefaultSubtaskTemplate(BasicCommon): class DataproductSpecificationsTemplate(Template): - pass + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='DataproductSpecificationsTemplate_unique_name_version')] class DefaultDataproductSpecificationsTemplate(BasicCommon): @@ -125,13 +122,17 @@ class DefaultDataproductSpecificationsTemplate(BasicCommon): class SubtaskInputSelectionTemplate(Template): - pass + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='SubtaskInputSelectionTemplate_unique_name_version')] # todo: so we need to specify a default? class DataproductFeedbackTemplate(Template): - pass + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='DataproductFeedbackTemplate_unique_name_version')] # todo: do we need to specify a default? @@ -165,7 +166,8 @@ class Subtask(BasicCommon): @staticmethod def _send_state_change_event_message(subtask_id:int, old_state: str, new_state: str): - with ToBus() as tobus: #TODO: do we want to connect to the bus for each new message, or have some global tobus? + with ToBus(exchange=os.environ.get("TMSS_EXCHANGE", DEFAULT_BUSNAME), + broker=os.environ.get("TMSS_BROKER", DEFAULT_BROKER)) as tobus: #TODO: do we want to connect to the bus for each new message, or have some global tobus? msg = EventMessage(subject="%s.%s" % (DEFAULT_TMSS_SUBTASK_NOTIFICATION_PREFIX, new_state.capitalize()), content={'subtask_id': subtask_id, 'old_state': old_state, 'new_state': new_state}) tobus.send(msg) @@ -193,6 +195,8 @@ class Subtask(BasicCommon): except Exception as e: logger.error("Could not send state change to messagebus: %s", e) + # update the previous state value + self.__original_state = self.state class SubtaskStateLog(BasicCommon): """ @@ -214,8 +218,7 @@ class SubtaskStateLog(BasicCommon): class SubtaskInput(BasicCommon): subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, related_name='inputs', help_text='Subtask to which this input specification refers.') task_relation_blueprint = ForeignKey('TaskRelationBlueprint', null=True, on_delete=SET_NULL, help_text='Task Relation Blueprint which this Subtask Input implements (NULLable).') - connector = ForeignKey('SubtaskConnector', null=True, on_delete=SET_NULL, help_text='Which connector this Task Input implements.') - producer = ForeignKey('SubtaskOutput', on_delete=PROTECT, help_text='The Subtask Output providing the input dataproducts.') + producer = ForeignKey('SubtaskOutput', on_delete=PROTECT, related_name='consumers', help_text='The SubtaskOutput producing the input dataproducts for this SubtaskInput.') dataproducts = ManyToManyField('Dataproduct', help_text='The Dataproducts resulting from application of the filter at time of scheduling Although the dataproducts are simply the result of applying the filter on immutable data, the filter application could change over time. We thus store the result of this filtering directly to retain which input was specified for the task..') selection_doc = JSONField(help_text='Filter to apply to the dataproducts of the producer, to derive input dataproducts when scheduling.') selection_template = ForeignKey('SubtaskInputSelectionTemplate', on_delete=PROTECT, help_text='Schema used for selection_doc.') @@ -229,7 +232,6 @@ class SubtaskInput(BasicCommon): class SubtaskOutput(BasicCommon): subtask = ForeignKey('Subtask', null=False, on_delete=CASCADE, related_name='outputs', help_text='Subtask to which this output specification refers.') - connector = ForeignKey('SubtaskConnector', null=True, on_delete=SET_NULL, help_text='Which connector this Subtask Output implements.') class Dataproduct(BasicCommon): @@ -246,7 +248,7 @@ class Dataproduct(BasicCommon): pinned_since = DateTimeField(null=True, help_text='When this dataproduct was pinned to disk, that is, forbidden to be removed, or NULL if not pinned (NULLable).') specifications_doc = JSONField(help_text='Dataproduct properties (f.e. beam, subband), to distinguish them when produced by the same task, and to act as input for selections in the Task Input and Work Request Relation Blueprint objects.') specifications_template = ForeignKey('DataproductSpecificationsTemplate', null=False, on_delete=CASCADE, help_text='Schema used for specifications_doc.') - producer = ForeignKey('SubtaskOutput', on_delete=PROTECT, related_name='dataproducts', help_text='Subtask Output which generates this dataproduct.') + producer = ForeignKey('SubtaskOutput', on_delete=PROTECT, related_name="dataproducts", help_text='Subtask Output which generates this dataproduct.') do_cancel = DateTimeField(null=True, help_text='When this dataproduct was cancelled (NULLable). Cancelling a dataproduct triggers cleanup if necessary.') expected_size = BigIntegerField(null=True, help_text='Expected size of dataproduct size, in bytes. Used for scheduling purposes. NULL if size is unknown (NULLable).') size = BigIntegerField(null=True, help_text='Dataproduct size, in bytes. Used for accounting purposes. NULL if size is (yet) unknown (NULLable).') diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py index 8faade80752e09d6b68974621721a0edbc82f946..9dca008d148704b32906959cf5115b13fb8c679b 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py @@ -2,7 +2,7 @@ This file contains the database models """ -from django.db.models import Model, CharField, DateTimeField, BooleanField, ForeignKey, CASCADE, IntegerField, FloatField, SET_NULL, PROTECT, ManyToManyField +from django.db.models import Model, CharField, DateTimeField, BooleanField, ForeignKey, CASCADE, IntegerField, FloatField, SET_NULL, PROTECT, ManyToManyField, UniqueConstraint from django.contrib.postgres.fields import ArrayField, JSONField from django.contrib.postgres.indexes import GinIndex from enum import Enum @@ -118,6 +118,8 @@ class Dataformat(AbstractChoice): class Choices(Enum): MEASUREMENTSET = "MeasurementSet" BEAMFORMED = "Beamformed" + QA_HDF5 = "QA_HDF5" + QA_PLOTS = "QA_Plots" class CopyReason(AbstractChoice): @@ -150,12 +152,18 @@ class Template(NamedCommon): class Meta: abstract = True + # TODO: remove all <class>_unique_name_version UniqueConstraint's from the subclasses and replace by this line below when we start using django 3.0 + # constraints = [UniqueConstraint(fields=['name', 'version'], name='%(class)s_unique_name_version')] # concrete models class GeneratorTemplate(Template): create_function = CharField(max_length=128, help_text='Python function to call to execute the generator.') + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='GeneratorTemplate_unique_name_version')] + class DefaultGeneratorTemplate(BasicCommon): name = CharField(max_length=128, unique=True) @@ -163,7 +171,9 @@ class DefaultGeneratorTemplate(BasicCommon): class SchedulingUnitTemplate(Template): - pass + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='SchedulingUnitTemplate_unique_name_version')] class DefaultSchedulingUnitTemplate(BasicCommon): @@ -174,6 +184,9 @@ class DefaultSchedulingUnitTemplate(BasicCommon): class TaskTemplate(Template): validation_code_js = CharField(max_length=128, help_text='JavaScript code for additional (complex) validation.') + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='TaskTemplate_unique_name_version')] class DefaultTaskTemplate(BasicCommon): name = CharField(max_length=128, unique=True) @@ -181,7 +194,9 @@ class DefaultTaskTemplate(BasicCommon): class WorkRelationSelectionTemplate(Template): - pass + class Meta: + # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) + constraints = [UniqueConstraint(fields=['name', 'version'], name='WorkRelationSelectionTemplate_unique_name_version')] class DefaultWorkRelationSelectionTemplate(BasicCommon): diff --git a/SAS/TMSS/src/tmss/tmssapp/populate.py b/SAS/TMSS/src/tmss/tmssapp/populate.py index 0dd510d7a4e44986904ec2195ab971afd8095498..bdca44d14b1435fa3a2e74dcb02b9e73e3f4dd7a 100644 --- a/SAS/TMSS/src/tmss/tmssapp/populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/populate.py @@ -14,13 +14,16 @@ class Migration(migrations.Migration): """ +import logging +logger = logging.getLogger(__name__) + import json -from lofar.sas.tmss.tmss.tmssapp.models.specification import Role, Datatype, Dataformat, CopyReason, TaskTemplate, \ - ResourceType, ResourceUnit -from lofar.sas.tmss.tmss.tmssapp.models.scheduling import SubtaskState, SubtaskType, SubtaskTemplate, Subtask, \ - StationType, Algorithm, ScheduleMethod, Cluster, Filesystem +from lofar.sas.tmss.tmss.tmssapp.subtasks import * +from lofar.sas.tmss.tmss.tmssapp.models.specification import * +from lofar.sas.tmss.tmss.tmssapp.models.scheduling import * from lofar.common.json_utils import * -from lofar.sas.tmss.tmss.tmssapp.subtasks import connect_observation_subtask_to_preprocessing_subtask +from lofar.common import isTestEnvironment, isDevelopmentEnvironment + def populate_choices(apps, schema_editor): ''' @@ -34,13 +37,41 @@ def populate_choices(apps, schema_editor): def populate_lofar_json_schemas(apps, schema_editor): + _populate_subtask_input_selection_templates() _populate_correlator_calibrator_schema() _populate_obscontrol_schema() _populate_stations_schema() _populate_pipelinecontrol_schema() _populate_preprocessing_schema() - _populate_example_data() + _populate_qa_files_subtask_template() + _populate_qa_plots_subtask_template() + + # Chain of Blueprint Task and subtasks should be instantiated with an API call + # so only create a Test Observation Draft Task + _populate_task_draft_example() + + +def _populate_task_draft_example(): + """ + Create a Task Draft 'Target Observation' + :return: + """ + try: + from datetime import datetime + from lofar.sas.tmss.tmss.tmssapp import models + from lofar.sas.tmss.test.tmss_test_data_django_models import TaskDraft_test_data + + if isTestEnvironment() or isDevelopmentEnvironment(): + task_template = models.TaskTemplate.objects.get(name='correlator schema') + task_draft_data = TaskDraft_test_data(name="Test Target Observation", specifications_template=task_template) + models.TaskDraft.objects.create(**task_draft_data) + + task_template = models.TaskTemplate.objects.get(name='preprocessing schema') + task_draft_data = TaskDraft_test_data(name="Test PreProcessingPipeline Task", specifications_template=task_template) + models.TaskDraft.objects.create(**task_draft_data) + except ImportError: + pass def populate_resources(apps, schema_editor): @@ -48,12 +79,8 @@ def populate_resources(apps, schema_editor): ru_hours = ResourceUnit.objects.create(name="hours", description="duration in hours") ResourceType.objects.create(name="lta_storage", description="Amount of storage in LTA", resource_unit=ru_bytes) - ResourceType.objects.create(name="cep_storage", description="Amount of storage at CEP processing cluster", - resource_unit=ru_bytes) - ResourceType.objects.create(name="cep_processing_hours", - description="Number of processing hours for CEP processing cluster", - resource_unit=ru_hours) - + ResourceType.objects.create(name="cep_storage", description="Amount of storage at CEP processing cluster", resource_unit=ru_bytes) + ResourceType.objects.create(name="cep_processing_hours", description="Number of processing hours for CEP processing cluster", resource_unit=ru_hours) def populate_misc(apps, schema_editor): cluster = Cluster.objects.create(name="CEP4", location="CIT") @@ -174,6 +201,70 @@ def _populate_correlator_calibrator_schema(): "CEP4", "DragNet" ] + }, + "QA": { + "type": "object", + "title": "Quality Assurance", + "default": {}, + "description": "Specify Quality Assurance steps for this observation", + "properties": { + "file_conversion": { + "type": "object", + "title": "File Conversion", + "default": {}, + "description": "Create a QA file for the observation", + "properties": { + "enabled": { + "type": "boolean", + "title": "enabled", + "default": true, + "description": "Do/Don't create a QA file for the observation" + }, + "nr_of_subbands": { + "type": "integer", + "title": "#subbands", + "default": -1, + "description": "Keep this number of subbands from the observation in the QA file, or all if -1" + }, + "nr_of_timestamps": { + "type": "integer", + "title": "#timestamps", + "default": 256, + "minimum": 1, + "description": "Extract this number of timestamps from the observation in the QA file (equidistantanly sampled, no averaging/interpolation)" + } + }, + "additionalProperties": false + }, + "plots": { + "type": "object", + "title": "Plots", + "default": {}, + "description": "Create dynamic spectrum plots", + "properties": { + "enabled": { + "type": "boolean", + "title": "enabled", + "default": true, + "description": "Do/Don't create plots from the QA file from the observation" + }, + "autocorrelation": { + "type": "boolean", + "title": "autocorrelation", + "default": true, + "description": "Create autocorrelation plots for all stations" + }, + "crosscorrelation": { + "type": "boolean", + "title": "crosscorrelation", + "default": true, + "description": "Create crosscorrelation plots for all baselines" + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false } } }'''), "tags": []} @@ -181,73 +272,6 @@ def _populate_correlator_calibrator_schema(): TaskTemplate.objects.create(**task_template_data) -def _populate_example_data(): - try: - from datetime import datetime - from lofar.sas.tmss.tmss.tmssapp import models - from lofar.sas.tmss.test.tmss_test_data_django_models import TaskDraft_test_data, TaskBlueprint_test_data, \ - SubtaskOutput_test_data, Dataproduct_test_data, Subtask_test_data - - cluster = Cluster.objects.get(name="CEP4") - - for i in range(10): - task_template = models.TaskTemplate.objects.get(name='correlator schema') - task_draft_data = TaskDraft_test_data(name="my test obs", specifications_template=task_template) - task_draft = models.TaskDraft.objects.create(**task_draft_data) - - task_blueprint_data = TaskBlueprint_test_data(task_draft=task_draft) - task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_data) - - subtask_template = models.SubtaskTemplate.objects.get(name='observationcontrol schema') - specifications_doc = { - "stations": {"station_list": ["RS106", "RS205"], - "antenna_set": "HBA_DUAL_INNER", - "filter": "HBA_110_190", - "analog_pointing": {"direction_type": "J2000", - "angle1": 0.4262457643630986, - "angle2": 0.5787463318245085}, - "digital_pointings": [{"name": "3C48", - "pointing": {"direction_type": "J2000", - "angle1": 0.4262457643630986, - "angle2": 0.5787463318245085}, - "subbands": list(range(0, 244)) - }] - } - } - - specifications_doc = add_defaults_to_json_object_for_schema(specifications_doc, subtask_template.schema) - subtask_data = Subtask_test_data(task_blueprint=task_blueprint, subtask_template=subtask_template, - specifications_doc=specifications_doc, cluster=cluster) - subtask = models.Subtask.objects.create(**subtask_data) - - subtask_output = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=subtask)) - for sb_nr in specifications_doc['stations']['digital_pointings'][0]['subbands']: - models.Dataproduct.objects.create(**Dataproduct_test_data(producer=subtask_output, - directory="CEP4:/data/test-projects/TMSS_test/L%d/uv/" % ( - subtask.id,), - filename="L%d_SB%03d_uv.MS" % ( - subtask.id, sb_nr))) - - task_template = models.TaskTemplate.objects.get(name='preprocessing schema') - task_draft_data = TaskDraft_test_data(name="my test pipeline", specifications_template=task_template) - task_draft = models.TaskDraft.objects.create(**task_draft_data) - - task_blueprint_data = TaskBlueprint_test_data(task_draft=task_draft) - task_blueprint = models.TaskBlueprint.objects.create(**task_blueprint_data) - - subtask_template = models.SubtaskTemplate.objects.get(name='pipelinecontrol schema') - specifications_doc = {} - specifications_doc = add_defaults_to_json_object_for_schema(specifications_doc, subtask_template.schema) - subtask_data = Subtask_test_data(task_blueprint=task_blueprint, subtask_template=subtask_template, - specifications_doc=specifications_doc, cluster=cluster) - pipe_subtask = models.Subtask.objects.create(**subtask_data) - - connect_observation_subtask_to_preprocessing_subtask(subtask, pipe_subtask) - - except ImportError: - pass - - def _populate_obscontrol_schema(): subtask_template_data = {"type": SubtaskType.objects.get(value='observation'), "name": "observationcontrol schema", @@ -825,6 +849,90 @@ def _populate_stations_schema(): TaskTemplate.objects.create(**task_template_data) +def _populate_subtask_input_selection_templates(): + selection_template_data = { "name": "All", + "description": 'Select all, apply no filtering.', + "version": '1', + "schema": json.loads('''{ + "$id": "http://example.com/example.json", + "type": "object", + "$schema": "http://json-schema.org/draft-06/schema#", + "definitions": {}, + "additionalProperties": false, + "properties": {} }'''), + "tags": []} + + SubtaskInputSelectionTemplate.objects.create(**selection_template_data) + + + +def _populate_qa_files_subtask_template(): + subtask_template_data = {"type": SubtaskType.objects.get(value=SubtaskType.Choices.QA_FILES.value), + "name": "QA file conversion", + "description": 'QA file conversion subtask template', + "version": '0.1', + "schema": json.loads(''' +{ + "$id": "http://example.com/example.json", + "type": "object", + "$schema": "http://json-schema.org/draft-06/schema#", + "definitions": {}, + "additionalProperties": false, + "properties": { + "nr_of_subbands": { + "type": "integer", + "title": "#subbands", + "default": -1, + "description": "Keep this number of subbands from the observation in the QA file, or all if -1" + }, + "nr_of_timestamps": { + "type": "integer", + "title": "#timestamps", + "default": 256, + "minimum": 1, + "description": "Extract this number of timestamps from the observation in the QA file (equidistantanly sampled, no averaging/interpolation)" + } + } +}'''), + "realtime": False, + "queue": True, + "tags": []} + + SubtaskTemplate.objects.create(**subtask_template_data) + +def _populate_qa_plots_subtask_template(): + subtask_template_data = {"type": SubtaskType.objects.get(value=SubtaskType.Choices.QA_PLOTS.value), + "name": "QA plots", + "description": 'QA plots subtask template', + "version": '0.1', + "schema": json.loads(''' +{ + "$id": "http://example.com/example.json", + "type": "object", + "$schema": "http://json-schema.org/draft-06/schema#", + "definitions": {}, + "additionalProperties": false, + "properties": { + "autocorrelation": { + "type": "boolean", + "title": "autocorrelation", + "default": true, + "description": "Create autocorrelation plots for all stations" + }, + "crosscorrelation": { + "type": "boolean", + "title": "crosscorrelation", + "default": true, + "description": "Create crosscorrelation plots for all baselines" + } + } +}'''), + "realtime": False, + "queue": True, + "tags": []} + + SubtaskTemplate.objects.create(**subtask_template_data) + def _populate_preprocessing_schema(): task_template_data = {"name": "preprocessing schema", "description": 'preprocessing settings', diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py index 037d5b980011ba80af4ba14674d0077b2fb9e4f1..4baf35cddd3651872cbbc4765ad107df0e93ad11 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py @@ -10,12 +10,6 @@ from .. import models from .specification import RelationalHyperlinkedModelSerializer from .widgets import JSONEditorField -class SubtaskConnectorSerializer(serializers.HyperlinkedModelSerializer): - class Meta: - model = models.SubtaskConnector - fields = '__all__' - - class SubtaskStateSerializer(serializers.ModelSerializer): class Meta: model = models.SubtaskState @@ -111,10 +105,11 @@ class SubtaskInputSerializer(serializers.HyperlinkedModelSerializer): fields = '__all__' -class SubtaskOutputSerializer(serializers.HyperlinkedModelSerializer): +class SubtaskOutputSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.SubtaskOutput fields = '__all__' + #extra_fields = ['dataproducts', 'consumers'] #TODO: how can we make the inputs and outputs visible in the rest view without making them required for POSTs? class DataproductSerializer(serializers.HyperlinkedModelSerializer): @@ -194,4 +189,5 @@ class SubtaskSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializer): class Meta: model = models.Subtask #fields = '__all__' + # extra_fields = ['inputs', 'outputs'] #TODO: how can we make the inputs and outputs visible in the rest view without making them required for POSTs? exclude = ('created_or_updated_by_user',) diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py index b6cb63cd98fd57f3632fd4eab317ebf68f3b544c..577c00b2b292a0fba731956687cc902ab43efe77 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py @@ -157,8 +157,7 @@ class SchedulingSetSerializer(RelationalHyperlinkedModelSerializer): try: self.fields['generator_doc'] = JSONEditorField(self.instance.generator_template.schema) except Exception as e: - print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) - # todo: Shall we use the schema for one of the default templates in this case instead? + pass class Meta: model = models.SchedulingSet @@ -175,8 +174,7 @@ class SchedulingUnitDraftSerializer(RelationalHyperlinkedModelSerializer): try: self.fields['requirements_doc'] = JSONEditorField(self.instance.requirements_template.schema) except Exception as e: - print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) - # todo: Shall we use the schema for one of the default templates in this case instead? + pass class Meta: model = models.SchedulingUnitDraft @@ -193,8 +191,7 @@ class SchedulingUnitBlueprintSerializer(serializers.HyperlinkedModelSerializer): try: self.fields['requirements_doc'] = JSONEditorField(self.instance.requirements_template.schema) except Exception as e: - print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) - # todo: Shall we use the schema for one of the default templates in this case instead? + pass class Meta: model = models.SchedulingUnitBlueprint @@ -210,8 +207,7 @@ class TaskDraftSerializer(RelationalHyperlinkedModelSerializer): try: self.fields['specifications_doc'] = JSONEditorField(self.instance.specifications_template.schema) except Exception as e: - print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) - # todo: Shall we use the schema for one of the default templates in this case instead? + pass class Meta: model = models.TaskDraft @@ -228,8 +224,7 @@ class TaskBlueprintSerializer(RelationalHyperlinkedModelSerializer): try: self.fields['specifications_doc'] = JSONEditorField(self.instance.specifications_template.schema) except Exception as e: - print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) - # todo: Shall we use the schema for one of the default templates in this case instead? + pass class Meta: model = models.TaskBlueprint @@ -246,8 +241,7 @@ class TaskRelationDraftSerializer(RelationalHyperlinkedModelSerializer): try: self.fields['selection_doc'] = JSONEditorField(self.instance.selection_template.schema) except Exception as e: - print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) - # todo: Shall we use the schema for one of the default templates in this case instead? + pass class Meta: model = models.TaskRelationDraft @@ -264,55 +258,11 @@ class TaskRelationBlueprintSerializer(serializers.HyperlinkedModelSerializer): try: self.fields['selection_doc'] = JSONEditorField(self.instance.selection_template.schema) except Exception as e: - print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) - # todo: Shall we use the schema for one of the default templates in this case instead? + pass class Meta: model = models.TaskRelationBlueprint fields = '__all__' -# ----- JSON -# Note: I feel a bit uneasy with this since I feel there should be a more straight-forward solution than -# ...intercepting the init process to determine the schema (or template uri or so) for the style attribute. -# ...Hoewever, I did not manage to simply pass the value(!) of e.g. the template field as a style attribute -# ...of the JSONField via a SerializerMethodField or similar, although I feel that should be possible. -# todo: can we trigger recreation of the widget somehow when the referred template changes in the form, so the schema -# gets updated in there? Probably this is just a limitation of the static DRF forms we have to live with...? - -class TaskBlueprintSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializer): - - # Create a JSON editor form to replace the simple text field based on the schema in the template that this - # blueprint refers to. If that fails, the JSONField remains a standard text input. - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - try: - self.fields['specifications_doc'] = JSONEditorField(self.instance.specifications_template.schema) - except Exception as e: - print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) - # todo: Shall we use the schema for one of the default templates in this case instead? - - class Meta: - model = models.TaskBlueprint - fields = '__all__' - extra_fields = ['subtasks', 'produced_by', 'consumed_by'] - - -class TaskDraftSerializerJSONeditorOnline(RelationalHyperlinkedModelSerializer): - - # Create a JSON editor form to replace the simple text field based on the schema in the template that this - # draft refers to. If that fails, the JSONField remains a standard text input. - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - try: - self.fields['specifications_doc'] = JSONEditorField(self.instance.specifications_template.schema) - except Exception as e: - print('Could not initialize JSONEditorField (%s), hence no fancy JSON form. This is expected for e.g. the list view.' % e) - # todo: Shall we use the schema for one of the default templates in this case instead? - - class Meta: - model = models.TaskDraft - fields = '__all__' - extra_fields = ['related_task_blueprint', 'produced_by', 'consumed_by'] - diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py index 8ff5c341f225ab110eacf8c7d58061a1b7579d9a..95f4873754ff9c972ee083a234e6543a1de089b4 100644 --- a/SAS/TMSS/src/tmss/tmssapp/subtasks.py +++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py @@ -1,12 +1,217 @@ +import logging +logger = logging.getLogger(__name__) + +from lofar.common.json_utils import add_defaults_to_json_object_for_schema, get_default_json_object_for_schema + +from lofar.sas.tmss.tmss.tmssapp.models.specification import * +from lofar.sas.tmss.tmss.tmssapp.models.scheduling import * + from datetime import datetime, timedelta from lofar.common.datetimeutils import parseDatetime -from lofar.sas.tmss.tmss.tmssapp.models.specification import Dataformat, Role, Datatype -from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Subtask, SubtaskType, SubtaskState, ScheduleMethod,\ - SubtaskTemplate, SubtaskInput, SubtaskOutput, SubtaskConnector, SubtaskInputSelectionTemplate, \ - Dataproduct, DataproductSpecificationsTemplate, DataproductFeedbackTemplate, DataproductTransform +from lofar.common.json_utils import add_defaults_to_json_object_for_schema + +from lofar.sas.tmss.tmss.tmssapp.models.specification import * +from lofar.sas.tmss.tmss.tmssapp.models.scheduling import * + +def create_observation_to_qafile_subtask(observation_subtask: Subtask): + ''' Create a subtask to convert the observation output to a QA h5 file. + This method implements "Instantiate subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + ''' + # step 0: check pre-requisites + if observation_subtask.specifications_template.type.value != SubtaskType.Choices.OBSERVATION.value: + raise ValueError("Cannot create %s subtask for subtask id=%d type=%s because it is not an %s" % ( + SubtaskType.Choices.QA_FILES.value, observation_subtask.pk, + observation_subtask.specifications_template.type, SubtaskType.Choices.OBSERVATION.value)) + + if observation_subtask.state.value == SubtaskState.Choices.DEFINING.value: + raise ValueError("Cannot create %s subtask for subtask id=%d because it is not DEFINED yet" % ( + SubtaskType.Choices.QA_FILES.value, observation_subtask.pk)) + + obs_task_spec = observation_subtask.task_blueprint.specifications_doc + obs_task_qafile_spec = obs_task_spec.get("QA", {}).get("file_conversion", {}) + + if not obs_task_qafile_spec.get("enabled", False): + logger.debug("Skipping creation of qafile_subtask because QA.file_conversion is not enabled") + return None + + # step 1: create subtask in defining state, with filled-in subtask_template + qafile_subtask_template = SubtaskTemplate.objects.get(name="QA file conversion") + qafile_subtask_spec = add_defaults_to_json_object_for_schema({}, qafile_subtask_template.schema) + qafile_subtask_spec['nr_of_subbands'] = obs_task_qafile_spec.get("nr_of_subbands") + qafile_subtask_spec['nr_of_timestamps'] = obs_task_qafile_spec.get("nr_of_timestamps") + validate_json_against_schema(qafile_subtask_spec, qafile_subtask_template.schema) + + qafile_subtask_data = { "start_time": None, + "stop_time": None, + "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value), + "task_blueprint": observation_subtask.task_blueprint, + "specifications_template": qafile_subtask_template, + "specifications_doc": qafile_subtask_spec, + "priority": 1, + "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), + "cluster": observation_subtask.cluster} + qafile_subtask = Subtask.objects.create(**qafile_subtask_data) + + # step 2: create and link subtask input/output + qafile_subtask_input = SubtaskInput.objects.create(subtask=qafile_subtask, + producer=observation_subtask.outputs.first(), # TODO: determine proper producer based on spec in task_relation_blueprint + selection_doc="{}", + selection_template=SubtaskInputSelectionTemplate.objects.get(name="All")) + qafile_subtask_output = SubtaskOutput.objects.create(subtask=qafile_subtask) + + # step 3: set state to DEFINED + qafile_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) + qafile_subtask.save() + + # done, now return the subtask, and allow the system to wait for the predecessors to be finished before we schedule this qa_file_subtask + return qafile_subtask + +def schedule_qafile_subtask(qafile_subtask: Subtask): + ''' Schedule the given qafile_subtask (which converts the observation output to a QA h5 file) + This method should typically be called upon the event of the observation_subtask being finished. + This method implements "Scheduling subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + ''' + + # step 0: check pre-requisites + if qafile_subtask.state.value != SubtaskState.Choices.DEFINED.value: + raise ValueError("Cannot schedule subtask id=%d because it is not DEFINED yet. state=%s" % (qafile_subtask.pk, + qafile_subtask.state.value)) + + if qafile_subtask.specifications_template.type.value != SubtaskType.Choices.QA_FILES.value: + raise ValueError("Cannot schedule subtask id=%d type=%s but type should be %s" % (qafile_subtask.pk, + qafile_subtask.specifications_template.type, SubtaskType.Choices.QA_FILES.value)) + + # step 1: set state to SCHEDULING + qafile_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULING.value) + qafile_subtask.save() + + # step 2: link input dataproducts + for input in qafile_subtask.inputs.all(): + input.dataproducts.set(input.producer.dataproducts.all()) + + # step 3: resource assigner + # is a no-op for QA + + # step 4: create output dataproducts, and link these to the output + # TODO: Should the output and/or dataproduct be determined by the specification in task_relation_blueprint? + qafile_subtask_dataproduct = Dataproduct.objects.create(filename="L%d_QA.h5" % (qafile_subtask.id,), + directory="/data/qa/qa_files", + dataformat=Dataformat.objects.get(value=Dataformat.Choices.QA_HDF5.value), + producer=qafile_subtask.outputs.first(), + specifications_doc="", + specifications_template=DataproductSpecificationsTemplate.objects.first(), # ????? + feedback_doc="", + feedback_template=DataproductFeedbackTemplate.objects.first() # ????? + ) + + # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it) + qafile_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value) + qafile_subtask.save() + + return qafile_subtask + +def create_qafile_to_qaplots_subtask(qafile_subtask: Subtask): + ''' Create a subtask to create inspection plots from the QA h5 file. + This method implements "Instantiate subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + ''' + # step 0: check pre-requisites + if qafile_subtask.specifications_template.type.value != SubtaskType.Choices.QA_FILES.value: + raise ValueError("Cannot create %s subtask for subtask id=%d type=%s because it is not an %s" % ( + SubtaskType.Choices.QA_PLOTS.value, qafile_subtask.pk, + qafile_subtask.specifications_template.type, SubtaskType.Choices.QA_FILES.value)) + + if qafile_subtask.state.value == SubtaskState.Choices.DEFINING.value: + raise ValueError("Cannot create %s subtask for subtask id=%d because it is not DEFINED yet" % ( + SubtaskType.Choices.QA_PLOTS.value, qafile_subtask.pk)) + + obs_task_spec = qafile_subtask.task_blueprint.specifications_doc + obs_task_qaplots_spec = obs_task_spec.get("QA", {}).get("plots", {}) + + if not obs_task_qaplots_spec.get("enabled", False): + logger.debug("Skipping creation of qaplots_subtask because QA.plots is not enabled") + return None + + # step 1: create subtask in defining state, with filled-in subtask_template + qaplots_subtask_template = SubtaskTemplate.objects.get(name="QA plots") + qaplots_subtask_spec_doc = add_defaults_to_json_object_for_schema({}, qaplots_subtask_template.schema) + qaplots_subtask_spec_doc['autocorrelation'] = obs_task_qaplots_spec.get("autocorrelation") + qaplots_subtask_spec_doc['crosscorrelation'] = obs_task_qaplots_spec.get("crosscorrelation") + validate_json_against_schema(qaplots_subtask_spec_doc, qaplots_subtask_template.schema) + + qaplots_subtask_data = { "start_time": None, + "stop_time": None, + "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value), + "task_blueprint": qafile_subtask.task_blueprint, + "specifications_template": qaplots_subtask_template, + "specifications_doc": qaplots_subtask_spec_doc, + "priority": 1, + "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), + "cluster": qafile_subtask.cluster} + qaplots_subtask = Subtask.objects.create(**qaplots_subtask_data) + + # step 2: create and link subtask input/output + qaplots_subtask_input = SubtaskInput.objects.create(subtask=qaplots_subtask, + producer=qafile_subtask.outputs.first(), + selection_doc="{}", + selection_template=SubtaskInputSelectionTemplate.objects.get(name="All")) + qaplots_subtask_output = SubtaskOutput.objects.create(subtask=qaplots_subtask) + + # step 3: set state to DEFINED + qaplots_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) + qaplots_subtask.save() + + # done, now return the subtask, and allow the system to wait for the predecessors to be finished before we schedule this qaplots_subtask + return qaplots_subtask + +def schedule_qaplots_subtask(qaplots_subtask: Subtask): + ''' Schedule the given qaplots_subtask (which creates inspection plots from a QA h5 file) + This method should typically be called upon the event of the qafile_subtask being finished. + This method implements "Scheduling subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + ''' + + # step 0: check pre-requisites + if qaplots_subtask.state.value != SubtaskState.Choices.DEFINED.value: + raise ValueError("Cannot schedule subtask id=%d because it is not DEFINED yet. state=%s" % (qaplots_subtask.pk, + qaplots_subtask.state.value)) + + if qaplots_subtask.specifications_template.type.value != SubtaskType.Choices.QA_PLOTS.value: + raise ValueError("Cannot schedule subtask id=%d type=%s but type should be %s" % (qaplots_subtask.pk, + qaplots_subtask.specifications_template.type, + SubtaskType.Choices.QA_PLOTS.value)) + + # step 1: set state to SCHEDULING + qaplots_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULING.value) + qaplots_subtask.save() + + # step 2: link input dataproducts + # this should typically be a single input with a single dataproduct (the qa h5 file) + for input in qaplots_subtask.inputs.all(): + input.dataproducts.set(input.producer.dataproducts.all()) + + # step 3: resource assigner + # is a no-op for QA + + # step 4: create output dataproducts, and link these to the output + # TODO: Should the output and/or dataproduct be determined by the specification in task_relation_blueprint? + qaplots_subtask_dataproduct = Dataproduct.objects.create(directory="/data/qa/plots/L%d" % (qaplots_subtask.id,), + dataformat=Dataformat.objects.get(value=Dataformat.Choices.QA_PLOTS.value), + producer=qaplots_subtask.outputs.first(), + specifications_doc="", + specifications_template=DataproductSpecificationsTemplate.objects.first(), # ????? + feedback_doc="", + feedback_template=DataproductFeedbackTemplate.objects.first() # ????? + ) + + # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it) + qaplots_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value) + qaplots_subtask.save() + + return qaplots_subtask -import logging -logger = logging.getLogger(__name__) def connect_observation_subtask_to_preprocessing_subtask(observation_subtask: Subtask, pipeline_subtask: Subtask): if observation_subtask.specifications_template.type.value != SubtaskType.Choices.OBSERVATION.value or \ @@ -59,4 +264,113 @@ def connect_observation_subtask_to_preprocessing_subtask(observation_subtask: Su output_dps.append(output_dp) pipeline_subtask_output.dataproducts.set(output_dps) - # todo: specify a SubtaskConnector? TaskRelation \ No newline at end of file + # todo: specify a SubtaskConnector? TaskRelation + + +def create_subtasks_from_task_blueprint(task_blueprint: TaskBlueprint): + generator_mapping = {'preprocessing schema': _create_subtasks_from_preprocessing_task_blueprint } + template_name = task_blueprint.specifications_template.name + if template_name in generator_mapping: + generator = generator_mapping[template_name] + return generator(task_blueprint) + else: + raise ValueError('Cannot create subtasks for task id=%s since no generator exists for its schema name=%s' % (task_blueprint.pk, template_name)) + + +def _create_subtasks_from_preprocessing_task_blueprint(task_blueprint: TaskBlueprint): + # todo: check whether already created to avoid duplication? + + subtask_template = SubtaskTemplate.objects.get(name='pipelinecontrol schema') + default_subtask_specs = get_default_json_object_for_schema(subtask_template.schema) + subtasks = [] + subtask_specs = _generate_subtask_specs_from_preprocessing_task_specs(task_blueprint.specifications_doc, + default_subtask_specs) + subtask = create_subtask(subtask_template, subtask_specs) + subtask.task_blueprint = task_blueprint + subtask.cluster = Cluster.objects.get(name="CEP4") # todo: probably should not be hardcoded? Can be optional in parset? + subtask.save() + subtasks.append(subtask) + SubtaskTemplate.objects.get(name='pipelinecontrol schema') + + return {'subtasks_created': [s.pk for s in subtasks]} + + # todo: determine observation subtask, then call connect_observation_subtask_to_preprocessing_subtask to create inputs (not sure where exactly this should happen) + + +def _generate_subtask_specs_from_preprocessing_task_specs(preprocessing_task_specs, default_subtask_specs): + # preprocessing task default spec: { + # "storagemanager": "dysco", + # "flag": {"outerchannels": true, "autocorrelations": true, "rfi_strategy": "auto"}, + # "demix": {"frequency_steps": 64, "time_steps": 10, "ignore_target": false, "sources": {}}, + # "average": {"frequency_steps": 4, "time_steps": 1}} + # pipelinecontrol subtask default spec: { + # "storagemanager": "dysco", + # "demixer": {"baselines": "CS*,RS*&", "frequency_steps": 4, "time_steps": 1, "demix_frequency_steps": 4, + # "demix_time_steps": 1, "ignore_target": false, "demix_always": [], "demix_if_needed": []}, + # "aoflagger": {"strategy": "HBAdefault"}, + # "preflagger0": {"channels": "0..nchan/32-1,31*nchan/32..nchan-1"}, + # "preflagger1": {"corrtype": "auto"}} + + # todo: check that this is actually how these need to be translated + # todo: especially check when defaults are NOT supposed to be set because the task implies to not include them + + # todo: translate task "sources": {} - I guess this is demix_always/demix_if_needed? + # todo: set subtask demixer properties "baselines": "CS*,RS*&", "demix_always": [], "demix_if_needed": [] + + subtask_specs = {} + subtask_specs['storagemanager'] = preprocessing_task_specs.get('storagemanager', + default_subtask_specs.get('storagemanager')) + + # todo: we depend on valid json here with knowledge about required properties. To generalize, we need to expect things to not be there. + if 'demix' or 'average' in preprocessing_task_specs: + # todo: should we exclude defaults in subtask.demixer if only one of these is defined on the task? + subtask_specs['demixer'] = default_subtask_specs['demixer'] + if 'demix' in preprocessing_task_specs: + subtask_specs['demixer'].update({ + "demix_frequency_steps": preprocessing_task_specs['demix']['frequency_steps'], + "demix_time_steps": preprocessing_task_specs['demix']['time_steps'], + "ignore_target": preprocessing_task_specs['demix']['ignore_target'] + }), + if 'average' in preprocessing_task_specs: + subtask_specs['demixer'].update({ + "demix_frequency_steps": preprocessing_task_specs['demix']['frequency_steps'], + "frequency_steps": preprocessing_task_specs['average']['frequency_steps'], + "demix_time_steps": preprocessing_task_specs['demix']['time_steps'], + "time_steps": preprocessing_task_specs['average']['time_steps'], + "ignore_target": preprocessing_task_specs['demix']['ignore_target'] + }), + if 'flag' in preprocessing_task_specs: + if preprocessing_task_specs["flag"]["rfi_strategy"] != 'none': + subtask_specs.update({"aoflagger": {"strategy": preprocessing_task_specs["flag"]["rfi_strategy"]}}) + + if preprocessing_task_specs["flag"]["rfi_strategy"] == 'auto': + # todo: handle 'auto' properly: we need to determine input dataproduct type and set LBA or HBA accordingly + # either here or allow 'auto' in subtask json and translate it when we connect obs to pipe subtask + default_strategy = default_subtask_specs['aoflagger']['strategy'] + subtask_specs.update({"aoflagger": {"strategy": default_strategy}}) + logger.warning('Translating aoflagger "auto" strategy to "%s" without knowing whether that makes sense!' % default_strategy) + + if preprocessing_task_specs["flag"]["outerchannels"]: + subtask_specs.update({"preflagger0": {"channels": "0..nchan/32-1,31*nchan/32..nchan-1"}}) + + if preprocessing_task_specs["flag"]["autocorrelations"]: + subtask_specs.update({"preflagger1": {"corrtype": "auto"}}) + + return subtask_specs + + +def create_subtask(subtask_template: SubtaskTemplate, subtask_specifications): + subtask_data = { + "tags": [], + "specifications_doc": subtask_specifications, + "do_cancel": None, + "priority": 0, + "state": SubtaskState.objects.get(value="defining"), + "task_blueprint": None, + "specifications_template": subtask_template, + "schedule_method": ScheduleMethod.objects.get(value="dynamic"), + "cluster": None + } + + return Subtask.objects.create(**subtask_data) + diff --git a/SAS/TMSS/src/tmss/tmssapp/tasks.py b/SAS/TMSS/src/tmss/tmssapp/tasks.py new file mode 100644 index 0000000000000000000000000000000000000000..843fce85273c8a9c8acbd862b77a49c5e386af39 --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/tasks.py @@ -0,0 +1,183 @@ +import datetime +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.common.datetimeutils import formatDatetime +from lofar.common.json_utils import * +from lofar.sas.tmss.tmss.exceptions import * +from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskDraft, TaskBlueprint, TaskRelationBlueprint, \ + TaskRelationDraft, SchedulingUnitBlueprint +from lofar.sas.tmss.tmss.tmssapp.models.scheduling import Subtask, SubtaskType, SubtaskInput, SubtaskOutput, \ + SubtaskTemplate, SubtaskState, ScheduleMethod, SubtaskInputSelectionTemplate +from lofar.sas.tmss.tmss.tmssapp.subtasks import connect_observation_subtask_to_preprocessing_subtask, \ + create_qafile_to_qaplots_subtask, create_observation_to_qafile_subtask + +import logging +logger = logging.getLogger(__name__) + + +def create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template(task_draft: models.TaskDraft): + """ + Create a task_blueprint from the task_draft + For every subtask specified in task blueprint: + - create subtask and set to DEFINING + - create subtask input and outputs and link + - link subtask inputs to predecessor outputs + - set subtask to DEFINED + """ + logger.debug("create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template(task_draft.id=%s)...", task_draft.pk) + task_blueprint = create_task_blueprint_from_task_draft(task_draft) + + obs_subtask = create_subtask_observation_control(task_blueprint) + pipe_subtask = create_subtask_pipeline_control(task_blueprint) + connect_observation_subtask_to_preprocessing_subtask(obs_subtask, pipe_subtask) + + if task_blueprint.specifications_doc.get("QA",{}).get("file_conversion",{}).get("enabled", False): + qa_file_subtask = create_observation_to_qafile_subtask(obs_subtask) + + if qa_file_subtask is not None and task_blueprint.specifications_doc.get("QA", {}).get("plots", {}).get("enabled", False): + qa_plots_subtask = create_qafile_to_qaplots_subtask(qa_file_subtask) + + return task_blueprint + + +def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft): + """ + Create a task_blueprint from the task_draft + :raises Exception if instantiate fails. + """ + logger.debug("create_task_blueprint_from_task_draft(task_draft.id=%s)", task_draft.pk) + + # Get scheduling unit blueprint from scheduling unit draft, but that is a multi object relation + # so which one is related to this task_draft? + # Therefore I (RGOE) do NOT care about the relation with the Scheduling Unit BluePrint, that should be solved later + # with another Story/Task + scheduling_unit_blueprint = SchedulingUnitBlueprint.objects.create( + name="Temporary Dummy Scheduling Unit Blueprint", + description="", + requirements_doc={}, + do_cancel=False, + draft=task_draft.scheduling_unit_draft, + requirements_template=models.SchedulingUnitTemplate.objects.first() # because we have multiple now with the same name, and I dont care for now + ) + + description_str = "Task Blueprint " + task_draft.description + name_str = "Task Blueprint of " + task_draft.name + task_blueprint = TaskBlueprint.objects.create( + description=description_str, + name=name_str, + do_cancel=False, + draft=task_draft, + scheduling_unit_blueprint=scheduling_unit_blueprint, + specifications_doc=task_draft.specifications_doc, + specifications_template=task_draft.specifications_template + ) + + logger.info("create_task_blueprint_from_task_draft(task_draft.id=%s) created task_blueprint: %s", task_draft.pk, task_blueprint.pk) + return task_blueprint + + +def create_subtask_observation_control(task_blueprint: models.TaskBlueprint): + """ + Create a subtask observation control. + This method implements "Instantiate subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + """ + # step 0: check pre-requisites + if task_blueprint.do_cancel: + raise ValueError("Cancel create subtasks from blueprint task id=%d, because its explicit set to cancel" % + task_blueprint.id) + + # step 1: create subtask in defining state + subtask_template = SubtaskTemplate.objects.get(name='observationcontrol schema') + # This is some 'extra' specification to add to subtask ... where should it comes from, + # currently not defined in task ? + extra_specifications_doc = { + "stations": {"station_list": ["RS106", "RS205"], + "antenna_set": "HBA_DUAL_INNER", + "filter": "HBA_110_190", + "analog_pointing": {"direction_type": "J2000", + "angle1": 0.4262457643630986, + "angle2": 0.5787463318245085}, + "digital_pointings": [{"name": "3C48", + "pointing": {"direction_type": "J2000", + "angle1": 0.4262457643630986, + "angle2": 0.5787463318245085}, + "subbands": list(range(0, 8)) + }] + } + } + specifications_doc = add_defaults_to_json_object_for_schema(extra_specifications_doc, subtask_template.schema) + cancel = datetime.datetime.utcnow().isoformat() # I dont understand why this should be a dateformat and not a boolean ? + cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4") + subtask_data = { "start_time": None, + "stop_time": None, + "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value), + "specifications_doc": specifications_doc, + "task_blueprint": task_blueprint, + "specifications_template": subtask_template, + "tags": [], + "do_cancel": cancel, + "priority": 1, + "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), + "cluster": models.Cluster.objects.get(name=cluster_name) + } + subtask = Subtask.objects.create(**subtask_data) + + # step 2: create and link subtask input/output + # an observation has no input, it just produces output data + subtask_output = SubtaskOutput.objects.create(subtask=subtask) + + # step 3: set state to DEFINED + subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) + subtask.save() + return subtask + + +def create_subtask_pipeline_control(task_blueprint: models.TaskBlueprint): + """ + Create a subtask preprocessing pipeline control. + This method implements "Instantiate subtasks" step from the "Specification Flow" + https://support.astron.nl/confluence/display/TMSS/Specification+Flow + + This function is almost the same as the create_subtask_observation_control + will be refactored later + """ + # step 0: check pre-requisites + + # step 1: create subtask in defining state + subtask_template = SubtaskTemplate.objects.get(name='pipelinecontrol schema') + specifications_doc = add_defaults_to_json_object_for_schema({}, subtask_template.schema) + cancel = datetime.datetime.utcnow().isoformat() # I dont understand why this should be a dateformat and not a boolean ? + cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4") + + subtask_data = { "start_time": None, + "stop_time": None, + "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value), + "specifications_doc": specifications_doc, + "task_blueprint": task_blueprint, + "specifications_template": subtask_template, + "tags": [], + "do_cancel": cancel, + "priority": 1, + "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), + "cluster": models.Cluster.objects.get(name=cluster_name) + } + subtask = models.Subtask.objects.create(**subtask_data) + + # step 2: create and link subtask input/output + # subtask_input = SubtaskInput.objects.create(subtask=subtask, + # producer=task_blueprint.produced_by, + # selection_doc="{}", + # selection_template=SubtaskInputSelectionTemplate.objects.get(name="All")) + # subtask_output = SubtaskOutput.objects.create(subtask=subtask) + + # step 3: set state to DEFINED + subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) + subtask.save() + return subtask + + + + + + + diff --git a/SAS/TMSS/src/tmss/tmssapp/views.py b/SAS/TMSS/src/tmss/tmssapp/views.py index f307b42d8aec201596f8f7ad23710d7d98941d07..5463bb1613602ede856f9ab38884e3a96bed939c 100644 --- a/SAS/TMSS/src/tmss/tmssapp/views.py +++ b/SAS/TMSS/src/tmss/tmssapp/views.py @@ -5,20 +5,25 @@ from lofar.sas.tmss.tmss.tmssapp import models from lofar.common.json_utils import get_default_json_object_for_schema from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset + def subtask_template_default_specification(request, subtask_template_pk:int): subtask_template = get_object_or_404(models.SubtaskTemplate, pk=subtask_template_pk) spec = get_default_json_object_for_schema(subtask_template.schema) return JsonResponse(spec) + def task_template_default_specification(request, task_template_pk:int): task_template = get_object_or_404(models.TaskTemplate, pk=task_template_pk) spec = get_default_json_object_for_schema(task_template.schema) return JsonResponse(spec) -""" + def subtask_parset(request, subtask_pk:int): subtask = get_object_or_404(models.Subtask, pk=subtask_pk) parset = convert_to_parset(subtask) return HttpResponse(str(parset), content_type='text/plain') -""" \ No newline at end of file + +def task_specify_observation(request, pk=None): + task = get_object_or_404(models.TaskDraft, pk=pk) + return HttpResponse("response", content_type='text/plain') \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py index 65e908b4a59e594f0b733b55989acc6958defbfe..b09cd70270271d7a426b7de3284c111c76f577a8 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py @@ -48,11 +48,6 @@ class subTaskFilter(filters.FilterSet): 'cluster__name': ['exact', 'icontains'], } -class SubtaskConnectorViewSet(LOFARViewSet): - queryset = models.SubtaskConnector.objects.all() - serializer_class = serializers.SubtaskConnectorSerializer - - class SubtaskStateViewSet(LOFARViewSet): queryset = models.SubtaskState.objects.all() serializer_class = serializers.SubtaskStateSerializer @@ -96,9 +91,18 @@ class ScheduleMethodViewSet(LOFARViewSet): serializer_class = serializers.ScheduleMethodSerializer +class SubtaskTemplateFilter(filters.FilterSet): + class Meta: + model = models.SubtaskTemplate + fields = { + 'name': ['exact'], + 'version': ['lt', 'gt', 'exact'] + } + class SubtaskTemplateViewSet(LOFARViewSet): queryset = models.SubtaskTemplate.objects.all() serializer_class = serializers.SubtaskTemplateSerializer + filter_class = SubtaskTemplateFilter def get_queryset(self): queryset = models.SubtaskTemplate.objects.all() @@ -110,6 +114,14 @@ class SubtaskTemplateViewSet(LOFARViewSet): return queryset + @swagger_auto_schema(responses={200: 'The schema as a JSON object', + 403: 'forbidden'}, + operation_description="Get the schema as a JSON object.") + @action(methods=['get'], detail=True) + def schema(self, request, pk=None): + subtask_template = get_object_or_404(models.SubtaskTemplate, pk=pk) + return JsonResponse(subtask_template.schema) + @swagger_auto_schema(responses={200: 'JSON object with all the defaults from the schema filled in', 403: 'forbidden'}, operation_description="Get a JSON object with all the defaults from the schema filled in.") @@ -151,14 +163,17 @@ class SubtaskViewSet(LOFARViewSet): filter_backends = (filters.DjangoFilterBackend,) filter_class = subTaskFilter - @swagger_auto_schema(responses={200: 'A LOFAR parset for this subtask', - 403: 'forbidden'}, - operation_description="Get a a LOFAR parset for the specifications of this subtask") - @action(methods=['get'], detail=True) - def default_specification(self, request, pk=None): + @swagger_auto_schema(auto_schema=TextPlainAutoSchema, + responses={200: 'A LOFAR parset for this subtask (as plain text)', + 403: 'forbidden', + 404: 'Not found'}, + produces='text/plain', + operation_description="Get a LOFAR parset for the specifications of this subtask") + @action(methods=['get'], detail=True, renderer_classes=[PlainTextRenderer]) + def parset(self, request, pk=None): subtask = get_object_or_404(models.Subtask, pk=pk) parset = convert_to_parset(subtask) - return HttpResponse(str(parset), content_type='text/plain') + return HttpResponse(parset, content_type='text/plain') class SubtaskNestedViewSet(LOFARNestedViewSet): @@ -171,17 +186,7 @@ class SubtaskNestedViewSet(LOFARNestedViewSet): if 'task_blueprint_id' in self.kwargs: task_blueprint = get_object_or_404(models.TaskBlueprint, pk=self.kwargs['task_blueprint_id']) return task_blueprint.subtasks.all() - -""" - @swagger_auto_schema(responses={200: 'A LOFAR parset for this subtask', - 403: 'forbidden'}, - operation_description="Get a a LOFAR parset for the specifications of this subtask") - @action(methods=['get'], detail=True) - def default_specification(self, request, pk=None): - subtask = get_object_or_404(models.Subtask, pk=pk) - parset = convert_to_parset(subtask) - return HttpResponse(str(parset), content_type='text/plain') -""" + class SubtaskInputViewSet(LOFARViewSet): queryset = models.SubtaskInput.objects.all() @@ -228,45 +233,3 @@ class DataproductHashViewSet(LOFARViewSet): serializer_class = serializers.DataproductHashSerializer -# --- JSON - -class SubtaskViewSetJSONeditorOnline(LOFARViewSet): - queryset = models.Subtask.objects.all() - serializer_class = serializers.SubtaskSerializerJSONeditorOnline - filter_backends = (filters.DjangoFilterBackend, OrderingFilter,) - filter_class = subTaskFilter - ordering = ('start_time',) - - - def get_view_name(self): # override name because DRF auto-naming dot_tmssapp_scheduling_djangoes not produce something usable here - name = "Subtask" - if self.suffix: - name += ' ' + self.suffix - return name - - def get_queryset(self): - if 'task_blueprint_id' in self.kwargs: - task_blueprint = get_object_or_404(models.TaskBlueprint, pk=self.kwargs['task_blueprint_id']) - return task_blueprint.subtasks.all() - else: - return models.Subtask.objects.all() - - - #renderer_classes = [PlainTextRenderer] - @swagger_auto_schema( - auto_schema=TextPlainAutoSchema, - responses={200: 'A LOFAR parset for this subtask (as plain text, not json)', - 403: 'forbidden', - 404: 'Not found'}, - produces='text/plain', - operation_description="Get a LOFAR parset for the specifications of this subtask") - @action(methods=['get'], detail=True, renderer_classes=[PlainTextRenderer]) - def parset(self, request, pk=None): - subtask = get_object_or_404(models.Subtask, pk=pk) - parset = convert_to_parset(subtask) - parset_str = "# THIS PARSET WAS GENERATED BY TMSS FROM THE SPECICATION OF SUBTASK ID=%d ON %s url: %s\n%s" % ( - subtask.pk, - formatDatetime(datetime.utcnow()), - request._request.get_raw_uri(), - parset,) - return HttpResponse(str(parset_str), content_type='text/plain') diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py index 52000bd56b5546e008688504a144c76df74db7b7..b873d55f99e92645deab3414c693c91de4c7a830 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py @@ -6,6 +6,8 @@ from django.shortcuts import get_object_or_404 from django.http import JsonResponse from django.contrib.auth.models import User from rest_framework.viewsets import ReadOnlyModelViewSet +from rest_framework import status +from rest_framework.response import Response from rest_framework.decorators import permission_classes from rest_framework.permissions import IsAuthenticatedOrReadOnly, DjangoModelPermissions @@ -17,8 +19,11 @@ from lofar.sas.tmss.tmss.tmssapp.viewsets.lofar_viewset import LOFARViewSet, LOF from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.tmssapp import serializers +from datetime import datetime from lofar.common.json_utils import get_default_json_object_for_schema - +from lofar.common.datetimeutils import formatDatetime +from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template +from lofar.sas.tmss.tmss.tmssapp.subtasks import create_subtasks_from_task_blueprint @@ -59,6 +64,14 @@ class TaskTemplateViewSet(LOFARViewSet): queryset = models.TaskTemplate.objects.all() serializer_class = serializers.TaskTemplateSerializer + @swagger_auto_schema(responses={200: 'The schema as a JSON object', + 403: 'forbidden'}, + operation_description="Get the schema as a JSON object.") + @action(methods=['get'], detail=True) + def schema(self, request, pk=None): + template = get_object_or_404(models.TaskTemplate, pk=pk) + return JsonResponse(template.schema) + @swagger_auto_schema(responses={200: 'JSON object with all the defaults from the schema filled in', 403: 'forbidden'}, operation_description="Get a JSON object with all the defaults from the schema filled in.") @@ -131,7 +144,8 @@ class ProjectNestedViewSet(LOFARNestedViewSet): if 'cycle_id' in self.kwargs: cycle = get_object_or_404(models.Cycle, pk=self.kwargs['cycle_id']) return cycle.projects.all() - + else: + return models.Project.objects.all() class ProjectQuotaViewSet(LOFARViewSet): queryset = models.ProjectQuota.objects.all() @@ -161,6 +175,9 @@ class SchedulingUnitDraftNestedViewSet(LOFARNestedViewSet): if 'scheduling_set_id' in self.kwargs: scheduling_set = get_object_or_404(models.SchedulingSet, pk=self.kwargs['scheduling_set_id']) return scheduling_set.scheduling_unit_drafts.all() + else: + return models.SchedulingUnitDraft.objects.all() + class SchedulingUnitBlueprintViewSet(LOFARViewSet): queryset = models.SchedulingUnitBlueprint.objects.all() @@ -175,12 +192,31 @@ class SchedulingUnitBlueprintNestedViewSet(LOFARNestedViewSet): if 'scheduling_unit_draft_id' in self.kwargs: scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=self.kwargs['scheduling_unit_draft_id']) return scheduling_unit_draft.related_scheduling_unit_blueprint.all() - + else: + return models.SchedulingUnitBlueprint.objects.all() + class TaskDraftViewSet(LOFARViewSet): - queryset = models.TaskDraft.objects.all() - serializer_class = serializers.TaskDraftSerializer + queryset = models.TaskDraft.objects.all() + serializer_class = serializers.TaskDraftSerializer + @swagger_auto_schema(responses={201: 'Created task blueprint, see Location in Response header', + 403: 'forbidden'}, + operation_description="Carve this draft task specification in stone, and make an (uneditable) blueprint out of it.") + @action(methods=['get'], detail=True, url_name="create_task_blueprint") + def create_task_blueprint(self, request, pk=None): + task_draft = get_object_or_404(models.TaskDraft, pk=pk) + task_blueprint = create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template(task_draft) + + # url path magic to construct the new task_blueprint_path url + task_draft_path = request._request.path + base_path = task_draft_path[:task_draft_path.find('/task_draft')] + task_blueprint_path = '%s/task_blueprint/%s/' % (base_path, task_blueprint.id,) + + # return a response with the new serialized TaskBlueprint, and a Location to the new instance in the header + return Response(serializers.TaskBlueprintSerializer(task_blueprint, context={'request':request}).data, + status=status.HTTP_201_CREATED, + headers={'Location': task_blueprint_path}) class TaskDraftNestedViewSet(LOFARNestedViewSet): queryset = models.TaskDraft.objects.all() @@ -190,19 +226,35 @@ class TaskDraftNestedViewSet(LOFARNestedViewSet): if 'scheduling_unit_draft_id' in self.kwargs: scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=self.kwargs['scheduling_unit_draft_id']) return scheduling_unit_draft.task_drafts.all() + else: + return models.TaskDraft.objects.all() + class TaskBlueprintViewSet(LOFARViewSet): queryset = models.TaskBlueprint.objects.all() serializer_class = serializers.TaskBlueprintSerializer + @swagger_auto_schema(responses={200: 'Create subtasks from this task blueprint', + 403: 'forbidden'}, + operation_description="Create subtasks from this task blueprint") + @action(methods=['get'], detail=True) + def create_subtasks(self, request, pk=None): + task_blueprint = get_object_or_404(models.TaskBlueprint, pk=pk) + subtasks = create_subtasks_from_task_blueprint(task_blueprint) + return JsonResponse(subtasks) + class TaskBlueprintNestedViewSet(LOFARNestedViewSet): queryset = models.TaskBlueprint.objects.all() serializer_class = serializers.TaskBlueprintSerializer + def get_queryset(self): if 'task_draft_id' in self.kwargs: task_draft = get_object_or_404(models.TaskDraft, pk=self.kwargs['task_draft_id']) return task_draft.related_task_blueprint.all() + else: + return models.TaskBlueprint.objects.all() + class TaskRelationDraftViewSet(LOFARViewSet): queryset = models.TaskRelationDraft.objects.all() @@ -217,7 +269,8 @@ class TaskRelationDraftNestedViewSet(LOFARNestedViewSet): if 'task_draft_id' in self.kwargs: task_draft = get_object_or_404(models.TaskDraft, pk=self.kwargs['task_draft_id']) return task_draft.produced_by.all() | task_draft.consumed_by.all() - + else: + return models.TaskRelationDraft.objects.all() class TaskRelationBlueprintViewSet(LOFARViewSet): @@ -238,34 +291,6 @@ class TaskRelationBlueprintNestedViewSet(LOFARNestedViewSet): elif 'task_relation_draft_id' in self.kwargs: task_relation_draft = get_object_or_404(models.TaskRelationDraft, pk=self.kwargs['task_relation_draft_id']) return task_relation_draft.related_task_relation_blueprint.all() - - - -# --- JSON - -class TaskBlueprintViewSetJSONeditorOnline(LOFARViewSet): - queryset = models.TaskBlueprint.objects.all() - serializer_class = serializers.TaskBlueprintSerializerJSONeditorOnline - - def get_view_name(self): # override name because DRF auto-naming does not produce something usable here - name = "Task Blueprint" - if self.suffix: - name += ' ' + self.suffix - return name - -class TaskDraftViewSetJSONeditorOnline(LOFARViewSet): - queryset = models.TaskDraft.objects.all() - serializer_class = serializers.TaskDraftSerializerJSONeditorOnline - - def get_view_name(self): # override name because DRF auto-naming does not produce something usable here - name = "Task Draft" - if self.suffix: - name += ' ' + self.suffix - return name - - def get_queryset(self): - if 'scheduling_unit_draft_id' in self.kwargs: - scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=self.kwargs['scheduling_unit_draft_id']) - return scheduling_unit_draft.task_drafts.all() else: - return models.TaskDraft.objects.all() \ No newline at end of file + return models.TaskRelationBlueprint.objects.all() + diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py index 8933556ecf8ae888fff7915c5acd91ffd62ffb8d..a3416d788b232a18dc2bea5324d9043e9c403945 100644 --- a/SAS/TMSS/src/tmss/urls.py +++ b/SAS/TMSS/src/tmss/urls.py @@ -119,7 +119,6 @@ router.register(r'algorithm', viewsets.AlgorithmViewSet) router.register(r'schedule_method', viewsets.ScheduleMethodViewSet) # templates -router.register(r'subtask_connector', viewsets.SubtaskConnectorViewSet) router.register(r'subtask_template', viewsets.SubtaskTemplateViewSet) router.register(r'dataproduct_specifications_template', viewsets.DataproductSpecificationsTemplateViewSet) router.register(r'default_subtask_template', viewsets.DefaultSubtaskTemplateViewSet) @@ -144,10 +143,10 @@ router.register(r'user', viewsets.UserViewSet) # --- # JSON -router.register(r'task_draft', viewsets.TaskDraftViewSetJSONeditorOnline) +router.register(r'task_draft', viewsets.TaskDraftViewSet) #removed TMSS-177 #router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_id>\d+)/task_draft', viewsets.TaskDraftViewSetJSONeditorOnline) -router.register(r'subtask', viewsets.SubtaskViewSetJSONeditorOnline) +router.register(r'subtask', viewsets.SubtaskViewSet) urlpatterns.extend(router.urls) diff --git a/SAS/TMSS/src/util.py b/SAS/TMSS/src/util.py deleted file mode 100644 index 629b1fe7e1a207d64ee1fb7493c3bb2d71236d98..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/util.py +++ /dev/null @@ -1,58 +0,0 @@ -import logging -import requests - -logger = logging.getLogger(__file__) - -# usage example: -# -# with TMSSsession('paulus', 'pauluspass', 'http://localhost') as session: -# response = session.get(url='http://localhost/api/task_draft/') -# print(response) - - - -class TMSSsession(object): - - OPENID = "openid" - BASICAUTH = "basicauth" - - def __init__(self, username, password, host, authentication_method=OPENID): - self.session = requests.session() - self.username = username - self.password = password - self.host = host - self.authentication_method = authentication_method - - def __enter__(self): - self.session.__enter__() - self.session.verify = False - - if self.authentication_method == self.OPENID: - # get authentication page of OIDC through TMSS redirect - response = self.session.get(self.host + '/oidc/authenticate/', allow_redirects=True) - csrftoken = self.session.cookies['csrftoken'] - - # post user credentials to login page, also pass csrf token - data = {'username': self.username, 'password': self.password, 'csrfmiddlewaretoken': csrftoken} - response = self.session.post(url=response.url, data=data, allow_redirects=True) - - # raise when sth went wrong - if "The username and/or password you specified are not correct" in response.content.decode('utf8'): - raise ValueError("The username and/or password you specified are not correct") - if response.status_code != 200: - raise ConnectionError(response.content.decode('utf8')) - - if self.authentication_method == self.BASICAUTH: - self.session.auth = (self.username, self.password) - - # return the authenticated session as user context - return self.session - - def __exit__(self, type, value, traceback): - try: - # logout user - self.session.get(self.host + '/api/logout/', allow_redirects=True) - self.session.__exit__(self, type, value, traceback) - except: - pass - diff --git a/SAS/TMSS/test/CMakeLists.txt b/SAS/TMSS/test/CMakeLists.txt index 19041224c4f4e355b3eaa0ba112e3c17bed1c56e..e50560117d6cd157ea8a17fdd9ca2ec56ab2a1ae 100644 --- a/SAS/TMSS/test/CMakeLists.txt +++ b/SAS/TMSS/test/CMakeLists.txt @@ -27,6 +27,7 @@ if(BUILD_TESTING) lofar_add_test(t_subtask_validation) lofar_add_test(t_tmssapp_specification_permissions) lofar_add_test(t_tmss_session_auth) + lofar_add_test(t_specify_observation) set_tests_properties(t_tmssapp_scheduling_REST_API PROPERTIES TIMEOUT 300) set_tests_properties(t_tmssapp_specification_REST_API PROPERTIES TIMEOUT 300) diff --git a/SAS/TMSS/test/ldap_test_service.py b/SAS/TMSS/test/ldap_test_service.py index 59eb0b60c041495333804830b90075ed0d472baa..6cb6921e83745cedcff267a5e139b6669963a7a7 100644 --- a/SAS/TMSS/test/ldap_test_service.py +++ b/SAS/TMSS/test/ldap_test_service.py @@ -5,6 +5,9 @@ logger = logging.getLogger(__name__) logging_already_configured = len(logging.root.handlers)>0 from ldap_test import LdapServer +from ldap_test.server import DEFAULT_GATEWAY_PORT, DEFAULT_PYTHON_PROXY_PORT +from py4j.java_gateway import Py4JNetworkError +from datetime import datetime, timedelta if not logging_already_configured: # the 3rd party ldap_test module erroneously does a logging.basicConfig upon module import... @@ -18,11 +21,13 @@ from optparse import OptionParser from lofar.common.util import waitForInterrupt, find_free_port from lofar.common.testing.dbcredentials import TemporaryCredentials +from lofar.common.locking import NamedAtomicLock class TestLDAPServer(): ''' A helper class which instantiates a running LDAP server (not interfering with any other test/production LDAP servers) Best used in a 'with'-context so the server is stoped automagically. ''' + _named_lock = NamedAtomicLock('TestLDAPServer') def __init__(self, user: str = 'test', password: str = 'test') -> None: self._tmp_creds = TemporaryCredentials(user=user, password=password) @@ -55,61 +60,71 @@ class TestLDAPServer(): '''instantiate the isolated postgres server''' logger.info('creating test-LDAP instance...') - self._tmp_creds.dbcreds.type = 'LDAP' - self._tmp_creds.dbcreds.host = '127.0.0.1' - self._tmp_creds.dbcreds.port = find_free_port() - self._tmp_creds.create() - - logger.info("Using dbcreds '%s' to start and configure LDAP server: %s", - self.dbcreds_id, self.dbcreds.stringWithHiddenPassword()) - - self._server = LdapServer({'port': self.dbcreds.port, - 'base': {'objectclass': ['domain'], - 'dn': 'o=lofar,c=eu', - 'attributes': {'o': 'lofar'}}, - 'entries': [ - {'objectclass': 'organizationUnit', - 'dn': 'ou=Users,o=lofar,c=eu', - 'attributes': {'ou': 'Users'}}, - {'objectclass': 'lofarPerson', - 'dn': 'cn=paulus,ou=users,o=lofar,c=eu', - 'attributes': {'cn': 'paulus', - 'userPassword': 'pauluspass', - 'mail': 'paulus@boskabouter.nl', - 'givenName': 'Paulus', - 'sn': 'Boskabouter', - 'lofarPersonSystemrole': 'cn=support,ou=Roles,o=lofar,c=eu'}}, - {'objectclass': 'lofarPerson', - 'dn': 'cn=paula,ou=users,o=lofar,c=eu', - 'attributes': {'cn': 'paula', - 'userPassword': 'paulapass', - 'mail': 'paulus@boskabouter.nl', - 'givenName': 'Paulus', - 'sn': 'Boskabouter', - 'lofarPersonSystemrole': 'cn=user,ou=Roles,o=lofar,c=eu'}}, - {'objectclass': 'lofarPerson', - 'dn': 'cn=%s,ou=users,o=lofar,c=eu' % self.dbcreds.user, - 'attributes': {'cn': self.dbcreds.user, - 'userPassword': self.dbcreds.password, - 'mail': '%s@lofar.test' % self.dbcreds.user, - 'givenName': self.dbcreds.user, - 'sn': 'lofar_test'}}, - {'objectclass': 'organizationUnit', - 'dn': 'ou=Roles,o=lofar,c=eu', - 'attributes': {'ou': 'Roles'}}, - {'objectclass': 'lofarSystemrole', - 'dn': 'cn=user,ou=roles,o=lofar,c=eu', - 'attributes': {'cn': 'user'}}, - {'objectclass': 'lofarSystemrole', - 'dn': 'cn=support,ou=roles,o=lofar,c=eu', - 'attributes': {'cn': 'support'}}, - ] - }) - - self._server.start() - os.environ["TMSS_LDAPCREDENTIALS"] = self.dbcreds_id - logger.info('LDAP server running and listening on port %s...', self.dbcreds.port) - logger.info('LDAP test user/pass: %s %s...', self.dbcreds.user, self.dbcreds.password) + with self._named_lock: + self._tmp_creds.dbcreds.type = 'LDAP' + self._tmp_creds.dbcreds.host = '127.0.0.1' + self._tmp_creds.dbcreds.port = find_free_port() + self._tmp_creds.create() + + logger.info("Using dbcreds '%s' to start and configure LDAP server: %s", + self.dbcreds_id, self.dbcreds.stringWithHiddenPassword()) + + start_time = datetime.utcnow() + while datetime.utcnow()-start_time < timedelta(minutes=1): + try: + self._server = LdapServer(java_gateway_port=find_free_port(DEFAULT_GATEWAY_PORT), + python_proxy_port=find_free_port(DEFAULT_PYTHON_PROXY_PORT), + config={'port': self.dbcreds.port, + 'base': {'objectclass': ['domain'], + 'dn': 'o=lofar,c=eu', + 'attributes': {'o': 'lofar'}}, + 'entries': [ + {'objectclass': 'organizationUnit', + 'dn': 'ou=Users,o=lofar,c=eu', + 'attributes': {'ou': 'Users'}}, + {'objectclass': 'lofarPerson', + 'dn': 'cn=paulus,ou=users,o=lofar,c=eu', + 'attributes': {'cn': 'paulus', + 'userPassword': 'pauluspass', + 'mail': 'paulus@boskabouter.nl', + 'givenName': 'Paulus', + 'sn': 'Boskabouter', + 'lofarPersonSystemrole': 'cn=support,ou=Roles,o=lofar,c=eu'}}, + {'objectclass': 'lofarPerson', + 'dn': 'cn=paula,ou=users,o=lofar,c=eu', + 'attributes': {'cn': 'paula', + 'userPassword': 'paulapass', + 'mail': 'paulus@boskabouter.nl', + 'givenName': 'Paulus', + 'sn': 'Boskabouter', + 'lofarPersonSystemrole': 'cn=user,ou=Roles,o=lofar,c=eu'}}, + {'objectclass': 'lofarPerson', + 'dn': 'cn=%s,ou=users,o=lofar,c=eu' % self.dbcreds.user, + 'attributes': {'cn': self.dbcreds.user, + 'userPassword': self.dbcreds.password, + 'mail': '%s@lofar.test' % self.dbcreds.user, + 'givenName': self.dbcreds.user, + 'sn': 'lofar_test'}}, + {'objectclass': 'organizationUnit', + 'dn': 'ou=Roles,o=lofar,c=eu', + 'attributes': {'ou': 'Roles'}}, + {'objectclass': 'lofarSystemrole', + 'dn': 'cn=user,ou=roles,o=lofar,c=eu', + 'attributes': {'cn': 'user'}}, + {'objectclass': 'lofarSystemrole', + 'dn': 'cn=support,ou=roles,o=lofar,c=eu', + 'attributes': {'cn': 'support'}}, + ] + }) + + self._server.start() + os.environ["TMSS_LDAPCREDENTIALS"] = self.dbcreds_id + logger.info('LDAP server running and listening on port %s...', self.dbcreds.port) + logger.info('LDAP test user/pass: %s %s...', self.dbcreds.user, self.dbcreds.password) + return + except Py4JNetworkError as e: + logger.warning("TestLDAPServer could not be started, retrying with next free port. Error: %s", e) + raise TimeoutError("%s could not be started within 60 seconds. bailing out..." % self.__class__.__name__) def stop(self): '''stop the running postgres server''' diff --git a/SAS/TMSS/test/t_specify_observation.py b/SAS/TMSS/test/t_specify_observation.py new file mode 100755 index 0000000000000000000000000000000000000000..f80b138af737d0b6d9ef852e24c4342470aeeaa1 --- /dev/null +++ b/SAS/TMSS/test/t_specify_observation.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id: $ + +import os +import unittest +import requests + +import logging +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + +# Do Mandatory setup step: +# use setup/teardown magic for tmss test database, ldap server and django server +# (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) +from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * + +from lofar.sas.tmss.test.tmss_test_data_django_models import * + +# import and setup rest test data creator +from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator +rest_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) + +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template + + +class SpecifyObservationFromTaskDraftTest(unittest.TestCase): + def test_create_task_blueprint(self): + """ + Use the 'default' task draft (ID=1) to specify observation + Check if the task draft name is equal to the task draft name specified in the created task blueprint + Check with REST-call if 4 subtasks are created and if these subtaskshave state value 'defined' + """ + task_draft = models.TaskDraft.objects.get(id=1) + res_task_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/1/', 200) + task_blueprint = create_task_blueprint_from_task_draft_and_instantiate_subtasks_from_template(task_draft) + self.assertEqual(task_draft.name, task_blueprint.draft.name) + res_task_blueprint = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/1/', 200) + self.assertEqual(len(res_task_blueprint['subtasks']), 4) + self.assertEqual(res_task_blueprint['specifications_template'], res_task_draft['specifications_template']) + for subtask_url in res_task_blueprint['subtasks']: + res_subtask = GET_and_assert_equal_expected_code(self, subtask_url, 200) + state_value = GET_and_assert_equal_expected_code(self, res_subtask['state'], 200)['value'] + self.assertEqual(state_value, "defined") + + +if __name__ == "__main__": + os.environ['TZ'] = 'UTC' + unittest.main() diff --git a/SAS/TMSS/test/t_specify_observation.run b/SAS/TMSS/test/t_specify_observation.run new file mode 100755 index 0000000000000000000000000000000000000000..d563b37623a3f667cb891d7872bd230ed2d88f6e --- /dev/null +++ b/SAS/TMSS/test/t_specify_observation.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*tmss*" t_specify_observation.py + diff --git a/SAS/TMSS/test/t_specify_observation.sh b/SAS/TMSS/test/t_specify_observation.sh new file mode 100755 index 0000000000000000000000000000000000000000..dd467716958fac3d617aca0642fd6dff0daee501 --- /dev/null +++ b/SAS/TMSS/test/t_specify_observation.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_specify_observation \ No newline at end of file diff --git a/SAS/TMSS/test/t_tmss_session_auth.py b/SAS/TMSS/test/t_tmss_session_auth.py index 644f517c05dfa8426dfcaa7eccbf2b8b000bb541..423a1cdf048041efa12c6ecdd886fd1d6f0134dd 100755 --- a/SAS/TMSS/test/t_tmss_session_auth.py +++ b/SAS/TMSS/test/t_tmss_session_auth.py @@ -38,7 +38,7 @@ from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator test_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) -from lofar.sas.tmss.util import TMSSsession +from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession from lofar.common.test_utils import integration_test @@ -97,7 +97,7 @@ class OIDCSession(unittest.TestCase): @integration_test def test_success_using_correct_credentials(self): - with TMSSsession(AUTH.username, AUTH.password, BASE_URL.replace('/api', '')) as session: + with TMSSsession(AUTH.username, AUTH.password, BASE_URL.replace('/api', '')).session as session: r = session.get(BASE_URL + '/task_draft/?format=api') self.assertEqual(r.status_code, 200) self.assertTrue("Task Draft List" in r.content.decode('utf8')) diff --git a/SAS/TMSS/test/t_tmss_test_database.py b/SAS/TMSS/test/t_tmss_test_database.py index 3e99b742d72f12c0f4456f6191aff3f05b7153df..708dcd2f4724181214093099fbc1cf96a3f883b5 100755 --- a/SAS/TMSS/test/t_tmss_test_database.py +++ b/SAS/TMSS/test/t_tmss_test_database.py @@ -24,7 +24,7 @@ import unittest import logging from datetime import datetime -logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) from lofar.common.postgres import PostgresDatabaseConnection, FETCH_ONE from lofar.sas.tmss.test.test_utils import TMSSPostgresTestMixin diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py index 6009c974f02a906550ff61ab181fe18f82be3408..611b2ea8abcb98dc07d2092deb91db1dbcdf99c2 100755 --- a/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py +++ b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py @@ -33,6 +33,10 @@ import logging logger = logging.getLogger(__name__) logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) +from lofar.common.test_utils import skip_integration_tests +if skip_integration_tests(): + exit(3) + # Do Mandatory setup step: # use setup/teardown magic for tmss test database, ldap server and django server # (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) @@ -288,6 +292,14 @@ class DefaultSubtaskTemplatesTestCase(unittest.TestCase): class SubtaskTestCase(unittest.TestCase): @classmethod def setUpClass(cls) -> None: + # we should not depend on "previous" data + models.SubtaskInput.objects.all().delete() + models.DataproductHash.objects.all().delete() + models.DataproductArchiveInfo.objects.all().delete() + models.DataproductTransform.objects.all().delete() + models.Dataproduct.objects.all().delete() + models.Subtask.objects.all().delete() + cls.cluster_url = test_data_creator.post_data_and_get_url(test_data_creator.Cluster(), '/cluster/') cls.task_blueprint_data = test_data_creator.TaskBlueprint() cls.task_blueprint_url = test_data_creator.post_data_and_get_url(cls.task_blueprint_data, '/task_blueprint/') @@ -594,145 +606,6 @@ class DataproductTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct/%s/' % id2, test_data_2) -class SubtaskConnectorTestCase(unittest.TestCase): - def test_subtask_connector_list_apiformat(self): - r = requests.get(BASE_URL + '/subtask_connector/?format=api', auth=AUTH) - self.assertEqual(r.status_code, 200) - self.assertTrue("Subtask Connector List" in r.content.decode('utf8')) - - def test_subtask_connector_GET_nonexistant_raises_error(self): - GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask_connector/1234321/', 404) - - def test_subtask_connector_POST_and_GET(self): - stc_test_data = test_data_creator.SubtaskConnector() - - # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) - url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, stc_test_data) - - def test_subtask_connector_PUT_invalid_raises_error(self): - stc_test_data = test_data_creator.SubtaskConnector() - - PUT_and_assert_expected_response(self, BASE_URL + '/subtask_connector/9876789876/', stc_test_data, 404, {}) - - def test_subtask_connector_PUT(self): - stc_test_data = test_data_creator.SubtaskConnector() - stc_test_data2 = test_data_creator.SubtaskConnector() - - # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) - url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, stc_test_data) - - # PUT new values, verify - PUT_and_assert_expected_response(self, url, stc_test_data2, 200, stc_test_data2) - GET_OK_and_assert_equal_expected_response(self, url, stc_test_data2) - - def test_subtask_connector_PATCH(self): - stc_test_data = test_data_creator.SubtaskConnector() - - # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) - url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, stc_test_data) - - test_patch = {"role": BASE_URL + '/role/calibrator/', - "datatype": BASE_URL + '/datatype/quality/', } - - # PATCH item and verify - PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) - expected_data = dict(stc_test_data) - expected_data.update(test_patch) - GET_OK_and_assert_equal_expected_response(self, url, expected_data) - - def test_subtask_connector_DELETE(self): - stc_test_data = test_data_creator.SubtaskConnector() - - # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', stc_test_data, 201, stc_test_data) - url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, stc_test_data) - - # DELETE and check it's gone - DELETE_and_assert_gone(self, url) - - def test_subtask_connector_PROTECT_behavior_on_role_deleted(self): - stc_test_data = test_data_creator.SubtaskConnector() - - # create dependency that is safe to delete (enums are not populated / re-established between tests) - role_data = {'value': 'kickme'} - POST_and_assert_expected_response(self, BASE_URL + '/role/', role_data, 201, role_data) - role_url = BASE_URL + '/role/kickme/' - - - # POST new item and verify - test_data = dict(stc_test_data) - test_data['role'] = role_url - url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', test_data, 201, test_data)['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data) - - # Try to DELETE dependency, verify that was not successful - # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... - response = requests.delete(role_url, auth=AUTH) - self.assertEqual(500, response.status_code) - self.assertTrue("ProtectedError" in str(response.content)) - GET_OK_and_assert_equal_expected_response(self, role_url, role_data) - - def test_subtask_connector_PROTECT_behavior_on_datatype_deleted(self): - stc_test_data = test_data_creator.SubtaskConnector() - - # create new dependency that is safe to delete (enums are not populated / re-established between tests) - datatype_data = {'value': 'kickme'} - POST_and_assert_expected_response(self, BASE_URL + '/datatype/', datatype_data, 201, datatype_data) - datatype_url = BASE_URL + '/datatype/kickme/' - - # POST new item and verify - test_data = dict(stc_test_data) - test_data['datatype'] = datatype_url - url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_connector/', test_data, 201, test_data)['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data) - - # Try to DELETE dependency, verify that was not successful - # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... - response = requests.delete(datatype_url, auth=AUTH) - self.assertEqual(500, response.status_code) - self.assertTrue("ProtectedError" in str(response.content)) - GET_OK_and_assert_equal_expected_response(self, datatype_url, datatype_data) - - def test_GET_SubtaskConnector_list_view_shows_entry(self): - - test_data_1 = SubtaskConnector_test_data() - models.SubtaskConnector.objects.create(**test_data_1) - nbr_results = models.SubtaskConnector.objects.count() - GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/subtask_connector/', test_data_1, nbr_results) - - def test_GET_SubtaskConnector_view_returns_correct_entry(self): - - # setup - test_data_1 = SubtaskConnector_test_data() - test_data_2 = SubtaskConnector_test_data() - id1 = models.SubtaskConnector.objects.create(**test_data_1).id - id2 = models.SubtaskConnector.objects.create(**test_data_2).id - # assert - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_connector/%s/' % id1, test_data_1) - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_connector/%s/' % id2, test_data_2) - - def test_SubtaskConnector_allows_setting_dataformats(self): - """ - Other then through the API view, we cannot assign ManyToMany on creation, but have to set it later - """ - test_data_1 = dict(SubtaskConnector_test_data()) - test_data_1['inputs'] = None - test_data_2 = SubtaskConnector_test_data() - tior = models.SubtaskConnector.objects.create(**test_data_2) - tior.dataformats.set([models.Dataformat.objects.get(value='Beamformed'), - models.Dataformat.objects.get(value='MeasurementSet')]) - tior.save() - # assert - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_connector/%s' % tior.id, test_data_2) - - class SubtaskInputTestCase(unittest.TestCase): @classmethod def setUpClass(cls) -> None: @@ -741,7 +614,6 @@ class SubtaskInputTestCase(unittest.TestCase): cls.task_relation_blueprint_data = test_data_creator.TaskRelationBlueprint() cls.task_relation_blueprint_url = test_data_creator.post_data_and_get_url(cls.task_relation_blueprint_data, '/task_relation_blueprint/') cls.dataproduct_urls = [test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(), '/dataproduct/'), test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(), '/dataproduct/')] - cls.subtask_connector_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskConnector(), '/subtask_connector/') cls.subtask_output_data = test_data_creator.SubtaskOutput() cls.subtask_output_url = test_data_creator.post_data_and_get_url(cls.subtask_output_data, '/subtask_output/') cls.subtask_input_selection_template_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskInputSelectionTemplate(), '/subtask_input_selection_template/') @@ -755,7 +627,7 @@ class SubtaskInputTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask_input/1234321/', 404) def test_subtask_input_POST_and_GET(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -763,12 +635,12 @@ class SubtaskInputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) def test_subtask_input_PUT_invalid_raises_error(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) PUT_and_assert_expected_response(self, BASE_URL + '/subtask_input/9876789876/', sti_test_data, 404, {}) def test_subtask_input_PUT(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -776,12 +648,12 @@ class SubtaskInputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) # PUT new values, verify - sti_test_data2 = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data2 = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) PUT_and_assert_expected_response(self, url, sti_test_data2, 200, sti_test_data2) GET_OK_and_assert_equal_expected_response(self, url, sti_test_data2) def test_subtask_input_PATCH(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -804,7 +676,7 @@ class SubtaskInputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_subtask_input_DELETE(self): - sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data) @@ -820,7 +692,7 @@ class SubtaskInputTestCase(unittest.TestCase): task_blueprint_url=self.subtask_data['task_blueprint'], specifications_template_url=self.subtask_data['specifications_template'], specifications_doc=self.subtask_data['specifications_doc']), '/subtask/') - sti_test_data = test_data_creator.SubtaskInput(subtask_url=subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(subtask_url=subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] @@ -832,28 +704,12 @@ class SubtaskInputTestCase(unittest.TestCase): # assert item gone GET_and_assert_equal_expected_code(self, url, 404) - def test_subtask_input_SET_NULL_behavior_on_connector_deleted(self): - subtask_connector_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskConnector(), '/subtask_connector/') - sti_test_data = test_data_creator.SubtaskInput(subtask_connector_url=subtask_connector_url, subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) - - # POST new item, verify - url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] - GET_OK_and_assert_equal_expected_response(self, url, sti_test_data) - - # DELETE dependency and check it's gone - DELETE_and_assert_gone(self, subtask_connector_url) - - # assert item reference is set null - expected_data = dict(sti_test_data) - expected_data['connector'] = None - GET_OK_and_assert_equal_expected_response(self, url, expected_data) - def test_subtask_input_SET_NULL_behavior_on_task_relation_blueprint_deleted(self): # make new task_relation_blueprint instance, but reuse related data for speed task_relation_blueprint_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskRelationBlueprint(draft_url=self.task_relation_blueprint_data['draft'], template_url=self.task_relation_blueprint_data['selection_template'], input_url=self.task_relation_blueprint_data['input'], output_url=self.task_relation_blueprint_data['output'], consumer_url=self.task_relation_blueprint_data['consumer'], producer_url=self.task_relation_blueprint_data['producer']), '/task_relation_blueprint/') - sti_test_data = test_data_creator.SubtaskInput(task_relation_blueprint_url=task_relation_blueprint_url, subtask_url=self.subtask_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + sti_test_data = test_data_creator.SubtaskInput(task_relation_blueprint_url=task_relation_blueprint_url, subtask_url=self.subtask_url, dataproduct_urls=self.dataproduct_urls, subtask_output_url=self.subtask_output_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] @@ -869,8 +725,8 @@ class SubtaskInputTestCase(unittest.TestCase): def test_subtask_input_PROTECT_behavior_on_producer_deleted(self): # make new subtask_output_url instance, but reuse related data for speed - subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=self.subtask_output_data['subtask'], subtask_connector_url=self.subtask_output_data['connector']), '/subtask_output/') - sti_test_data = test_data_creator.SubtaskInput(subtask_output_url=subtask_output_url, subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_connector_url=self.subtask_connector_url, subtask_input_selection_template_url=self.subtask_input_selection_template_url) + subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=self.subtask_output_data['subtask']), '/subtask_output/') + sti_test_data = test_data_creator.SubtaskInput(subtask_output_url=subtask_output_url, subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, subtask_input_selection_template_url=self.subtask_input_selection_template_url) # POST with dependency url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_input/', sti_test_data, 201, sti_test_data)['url'] @@ -889,7 +745,6 @@ class SubtaskInputTestCase(unittest.TestCase): subtask_url=self.subtask_url, task_relation_blueprint_url=self.task_relation_blueprint_url, dataproduct_urls=self.dataproduct_urls, - subtask_connector_url=self.subtask_connector_url, subtask_output_url=self.subtask_output_url) # POST with dependency @@ -940,8 +795,6 @@ class SubtaskOutputTestCase(unittest.TestCase): def setUpClass(cls) -> None: cls.subtask_data = test_data_creator.Subtask() cls.subtask_url = test_data_creator.post_data_and_get_url(cls.subtask_data, '/subtask/') - cls.subtask_connector_data = test_data_creator.SubtaskConnector() - cls.subtask_connector_url = test_data_creator.post_data_and_get_url(cls.subtask_connector_data, '/subtask_connector/') def test_subtask_output_list_apiformat(self): r = requests.get(BASE_URL + '/subtask_output/?format=api', auth=AUTH) @@ -952,7 +805,7 @@ class SubtaskOutputTestCase(unittest.TestCase): GET_and_assert_equal_expected_code(self, BASE_URL + '/subtask_output/1234321/', 404) def test_subtask_output_POST_and_GET(self): - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) + sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) # POST and GET a new item and assert correctness r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, @@ -961,12 +814,12 @@ class SubtaskOutputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, sto_test_data) def test_subtask_output_PUT_invalid_raises_error(self): - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) + sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) PUT_and_assert_expected_response(self, BASE_URL + '/subtask_output/9876789876/', sto_test_data, 404, {}) def test_subtask_output_PUT(self): - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) - sto_test_data2 = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) + sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) + sto_test_data2 = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201,sto_test_data) @@ -978,8 +831,8 @@ class SubtaskOutputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, sto_test_data2) def test_subtask_output_PATCH(self): - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) - sto_test_data2 = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) + sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) + sto_test_data2 = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, @@ -997,7 +850,7 @@ class SubtaskOutputTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, expected_data) def test_subtask_output_DELETE(self): - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) + sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url) # POST new item, verify r_dict = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, @@ -1011,7 +864,7 @@ class SubtaskOutputTestCase(unittest.TestCase): def test_subtask_output_CASCADE_behavior_on_subtask_deleted(self): # make new subtask_url instance, but reuse related data for speed subtask_url = test_data_creator.post_data_and_get_url(self.subtask_data, '/subtask/') - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=subtask_url, subtask_connector_url=self.subtask_connector_url) + sto_test_data = test_data_creator.SubtaskOutput(subtask_url=subtask_url) # POST new item, verify url = POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, sto_test_data)['url'] @@ -1023,23 +876,6 @@ class SubtaskOutputTestCase(unittest.TestCase): # assert item gone GET_and_assert_equal_expected_code(self, url, 404) - def test_subtask_output_SET_NULL_behavior_on_connector_deleted(self): - sto_test_data = test_data_creator.SubtaskOutput(subtask_url=self.subtask_url, subtask_connector_url=self.subtask_connector_url) - - # POST new item, verify - url = \ - POST_and_assert_expected_response(self, BASE_URL + '/subtask_output/', sto_test_data, 201, sto_test_data)[ - 'url'] - GET_OK_and_assert_equal_expected_response(self, url, sto_test_data) - - # DELETE dependency and check it's gone - DELETE_and_assert_gone(self, sto_test_data['connector']) - - # assert item reference is set null - expected_data = dict(sto_test_data) - expected_data['connector'] = None - GET_OK_and_assert_equal_expected_response(self, url, expected_data) - def test_GET_SubtaskOutput_list_view_shows_entry(self): test_data_1 = SubtaskOutput_test_data() @@ -1706,7 +1542,7 @@ class DataproductArchiveInfoTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/dataproduct_archive_info/%s/' % id2, test_data_2) -class SubtaskQuery(unittest.TestCase): +class SubtaskQueryTestCase(unittest.TestCase): """ Test queries on the subtask REST api: - query cluster only @@ -1746,7 +1582,7 @@ class SubtaskQuery(unittest.TestCase): Create multiple subtasks for a given number of days with start_time 2 hours from now and stop_time 4 hours from now """ - cluster = SubtaskQuery.create_cluster(cluster_name) + cluster = SubtaskQueryTestCase.create_cluster(cluster_name) for day_idx in range(0, total_number): start_time = datetime.now() + timedelta(hours=2, days=day_idx) stop_time = datetime.now() + timedelta(hours=4, days=day_idx) @@ -1765,11 +1601,19 @@ class SubtaskQuery(unittest.TestCase): clusterB 50 subtasks with start 2hr and stop time 4hr from now, recurring 'every day' clusterC 30 subtasks with start 2hr and stop time 4hr from now, recurring 'every day' """ - cluster = SubtaskQuery.create_cluster("clusterA") + # we're counting (filtered) subtasks, so we should not depend on "previous" data + models.SubtaskInput.objects.all().delete() + models.DataproductHash.objects.all().delete() + models.DataproductArchiveInfo.objects.all().delete() + models.DataproductTransform.objects.all().delete() + models.Dataproduct.objects.all().delete() + models.Subtask.objects.all().delete() + + cluster = SubtaskQueryTestCase.create_cluster("clusterA") subtask_data = Subtask_test_data(cluster=cluster) models.Subtask.objects.create(**subtask_data) - for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): - SubtaskQuery.create_multiple_subtask_object(period_length_in_days, cluster_name) + for cluster_name, period_length_in_days in SubtaskQueryTestCase.subtasks_test_data_with_start_stop_time.items(): + SubtaskQueryTestCase.create_multiple_subtask_object(period_length_in_days, cluster_name) def test_query_cluster_only(self): @@ -1780,7 +1624,7 @@ class SubtaskQuery(unittest.TestCase): response = requests.get(BASE_URL + '/subtask/?cluster__name=clusterA', auth=AUTH) self.check_response_OK_and_result_count(response, 1) - for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + for cluster_name, period_length_in_days in SubtaskQueryTestCase.subtasks_test_data_with_start_stop_time.items(): logger.info("Check query on %s" % cluster_name) response = requests.get(BASE_URL + '/subtask/?cluster__name=%s' % cluster_name, auth=AUTH) self.check_response_OK_and_result_count(response, period_length_in_days) @@ -1790,7 +1634,7 @@ class SubtaskQuery(unittest.TestCase): Check if I can query on the start and stop time and cluster name (B and C) over a period Check status code and response length """ - for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + for cluster_name, period_length_in_days in SubtaskQueryTestCase.subtasks_test_data_with_start_stop_time.items(): start_time = datetime.now() stop_time = start_time + timedelta(days=period_length_in_days) expected_count = period_length_in_days @@ -1845,7 +1689,7 @@ class SubtaskQuery(unittest.TestCase): Check if I can query on the start time and cluster name (B and C) over a period Check status code and response length """ - for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + for cluster_name, period_length_in_days in SubtaskQueryTestCase.subtasks_test_data_with_start_stop_time.items(): start_time = datetime.now() expected_count = period_length_in_days logger.info("Check query greater than start_time (%s) for %s " % @@ -1868,7 +1712,7 @@ class SubtaskQuery(unittest.TestCase): Check if I can query on the stop time and cluster name (B and C) over a period Check status code and response length """ - for cluster_name, period_length_in_days in SubtaskQuery.subtasks_test_data_with_start_stop_time.items(): + for cluster_name, period_length_in_days in SubtaskQueryTestCase.subtasks_test_data_with_start_stop_time.items(): stop_time = datetime.now() + timedelta(days=period_length_in_days) logger.info("Check query less than stop_time (%s) for %s " % (formatDatetime(stop_time), cluster_name)) @@ -1898,7 +1742,7 @@ class SubtaskQuery(unittest.TestCase): self.check_response_OK_and_result_count(response, 0) # Check how many is 'ALL' - total_subtasks = SubtaskQuery.get_total_number_of_subtasks() + total_subtasks = SubtaskQueryTestCase.get_total_number_of_subtasks() response = requests.get(BASE_URL + '/subtask/?cluster__error_in_query=clusterA', auth=AUTH) self.check_response_OK_and_result_count(response, total_subtasks) @@ -1922,11 +1766,12 @@ class SubtaskQuery(unittest.TestCase): def test_query_state_only(self): """ Check the query on state value. Check status code and response length - All states are scheduling, None are defined + All states are defining (by default), None are defined """ - logger.info("Check query on state scheduling") - response = requests.get(BASE_URL + '/subtask/?state__value=scheduling', auth=AUTH) - self.check_response_OK_and_result_count(response, SubtaskQuery.get_total_number_of_subtasks()) + logger.info("Check query on state defining") + total_number_of_subtasks = SubtaskQueryTestCase.get_total_number_of_subtasks() + response = requests.get(BASE_URL + '/subtask/?state__value=defining', auth=AUTH) + self.check_response_OK_and_result_count(response, total_number_of_subtasks) response = requests.get(BASE_URL + '/subtask/?state__value=defined', auth=AUTH) self.check_response_OK_and_result_count(response, 0) @@ -1941,7 +1786,7 @@ class SubtaskQuery(unittest.TestCase): """ logger.info("Check query on ordering ascending start time") response = requests.get(BASE_URL + '/subtask/?ordering=start_time', auth=AUTH) - self.check_response_OK_and_result_count(response, SubtaskQuery.get_total_number_of_subtasks()) + self.check_response_OK_and_result_count(response, SubtaskQueryTestCase.get_total_number_of_subtasks()) previous_start_time = "2000-01-01T00:00:00" for item in response.json().get('results'): start_time = item['start_time'] @@ -1951,7 +1796,7 @@ class SubtaskQuery(unittest.TestCase): logger.info("Check query on ordering descending start time") response = requests.get(BASE_URL + '/subtask/?ordering=-start_time', auth=AUTH) - self.check_response_OK_and_result_count(response, SubtaskQuery.get_total_number_of_subtasks()) + self.check_response_OK_and_result_count(response, SubtaskQueryTestCase.get_total_number_of_subtasks()) previous_start_time = "2100-01-01T00:00:00" for item in response.json().get('results'): start_time = item['start_time'] diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py index 9fa9a987f1e380d231159f80a43897d0c6435be9..78fd1a0d54856d4b8d7a7b814972301c95518305 100755 --- a/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py +++ b/SAS/TMSS/test/t_tmssapp_scheduling_django_API.py @@ -26,7 +26,7 @@ from datetime import datetime import logging logger = logging.getLogger(__name__) -logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) # todo: Tags? -> Decide how to deal with them first. @@ -249,32 +249,6 @@ class DataproductTest(unittest.TestCase): models.Dataproduct.objects.create(**test_data) -class SubtaskConnectorTest(unittest.TestCase): - def test_SubtaskConnector_gets_created_with_correct_creation_timestamp(self): - - # setup - before = datetime.utcnow() - entry = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()) - - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.created_at) - self.assertGreater(after, entry.created_at) - - def test_SubtaskConnector_update_timestamp_gets_changed_correctly(self): - - # setup - entry = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()) - before = datetime.utcnow() - entry.save() - after = datetime.utcnow() - - # assert - self.assertLess(before, entry.updated_at) - self.assertGreater(after, entry.updated_at) - - class AntennaSetTest(unittest.TestCase): def test_AntennaSet_gets_created_with_correct_creation_timestamp(self): diff --git a/SAS/TMSS/test/t_tmssapp_specification_REST_API.py b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py index 4878da5ef38f1c84e8a238fa7d901e48b2165c25..a9f0b6c410b3fc3cc164d0e19be8b7f57f82f9d0 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_REST_API.py +++ b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py @@ -33,6 +33,10 @@ import logging logger = logging.getLogger(__name__) logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) +from lofar.common.test_utils import skip_integration_tests +if skip_integration_tests(): + exit(3) + # Do Mandatory setup step: # use setup/teardown magic for tmss test database, ldap server and django server # (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) @@ -65,30 +69,35 @@ class GeneratorTemplateTestCase(unittest.TestCase): def test_generator_template_POST_and_GET(self): # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) + test_data = test_data_creator.GeneratorTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) def test_generator_template_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/generator_template/9876789876/', test_data_creator.GeneratorTemplate(), 404, {}) + test_data = test_data_creator.GeneratorTemplate() + PUT_and_assert_expected_response(self, BASE_URL + '/generator_template/9876789876/', test_data, 404, {}) def test_generator_template_PUT(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) + test_data = test_data_creator.GeneratorTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # PUT new values, verify - PUT_and_assert_expected_response(self, url, test_data_creator.GeneratorTemplate("generatortemplate2"), 200, test_data_creator.GeneratorTemplate("generatortemplate2")) - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate("generatortemplate2")) + test_data2 = test_data_creator.GeneratorTemplate("generatortemplate2") + PUT_and_assert_expected_response(self, url, test_data2, 200, test_data2) + GET_OK_and_assert_equal_expected_response(self, url, test_data2) def test_generator_template_PATCH(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) + test_data = test_data_creator.GeneratorTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}} @@ -102,9 +111,10 @@ class GeneratorTemplateTestCase(unittest.TestCase): def test_generator_template_DELETE(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data_creator.GeneratorTemplate(), 201, test_data_creator.GeneratorTemplate()) + test_data = test_data_creator.GeneratorTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -131,30 +141,35 @@ class SchedulingUnitTemplateTestCase(unittest.TestCase): def test_scheduling_unit_template_POST_and_GET(self): # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) + test_data = test_data_creator.SchedulingUnitTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url+'?format=json', test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, url+'?format=json', test_data) def test_scheduling_unit_template_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/9876789876/', test_data_creator.SchedulingUnitTemplate(), 404, {}) + test_data = test_data_creator.SchedulingUnitTemplate() + PUT_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/9876789876/', test_data, 404, {}) def test_scheduling_unit_template_PUT(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) + test_data = test_data_creator.SchedulingUnitTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # PUT new values, verify - PUT_and_assert_expected_response(self, url, test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2"), 200, test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2")) - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2")) + test_data2 = test_data_creator.SchedulingUnitTemplate("schedulingunittemplate2") + PUT_and_assert_expected_response(self, url, test_data2, 200, test_data2) + GET_OK_and_assert_equal_expected_response(self, url, test_data2) def test_scheduling_unit_template_PATCH(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) + test_data = test_data_creator.SchedulingUnitTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}} @@ -168,9 +183,10 @@ class SchedulingUnitTemplateTestCase(unittest.TestCase): def test_scheduling_unit_template_DELETE(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data_creator.SchedulingUnitTemplate(), 201, test_data_creator.SchedulingUnitTemplate()) + test_data = test_data_creator.SchedulingUnitTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -197,31 +213,33 @@ class TaskTemplateTestCase(unittest.TestCase): def test_task_template_POST_and_GET(self): # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, - test_data_creator.TaskTemplate()) + test_data = test_data_creator.TaskTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url + '?format=json', test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, url + '?format=json', test_data) def test_task_template_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/task_template/9876789876/', test_data_creator.TaskTemplate(), 404, {}) + test_data = test_data_creator.TaskTemplate() + PUT_and_assert_expected_response(self, BASE_URL + '/task_template/9876789876/', test_data, 404, {}) def test_task_template_PUT(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, - test_data_creator.TaskTemplate()) + test_data = test_data_creator.TaskTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # PUT new values, verify - PUT_and_assert_expected_response(self, url, test_data_creator.TaskTemplate("tasktemplate2"), 200, test_data_creator.TaskTemplate("tasktemplate2")) - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.TaskTemplate("tasktemplate2")) + test_data2 = test_data_creator.TaskTemplate("tasktemplate2") + PUT_and_assert_expected_response(self, url, test_data2, 200, test_data2) + GET_OK_and_assert_equal_expected_response(self, url, test_data2) def test_task_template_PATCH(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, - test_data_creator.TaskTemplate()) + test_data = test_data_creator.TaskTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}, @@ -234,10 +252,10 @@ class TaskTemplateTestCase(unittest.TestCase): def test_task_template_DELETE(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, - test_data_creator.TaskTemplate()) + test_data = test_data_creator.TaskTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -264,30 +282,35 @@ class WorkRelationSelectionTemplateTestCase(unittest.TestCase): def test_work_relation_selection_template_POST_and_GET(self): # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) + test_data = test_data_creator.WorkRelationSelectionTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url+'?format=json', test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, url+'?format=json', test_data) def test_work_relation_selection_template_PUT_invalid_raises_error(self): - PUT_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/9876789876/', test_data_creator.WorkRelationSelectionTemplate(), 404, {}) + test_data = test_data_creator.WorkRelationSelectionTemplate() + PUT_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/9876789876/', test_data, 404, {}) def test_work_relation_selection_template_PUT(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) + test_data = test_data_creator.WorkRelationSelectionTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # PUT new values, verify - PUT_and_assert_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2"), 200, test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2")) - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2")) + test_data2 = test_data_creator.WorkRelationSelectionTemplate("workrelationselectiontemplate2") + PUT_and_assert_expected_response(self, url, test_data2, 200, test_data2) + GET_OK_and_assert_equal_expected_response(self, url, test_data2) def test_work_relation_selection_template_PATCH(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) + test_data = test_data_creator.WorkRelationSelectionTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) test_patch = {"version": 'v6.28318530718', "schema": {"mykey": "my better value"}, @@ -302,9 +325,10 @@ class WorkRelationSelectionTemplateTestCase(unittest.TestCase): def test_work_relation_selection_template_DELETE(self): # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data_creator.WorkRelationSelectionTemplate(), 201, test_data_creator.WorkRelationSelectionTemplate()) + test_data = test_data_creator.WorkRelationSelectionTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', test_data, 201, test_data) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, url, test_data) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -383,7 +407,8 @@ class TaskConnectorTestCase(unittest.TestCase): def test_task_connector_POST_existing_outputs_works(self): # First POST a new item to reference - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data_creator.TaskTemplate(), 201, test_data_creator.TaskTemplate()) + test_data = test_data_creator.TaskTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', test_data, 201, test_data) url = r_dict['url'] # POST a new item with correct reference @@ -472,9 +497,8 @@ class TaskConnectorTestCase(unittest.TestCase): class DefaultTemplates(unittest.TestCase): def test_default_generator_template_POST(self): - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', - test_data_creator.GeneratorTemplate(), 201, - test_data_creator.GeneratorTemplate()) + test_data = test_data_creator.GeneratorTemplate() + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', test_data, 201, test_data) url = r_dict['url'] test_data_1 = dict(test_data_creator.DefaultTemplates()) @@ -482,9 +506,10 @@ class DefaultTemplates(unittest.TestCase): POST_and_assert_expected_response(self, BASE_URL + '/default_generator_template/', test_data_1, 201, test_data_1) def test_default_scheduling_unit_template_POST(self): + test_data = test_data_creator.SchedulingUnitTemplate() r_dict = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', - test_data_creator.SchedulingUnitTemplate(), 201, - test_data_creator.SchedulingUnitTemplate()) + test_data, 201, + test_data) url = r_dict['url'] test_data_1 = dict(test_data_creator.DefaultTemplates()) @@ -492,9 +517,10 @@ class DefaultTemplates(unittest.TestCase): POST_and_assert_expected_response(self, BASE_URL + '/default_scheduling_unit_template/', test_data_1, 201, test_data_1) def test_default_task_template_POST(self): + test_data = test_data_creator.TaskTemplate() r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', - test_data_creator.TaskTemplate(), 201, - test_data_creator.TaskTemplate()) + test_data, 201, + test_data) url = r_dict['url'] test_data_1 = dict(test_data_creator.DefaultTemplates()) @@ -502,9 +528,10 @@ class DefaultTemplates(unittest.TestCase): POST_and_assert_expected_response(self, BASE_URL + '/default_task_template/', test_data_1, 201, test_data_1) def test_default_work_relation_selection_template_POST(self): + test_data = test_data_creator.WorkRelationSelectionTemplate() r_dict = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', - test_data_creator.WorkRelationSelectionTemplate(), 201, - test_data_creator.WorkRelationSelectionTemplate()) + test_data, 201, + test_data) url = r_dict['url'] test_data_1 = dict(test_data_creator.DefaultTemplates()) @@ -514,74 +541,78 @@ class DefaultTemplates(unittest.TestCase): def test_default_generator_template_PROTECT_behavior_on_template_deleted(self): # POST with dependency + test_data = test_data_creator.GeneratorTemplate() template_url = POST_and_assert_expected_response(self, BASE_URL + '/generator_template/', - test_data_creator.GeneratorTemplate(), 201, - test_data_creator.GeneratorTemplate())['url'] - test_data = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) - test_data['template'] = template_url + test_data, 201, + test_data)['url'] + test_data2 = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) + test_data2['template'] = template_url POST_and_assert_expected_response(self, BASE_URL + '/default_generator_template/', - test_data, 201, test_data) + test_data2, 201, test_data2) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_OK_and_assert_equal_expected_response(self, template_url, test_data_creator.GeneratorTemplate()) + GET_OK_and_assert_equal_expected_response(self, template_url, test_data) def test_default_scheduling_unit_template_PROTECT_behavior_on_template_deleted(self): # POST with dependency + test_data = test_data_creator.SchedulingUnitTemplate() template_url = POST_and_assert_expected_response(self, BASE_URL + '/scheduling_unit_template/', - test_data_creator.SchedulingUnitTemplate(), 201, - test_data_creator.SchedulingUnitTemplate())['url'] - test_data = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) - test_data['template'] = template_url + test_data, 201, + test_data)['url'] + test_data2 = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) + test_data2['template'] = template_url POST_and_assert_expected_response(self, BASE_URL + '/default_scheduling_unit_template/', - test_data, 201, test_data) + test_data2, 201, test_data2) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_OK_and_assert_equal_expected_response(self, template_url, test_data_creator.SchedulingUnitTemplate()) + GET_OK_and_assert_equal_expected_response(self, template_url, test_data) def test_default_task_template_PROTECT_behavior_on_template_deleted(self): # POST with dependency + test_data = test_data_creator.TaskTemplate() template_url = POST_and_assert_expected_response(self, BASE_URL + '/task_template/', - test_data_creator.TaskTemplate(), 201, - test_data_creator.TaskTemplate())['url'] - test_data = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) - test_data['template'] = template_url + test_data, 201, + test_data)['url'] + test_data2 = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) + test_data2['template'] = template_url POST_and_assert_expected_response(self, BASE_URL + '/default_task_template/', - test_data, 201, test_data) + test_data2, 201, test_data2) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_OK_and_assert_equal_expected_response(self, template_url, test_data_creator.TaskTemplate()) + GET_OK_and_assert_equal_expected_response(self, template_url, test_data) def test_default_work_relation_selection_template_PROTECT_behavior_on_template_deleted(self): # POST with dependency + test_data = test_data_creator.WorkRelationSelectionTemplate() template_url = POST_and_assert_expected_response(self, BASE_URL + '/work_relation_selection_template/', - test_data_creator.WorkRelationSelectionTemplate(), 201, - test_data_creator.WorkRelationSelectionTemplate())['url'] - test_data = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) - test_data['template'] = template_url + test_data, 201, + test_data)['url'] + test_data2 = dict(test_data_creator.DefaultTemplates("defaulttemplate2")) + test_data2['template'] = template_url POST_and_assert_expected_response(self, BASE_URL + '/default_work_relation_selection_template/', - test_data, 201, test_data) + test_data2, 201, test_data2) # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... response = requests.delete(template_url, auth=AUTH) self.assertEqual(500, response.status_code) self.assertTrue("ProtectedError" in str(response.content)) - GET_OK_and_assert_equal_expected_response(self, template_url, test_data_creator.WorkRelationSelectionTemplate()) + GET_OK_and_assert_equal_expected_response(self, template_url, test_data) class CycleTestCase(unittest.TestCase): diff --git a/SAS/TMSS/test/t_tmssapp_specification_django_API.py b/SAS/TMSS/test/t_tmssapp_specification_django_API.py index 6e0e78cc389bcd6e891055cf5ee9a9e956fbf7b1..175e48e305ab6f7de886c7af3de5920bbc96b5ce 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_django_API.py +++ b/SAS/TMSS/test/t_tmssapp_specification_django_API.py @@ -26,7 +26,7 @@ import uuid import logging logger = logging.getLogger(__name__) -logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) # todo: Tags? -> Decide how to deal with them first. # todo: Immutability of Blueprints on db level? @@ -39,6 +39,7 @@ from lofar.sas.tmss.test.tmss_database_unittest_setup import * from lofar.sas.tmss.test.tmss_test_data_django_models import * from django.db.utils import IntegrityError +from django.core.exceptions import ValidationError class GeneratorTemplateTest(unittest.TestCase): @@ -130,6 +131,21 @@ class TaskTemplateTest(unittest.TestCase): self.assertLess(before, entry.updated_at) self.assertGreater(after, entry.updated_at) + def test_TaskTemplate_name_version_unique(self): + test_data = TaskTemplate_test_data(name="my_name", version="1") + entry1 = models.TaskTemplate.objects.create(**test_data) + + with self.assertRaises(IntegrityError): + entry2 = models.TaskTemplate.objects.create(**test_data) + + test_data2 = dict(**test_data) + test_data2['version'] = "2" + entry2 = models.TaskTemplate.objects.create(**test_data2) + + with self.assertRaises(IntegrityError): + entry2.version = '1' + entry2.save() + class WorkRelationSelectionTemplateTest(unittest.TestCase): def test_WorkRelationSelectionTemplate_gets_created_with_correct_creation_timestamp(self): diff --git a/SAS/TMSS/test/t_tmssapp_specification_permissions.py b/SAS/TMSS/test/t_tmssapp_specification_permissions.py index cc356399963a8d553c330d54d09135dc00ed8808..0e8ebd686bd17a53a0746993d73ec7e4127604d6 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_permissions.py +++ b/SAS/TMSS/test/t_tmssapp_specification_permissions.py @@ -56,6 +56,8 @@ class CyclePermissionTestCase(unittest.TestCase): # refresh user to update cache, see: https://docs.djangoproject.com/en/3.0/topics/auth/default/#permission-caching user = User.objects.get(username='paulus') + while user.has_perm('tmssapp.add_cycle'): + user = User.objects.get(username='paulus') self.assertFalse(user.has_perm('tmssapp.add_cycle')) @@ -69,6 +71,8 @@ class CyclePermissionTestCase(unittest.TestCase): # refresh user to update cache, see: https://docs.djangoproject.com/en/3.0/topics/auth/default/#permission-caching user = User.objects.get(username='paulus') + while not user.has_perm('tmssapp.add_cycle'): + user = User.objects.get(username='paulus') self.assertTrue(user.has_perm('tmssapp.add_cycle')) @@ -82,6 +86,8 @@ class CyclePermissionTestCase(unittest.TestCase): # refresh user to update cache, see: https://docs.djangoproject.com/en/3.0/topics/auth/default/#permission-caching user = User.objects.get(username='paulus') + while not user.has_perm('tmssapp.add_cycle'): + user = User.objects.get(username='paulus') # add count = len(models.Cycle.objects.all()) @@ -100,6 +106,8 @@ class CyclePermissionTestCase(unittest.TestCase): # refresh user to update cache, see: https://docs.djangoproject.com/en/3.0/topics/auth/default/#permission-caching user = User.objects.get(username='paulus') + while not user.has_perm('tmssapp.add_cycle'): + user = User.objects.get(username='paulus') # add count = len(models.Cycle.objects.all()) diff --git a/SAS/TMSS/test/test_utils.py b/SAS/TMSS/test/test_utils.py index 34e44c6f384073421611136ed7c6d8e7c24b39c2..88a250084b484b67931a20a822bac8b38655462b 100644 --- a/SAS/TMSS/test/test_utils.py +++ b/SAS/TMSS/test/test_utils.py @@ -32,6 +32,9 @@ from lofar.common.dbcredentials import Credentials, DBCredentials from lofar.common.util import find_free_port, waitForInterrupt from lofar.sas.tmss.test.ldap_test_service import TestLDAPServer from lofar.sas.tmss.tmss.exceptions import TMSSException +from lofar.messaging.config import DEFAULT_BROKER, DEFAULT_BUSNAME +from lofar.common.testing.dbcredentials import TemporaryCredentials +from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession def assertDataWithUrls(self, data, expected): """ @@ -113,12 +116,15 @@ class TMSSPostgresTestMixin(PostgresTestMixin): class TMSSDjangoServerInstance(): ''' Creates a running django TMSS server at the requested port with the requested database credentials. ''' - def __init__(self, db_dbcreds_id: str="TMSS", ldap_dbcreds_id: str="TMSS_LDAP", host: str='127.0.0.1', port: int=8000): + def __init__(self, db_dbcreds_id: str="TMSS", ldap_dbcreds_id: str="TMSS_LDAP", host: str='127.0.0.1', port: int=8000, + exchange: str=os.environ.get("TMSS_EXCHANGE", DEFAULT_BUSNAME), broker: str=os.environ.get("TMSS_EXCHANGE", DEFAULT_BUSNAME)): self._db_dbcreds_id = db_dbcreds_id self._ldap_dbcreds_id = ldap_dbcreds_id self.host = host self.port = port self._server_process = None + self._exchange = exchange + self._broker = broker @property def address(self): @@ -160,6 +166,8 @@ class TMSSDjangoServerInstance(): # set these here, run django setup, and start the server os.environ["TMSS_LDAPCREDENTIALS"] = self.ldap_dbcreds_id os.environ["TMSS_DBCREDENTIALS"] = self.database_dbcreds_id + os.environ["TMSS_EXCHANGE"] = self._exchange + os.environ["TMSS_BROKER"] = self._broker os.environ["DJANGO_SETTINGS_MODULE"] = "lofar.sas.tmss.tmss.settings" django.setup() @@ -241,19 +249,32 @@ class TMSSDjangoServerInstance(): class TMSSTestEnvironment: '''Create and run a test django TMSS server against a newly created test database and a test ldap server (and cleanup automagically)''' - def __init__(self, host: str='127.0.0.1', preferred_django_port: int=8000): + def __init__(self, host: str='127.0.0.1', preferred_django_port: int=8000, + exchange: str=os.environ.get("TMSS_EXCHANGE", DEFAULT_BUSNAME), broker: str=os.environ.get("TMSS_BROKER", DEFAULT_BROKER)): self.ldap_server = TestLDAPServer(user='test', password='test') self.database = TMSSTestDatabaseInstance() self.django_server = TMSSDjangoServerInstance(db_dbcreds_id=self.database.dbcreds_id, ldap_dbcreds_id=self.ldap_server.dbcreds_id, host=host, - port=find_free_port(preferred_django_port)) + port=find_free_port(preferred_django_port), + exchange=exchange, + broker=broker) + self.client_credentials = TemporaryCredentials(user=self.ldap_server.dbcreds.user, + password=self.ldap_server.dbcreds.password) def start(self): self.ldap_server.start() self.database.create() self.django_server.start() + # store client credentials in the TemporaryCredentials file... + self.client_credentials.dbcreds.host = self.django_server.host + self.client_credentials.dbcreds.port = self.django_server.port + self.client_credentials.dbcreds.type = "http" + self.client_credentials.create() + # ... and set TMSS_CLIENT_DBCREDENTIALS environment variable, sp anybody or anything (any test) can use it automagically + os.environ['TMSS_CLIENT_DBCREDENTIALS'] = self.client_credentials.dbcreds_id + # apart from the running django server with a REST API, # it is also convenient to provide a working django setup for the 'normal' django API (via models.objects) # so: do setup_django @@ -271,6 +292,7 @@ class TMSSTestEnvironment: self.django_server.stop() self.ldap_server.stop() self.database.destroy() + self.client_credentials.destroy() def __enter__(self): try: @@ -284,6 +306,8 @@ class TMSSTestEnvironment: def __exit__(self, exc_type, exc_val, exc_tb): self.stop() + def create_tmss_client(self): + return TMSSsession.create_from_dbcreds_for_ldap(self.client_credentials.dbcreds_id) def main_test_database(): """instantiate, run and destroy a test postgress django database""" @@ -307,7 +331,7 @@ def main_test_database(): def main_test_environment(): """instantiate, run and destroy a full tmss test environment (postgress database, ldap server, django server)""" - from optparse import OptionParser + from optparse import OptionParser, OptionGroup os.environ['TZ'] = 'UTC' parser = OptionParser('%prog [options]', @@ -316,11 +340,15 @@ def main_test_environment(): help="expose the TMSS Django REST API via this host. [default=%default]") parser.add_option("-p", "--port", dest="port", type="int", default=find_free_port(8000), help="try to use this port for the DJANGO REST API. If not available, then a random free port is used and logged. [default=%default]") + group = OptionGroup(parser, 'Messaging options') + group.add_option('-b', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, help='Address of the message broker, default: %default') + group.add_option('-e', "--exchange", dest="exchange", type="string", default=DEFAULT_BUSNAME, help="Bus or queue where the TMSS messages are published. [default: %default]") + parser.add_option_group(group) (options, args) = parser.parse_args() logging.basicConfig(format = '%(asctime)s %(levelname)s %(message)s', level = logging.INFO) - with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port) as instance: + with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port, exchange=options.exchange, broker=options.broker) as instance: # print some nice info for the user to use the test servers... # use print instead of log for clean lines. for h in logging.root.handlers: @@ -332,12 +360,16 @@ def main_test_environment(): print("*****************************************************") print("DB Credentials ID: %s" % (instance.database.dbcreds_id, )) print("LDAP Credentials ID: %s" % (instance.django_server.ldap_dbcreds_id, )) + print("TMSS Client Credentials ID: %s" % (instance.client_credentials.dbcreds_id, )) print("Django URL: %s" % (instance.django_server.url)) print() print("Example cmdlines to run tmss or tmss_manage_django:") print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss_manage_django" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) print() + print("Example cmdline to run tmss client call:") + print("TMSS_CLIENT_DBCREDENTIALS=%s tmss_set_subtask_state <id> <state>" % (instance.client_credentials.dbcreds_id, )) + print() print("Press Ctrl-C to exit (and remove the test database and django server automatically)") waitForInterrupt() diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py index 99b98d2e69f43e418cae2114d4ba32aa95968bee..155c1b780f8635cbf69a403f3794736aad27c570 100644 --- a/SAS/TMSS/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/test/tmss_test_data_django_models.py @@ -34,10 +34,13 @@ from datetime import datetime import uuid import json -def GeneratorTemplate_test_data(name="my_GeneratorTemplate") -> dict: +def GeneratorTemplate_test_data(name="my_GeneratorTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one observation', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "create_function": 'Funky', "tags": ["TMSS", "TESTING"]} @@ -47,25 +50,34 @@ def DefaultGeneratorTemplate_test_data(name=None, template=None) -> dict: 'template': template, 'tags':[]} -def SchedulingUnitTemplate_test_data(name="my_SchedulingUnitTemplate") -> dict: +def SchedulingUnitTemplate_test_data(name="my_SchedulingUnitTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My SchedulingUnitTemplate description', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} -def TaskTemplate_test_data(name="my TaskTemplate") -> dict: +def TaskTemplate_test_data(name="my TaskTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"validation_code_js":"", "name": name, "description": 'My TaskTemplate description', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} -def WorkRelationSelectionTemplate_test_data(name="my_WorkRelationSelectionTemplate") -> dict: +def WorkRelationSelectionTemplate_test_data(name="my_WorkRelationSelectionTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My WorkRelationSelectionTemplate description', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} @@ -197,48 +209,52 @@ def TaskRelationBlueprint_test_data() -> dict: "consumer": models.TaskBlueprint.objects.create(**TaskBlueprint_test_data())} -def SubtaskTemplate_test_data(schema: object=None) -> dict: +def SubtaskTemplate_test_data(schema: object=None, version:str=None) -> dict: if schema is None: schema = {} + if version is None: + version = str(uuid.uuid4()) + return {"type": models.SubtaskType.objects.get(value='copy'), "name": "observation", "description": 'My one observation', - "version": 'v0.314159265359', + "version": version, "schema": schema, "realtime": True, "queue": False, "tags": ["TMSS", "TESTING"]} -def DataproductSpecificationsTemplate_test_data() -> dict: +def DataproductSpecificationsTemplate_test_data(version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": "data", "description": 'My one date', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} -def DataproductFeedbackTemplate_test_data() -> dict: +def DataproductFeedbackTemplate_test_data(version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": "data", "description": 'My one date', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} -def SubtaskOutput_test_data(subtask: models.Subtask=None, connector: models.SubtaskConnector=None) -> dict: +def SubtaskOutput_test_data(subtask: models.Subtask=None) -> dict: if subtask is None: subtask = models.Subtask.objects.create(**Subtask_test_data()) - if connector is None: - connector = models.SubtaskConnector.objects.create(**SubtaskConnector_test_data(output_of=subtask.specifications_template, input_of=subtask.specifications_template)) - return {"subtask": subtask, - "connector": connector, "tags":[]} def SubtaskInput_test_data() -> dict: return {"subtask": models.Subtask.objects.create(**Subtask_test_data()), "task_relation_blueprint": models.TaskRelationBlueprint.objects.create(**TaskRelationBlueprint_test_data()), - "connector": models.SubtaskConnector.objects.create(**SubtaskConnector_test_data()), "producer": models.SubtaskOutput.objects.create(**SubtaskOutput_test_data()), #"dataproducts": models.Dataproduct.objects.create(**dpt.get_test_data()), "selection_doc": {}, @@ -268,7 +284,7 @@ def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_templat return { "start_time": start_time, "stop_time": stop_time, - "state": models.SubtaskState.objects.get(value='scheduling'), + "state": models.SubtaskState.objects.get(value='defining'), "specifications_doc": specifications_doc, "task_blueprint": task_blueprint, "specifications_template": subtask_template, @@ -303,19 +319,6 @@ def Dataproduct_test_data(producer: models.SubtaskOutput=None, "feedback_doc": {}, "feedback_template": models.DataproductFeedbackTemplate.objects.create(**DataproductFeedbackTemplate_test_data())} -def SubtaskConnector_test_data(output_of: models.SubtaskTemplate=None, input_of: models.SubtaskTemplate=None) -> dict: - if output_of is None: - output_of = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()) - - if input_of is None: - input_of = models.SubtaskTemplate.objects.create(**SubtaskTemplate_test_data()) - - return {"role": models.Role.objects.get(value='calibrator'), - "datatype": models.Datatype.objects.get(value='instrument model'), - "output_of": output_of, - "input_of": input_of, - "tags": []} - def AntennaSet_test_data() -> dict: return {"name": "observation", "description": 'My one observation', @@ -354,10 +357,13 @@ def DataproductHash_test_data() -> dict: "hash": "myhash_1", "tags": ['tmss', 'testing']} -def SubtaskInputSelectionTemplate_test_data() -> dict: +def SubtaskInputSelectionTemplate_test_data(version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": "data", "description": 'My one date', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} diff --git a/SAS/TMSS/test/tmss_test_data_rest.py b/SAS/TMSS/test/tmss_test_data_rest.py index 75d6152fde9f92e617b054411357885c1d6d1e2c..065a5614956f21b96a2dfccdd66a26a666cac063 100644 --- a/SAS/TMSS/test/tmss_test_data_rest.py +++ b/SAS/TMSS/test/tmss_test_data_rest.py @@ -45,33 +45,45 @@ class TMSSRESTTestDataCreator(): ####################################################### - def GeneratorTemplate(self, name="generatortemplate"): + def GeneratorTemplate(self, name="generatortemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one observation', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "create_function": 'Funky', "tags": ["TMSS", "TESTING"]} - def SchedulingUnitTemplate(self, name="schedulingunittemplate1"): + def SchedulingUnitTemplate(self, name="schedulingunittemplate1", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return { "name": name, "description": 'My description', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} - def TaskTemplate(self, name="tasktemplate1"): + def TaskTemplate(self, name="tasktemplate1", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one observation', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"], "validation_code_js": "???"} - def WorkRelationSelectionTemplate(self, name="workrelationselectiontemplate1"): + def WorkRelationSelectionTemplate(self, name="workrelationselectiontemplate1", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one observation', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} @@ -294,30 +306,42 @@ class TMSSRESTTestDataCreator(): "producer": producer_url, "consumer": consumer_url} - def SubtaskTemplate(self, name="subtask1", schema=None): + def SubtaskTemplate(self, name="subtask_template_1", schema=None, subtask_type_url: str=None, version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + if schema is None: schema = {} - return {"type": self.django_api_url + '/subtask_type/copy/', + if subtask_type_url is None: + subtask_type_url = self.django_api_url + '/subtask_type/observation/' + + return {"type": subtask_type_url, "name": name, "description": 'My one observation', - "version": 'v0.314159265359', + "version": version, "schema": schema, "realtime": True, "queue": False, "tags": ["TMSS", "TESTING"]} - def DataproductSpecificationsTemplate(self, name="my_DataproductSpecificationsTemplate"): + def DataproductSpecificationsTemplate(self, name="my_DataproductSpecificationsTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one date', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} - def DataproductFeedbackTemplate(self, name="my_DataproductFeedbackTemplate"): + def DataproductFeedbackTemplate(self, name="my_DataproductFeedbackTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one date', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} @@ -346,11 +370,11 @@ class TMSSRESTTestDataCreator(): specifications_template_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/subtask_template/') if specifications_doc is None: - specifications_doc = "{}" + specifications_doc = requests.get(specifications_template_url + 'default_specification/', auth=self.auth).content.decode('utf-8') return {"start_time": datetime.utcnow().isoformat(), "stop_time": datetime.utcnow().isoformat(), - "state": self.django_api_url + '/subtask_state/scheduling/', + "state": self.django_api_url + '/subtask_state/defining/', "specifications_doc": specifications_doc, "task_blueprint": task_blueprint_url, "specifications_template": specifications_template_url, @@ -360,31 +384,14 @@ class TMSSRESTTestDataCreator(): "schedule_method": self.django_api_url + '/schedule_method/manual/', "cluster": cluster_url} - def SubtaskOutput(self, subtask_url=None, subtask_connector_url=None): + def SubtaskOutput(self, subtask_url=None): if subtask_url is None: subtask_url = self.post_data_and_get_url(self.Subtask(), '/subtask/') - if subtask_connector_url is None: - subtask_connector_url = self.post_data_and_get_url(self.SubtaskConnector(), '/subtask_connector/') - + return {"subtask": subtask_url, - "connector": subtask_connector_url, "tags": []} - - def SubtaskConnector(self, input_of_url=None, output_of_url=None): - if input_of_url is None: - input_of_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/subtask_template/') - - if output_of_url is None: - output_of_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/subtask_template/') - - return {"role": self.django_api_url + '/role/correlator/', - "datatype": self.django_api_url + '/datatype/image/', - "dataformats": [self.django_api_url + '/dataformat/Beamformed/'], - "output_of": output_of_url, - "input_of": input_of_url, - "tags": []} - + def Dataproduct(self, filename="my_filename", specifications_template_url=None, subtask_output_url=None, dataproduct_feedback_template_url=None): if specifications_template_url is None: specifications_template_url = self.post_data_and_get_url(self.SubtaskTemplate(), '/dataproduct_specifications_template/') @@ -454,14 +461,17 @@ class TMSSRESTTestDataCreator(): "corrupted_since": datetime.utcnow().isoformat(), "tags": ['tmss', 'testing']} - def SubtaskInputSelectionTemplate(self, name="my_SubtaskInputSelectionTemplate"): + def SubtaskInputSelectionTemplate(self, name="my_SubtaskInputSelectionTemplate", version:str=None) -> dict: + if version is None: + version = str(uuid.uuid4()) + return {"name": name, "description": 'My one date', - "version": 'v0.314159265359', + "version": version, "schema": {"mykey": "my value"}, "tags": ["TMSS", "TESTING"]} - def SubtaskInput(self, subtask_url=None, task_relation_blueprint_url=None, dataproduct_urls=None, subtask_connector_url=None, subtask_output_url=None, subtask_input_selection_template_url=None): + def SubtaskInput(self, subtask_url=None, task_relation_blueprint_url=None, dataproduct_urls=None, subtask_output_url=None, subtask_input_selection_template_url=None): if subtask_url is None: subtask_url = self.post_data_and_get_url(self.Subtask(), '/subtask/') @@ -472,9 +482,7 @@ class TMSSRESTTestDataCreator(): dataproduct_urls = [self.post_data_and_get_url(self.Dataproduct(), '/dataproduct/'), self.post_data_and_get_url(self.Dataproduct(), '/dataproduct/')] - if subtask_connector_url is None: - subtask_connector_url = self.post_data_and_get_url(self.SubtaskConnector(), '/subtask_connector/') - + if subtask_output_url is None: subtask_output_url = self.post_data_and_get_url(self.SubtaskOutput(), '/subtask_output/') @@ -483,7 +491,6 @@ class TMSSRESTTestDataCreator(): return {"subtask": subtask_url, "task_relation_blueprint": task_relation_blueprint_url, - "connector": subtask_connector_url, "producer": subtask_output_url, "dataproducts": dataproduct_urls, "selection_doc": "{}",