diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000000000000000000000000000000000..cad7657dfa543e02eca53f1ecc7545c92bc0a550 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "cmake.configureOnOpen": false +} \ No newline at end of file diff --git a/CMake/LofarPackageList.cmake b/CMake/LofarPackageList.cmake index db28a087be704c2b09c85cd66fea45146d617029..d4f6966e12814caac01dda87311fdbea2535433f 100644 --- a/CMake/LofarPackageList.cmake +++ b/CMake/LofarPackageList.cmake @@ -1,7 +1,7 @@ # - Create for each LOFAR package a variable containing the absolute path to # its source directory. # -# Generated by gen_LofarPackageList_cmake.sh at do 29 okt 2020 7:42:34 CET +# Generated by gen_LofarPackageList_cmake.sh at do 28 mei 2020 11:22:44 CEST # # ---- DO NOT EDIT ---- # @@ -207,7 +207,7 @@ if(NOT DEFINED LOFAR_PACKAGE_LIST_INCLUDED) set(TaskPrescheduler_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/ResourceAssignment/TaskPrescheduler) set(RACommon_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/ResourceAssignment/Common) set(TMSSClient_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/client) - set(TMSSSubtaskSchedulingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/subtask_scheduling) + set(TMSSSchedulingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/scheduling) set(TMSSFeedbackHandlingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/feedback_handling) set(TMSSPostgresListenerService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/tmss_postgres_listener) set(TriggerEmailServiceCommon_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TriggerEmailService/Common) diff --git a/CMake/variants/variants.lcs157 b/CMake/variants/variants.lcs157 index 1e70de71e0b9d86b4c0e0f32fbdba10342bd8c1d..1c7d26eaa4309cb4a3444e443326ae5b4c03aca1 100644 --- a/CMake/variants/variants.lcs157 +++ b/CMake/variants/variants.lcs157 @@ -2,6 +2,7 @@ # AS: put under comment as LOFAR general rule is to use shared libs now. #option(BUILD_SHARED_LIBS "Build shared libraries" OFF) +set(PYTHON_EXECUTABLE "/usr/bin/python3.6" CACHE FILEPATH "") set(WINCC_ROOT_DIR /opt/WinCC_OA/3.16) set(CASACORE_ROOT_DIR "/opt/casacore") set(CASAREST_ROOT_DIR "/opt/casarest") diff --git a/Docker/lofar-base/Dockerfile.tmpl b/Docker/lofar-base/Dockerfile.tmpl index 9ba56ab2373f0094e6ee0194f91790e608f64403..b32d6d4e736eb922c7bf65b62b0b007ef992bed5 100644 --- a/Docker/lofar-base/Dockerfile.tmpl +++ b/Docker/lofar-base/Dockerfile.tmpl @@ -202,6 +202,16 @@ RUN aptitude install -y libqpid-proton8 libqpid-proton-cpp8 python3-qpid-proton # ******************* RUN aptitude install -y python3-kombu +# ******************* +# Unconsumed build arguments +# ******************* + +# Apply a finger print to force a rebuild if the source code changes. Supply a unique ID here to force a rebuild. +ARG LOFAR_FINGERPRINT=whatever + +# we do not use this, but will be configured to produce it +ARG LOFAR_BASE_IMAGE_VERSION=latest + # # entry # diff --git a/Docker/lofar-ci/Dockerfile_ci_mac b/Docker/lofar-ci/Dockerfile_ci_mac index b23bda761611ba93ddb5fa6d9bba1cc05cc40078..ceb9978ef6d2516e6f1a16d8d6ade53a08b381df 100644 --- a/Docker/lofar-ci/Dockerfile_ci_mac +++ b/Docker/lofar-ci/Dockerfile_ci_mac @@ -11,7 +11,7 @@ ARG BASE_VERSION=latest FROM ci_base:$BASE_VERSION RUN echo "Installing packages for MAC..." && \ - yum -y install readline-devel boost-python36-devel hdf5-devel blas-devel lapack-devel cfitsio-devel wcslib-devel autogen postgresql-devel cmake3 libpqxx-devel qpid-cpp-server qpid-cpp-client-devel qpid-tools unittest-cpp-devel jsoncpp-devel jsoncpp libcurl-devel libcurl && \ + yum -y install readline-devel boost-python36-devel hdf5-devel blas-devel lapack-devel cfitsio-devel wcslib-devel autogen postgresql-devel cmake3 libpqxx-devel qpid-cpp-server qpid-cpp-client-devel unittest-cpp-devel jsoncpp-devel jsoncpp libcurl-devel libcurl && \ pip3 install psycopg2 testing.postgresql lxml mock numpy kombu requests python-dateutil fabric RUN echo "Installing WinCC3.14 build and Demo App from Nexus repo..." && \ diff --git a/Docker/lofar-outputproc/Dockerfile.tmpl b/Docker/lofar-outputproc/Dockerfile.tmpl index 6c5ffa7b4a9707b9b2478e47615dacc58efc3b99..e0969b5d78a69578232c18fa9861bcb60ef4f9e5 100644 --- a/Docker/lofar-outputproc/Dockerfile.tmpl +++ b/Docker/lofar-outputproc/Dockerfile.tmpl @@ -5,6 +5,10 @@ FROM lofar-base:${LOFAR_TAG} RUN apt-get update && apt-get install -y git python python3 g++ make +# Consume superfluous build variables +ARG DOCKER_IMAGE_BUILD_DATE=now +ENV DOCKER_IMAGE_BUILD_DATE=${DOCKER_IMAGE_BUILD_DATE} + # # ******************* # Blitz @@ -24,13 +28,15 @@ RUN apt-get update && apt-get install -y git python && \ # ******************* # -ENV DAL_VERSION=v3.3.1 +ENV DAL_VERSION=v3.3.2 # Run-time dependencies RUN aptitude install -y libhdf5-${LIBHDF5_VERSION} python3 && \ aptitude clean && \ aptitude autoclean +ARG J=6 + RUN export BUILD_PACKAGES="git cmake g++ swig3.0 python3-setuptools python3-dev libhdf5-dev" && \ aptitude install -y ${BUILD_PACKAGES} && \ mkdir -p ${INSTALLDIR}/DAL/build && \ @@ -54,8 +60,11 @@ RUN export BUILD_PACKAGES="git cmake g++ swig3.0 python3-setuptools python3-dev RUN apt-get update && apt-get install -y binutils liblog4cplus-1.1-9 libxml2 libboost-thread${BOOST_VERSION}.1 libboost-filesystem${BOOST_VERSION}.1 libboost-date-time${BOOST_VERSION}.1 libpng16-16 libsigc++-2.0-dev libxml++2.6-2v5 libboost-regex${BOOST_VERSION}.1 libreadline${READLINE_VERSION} # Tell image build information -ENV LOFAR_BRANCH=${LOFAR_VERSION} \ - LOFAR_BUILDVARIANT=gnucxx11_opt +ARG LOFAR_VERSION=master +ENV LOFAR_BRANCH=${LOFAR_VERSION} + +ARG LOFAR_BUILDVARIANT=gnucxx11_opt +ENV LOFAR_BUILDVARIANT=${LOFAR_BUILDVARIANT} # Install RUN apt-get update && apt-get install -y git cmake g++ gfortran bison flex autogen liblog4cplus-dev libhdf5-dev libboost-dev boost-python${BOOST_VERSION}-dev libxml2-dev pkg-config libpng-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev libboost-regex${BOOST_VERSION}-dev binutils-dev libopenblas-dev libcfitsio-dev wcslib-dev libcap2-bin libreadline-dev && \ diff --git a/LCS/PyCommon/datetimeutils.py b/LCS/PyCommon/datetimeutils.py index 93841f255b159cd4d967e208f4eb8b93a6ff2967..fb1df8788220bf9af22515b5903c63c8c6a1147f 100644 --- a/LCS/PyCommon/datetimeutils.py +++ b/LCS/PyCommon/datetimeutils.py @@ -140,11 +140,33 @@ def from_milliseconds_since_unix_epoch(nr_of_milliseconds_since_epoch): ''' return from_seconds_since_unix_epoch(nr_of_milliseconds_since_epoch/1000.0) -def round_to_millisecond_precision(timestamp): +def round_to_millisecond_precision(timestamp: datetime) -> datetime: """ returns the given timestamp rounded to the nearest millisecond :param timestamp: datetime a python datetime timestamp :return: the given timestamp rounded to the nearest millisecond """ diff_to_rounded_millisecond = timestamp.microsecond - 1000*round(timestamp.microsecond/1000) - return timestamp - timedelta(microseconds=diff_to_rounded_millisecond) \ No newline at end of file + return timestamp - timedelta(microseconds=diff_to_rounded_millisecond) + +def round_to_second_precision(timestamp: datetime) -> datetime: + """ + returns the given timestamp rounded to the nearest second + :param timestamp: datetime a python datetime timestamp + :return: the given timestamp rounded to the nearest second + """ + if timestamp.microsecond < 500000: + return timestamp + timedelta(microseconds=-timestamp.microsecond) + else: + return timestamp + timedelta(microseconds=-timestamp.microsecond, seconds=1) + +def round_to_minute_precision(timestamp: datetime) -> datetime: + """ + returns the given timestamp rounded to the nearest minute + :param timestamp: datetime a python datetime timestamp + :return: the given timestamp rounded to the nearest minute + """ + if timestamp.second < 30: + return timestamp + timedelta(seconds=-timestamp.second, microseconds=-timestamp.microsecond) + else: + return timestamp + timedelta(minutes=1, seconds=-timestamp.second, microseconds=-timestamp.microsecond) diff --git a/LCS/PyCommon/json_utils.py b/LCS/PyCommon/json_utils.py index 956fd3b0a29c34bc25bc3e204ff877943e266ca1..232ba7841c4d588378ab01fb192d8d25b59577bc 100644 --- a/LCS/PyCommon/json_utils.py +++ b/LCS/PyCommon/json_utils.py @@ -72,17 +72,38 @@ def _extend_with_required(validator_class): _DefaultValidatingDraft6Validator = _extend_with_default(jsonschema.Draft6Validator) _DefaultValidatingDraft6Validator = _extend_with_required(_DefaultValidatingDraft6Validator) +# storage for validators, for fast caching of ref resolved urls. +_schema_validators = {} +_schema__defaults_addding_validators = {} + +def get_validator_for_schema(schema: dict, add_defaults: bool=False): + '''get a json validator for the given schema. + If the schema is already known in the cache by its $id, then the validator from the cached is return. + This saves many many lookups and ref resolving. + the 'add_defaults' parameter indicates if we want the validator to add defaults while validating or not.''' + if isinstance(schema, str): + schema = json.loads(schema) + + validators_cache = _schema__defaults_addding_validators if add_defaults else _schema_validators + + if '$id' in schema: + if schema['$id'] not in validators_cache: + validators_cache[schema['$id']] = _DefaultValidatingDraft6Validator(schema) if add_defaults else jsonschema.Draft6Validator(schema=schema) + validator = validators_cache[schema['$id']] + else: + validator = _DefaultValidatingDraft6Validator(schema) if add_defaults else jsonschema.Draft6Validator(schema=schema) + + validator.schema = schema + return validator def get_default_json_object_for_schema(schema: str) -> dict: '''return a valid json object for the given schema with all properties with their default values''' return add_defaults_to_json_object_for_schema({}, schema) - def add_defaults_to_json_object_for_schema(json_object: dict, schema: str) -> dict: '''return a copy of the json object with defaults filled in according to the schema for all the missing properties''' copy_of_json_object = deepcopy(json_object) - #TODO: investigate if we want to use a 'common'/singleton validator and use (remote) schema caching for faster validation - _DefaultValidatingDraft6Validator(schema).validate(copy_of_json_object) + get_validator_for_schema(schema, add_defaults=True).validate(copy_of_json_object) return copy_of_json_object def replace_host_in_urls(schema, new_base_url: str, keys=['$id', '$ref', '$schema']): @@ -202,7 +223,7 @@ def validate_json_object_with_schema(json_object, schema): """ Validate the given json_object with schema """ - jsonschema.Draft6Validator(schema=schema).validate(json_object) + get_validator_for_schema(schema, add_defaults=False).validate(json_object) diff --git a/LCS/PyCommon/postgres.py b/LCS/PyCommon/postgres.py index 9c6d36e6e4369f722c807b198ae07b34b0924d06..b04e99e4cadcea254e8fb4925edfc1aea508798f 100644 --- a/LCS/PyCommon/postgres.py +++ b/LCS/PyCommon/postgres.py @@ -40,28 +40,29 @@ from lofar.common.dbcredentials import DBCredentials logger = logging.getLogger(__name__) -def makePostgresNotificationQueries(schema, table, action, column_name='id'): +def makePostgresNotificationQueries(schema, table, action, column_name=None, quote_column_value:bool=True, id_column_name='id', quote_id_value:bool=False): action = action.upper() if action not in ('INSERT', 'UPDATE', 'DELETE'): raise ValueError('''trigger_type '%s' not in ('INSERT', 'UPDATE', 'DELETE')''' % action) change_name = '''{table}_{action}'''.format(table=table, action=action) - if column_name != 'id': + if column_name is not None and column_name != id_column_name: change_name += '_column_' + column_name function_name = '''NOTIFY_{change_name}'''.format(change_name=change_name) - if action == 'UPDATE': - if column_name == 'id': - select_payload = '''SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;''' - else: - select_payload = '''SELECT '{"id": ' || CAST(NEW.id AS text) || ', "''' + column_name + '''": "' || CAST(NEW.''' + column_name + ''' AS text) || '"}' INTO payload;''' - elif action == 'INSERT': - select_payload = '''SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;''' - elif action == 'DELETE': - select_payload = '''SELECT '{"id": ' || CAST(OLD.id AS text) || '}' INTO payload;''' + # build query string selecting the id:value (and col:col_value) into a json formatted object string + select_payload = '''SELECT '{po}"{id_column_name}": {id_value_quote}' || CAST({new_or_old}.{id_column_name} AS text) || '{id_value_quote}{column_key_value}{pc}' INTO payload;'''.format( + po="{", + id_column_name=id_column_name, + id_value_quote='"' if quote_id_value else '', + new_or_old='OLD' if action=='DELETE' else 'NEW', + column_key_value=''', "{column_name}": {column_value_quote}' || CAST(NEW.{column_name} AS text) || '{column_value_quote}'''.format( + column_name=column_name, + column_value_quote='"' if quote_column_value else '') if column_name else '', + pc = "}") if action == 'UPDATE': - begin_update_check = 'IF ROW(NEW.{what}) IS DISTINCT FROM ROW(OLD.{what}) THEN'.format(what='*' if column_name == 'id' else column_name) + begin_update_check = 'IF ROW(NEW.{what}) IS DISTINCT FROM ROW(OLD.{what}) THEN'.format(what='*' if column_name is None or column_name == id_column_name else column_name) end_update_check = 'END IF;' else: begin_update_check = '' @@ -83,9 +84,8 @@ def makePostgresNotificationQueries(schema, table, action, column_name='id'): function_name=function_name, table=table, action=action, - old_or_new=('OLD' if action == 'DELETE' else 'NEW') + '.' + column_name, value='OLD' if action == 'DELETE' else 'NEW', - change_name=change_name.lower(), + change_name=change_name[:63].lower(), # postgres limits channel names to 63 chars begin_update_check=begin_update_check, select_payload=select_payload, end_update_check=end_update_check) @@ -441,7 +441,7 @@ class PostgresListener(PostgresDatabaseConnection): self.connect() - logger.info("Started listening to %s" % ', '.join([str(x) for x in list(self.__callbacks.keys())])) + logger.info("Started listening to %s on database %s", ', '.join([str(x) for x in list(self.__callbacks.keys())]), self.dbcreds.stringWithHiddenPassword()) def eventLoop(): while self.isListening(): @@ -477,7 +477,7 @@ class PostgresListener(PostgresDatabaseConnection): self.__thread.join() self.__thread = None - logger.info("Stopped listening") + logger.info("Stopped listening for notifications on database %s", self.dbcreds.stringWithHiddenPassword()) self.stopWaiting() self.disconnect() diff --git a/LCS/PyCommon/test/postgres.py b/LCS/PyCommon/test/postgres.py index 51e3be001e05424dea7358c5aa4f239e02140faf..104a43a7508372829b25ddce531534b2cf3fce90 100755 --- a/LCS/PyCommon/test/postgres.py +++ b/LCS/PyCommon/test/postgres.py @@ -70,7 +70,7 @@ class PostgresTestDatabaseInstance(): def create(self): '''instantiate the isolated postgres server''' - logger.info('creating test-database instance...') + logger.info('%s creating test-database instance...', self.__class__.__name__) with self._named_lock: start_time = datetime.utcnow() @@ -90,9 +90,9 @@ class PostgresTestDatabaseInstance(): # make the user known in the new test database self._create_superuser(dsn) - logger.info('Created test-database instance. It is available at: %s', self.dbcreds.stringWithHiddenPassword()) + logger.info('%s created test-database instance. It is available at: %s', self.__class__.__name__, self.dbcreds.stringWithHiddenPassword()) - logger.info('Applying test-database schema...') + logger.info('%s applying test-database schema...', self.__class__.__name__) self.apply_database_schema() return except Exception as e: @@ -117,9 +117,9 @@ class PostgresTestDatabaseInstance(): '''destroy the running postgres server''' try: if self._postgresql: - logger.info('removing test-database instance at %s', self.dbcreds.stringWithHiddenPassword()) + logger.info('%s removing test-database instance at %s', self.__class__.__name__, self.dbcreds.stringWithHiddenPassword()) self._postgresql.stop() - logger.info('test-database instance removed') + logger.info('%s test-database instance removed', self.__class__.__name__) except Exception as e: logger.info('error while removing test-database instance at %s: %s', self.dbcreds.stringWithHiddenPassword(), e) diff --git a/LCU/StationTest/rspctlprobe.py b/LCU/StationTest/rspctlprobe.py old mode 100755 new mode 100644 index 1d254e6ad388452474771e25d72e34e82f82573f..96dab5d98679c2736d0312ea9e344db579fdf07f --- a/LCU/StationTest/rspctlprobe.py +++ b/LCU/StationTest/rspctlprobe.py @@ -19,6 +19,31 @@ from functools import reduce name = __name__ if __name__ != '__main__' else 'rspctlprobe' logger = logging.getLogger(name) +_NUM_HBA_ELEMENTS = 16 + +# Optimum element calculation done by M.Brentjes (Dec 2015) +_OptimumElements_Int = [0, 5, 3, 1, 8, 3, 12, 15, 10, 13, 11, 5, 12, 12, 5, 2, 10, 8, 0, 3, 5, 1, 4, 0, 11, 6, 2, 4, 9, + 14, 15, 3, 7, 5, 13, 15, 5, 6, 5, 12, 15, 7, 1, 1, 14, 9, 4, 9, 3, 9, 3, 13, 7, 14, 7, 14, 2, 8, + 8, 0, 1, 4, 2, 2, 12, 15, 5, 7, 6, 10, 12, 3, 3, 12, 7, 4, 6, 0, 5, 9, 1, 10, 10, 11, 5, 11, 7, + 9, 7, 6, 4, 4, 15, 4, 1, 15] +_OptimumElements_Core = [0, 10, 4, 3, 14, 0, 5, 5, 3, 13, 10, 3, 12, 2, 7, 15, 6, 14, 7, 5, 7, 9, 0, 15, 0, 10, 4, 3, + 14, 0, 5, 5, 3, 13, 10, 3, 12, 2, 7, 15, 6, 14, 7, 5, 7, 9, 0, 15] +_OptimumElements_Remote = [0, 13, 12, 4, 11, 11, 7, 8, 2, 7, 11, 2, 10, 2, 6, 3, 8, 3, 1, 7, 1, 15, 13, 1, 11, 1, 12, 7, + 10, 15, 8, 2, 12, 13, 9, 13, 4, 5, 5, 12, 5, 5, 9, 11, 15, 12, 2, 15] + +_NUM_TILES = {'core': 48, + 'remote': 48, + 'international': 96} +_OptimumElements = {'core': _OptimumElements_Core, + 'remote': _OptimumElements_Remote, + 'international': _OptimumElements_Int} +_SLEEP_TIME_SINGLE_ELEMENT_SELECTION = 2. # in units of s +STATION_TYPE = 'Unknown' +_HBA_MODES = (5, 6, 7) +_ELEMENT_OFF_CODE = '2' +_ELEMENT_ON_ZERO_DELAY = '128' + + # --------------------------------NICE PRINTOUT def table_maxlength_per_column(column): """ @@ -28,7 +53,8 @@ def table_maxlength_per_column(column): """ return reduce(max, list(map(len, column))) -def compute_table_width(data, margin = 1): + +def compute_table_width(data, margin=1): """ Compute the column width in characters :param data: table made of a list of columns @@ -39,6 +65,7 @@ def compute_table_width(data, margin = 1): """ return [x + 2 * margin for x in list(map(table_maxlength_per_column, data))] + def table_fix_string_length(string, length): """ Reformat each string to have the same character width @@ -48,7 +75,8 @@ def table_fix_string_length(string, length): :type length: str :return: a formatted string with the request character size """ - return '{:^{width}}'.format(string, width = length) + return '{:^{width}}'.format(string, width=length) + def table_format_column(column, length): """ @@ -60,6 +88,7 @@ def table_format_column(column, length): """ return [table_fix_string_length(x, length) for x in column] + def table_transpose(table): """ Transpose a list of rows in a list of columns and viceversa @@ -69,7 +98,8 @@ def table_transpose(table): """ return list(zip(*table)) -def table_format(table, separator = "|", margin_size = 1): + +def table_format(table, separator="|", margin_size=1): """ Format a table of values :param table: table of values @@ -84,6 +114,7 @@ def table_format(table, separator = "|", margin_size = 1): # transpose the list of columns in list of rows and concatenate the values to obtain rows using the separator return [separator.join(row) for row in table_transpose(formatted_columns)] + def table_print_out_table(write_function, table): """ Calls the write function for each row in the new formatted table @@ -97,6 +128,7 @@ def table_print_out_table(write_function, table): except Exception as e: logger.error("Error formatting table: %s", e) + # ---------------------------------UTILITIES def issue_rspctl_command(cmd): """ @@ -109,7 +141,8 @@ def issue_rspctl_command(cmd): cmd = ["rspctl"] + cmd try: - proc = subprocess.Popen(cmd, stdout = subprocess.PIPE, stderr = subprocess.PIPE) + logging.debug('executing command: %s', cmd) + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() if proc.returncode == 0: @@ -122,13 +155,15 @@ def issue_rspctl_command(cmd): except OSError as e: raise Exception("Error executing " + " ".join(cmd) + ":" + e.strerror) + def list_mode(l): """ Return the most frequent element in the list :param l: input list :return: the most frequent element """ - return max(set(l), key = l.count) + return max(set(l), key=l.count) + # ----------------------------------COMMANDS # -------Clock @@ -152,6 +187,7 @@ def parse_clock_output(out, err): "STDOUT: %s\n" % out + "STDERR: %s\n" % err) + def query_clock(): """ Execute the command rspctl --clock and and parses the result @@ -161,19 +197,20 @@ def query_clock(): out, err = issue_rspctl_command(['--clock']) return parse_clock_output(out, err) + class RCUBoard: """ This class describes the properties of a RCUBoard """ - def __init__(self, - identifier = -1, - status = None, - mode = None, - delay = None, - attenuation = None, - sub_bands = None, - xcsub_bands = None): + def __init__(self, + identifier=-1, + status=None, + mode=None, + delay=None, + attenuation=None, + sub_bands=None, + xcsub_bands=None): self.id = identifier self.status = status self.mode = mode @@ -195,6 +232,7 @@ class RCUBoard: def __getitem__(self, item): return getattr(self, item) + # -------RCU mode def parse_rcu_output(out, err): """ @@ -211,21 +249,21 @@ def parse_rcu_output(out, err): :rtype: dict """ rcu_values = out[1:] - rcu_by_id = {} # list of RCUs listed by ID + rcu_by_id = {} # list of RCUs listed by ID for rcu_value in rcu_values: - match = re.search("RCU\[\s*(?P<RCU_id>\d+)\].control=" + # parsing id - "\d+x\w+\s=>\s*(?P<status>\w+)," + # parsing status - "\smode:(?P<mode>\-?\d)," + # parsing mode - "\sdelay=(?P<delay>\d+)," + # parsing delay - "\satt=(?P<attenuation>\d+)", rcu_value) # parsing attenuation + match = re.search("RCU\[\s*(?P<RCU_id>\d+)\].control=" + # parsing id + "\d+x\w+\s=>\s*(?P<status>\w+)," + # parsing status + "\smode:(?P<mode>-?\d)," + # parsing mode + "\sdelay=(?P<delay>\d+)," + # parsing delay + "\satt=(?P<attenuation>\d+)", rcu_value) # parsing attenuation if match: rcu_id = int(match.group('RCU_id')) - rcu_board = RCUBoard(identifier = rcu_id, - status = match.group('status'), - mode = match.group('mode'), - delay = match.group('delay'), - attenuation = match.group('attenuation') + rcu_board = RCUBoard(identifier=rcu_id, + status=match.group('status'), + mode=match.group('mode'), + delay=match.group('delay'), + attenuation=match.group('attenuation') ) rcu_by_id[rcu_id] = rcu_board @@ -235,6 +273,7 @@ def parse_rcu_output(out, err): "STDERR: %s\n" % err) return rcu_by_id + def query_rcu_mode(): """ Execute the command rspctl --rcu and parses the result @@ -244,6 +283,7 @@ def query_rcu_mode(): out, err = issue_rspctl_command(['--rcu']) return parse_rcu_output(out, err) + # -------Subbands def parse_subbands_output(out, err): """ @@ -270,9 +310,9 @@ def parse_subbands_output(out, err): i_row = 0 while i_row < len(rcu_values): value = rcu_values[i_row] - match = re.search("RCU\[\s*(?P<RCU_id>\d+)\]" + # parsing RCU id - ".subbands=\(\d+,(?P<n_rows>\d)\)\s+x\s+\(0," + # parsing the number of rows - "(?P<n_elements>\d+)\)\s*", # parsing the number of elements + match = re.search("RCU\[\s*(?P<RCU_id>\d+)\]" + # parsing RCU id + ".subbands=\(\d+,(?P<n_rows>\d)\)\s+x\s+\(0," + # parsing the number of rows + "(?P<n_elements>\d+)\)\s*", # parsing the number of elements value) if match: rcu_id = int(match.group('RCU_id')) @@ -287,15 +327,17 @@ def parse_subbands_output(out, err): sub_band_list = [] for i in range(n_rows): # Parsing the string [ 143 145 ... or ... 122 123] into a list of integers - row = list(map(int, [_f for _f in rcu_values[i_row + i + 1].strip().lstrip('[').rstrip(']').split(' ') if _f])) + row = list( + map(int, [_f for _f in rcu_values[i_row + i + 1].strip().lstrip('[').rstrip(']').split(' ') if _f])) sub_band_list.append(row) - i_row = i_row + n_rows + 1 # ADVANCE + i_row = i_row + n_rows + 1 # ADVANCE rcu_by_id[rcu_id] = sub_band_list return rcu_by_id + def query_sub_bands_mode(): """ Execute the command rspctl --subbands and parses the result @@ -305,6 +347,7 @@ def query_sub_bands_mode(): out, err = issue_rspctl_command(['--subbands']) return parse_subbands_output(out, err) + # -------XCSub bands def parse_xcsub_bands_output(out, err): """ @@ -342,7 +385,7 @@ def parse_xcsub_bands_output(out, err): :return: a dict indexed by the rcu board id containing the list of xcsub bands used :rtype: dict """ - rcu_values= out[1:] + rcu_values = out[1:] rcu_by_id = {} i_row = 0 @@ -362,10 +405,11 @@ def parse_xcsub_bands_output(out, err): xcsub_bands_list = [] for i in range(n_rows): # Parsing the string [ 143 145 ... or ... 122 123] into a list of integers - row = list(map(int, [_f for _f in rcu_values[i_row + i + 1].strip().lstrip('[').rstrip(']').split(' ') if _f])) + row = list( + map(int, [_f for _f in rcu_values[i_row + i + 1].strip().lstrip('[').rstrip(']').split(' ') if _f])) xcsub_bands_list.append(row) - i_row = i_row + n_rows + 1 # ADVANCE + i_row = i_row + n_rows + 1 # ADVANCE # concatenates the two rows -> computes the max xcsub_band and returns the value # [NOTE max accepts only a couple of values] val = reduce(lambda x, a: max(x, a), reduce(lambda x, a: x + a, xcsub_bands_list)) @@ -377,6 +421,7 @@ def parse_xcsub_bands_output(out, err): rcu_by_id[rcu_id] = val return rcu_by_id + def query_xcsub_bands_mode(): """ Execute the command rspctl --subbands and parses the result @@ -386,6 +431,7 @@ def query_xcsub_bands_mode(): out, err = issue_rspctl_command(['--xcsubband']) return parse_xcsub_bands_output(out, err) + # -------Spectral inversion def parse_spinv_output(out, err): """ @@ -450,6 +496,7 @@ def parse_spinv_output(out, err): return rcu_by_id + def query_spinv_mode(): """ Execute the command rspctl --spinv and parses the result @@ -459,6 +506,7 @@ def query_spinv_mode(): out, err = issue_rspctl_command(['--specinv']) return parse_spinv_output(out, err) + def execute_xcstatistics_mode(parameters): """ Execute the command rspclt --xcstatistics from a dict of parameters @@ -481,11 +529,12 @@ def execute_xcstatistics_mode(parameters): cmd_list.append('--integration=%d' % parameters['integration']) if 'directory' in parameters: cmd_list.append('--directory=%s' % parameters['directory']) - if 'select'in parameters: + if 'select' in parameters: cmd_list.append('--select=%s' % parameters['select']) issue_rspctl_command(cmd_list) + # ----------------------------------Merging information def query_status(): @@ -542,6 +591,7 @@ def query_status(): return res + def dump_info_file(path, res): """ Dump the information collected in json format into the directory specified in path @@ -553,7 +603,8 @@ def dump_info_file(path, res): file_path = os.path.join(path, "infos") with open(file_path, 'w') as fout: - fout.write(json.dumps(res, indent = 4, separators = (',', ': '))) + fout.write(json.dumps(res, indent=4, separators=(',', ': '))) + def query_xcstatistics(options): """ @@ -576,7 +627,7 @@ def query_xcstatistics(options): filename = "_mode_%s_xst_sb%0.3d.dat" % (mode, subband) - temporary_output_directory = tempfile.mkdtemp(prefix = "rspctlprobe_tmp") + temporary_output_directory = tempfile.mkdtemp(prefix="rspctlprobe_tmp") options['directory'] = temporary_output_directory integration = options['integration'] @@ -600,7 +651,7 @@ def query_xcstatistics(options): rcus = res["rcus"] header = ["RCUID", "delay", "attenuation", "mode", "status", "xcsub_bands"] - ids = [[header[0]] + list(map(str, list(rcus.keys())))] # Create the id column of the file + ids = [[header[0]] + list(map(str, list(rcus.keys())))] # Create the id column of the file table = [[key] + [str(rcus[i][key]) for i in rcus] for key in header[1:]] table = ids + table @@ -618,6 +669,7 @@ def query_xcstatistics(options): return res + def query_most_common_mode(): """ Return the most frequent mode that the RCUs have @@ -627,9 +679,10 @@ def query_most_common_mode(): rcus_mode = [rcus_mode[rcu] for rcu in rcus_mode] return int(list_mode([x['mode'] for x in rcus_mode])) + def set_mode(mode): """ - Set the mode on all the rsp boards + Set the mode on all the rcu boards :param mode: the mode to be set :type mode: int @@ -645,11 +698,128 @@ def set_mode(mode): for i in range(10): time.sleep(3) outmode = query_most_common_mode() - logger.info('current rsp mode is {}'.format(outmode)) + logger.info('current rcu mode is {}'.format(outmode)) if mode == outmode: logger.info('mode changed correctly to {}'.format(outmode)) return True - raise Exception('Cannot change rsp mode') + raise Exception('Cannot change rcu mode') + + +def _single_element_hba_delay_string(element_id): + """ + Generate the HBA delay string to select a single element id + :param element_id: the element id to be selected + :return: the element id string + >>> _single_element_hba_delay_string(0) + '128,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2' + >>> _single_element_hba_delay_string(15) + '2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,128' + >>> _single_element_hba_delay_string(8) + '2,2,2,2,2,2,2,2,128,2,2,2,2,2,2,2' + >>> _single_element_hba_delay_string(-1) + Traceback (most recent call last): + ... + ValueError: the element id -1 out of range [0, 15] + >>> _single_element_hba_delay_string(18) + Traceback (most recent call last): + ... + ValueError: the element id 18 out of range [0, 15] + """ + if element_id < 0 or element_id > _NUM_HBA_ELEMENTS: + raise ValueError('the element id %d out of range [0, 15]' % element_id) + + return ",".join([_ELEMENT_OFF_CODE for _ in range(element_id)] + + [_ELEMENT_ON_ZERO_DELAY] + + [_ELEMENT_OFF_CODE for _ in range(element_id + 1, _NUM_HBA_ELEMENTS)]) + + +def _tile_to_rcu_ids(tile_id): + """ + RCU ids for a given tile id (both polarizations) + :param tile_id: the id of the tile + :return: the list of the rcu ids corresponding to the tile_id + + >>> _tile_to_rcu_ids(1) + [2,3] + >>> _tile_to_rcu_ids(4) + [8,9] + """ + return [2 * tile_id, 2 * tile_id + 1] + + +def _rcu_selection_string(element_id, station_type): + """ + Generate the rcu selection string to select a series of rcu to be set for the HBA single element mode + :param element_id: the element id to be selected + :return: the element id string + >>> _rcu_selection_string(0, 'remote') + '0,1' + >>> _rcu_selection_string(8, 'remote') + '14,15,32,33,60,61' + >>> _rcu_selection_string(15, 'remote') + '42,43,58,59,88,89,94,95' + >>> _rcu_selection_string(8, 'international') + '8,9,34,35,114,115,116,117' + >>> _rcu_selection_string(9, 'core') + '42,43,90,91' + >>> _rcu_selection_string(8, 'core') + '' + >>> _rcu_selection_string(-1, 'core') + Traceback (most recent call last): + ... + ValueError: the element id -1 out of range [0, 15] + >>> _rcu_selection_string(18, 'core') + Traceback (most recent call last): + ... + ValueError: the element id 18 out of range [0, 15] + """ + if element_id < 0 or element_id > _NUM_HBA_ELEMENTS: + raise ValueError('the element id %d out of range [0, 15]' % element_id) + + num_tiles = _NUM_TILES[station_type] + elements_list = _OptimumElements[station_type] + rcu_to_be_selected = [] + for tile in range(num_tiles): + if elements_list[tile] == element_id: + # convert tile number to RCU number + rcu_to_be_selected += _tile_to_rcu_ids(tile) + + rcu_ctrl_string = ','.join(map(str, rcu_to_be_selected)) + return rcu_ctrl_string + + +def detect_station_type(): + hostname = socket.gethostname() + if hostname.startswith('RS'): + station_type = 'remote' + elif hostname.startswith('CS'): + station_type = 'core' + else: + station_type = 'international' + logger.info('Station type detected is %s', station_type) + return station_type + + +def set_single_hba_element(station_type): + """ + Activate a single element in the HBA tile + + :return: None + """ + if station_type not in _NUM_TILES: + raise ValueError('the station type "%s" not existent' % station_type) + + logger.info('selecting a single element only') + for element_id in range(_NUM_HBA_ELEMENTS): + rcu_to_select = _rcu_selection_string(element_id, station_type) + if rcu_to_select == '': + continue + delay_to_set = _single_element_hba_delay_string(element_id) + + issue_rspctl_command(['--hbadelay={}'.format(delay_to_set), + '--select={}'.format(rcu_to_select)]) + time.sleep(_SLEEP_TIME_SINGLE_ELEMENT_SELECTION) + def set_xcsubband(subband): """ @@ -659,7 +829,7 @@ def set_xcsubband(subband): :type subband: string """ logger.info('switching rcu xcsubband to %d', subband) - issue_rspctl_command(["--xcsubband={}".format(subband)]) + issue_rspctl_command(['--xcsubband={}'.format(subband)]) logger.debug('xcsubband change command issued') for i in range(10): time.sleep(1) @@ -670,7 +840,8 @@ def set_xcsubband(subband): return True raise Exception('Cannot change rsp xcsubband to {}'.format(subband)) -def produce_xcstatistics(integration_time = 1, duration = 1, add_options = None, output_directory = "./"): + +def produce_xcstatistics(integration_time=1, duration=1, add_options=None, output_directory="./"): """ Execute the command to compute the xcstatistics with a given integration and duration. It is also possible to specify an output directory and additional options. @@ -690,13 +861,15 @@ def produce_xcstatistics(integration_time = 1, duration = 1, add_options = None, res = query_xcstatistics(add_options) return res + def batch_produce_xcstatistics(integration_time, duration, - wait_time = None, - xcsub_bands = None, - mode = None, - add_options = None, - output_directory = "./"): + wait_time=None, + xcsub_bands=None, + mode=None, + add_options=None, + output_directory="./", + select_single_element=False): """ Produces the xcstatistics for a list of integration_times durations and wait_times on the given set of xcsubband storing everything in the output directory. @@ -719,6 +892,9 @@ def batch_produce_xcstatistics(integration_time, if mode != -2: set_mode(mode) + if select_single_element: + set_single_hba_element(station_type=STATION_TYPE) + for ind, (i, d, w) in enumerate(zip(integration_time, duration, wait_time)): if not xcsub_bands: produce_xcstatistics(i, d, add_options, output_directory) @@ -729,51 +905,79 @@ def batch_produce_xcstatistics(integration_time, time.sleep(w) + # ----------------------------------MAIN CODE LOGIC -def setup_logging(): +def setup_logging(log_level): """ Setup the logging system """ logging.basicConfig( - format = '%(asctime)s - %(name)s: %(message)s', - datefmt = "%m/%d/%Y %I:%M:%S %p", - level = logging.DEBUG) - - + format='%(asctime)s - %(name)s: %(message)s', + datefmt="%m/%d/%Y %I:%M:%S %p", + level=log_level) + + __MODE_NOT_SET_DEFAULT = -2 -def init(): +def init(log_level=logging.DEBUG): """ Init phase of the program """ - setup_logging() + global STATION_TYPE + setup_logging(log_level=log_level) + STATION_TYPE = detect_station_type() + def setup_command_argument_parser(): parser = argparse.ArgumentParser( - formatter_class=argparse.RawDescriptionHelpFormatter, - description = "es: rspctlprobe.py --mode 3 --xcstatistics --xcsubband 100:400:50 --integration 5 --duration 5 --wait 3600 --loops 24 --directory /localhome/data/") - - parser.add_argument('--xcstatistics', action = 'store_true') - parser.add_argument('--integration', type = int, default = [1], nargs = '+') - parser.add_argument('--duration', type = int, default = [1], nargs = '+') - parser.add_argument('--xcangle', default = 'False') - parser.add_argument('--directory', default = os.getcwd()) - parser.add_argument('--wait', type = int, default = [0], nargs = '+') - parser.add_argument('--xcsubband', type = str, default = "") - parser.add_argument('--loops', type = int, default = 1) - parser.add_argument('--mode', type = int, default = __MODE_NOT_SET_DEFAULT) + formatter_class=argparse.RawDescriptionHelpFormatter, + description="Example complete tile: rspctlprobe.py --mode 5 --xcstatistics --xcsubband 100:400:25 --integration 5 --duration 5 --wait 3600 --loops 24 --directory /localhome/data/ \n\n" + "Example single element: rspctlprobe.py --mode 5 --single --xcstatistics --xcsubband 100:400:25 --integration 5 --duration 5 --wait 3600 --loops 24 --directory /localhome/data/") + + parser.add_argument('--xcstatistics', action='store_true') + parser.add_argument('--integration', type=int, default=[1], nargs='+') + parser.add_argument('--duration', type=int, default=[1], nargs='+') + parser.add_argument('--xcangle', default='False') + parser.add_argument('--directory', default=os.getcwd()) + parser.add_argument('--wait', type=int, default=[0], nargs='+') + parser.add_argument('--xcsubband', type=str, default="") + parser.add_argument('--loops', type=int, default=1) + parser.add_argument('--mode', type=int, default=__MODE_NOT_SET_DEFAULT) + parser.add_argument('--single', action='store_true', help='select a single HBA element') return parser + +def check_input_validity(arguments): + if arguments.single: + current_mode = query_most_common_mode() + if current_mode not in _HBA_MODES and arguments.mode == __MODE_NOT_SET_DEFAULT: + logger.error('single selection cannot be done for not HBA modes with code (5, 6, 7): current mode is %d', + current_mode) + raise SystemExit('single selection cannot be done for not HBA modes with code (5, 6, 7)') + + +def xcsubband_specification_to_list(xcsubbands_string): + if ":" in xcsubbands_string: + start, end, step = map(int, xcsubbands_string.split(":")) + xcsub_bands = [int(i) for i in range(start, end + step, step)] + elif "," in xcsubbands_string: + xcsub_bands = [int(i) for i in xcsubbands_string.split(",")] + else: + xcsub_bands = [int(xcsubbands_string)] + return xcsub_bands + + def parse_and_execute_command_arguments(): """ Parses the command line arguments and execute the procedure linked :return: :rtype: """ + global STATION_TYPE parser = setup_command_argument_parser() program_arguments = parser.parse_args() - + check_input_validity(program_arguments) if program_arguments.xcstatistics: options = {} if program_arguments.xcangle: @@ -781,34 +985,31 @@ def parse_and_execute_command_arguments(): try: if program_arguments.xcsubband: - if ":" in program_arguments.xcsubband: - start, end, step = map(int, program_arguments.xcsubband.split(":")) - xcsub_bands = [int(i) for i in range(start, end+step, step)] - elif "," in program_arguments.xcsubband: - xcsub_bands = [int(i) for i in program_arguments.xcsubband.split(",")] - else: - xcsub_bands = [int(program_arguments.xcsubband)] + xcsub_bands = xcsubband_specification_to_list(program_arguments.xcsubband) for i in range(program_arguments.loops): batch_produce_xcstatistics(program_arguments.integration, program_arguments.duration, - wait_time = program_arguments.wait, - xcsub_bands = xcsub_bands, - mode = program_arguments.mode, - add_options = options, - output_directory = program_arguments.directory) + wait_time=program_arguments.wait, + xcsub_bands=xcsub_bands, + mode=program_arguments.mode, + add_options=options, + output_directory=program_arguments.directory, + select_single_element=program_arguments.single) else: for i in range(program_arguments.loops): batch_produce_xcstatistics(program_arguments.integration, program_arguments.duration, - wait_time = program_arguments.wait, - mode = program_arguments.mode, - add_options = options, - output_directory = program_arguments.directory) + wait_time=program_arguments.wait, + mode=program_arguments.mode, + add_options=options, + output_directory=program_arguments.directory, + select_single_element=program_arguments.single) + if program_arguments.mode != __MODE_NOT_SET_DEFAULT: # SWITCH BACK TO MODE 0 AT THE END IF MODE SWITCH WAS SET - set_mode(0) + set_mode(0) except Exception as e: logger.error('error executing rspctl : %s', e) logger.error('traceback \n%s', traceback.format_exc()) @@ -816,11 +1017,13 @@ def parse_and_execute_command_arguments(): else: parser.error('please specify a task') + def main(): - init() - logging.basicConfig(format = '%(asctime)s ' + socket.gethostname() + ' %(levelname)s %(message)s', - level = logging.INFO) + init(log_level=logging.INFO) + logging.basicConfig(format='%(asctime)s ' + socket.gethostname() + ' %(levelname)s %(message)s', + level=logging.INFO) parse_and_execute_command_arguments() + if __name__ == '__main__': main() diff --git a/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc b/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc index a2462a0717583c1669d07e6ead0ab81c00c3c330..5a9bc3f4bea1cadc352584deeb3ff09fba52e036 100644 --- a/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc +++ b/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc @@ -127,9 +127,17 @@ std::string TMSSBridge::getParsetAsText(int subtask_id) bool TMSSBridge::setSubtaskState(int subtask_id, const string& state) { string queryStr = "/api/subtask/" + to_string(subtask_id) + "/"; + string json_doc = "{ \"state\": \"/api/subtask_state/" + state +"/\""; + if(state == "finishing") { + // set stop_time to 'now' upon finished to get an actual record of when the observation stopped + ptime now = from_time_t(time(0)); + json_doc += ", \"stop_time\": \"" + to_iso_extended_string(now) + "\""; + } + json_doc += " }"; + string result; - if(httpQuery(queryStr, result, "PATCH", "{ \"state\": \"/api/subtask_state/" + state +"/\" }")) { - LOG_INFO_STR("Updated subtask id=" << subtask_id << " to status=" << state); + if(httpQuery(queryStr, result, "PATCH", json_doc)) { + LOG_INFO_STR("Updated subtask state id=" << subtask_id << " with patch: " << json_doc); return true; } diff --git a/MAC/Deployment/data/StaticMetaData/CableDelays/CS030-CableDelays.conf b/MAC/Deployment/data/StaticMetaData/CableDelays/CS030-CableDelays.conf index 204a334e12b246e4f005a907e37f7efe939fd414..cfc24531fd144ec059c9175a806576c27127163f 100644 --- a/MAC/Deployment/data/StaticMetaData/CableDelays/CS030-CableDelays.conf +++ b/MAC/Deployment/data/StaticMetaData/CableDelays/CS030-CableDelays.conf @@ -16,6 +16,10 @@ # 115m 465.5254 # 130m 530.6981 # +# T25 has 122 meter coax i.s.o. 115 meter. New delay in table added (M.J. Norden, 24-9-2020) +#50 115 465.5254 80 326.9640 115 493.8617 +#51 115 465.5254 80 326.9640 115 493.8617 +# # LBL LBH HBA #RCUnr len delay len delay len delay #----------------------------------------------------------------------- @@ -69,8 +73,8 @@ 47 115 465.5254 80 326.9640 115 465.5254 48 115 465.5254 80 326.9640 115 465.5254 49 115 465.5254 80 326.9640 115 465.5254 -50 115 465.5254 80 326.9640 115 465.5254 -51 115 465.5254 80 326.9640 115 465.5254 +50 115 465.5254 80 326.9640 120 493.8617 +51 115 465.5254 80 326.9640 120 493.8617 52 115 465.5254 115 465.5254 115 465.5254 53 115 465.5254 115 465.5254 115 465.5254 54 80 326.9640 115 465.5254 115 465.5254 diff --git a/QA/QA_Service/lib/qa_service.py b/QA/QA_Service/lib/qa_service.py index 18bd13f9c1f44378b90bdd5e6f99919627123012..992ddb000178fcf1fff3cb93cedc9b7d5a91ac25 100644 --- a/QA/QA_Service/lib/qa_service.py +++ b/QA/QA_Service/lib/qa_service.py @@ -69,6 +69,10 @@ class QAFilteringOTDBBusListener(OTDBBusListener): class QAFilteringTMSSSubTaskBusListener(TMSSBusListener): class QAFilteringTMSSSubTaskEventMessageHandler(UsingToBusMixin, TMSSEventMessageHandler): + def __init__(self): + UsingToBusMixin.__init__(self) + TMSSEventMessageHandler.__init__(self) + def _send_qa_command_message(self, subtask_id: int, command_subject: str): with TMSSsession.create_from_dbcreds_for_ldap() as tmsssession: tmsssession.set_subtask_status(subtask_id, 'queueing') diff --git a/QA/QA_Service/test/t_qa_service.py b/QA/QA_Service/test/t_qa_service.py index fe5bfc908acd25b0225f6a6747277302564efa44..8daf86ce36f8c27fb947bac2a843051e2204e205 100755 --- a/QA/QA_Service/test/t_qa_service.py +++ b/QA/QA_Service/test/t_qa_service.py @@ -96,7 +96,8 @@ class TestQAService(unittest.TestCase): cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID)) cls.tmp_exchange.open() - cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address) + cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address, + start_postgres_listener=True, start_ra_test_environment=True) cls.tmss_test_env.start() cls.tmss_test_env.populate_schemas() diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py b/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py index 75fe6059ed1fc2f9098c774c600d3439e7810960..f5711b4db9753551debdda209e418df0c022cdc2 100755 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py @@ -30,7 +30,7 @@ to assign resources to these tasks. import logging logger = logging.getLogger(__name__) -from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME +from lofar.messaging.config import DEFAULT_BROKER, DEFAULT_BUSNAME from lofar.messaging.rpc import RPCClientContextManagerMixin, RPCClient diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py b/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py index e97993b95a5533e282a1c2b106dbd514abc9b071..6875b8004895b7302e338e31d3a1e32df31aefe2 100755 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py @@ -158,7 +158,7 @@ class ResourceAssigner(object): if spec.status == 'approved': # Only needed to send misc field info (storagemanager) to OTDB logger.info('Task otdb_id=%s tmss_id=%s is only approved, no resource assignment needed yet' % (otdb_id, tmss_id)) self._send_task_status_notification(spec, 'approved') - return + return True #TODO have Specification propagate to the estimator? if self._schedule_resources(spec, specification_tree): # Cleanup the data of any previous run of the task diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py b/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py index ea374250236b38524742631147c1c98879f7867b..a2d282ce204f6d1361443d231c8f13b0865a26df 100644 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py @@ -283,13 +283,21 @@ class BasicScheduler(object): # up more resources as a by-product, in which case other conflicts can simply be shifted to those newly freed # resources. conflict_claims = self.radb.getResourceClaims(task_ids=[self.task_id], status="conflict", extended=True) - logger.info("Resulting claims in conflict before resolution: %s", conflict_claims) - - if conflict_claims and not any([self._resolve_conflict(c) for c in conflict_claims]): - if need_all or len(conflict_claims) == len(tentative_claims): - # Could not resolve any conflict - raise ScheduleException("Could not resolve one or more conflicting claims: #tentative_claims=%s #conflict_claims=%s conflict_claims=%s" % ( - len(tentative_claims), len(conflict_claims), conflict_claims)) + if conflict_claims: + for conflict_claim in conflict_claims: + logger.warning("conflicting_claim: %s\nresource:%s\noverlapping_claims:%s\noverlapping_tasks:%s", conflict_claim, + self.radb.getResources(resource_ids=[conflict_claim['resource_id']], + include_availability=True, + claimable_capacity_lower_bound=conflict_claim['starttime'], + claimable_capacity_upper_bound=conflict_claim['endtime'])[0], + self.radb.get_overlapping_claims(conflict_claim['id']), + self.radb.get_overlapping_tasks(conflict_claim['id'])) + + if not any([self._resolve_conflict(c) for c in conflict_claims]): + if need_all or len(conflict_claims) == len(tentative_claims): + # Could not resolve any conflict + raise ScheduleException("Could not resolve one or more conflicting claims: #tentative_claims=%s #conflict_claims=%s conflict_claims=%s" % ( + len(tentative_claims), len(conflict_claims), conflict_claims)) # remove conflicting claims (allowing the next iteration to propose alternatives). Note that _handle_conflicts # could have reduced the number of conflicting claims. diff --git a/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py b/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py index 5046b4eb97f63c4354418a1352c9e4803c641054..fc053cfb2674659aac93c62c0861fa436d109ca9 100644 --- a/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py +++ b/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py @@ -41,7 +41,7 @@ class RATestEnvironment: exchange: str=os.environ.get("RA_EXCHANGE", DEFAULT_BUSNAME), broker: str=os.environ.get("RA_BROKER", DEFAULT_BROKER)): self.radb_test_instance = RADBTestDatabaseInstance() self.radb = self.radb_test_instance.create_database_connection() - self.radb_service = createRADBService(dbcreds=self.radb_test_instance.dbcreds, exchange=exchange, broker=broker) + self.radb_service = createRADBService(dbcreds=self.radb_test_instance.dbcreds, exchange=exchange, broker=broker, num_threads=1) self.re_service = createEstimatorService(exchange=exchange, broker=broker) self.ra_service = RAService(radbcreds=self.radb_test_instance.dbcreds, exchange=exchange, broker=broker) diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py index 29358de27d660b822a48c48a705f5dd0ec6ff135..285933e110ad53c2a94c719bc15bb8386932059d 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py @@ -659,7 +659,7 @@ class RADatabase(PostgresDatabaseConnection): return self._cursor.rowcount > 0 - def _to_fields_and_value_placeholders_strings(self, fields: collections.Iterable) -> (str, str): + def _to_fields_and_value_placeholders_strings(self, fields: collections.abc.Iterable) -> (str, str): """convert a list of fields (column names) into a tuple of a comma-seperated string and a comma-seperated placeholder string For usage with prepared statements (postgres mogrify)""" fields_str = ', '.join(fields) diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbbuslistener.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbbuslistener.py index c0412d60c699d267af1993a0e185c43904ff2165..7a833e499e9449b557333504e3bf394e5515353d 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbbuslistener.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbbuslistener.py @@ -124,8 +124,12 @@ if __name__ == '__main__': logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + class ExampleRADBEventMessageHandler(RADBEventMessageHandler): + def onTaskUpdated(self, updated_task): + logger.info("Example task updated: %s", updated_task) + from lofar.messaging import BusListenerJanitor - with BusListenerJanitor(RADBEventMessageBusListener()): + with BusListenerJanitor(RADBEventMessageBusListener(handler_type=ExampleRADBEventMessageHandler)): waitForInterrupt() __all__ = ["RADBEventMessageBusListener", "RADBEventMessageHandler"] diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py index 6a1786252db82892e650c2e24899cc6836046570..e0e853db3c9267aa8e46e3a12263ab24f36ee671 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py @@ -122,14 +122,13 @@ class RADBPGListener(PostgresListener): r = {k:r[k] for k in ['id', 'total_capacity', 'available_capacity', 'used_capacity']} self._sendNotification('ResourceCapacityUpdated', r) - def __enter__(self): - super(RADBPGListener, self).__enter__() + def start(self): + super(RADBPGListener, self).start() self.radb.connect() self.event_bus.open() - return self - def __exit__(self, exc_type, exc_val, exc_tb): - super(RADBPGListener, self).__exit__(exc_type, exc_val, exc_tb) + def stop(self): + super(RADBPGListener, self).stop() self.radb.disconnect() self.event_bus.close() diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py index 8ecc6fb4b4ebbb380d168e83e695431d4db9af91..422174462c3ddb222fc7437c38c3548c7eefb5ed 100755 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py @@ -55,8 +55,14 @@ class RADBTestDatabaseInstance(PostgresTestDatabaseInstance): for sql_path in sql_createdb_paths: logger.debug("setting up database. applying sql file: %s", sql_path) with open(sql_path) as sql: + # temporarily suppress logging of queries to prevent the log from being spammed with the entire sql schema + logging.getLogger('lofar.common.postgres').disabled = True + db.executeQuery(sql.read()) + # revert temporarily suppressed logging + logging.getLogger('lofar.common.postgres').disabled = False + def create_database_connection(self) -> RADatabase: self.radb = RADatabase(self.dbcreds) return self.radb diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py index 399b826974ae0275845fc6f639a66be40dddd980..f1ec6d530f2797ee805874072688562d4f103c21 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py @@ -161,12 +161,12 @@ class ResourceEstimatorHandler(ServiceMessageHandler): return self.get_subtree_estimate(specification_tree) -def createService(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER): +def createService(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER, num_threads=1): return RPCService(service_name=DEFAULT_RESOURCEESTIMATOR_SERVICENAME, handler_type=ResourceEstimatorHandler, exchange=exchange, broker=broker, - num_threads=1) + num_threads=num_threads) def main(): diff --git a/SAS/ResourceAssignment/ResourceAssignmentService/service.py b/SAS/ResourceAssignment/ResourceAssignmentService/service.py index 732404dbadbe236fb8668ae75ed32b62e021a6c9..0ec23d0a9638704c53a74677b4599fc6f91605cf 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentService/service.py +++ b/SAS/ResourceAssignment/ResourceAssignmentService/service.py @@ -386,13 +386,13 @@ class RADBServiceMessageHandler(ServiceMessageHandler): return { 'resource_claimable_capacity': resource_claimable_capacity} -def createService(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER, dbcreds=None): +def createService(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER, dbcreds=None, num_threads=4): return RPCService(DEFAULT_RADB_SERVICENAME, RADBServiceMessageHandler, handler_kwargs={'dbcreds': dbcreds}, exchange=exchange, broker=broker, - num_threads=4) + num_threads=num_threads) def main(): # make sure we run in UTC timezone diff --git a/SAS/TMSS/client/lib/populate.py b/SAS/TMSS/client/lib/populate.py index ed05353790cb9db1ccdeeed71f0b5589201ca502..bb39f6967617e077aa4c8d00f425534cbfc4d95c 100644 --- a/SAS/TMSS/client/lib/populate.py +++ b/SAS/TMSS/client/lib/populate.py @@ -5,6 +5,7 @@ import json from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession from lofar.common import json_utils import os +from concurrent.futures import ThreadPoolExecutor def populate_schemas_main(): from optparse import OptionParser @@ -34,7 +35,9 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None): with TMSSsession.create_from_dbcreds_for_ldap() as client: base_url = client.base_url.rstrip('/').rstrip('api').rstrip('/') - for template in templates: + + # define upload method for parallel execution (see below) + def upload_template(template): try: with open(os.path.join(schema_dir, template.pop('file_name'))) as schema_file: try: @@ -69,7 +72,7 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None): else: template['schema'] = json_schema - logger.info("Uploading template template='%s' name='%s' version='%s'", template, name, version) + logger.info("Uploading template name='%s' version='%s'", name, version) client.post_template(template_path=template_name, name=name, @@ -81,3 +84,13 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None): except Exception as e: logger.error(e) + # TODO: make parallel upload work. Right now it fails sometimes do to interdependencies and non-determistic upload order. + # do parallel upload + # with ThreadPoolExecutor() as executor: + # executor.map(upload_template, templates) + + # for now, do sequeltial upload + for template in templates: + upload_template(template) + + diff --git a/SAS/TMSS/client/lib/tmss_http_rest_client.py b/SAS/TMSS/client/lib/tmss_http_rest_client.py index 48df33a1cab208a1892a4f5aa47ed11e73b066e0..6d8a1f647edac91a3b3974a5551788ec98de9850 100644 --- a/SAS/TMSS/client/lib/tmss_http_rest_client.py +++ b/SAS/TMSS/client/lib/tmss_http_rest_client.py @@ -5,7 +5,7 @@ import requests from http.client import responses import os import json -from datetime import datetime +from datetime import datetime, timedelta from lofar.common.datetimeutils import formatDatetime # usage example: @@ -94,8 +94,12 @@ class TMSSsession(object): def set_subtask_status(self, subtask_id: int, status: str) -> {}: '''set the status for the given subtask, and return the subtask with its new state, or raise on error''' + json_doc = {'state': "%s/subtask_state/%s/" % (self.base_url, status)} + if status == 'finishing': + json_doc['stop_time'] = datetime.utcnow().isoformat() + response = self.session.patch(url='%s/subtask/%s/' % (self.base_url, subtask_id), - json={'state': "%s/subtask_state/%s/" % (self.base_url, status)}, + json=json_doc, params={'format':'json'}) if response.status_code >= 200 and response.status_code < 300: @@ -106,7 +110,8 @@ class TMSSsession(object): def get_subtask_parset(self, subtask_id) -> str: '''get the lofar parameterset (as text) for the given subtask''' - result = self.session.get(url='%s/subtask/%s/parset' % (self.base_url, subtask_id)) + result = self.session.get(url=self.get_full_url_for_path('/subtask/%s/parset' % (subtask_id,)), + headers={'Accept': 'text/plain'}) if result.status_code >= 200 and result.status_code < 300: return result.content.decode('utf-8') raise Exception("Could not get parameterset for subtask %s.\nResponse: %s" % (subtask_id, result)) @@ -167,7 +172,9 @@ class TMSSsession(object): def get_url_as_json_object(self, full_url: str, params={}) -> object: '''get resource at the given full url (including http://<base_url>, interpret it as json, and return it as as native object (usually a dict or a list of dicts)''' response = self.session.get(url=full_url, params=params, timeout=100000) - logger.info("%s %s %s on %s", response.request.method.upper(), response.status_code, responses.get(response.status_code), response.request.url) + logger.info("%s %s %s in %.1fms%s on %s", response.request.method.upper(), response.status_code, responses.get(response.status_code), + response.elapsed.total_seconds()*1000, ' SLOW!' if response.elapsed > timedelta(seconds=1) else '', + response.request.url) if response.status_code >= 200 and response.status_code < 300: result = json.loads(response.content.decode('utf-8')) @@ -239,7 +246,7 @@ class TMSSsession(object): def specify_observation_task(self, task_id: int) -> requests.Response: """specify observation for the given draft task by just doing a REST API call """ - result = self.session.get(url='%s/api/task/%s/specify_observation' % (self.base_url, task_id)) + result = self.session.get(url=self.get_full_url_for_path('/task/%s/specify_observation' % (task_id,))) if result.status_code >= 200 and result.status_code < 300: return result.content.decode('utf-8') raise Exception("Could not specify observation for task %s.\nResponse: %s" % (task_id, result)) @@ -257,7 +264,7 @@ class TMSSsession(object): def get_setting(self, setting_name: str) -> {}: """get the value of a TMSS setting. returns the setting value upon success, or raises.""" - response = self.session.get(url='%s/setting/%s/' % (self.base_url, setting_name), + response = self.session.get(url=self.get_full_url_for_path('/setting/%s/' % (setting_name,)), params={'format': 'json'}) if response.status_code >= 200 and response.status_code < 300: @@ -269,7 +276,7 @@ class TMSSsession(object): def set_setting(self, setting_name: str, setting_value: bool) -> {}: """set a value for a TMSS setting. returns the setting value upon success, or raises.""" - response = self.session.patch(url='%s/setting/%s/' % (self.base_url, setting_name), + response = self.session.patch(url=self.get_full_url_for_path('/setting/%s/' % (setting_name,)), json={'value': setting_value}) if response.status_code >= 200 and response.status_code < 300: @@ -289,7 +296,7 @@ class TMSSsession(object): json_data['template'] = json.loads(template) if isinstance(template, str) else template json_data.update(**kwargs) - response = self.session.post(url='%s/%s/' % (self.base_url, template_path), json=json_data) + response = self.session.post(url=self.get_full_url_for_path(template_path), json=json_data) if response.status_code == 201: logger.info("created new template: %s", json.loads(response.text)['url']) else: @@ -302,7 +309,7 @@ class TMSSsession(object): new_feedback = feedback else: new_feedback = "%s\n%s" % (existing_feedback, feedback) - response = self.session.patch(url='%s/subtask/%s/' % (self.base_url, subtask_id), + response = self.session.patch(url=self.get_full_url_for_path('/subtask/%s/' % (subtask_id,)), json={'raw_feedback': new_feedback}, params={'format': 'json'}) @@ -316,7 +323,7 @@ class TMSSsession(object): def process_subtask_feedback_and_set_finished(self, subtask_id: int) -> {}: '''process the raw_feedback of a given subtask and set the subtask to finished on succes. Return the subtask with its new state, or raise an error''' - response = self.session.post(url='%s/subtask/%s/process_feedback_and_set_finished' % (self.base_url, subtask_id), + response = self.session.post(url=self.get_full_url_for_path('/subtask/%s/process_feedback_and_set_finished' % (subtask_id,)), params={'format': 'json'}) if response.status_code >= 200 and response.status_code < 300: diff --git a/SAS/TMSS/client/lib/tmssbuslistener.py b/SAS/TMSS/client/lib/tmssbuslistener.py index 81448e9a16c97e4cfb5f91213a218dde91f9edaf..75d63297e8d5dfff5403d560c6cbc3843ffcd71e 100644 --- a/SAS/TMSS/client/lib/tmssbuslistener.py +++ b/SAS/TMSS/client/lib/tmssbuslistener.py @@ -45,6 +45,7 @@ TMSS_TASKDRAFT_OBJECT_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % TMSS_SCHEDULINGUNITBLUEPRINT_OBJECT_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % 'SchedulingUnitBlueprint.Object' TMSS_SCHEDULINGUNITBLUEPRINT_STATUS_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % 'SchedulingUnitBlueprint.Status' TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % 'SchedulingUnitDraft.Object' +TMSS_SETTING_OBJECT_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % 'Setting.Object' TMSS_ALL_OBJECT_EVENTS_FILTER = _TMSS_EVENT_PREFIX_TEMPLATE % '.*.Object.#' TMSS_ALL_STATUS_EVENTS_FILTER = _TMSS_EVENT_PREFIX_TEMPLATE % '.*.Status.#' TMSS_ALL_EVENTS_FILTER = _TMSS_EVENT_PREFIX_TEMPLATE % '#' @@ -55,13 +56,20 @@ class TMSSEventMessageHandler(AbstractMessageHandler): Base-type messagehandler for handling all TMSS event messages. Typical usage is to derive your own subclass from TMSSEventMessageHandler and implement the specific on<SomeMessage> methods that you are interested in. ''' + + def __init__(self, log_event_messages: bool=False) -> None: + self.log_event_messages = log_event_messages + super().__init__() + + def handle_message(self, msg: EventMessage): if not isinstance(msg, EventMessage): raise ValueError("%s: Ignoring non-EventMessage: %s" % (self.__class__.__name__, msg)) stripped_subject = msg.subject.replace(_TMSS_EVENT_PREFIX_TEMPLATE%('',), '') - logger.info("%s %s: %s" % (self.__class__.__name__, stripped_subject, single_line_with_single_spaces(msg.content))) + if self.log_event_messages: + logger.info("%s %s: %s" % (self.__class__.__name__, stripped_subject, single_line_with_single_spaces(msg.content))) # sorry, very big if/elif/else tree. # it just maps all possible event subjects for all possible objects and statuses onto handler methods. @@ -93,6 +101,8 @@ class TMSSEventMessageHandler(AbstractMessageHandler): self.onSchedulingUnitDraftCreated(**msg.content) elif stripped_subject == 'SchedulingUnitDraft.Object.Updated': self.onSchedulingUnitDraftUpdated(**msg.content) + elif stripped_subject == 'SchedulingUnitDraft.Object.Constraints.Updated': + self.onSchedulingUnitDraftConstraintsUpdated(**msg.content) elif stripped_subject == 'SchedulingUnitDraft.Object.Deleted': self.onSchedulingUnitDraftDeleted(**msg.content) elif stripped_subject.startswith('SubTask.Status.'): @@ -101,6 +111,8 @@ class TMSSEventMessageHandler(AbstractMessageHandler): self.onTaskBlueprintStatusChanged(**msg.content) elif stripped_subject.startswith('SchedulingUnitBlueprint.Status.'): self.onSchedulingUnitBlueprintStatusChanged(**msg.content) + elif stripped_subject == 'Setting.Object.Updated': + self.onSettingUpdated(**msg.content) else: raise MessageHandlerUnknownSubjectError("TMSSBusListener.handleMessage: unknown subject: %s" % msg.subject) @@ -192,6 +204,12 @@ class TMSSEventMessageHandler(AbstractMessageHandler): ''' pass + def onSchedulingUnitDraftConstraintsUpdated(self, id: int, scheduling_constraints_doc: dict): + '''onSchedulingUnitDraftConstraintsUpdated is called upon receiving a SchedulingUnitDraft.Object.Constraints.Updated message, which is sent when a the constraints on a SchedulingUnitDrafts were updated. + :param id: the TMSS id of the SchedulingUnitDraft + ''' + pass + def onSchedulingUnitDraftDeleted(self, id: int): '''onSchedulingUnitDraftDeleted is called upon receiving a SchedulingUnitDraft.Object.Deleted message, which is sent when a SchedulingUnitDrafts was created. :param id: the TMSS id of the SchedulingUnitDraft @@ -216,6 +234,13 @@ class TMSSEventMessageHandler(AbstractMessageHandler): ''' pass + def onSettingUpdated(self, name: str, value): + '''onSettingUpdated is called upon receiving a Setting.Object.Updated message, which is sent when a Setting was updated. + :param name: the name of the Setting + ''' + pass + + class TMSSBusListener(BusListener): def __init__(self, diff --git a/SAS/TMSS/docker-compose-scu199.yml b/SAS/TMSS/docker-compose-scu199.yml index 0778331fa0f4cbdbc15cf49c1c3c88273b98b4db..85cfd2d27d6fd292129294551405937b511a07bf 100644 --- a/SAS/TMSS/docker-compose-scu199.yml +++ b/SAS/TMSS/docker-compose-scu199.yml @@ -7,7 +7,7 @@ services: env_file: - ./.env network_mode: "host" - command: bash -c 'source /opt/lofar/lofarinit.sh && ALLOWED_HOSTS=* tmss_test_environment -H 0.0.0.0 -P `hostname -f` -p 8008 --data' + command: bash -c 'source /opt/lofar/lofarinit.sh && ALLOWED_HOSTS=* tmss_test_environment -H 0.0.0.0 -P `hostname -f` -p 8008 -sSd' ports: - "8008:8008" testprovider: diff --git a/SAS/TMSS/frontend/tmss_webapp/package.json b/SAS/TMSS/frontend/tmss_webapp/package.json index 8230388bc6f2c1f16805cf9a25ba4f05f6b6b000..705a5183e8801882a780f93350b30333e5bd3582 100644 --- a/SAS/TMSS/frontend/tmss_webapp/package.json +++ b/SAS/TMSS/frontend/tmss_webapp/package.json @@ -37,6 +37,7 @@ "react-calendar-timeline": "^0.27.0", "react-dom": "^16.13.1", "react-frame-component": "^4.1.2", + "react-json-to-table": "^0.1.7", "react-json-view": "^1.19.1", "react-loader-spinner": "^3.1.14", "react-router-dom": "^5.2.0", diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js b/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js index e9c0b245f5eaeeaf5bd579a4c42c6b67e46eae44..1a4f0d4290cde1e5be6ce84333dad9622678693a 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js @@ -173,15 +173,15 @@ function Jeditor(props) { } editor = new JSONEditor(element, editorOptions); // editor.getEditor('root').disable(); - if (props.disabled) { - editor.on('ready',() => { - editor.disable(); - }); - } - if (props.parentFunction) { - props.parentFunction(editorFunction); - } editorRef.current = editor; + editor.on('ready',() => { + if (props.disabled) { + editor.disable(); + } + if (props.parentFunction) { + props.parentFunction(editorFunction); + } + }); editor.on('change', () => {setEditorOutput()}); }; @@ -208,8 +208,13 @@ function Jeditor(props) { /** * Function called by the parent component to perform certain action ib JEditor */ - function editorFunction() { - editorRef.current.destroy(); + function editorFunction(name, params) { + if (name === "setValue") { + const newValue = updateInput(_.cloneDeep(params[0])); + editorRef.current.setValue(newValue); + } else { + editorRef.current.destroy(); + } } /** diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js b/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js index d611b7857cf365ed32c8eee56b0936e8eb7e5b87..180744f4df30abb3edb16851ca9db17313ef356d 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js @@ -157,6 +157,12 @@ export class CalendarTimeline extends Component { setInterval(function(){setCurrentUTC(true)}, 60000); // Update UTC clock every second to keep the clock display live setInterval(function(){setCurrentUTC()}, 1000); + if (this.state.viewType === UIConstants.timeline.types.WEEKVIEW) { + this.addWeekSunTimes(this.state.defaultStartTime, this.state.defaultEndTime, this.state.group, this.state.items) + .then(items => { + this.setState({items: items}); + }); + } } shouldComponentUpdate() { @@ -251,22 +257,21 @@ export class CalendarTimeline extends Component { if (startMonth !== endMonth) { monthDuration = `(${startMonth}-${endMonth})`; } - return (<div {...getRootProps()} - style={{color: '#ffffff', textAlign: "right", width: `${this.state.sidebarWidth}px`, - paddingRight: '10px', backgroundColor: '#8ba7d9'}}> - <div style={{height:'30px'}}>{this.state.viewType===UIConstants.timeline.types.NORMAL? + return (<div {...getRootProps()} className="sidebar-header" + style={{width: `${this.state.sidebarWidth}px`}}> + <div className="sidebar-header-row">{this.state.viewType===UIConstants.timeline.types.NORMAL? (this.state.dayHeaderVisible?`Day${monthDuration}`:`Week${monthDuration}`) :`Week (${this.state.timelineStartDate.week()}) / Day`}</div> - <div style={{height:'30px'}}>{this.state.dayHeaderVisible?`UTC(Hr)`:`UTC(Day)`}</div> - <div style={{height:'30px'}}>{this.state.dayHeaderVisible?`LST(Hr)`:`LST(Day)`}</div> - {this.state.viewType === UIConstants.timeline.types.NORMAL && - <div className="p-grid" - style={{height:this.props.showSunTimings?'30px':'0px', paddingTop:'10px', paddingLeft:'10px'}}> - <div className="col-4" style={{marginTop:'2px', paddingLeft:'5px', backgroundColor:'yellow', color: '#212529'}}>Sunrise</div> - <div className="col-4" style={{marginTop:'2px', paddingLeft:'5px', backgroundColor:'orange', color: '#212529'}}>Sunset</div> - <div className="col-4" style={{marginTop:'2px', paddingLeft:'5px', backgroundColor:'blue'}}>Night</div> + <div className="sidebar-header-row">{this.state.dayHeaderVisible?`UTC(Hr)`:`UTC(Day)`}</div> + <div className="sidebar-header-row">{this.state.dayHeaderVisible?`LST(Hr)`:`LST(Day)`}</div> + {/* {this.state.viewType === UIConstants.timeline.types.NORMAL && */} + <div className="p-grid legend-row" + style={{height:this.props.showSunTimings?'30px':'0px'}}> + <div className="col-4 legend-suntime legend-sunrise">Sunrise</div> + <div className="col-4 legend-suntime legend-sunset">Sunset</div> + <div className="col-4 legend-suntime legend-night">Night</div> </div> - } + {/* } */} </div> ); } @@ -449,8 +454,8 @@ export class CalendarTimeline extends Component { } const nightStyle = { lineHeight: '30px', - backgroundColor: 'blue', - color: 'blue' + backgroundColor: 'grey', + color: 'grey' } const sunriseStyle = { lineHeight: '30px', @@ -463,27 +468,25 @@ export class CalendarTimeline extends Component { color: 'orange' } // Get the intervals UTC date format and time - const intervalDate = interval.startTime.clone().utc().format("YYYYMMDDT12:00:00"); + const intervalDate = interval.startTime.clone().utc().format("YYYY-MM-DD"); const intervalTime = interval.startTime.clone().utc(); // Get the suntime for the UTC date const intervalDateSunTime = sunTimeMap[intervalDate]; let intervalStyle = dayStyle; // If suntime is available display suntime blocks if (intervalDateSunTime) { - // Set 15 minutes duration for sunrise and sunset and create blocks accordingly - if (intervalTime.isBefore(intervalDateSunTime.sunrise) || - intervalTime.isAfter(intervalDateSunTime.sunset.clone().add(14, 'minutes'))) { - intervalStyle = nightStyle; - } else if (intervalTime.isSame(intervalDateSunTime.sunrise) || - intervalTime.isBefore(intervalDateSunTime.sunrise.clone().add(15, 'minutes'))) { + if (intervalTime.isBefore(intervalDateSunTime.sunrise.start) || + intervalTime.isAfter(intervalDateSunTime.sunset.end)) { + intervalStyle = nightStyle; + } else if (intervalTime.isSameOrAfter(intervalDateSunTime.sunrise.start) && + intervalTime.isSameOrBefore(intervalDateSunTime.sunrise.end)) { intervalStyle = sunriseStyle; - } else if (intervalTime.isSame(intervalDateSunTime.sunset) || - (intervalTime.isAfter(intervalDateSunTime.sunset) && - intervalTime.isBefore(intervalDateSunTime.sunset.clone().add(15, 'minutes')))) { + } else if (intervalTime.isSameOrAfter(intervalDateSunTime.sunset.start) && + intervalTime.isSameOrBefore(intervalDateSunTime.sunset.end)) { intervalStyle = sunsetStyle; } return ( - <div + <div clasName={`suntime-header, ${intervalStyle}`} {...getIntervalProps({ interval, style: intervalStyle @@ -500,46 +503,133 @@ export class CalendarTimeline extends Component { } /** - * Function to render sunrise timings on the timeline view in normal view. + * Function to render sunrise and before sunrise timings on the timeline view in normal view. * @param {Array} sunRiseTimings */ renderSunriseMarkers(sunRiseTimings) { + let endPoint = 0; return ( <> {sunRiseTimings && sunRiseTimings.length>0 && sunRiseTimings.map((item, index) => ( - <CustomMarker key={"sunrise-"+index} date={item}> - {({ styles, date }) => { - const customStyles = { - ...styles, - backgroundColor: 'yellow', - width: '3px' - } - return <div style={customStyles} /> - }} - </CustomMarker> + <> + {/* Marker to get the position of the sunrise end time */} + <CustomMarker key={"sunrise-"+index} date={item.end}> + {({ styles, date }) => { + endPoint = styles.left; + return "" + }} + </CustomMarker> + {/* Marker to represent dark light before sunrise on the day */} + <CustomMarker key={"sunrise-"+index} date={item.start.clone().hours(0).minutes(0).seconds(0)}> + {({ styles, date }) => { + const customStyles = { + ...styles, + backgroundColor: 'grey', + opacity:0.7, + zIndex: 10, + // width: '3px' + width: (endPoint-styles.left) + } + return <div style={customStyles} /> + }} + </CustomMarker> + {/* Marker to represent the duration of sunrise */} + <CustomMarker key={"sunrise-"+index} date={item.start}> + {({ styles, date }) => { + const customStyles = { + ...styles, + backgroundColor: 'yellow', + opacity:0.7, + zIndex: 10, + // width: '3px' + width: (endPoint-styles.left) + } + return <div style={customStyles} /> + }} + </CustomMarker> + </> ))} </> ); } /** - * Function to render sunrise timings on the timeline view in normal view. + * Function to render sunset & after sunset timings on the timeline view in normal view. * @param {Array} sunSetTimings */ renderSunsetMarkers(sunSetTimings) { + let endPoint = 0; return ( <> {sunSetTimings && sunSetTimings.length>0 && sunSetTimings.map((item, index) => ( - <CustomMarker key={"sunset-"+index} date={item}> - {({ styles, date }) => { - const customStyles = { - ...styles, - backgroundColor: 'orange', - width: '3px' - } - return <div style={customStyles} /> - }} - </CustomMarker> + <> + {/* Marker to get the position of the sunset end time */} + <CustomMarker key={"sunset-"+index} date={item.end}> + {({ styles, date }) => { + endPoint = styles.left; + return "" + }} + </CustomMarker> + {/* Marker to represent the dark light after sunset */} + <CustomMarker key={"sunset-"+index} date={item.start.clone().hours(23).minutes(59).seconds(59)}> + {({ styles, date }) => { + const customStyles = { + ...styles, + backgroundColor: 'grey', + opacity:0.7, + zIndex: 10, + left: endPoint, + width: styles.left-endPoint + } + return <div style={customStyles} /> + }} + </CustomMarker> + {/* Marker to represent the actual sunset duration */} + <CustomMarker key={"sunset-"+index} date={item.start}> + {({ styles, date }) => { + const customStyles = { + ...styles, + backgroundColor: 'orange', + opacity:0.7, + zIndex: 10, + width: endPoint - styles.left + } + return <div style={customStyles} /> + }} + </CustomMarker> + </> + ))} + </> + ); + } + + /** + * Function to render sunrise timings on the timeline view in normal view. + * @param {Array} sunSetTimings + */ + renderNightMarkers(sunRiseTimings, sunSetTimings) { + let endPoint = 0; + return ( + <> + {sunSetTimings && sunSetTimings.length>0 && sunSetTimings.map((item, index) => ( + <> + <CustomMarker key={"sunset-"+index} date={item.end}> + {({ styles, date }) => { + endPoint = styles.left; + return "" + }} + </CustomMarker> + <CustomMarker key={"sunset-"+index} date={item.start}> + {({ styles, date }) => { + const customStyles = { + ...styles, + backgroundColor: 'orange', + width: endPoint - styles.left + } + return <div style={customStyles} /> + }} + </CustomMarker> + </> ))} </> ); @@ -577,10 +667,16 @@ export class CalendarTimeline extends Component { /** Custom function to pass to timeline component to render item */ renderItem({ item, timelineContext, itemContext, getItemProps, getResizeProps }) { + /* Reduce the item height so that the suntimings can be viewed above the item block. + Also suntimes are rendered as items with tiny height to represent as horizontal bar above the actual items */ + if (item.type === "SUNTIME") { + itemContext.dimensions.height = 3; + } else { + itemContext.dimensions.height -= 3; + itemContext.dimensions.top += 3; + } const { left: leftResizeProps, right: rightResizeProps } = getResizeProps(); const backgroundColor = itemContext.selected?item.bgColor:item.bgColor; - // const backgroundColor = itemContext.selected ? (itemContext.dragging ? "red" : item.selectedBgColor) : item.bgColor; - // const borderColor = itemContext.resizing ? "red" : item.color; let itemContentStyle = {lineHeight: `${Math.floor(itemContext.dimensions.height)}px`, fontSize: "14px", overflow: "hidden", textOverflow: "ellipsis", whiteSpace: "nowrap", @@ -603,7 +699,8 @@ export class CalendarTimeline extends Component { borderRadius: 3, borderLeftWidth: itemContext.selected ? 3 : 1, borderRightWidth: itemContext.selected ? 3 : 1, - opacity: item.type==="SUNTIME"?0.6:1 + // opacity: item.type==="SUNTIME"?0.6:1 + zIndex: item.type==="SUNTIME"?79:80 }, onMouseDown: () => { if (item.type !== "SUNTIME") { @@ -623,7 +720,7 @@ export class CalendarTimeline extends Component { //whiteSpace: "nowrap" }} > - { this.state.viewType===UIConstants.timeline.types.WEEKVIEW && + { this.state.viewType===UIConstants.timeline.types.WEEKVIEW && item.type !== "SUNTIME" && <><div style={itemContentStyle}><i style={{fontSize:"12px"}} className="fa fa-user" title="Friend"></i><span>{item.project}</span></div> <div style={itemContentStyle}><span>{item.duration}</span></div> <div style={itemContentStyle}><span>{item.band}</span></div> </>} @@ -719,14 +816,18 @@ export class CalendarTimeline extends Component { const noOfDays = endTime.diff(startTime, 'days'); for (const number of _.range(noOfDays+1)) { const date = startTime.clone().add(number, 'days').hours(12).minutes(0).seconds(0); - const formattedDate = date.format("YYYYMMDDTHH:mm:ss"); - UtilService.getSunTimings(formattedDate+"Z").then(timings => { - const sunriseTime = moment.utc(timings.sun_rise.split('.')[0]); - const sunsetTime = moment.utc(timings.sun_set.split('.')[0]); - if (moment.utc(timings.sun_rise).isAfter(startTime)) { + const formattedDate = date.format("YYYY-MM-DD"); + UtilService.getSunTimings(formattedDate).then(timings => { + const sunriseStartTime = moment.utc(timings.sun_rise.start.split('.')[0]); + const sunriseEndTime = moment.utc(timings.sun_rise.end.split('.')[0]); + const sunsetStartTime = moment.utc(timings.sun_set.start.split('.')[0]); + const sunsetEndTime = moment.utc(timings.sun_set.end.split('.')[0]); + const sunriseTime = {start: sunriseStartTime, end: sunriseEndTime}; + const sunsetTime = {start: sunsetStartTime, end: sunsetEndTime}; + if (moment.utc(timings.sunriseEndTime).isAfter(startTime)) { sunRiseTimings.push(sunriseTime); } - if (moment.utc(timings.sun_set).isBefore(endTime)) { + if (moment.utc(timings.sunsetStartTime).isBefore(endTime)) { sunSetTimings.push(sunsetTime); } sunTimeMap[formattedDate] = {sunrise: sunriseTime, sunset: sunsetTime}; @@ -735,33 +836,62 @@ export class CalendarTimeline extends Component { } } + /** + * + * @param {moment} startTime + * @param {moment} endTime + * @param {Array} stationGroup - Array of station group objects + * @param {Array} items - Array of Item objects + */ async addStationSunTimes(startTime, endTime, stationGroup, items) { const noOfDays = endTime.diff(startTime, 'days'); let sunItems = _.cloneDeep(items); for (const number of _.range(noOfDays+1)) { for (const station of stationGroup) { const date = startTime.clone().add(number, 'days').hours(12).minutes(0).seconds(0); - const timings = await UtilService.getSunTimings(date.format("YYYYMMDDTHH:mm:ss")+"Z", station.id); - let sunriseItem = { id: `sunrise-${number}-${station.id}`, - group: station.id, - title: timings.sun_rise, - project: "", - name: "", - duration: "", - start_time: moment.utc(timings.sun_rise), - end_time: moment.utc(timings.sun_rise).add(5, 'minutes'), - bgColor: "yellow", - selectedBgColor: "yellow", - type: "SUNTIME"}; - sunItems.push(sunriseItem); - let sunsetItem = _.cloneDeep(sunriseItem); - sunsetItem.id = `sunset-${number}-${station.id}`; - sunsetItem.start_time = moment.utc(timings.sun_set); - sunsetItem.end_time = moment.utc(timings.sun_set).add(5, 'minutes'); - sunsetItem.bgColor = "orange"; - sunsetItem.selectedBgColor = "0range"; - sunItems.push(sunsetItem); - + const timings = await UtilService.getSunTimings(date.format("YYYY-MM-DD"), station.id); + if (timings) { + let sunriseItem = { id: `sunrise-${number}-${station.id}`, + group: station.id, + // title: `${timings.sun_rise.start} to ${timings.sun_rise.end}`, + title: "", + project: "", + name: "", + duration: "", + start_time: moment.utc(timings.sun_rise.start), + end_time: moment.utc(timings.sun_rise.end), + bgColor: "yellow", + selectedBgColor: "yellow", + type: "SUNTIME"}; + sunItems.push(sunriseItem); + let sunsetItem = _.cloneDeep(sunriseItem); + sunsetItem.id = `sunset-${number}-${station.id}`; + // sunsetItem.title = `${timings.sun_set.start} to ${timings.sun_set.end}`; + sunsetItem.title = ""; + sunsetItem.start_time = moment.utc(timings.sun_set.start); + sunsetItem.end_time = moment.utc(timings.sun_set.end); + sunsetItem.bgColor = "orange"; + sunsetItem.selectedBgColor = "orange"; + sunItems.push(sunsetItem); + let befSunriseItem = _.cloneDeep(sunriseItem); + befSunriseItem.id = `bef-sunrise-${number}-${station.id}`; + // sunsetItem.title = `${timings.sun_set.start} to ${timings.sun_set.end}`; + befSunriseItem.title = ""; + befSunriseItem.start_time = moment.utc(timings.sun_rise.start).hours(0).minutes(0).seconds(0); + befSunriseItem.end_time = moment.utc(timings.sun_rise.start); + befSunriseItem.bgColor = "grey"; + befSunriseItem.selectedBgColor = "grey"; + sunItems.push(befSunriseItem); + let afterSunsetItem = _.cloneDeep(sunriseItem); + afterSunsetItem.id = `aft-sunset-${number}-${station.id}`; + // sunsetItem.title = `${timings.sun_set.start} to ${timings.sun_set.end}`; + afterSunsetItem.title = ""; + afterSunsetItem.start_time = moment.utc(timings.sun_set.end); + afterSunsetItem.end_time = moment.utc(timings.sun_set.end).hours(23).minutes(59).seconds(59); + afterSunsetItem.bgColor = "grey"; + afterSunsetItem.selectedBgColor = "grey"; + sunItems.push(afterSunsetItem); + } } } if (!this.props.showSunTimings && this.state.viewType === UIConstants.timeline.types.NORMAL) { @@ -770,6 +900,73 @@ export class CalendarTimeline extends Component { return items; } + /** + * To Render sunrise, sunset and night times as horizontal bar, new items are created and appended with actual items. + * @param {moment} startTime + * @param {moment} endTime + * @param {Array} weekGroup + * @param {Array} items + */ + async addWeekSunTimes(startTime, endTime, weekGroup, items) { + const noOfDays = endTime.diff(startTime, 'days'); + let sunItems = _.cloneDeep(items); + for (const weekDay of weekGroup) { + if (weekDay.value) { + const timings = await UtilService.getSunTimings(weekDay.value.format("YYYY-MM-DD"), 'CS001'); + const sunriseStart = moment.utc(timings.sun_rise.start); + const sunriseEnd = moment.utc(timings.sun_rise.end); + const sunsetStart = moment.utc(timings.sun_set.start); + const sunsetEnd = moment.utc(timings.sun_set.end); + if (timings) { + let sunriseItem = { id: `sunrise-${weekDay.id}`, + group: weekDay.id, + // title: `${timings.sun_rise.start} to ${timings.sun_rise.end}`, + title: "", + project: "", + name: "", + duration: "", + start_time: startTime.clone().hours(sunriseStart.hours()).minutes(sunriseStart.minutes()).seconds(sunriseStart.seconds()), + end_time: startTime.clone().hours(sunriseEnd.hours()).minutes(sunriseEnd.minutes()).seconds(sunriseEnd.seconds()), + bgColor: "yellow", + selectedBgColor: "yellow", + type: "SUNTIME"}; + sunItems.push(sunriseItem); + let sunsetItem = _.cloneDeep(sunriseItem); + sunsetItem.id = `sunset-${weekDay.id}`; + // sunsetItem.title = `${timings.sun_set.start} to ${timings.sun_set.end}`; + sunsetItem.title = ""; + sunsetItem.start_time = startTime.clone().hours(sunsetStart.hours()).minutes(sunsetStart.minutes()).seconds(sunsetStart.seconds()); + sunsetItem.end_time = startTime.clone().hours(sunsetEnd.hours()).minutes(sunsetEnd.minutes()).seconds(sunsetEnd.seconds()); + sunsetItem.bgColor = "orange"; + sunsetItem.selectedBgColor = "orange"; + sunItems.push(sunsetItem); + let befSunriseItem = _.cloneDeep(sunriseItem); + befSunriseItem.id = `bef-sunrise-${weekDay.id}`; + // sunsetItem.title = `${timings.sun_set.start} to ${timings.sun_set.end}`; + befSunriseItem.title = ""; + befSunriseItem.start_time = startTime.clone().hours(0).minutes(0).seconds(0); + befSunriseItem.end_time = startTime.clone().hours(sunriseStart.hours()).minutes(sunriseStart.minutes()).seconds(sunriseStart.seconds());; + befSunriseItem.bgColor = "grey"; + befSunriseItem.selectedBgColor = "grey"; + sunItems.push(befSunriseItem); + let afterSunsetItem = _.cloneDeep(sunriseItem); + afterSunsetItem.id = `aft-sunset-${weekDay.id}`; + // sunsetItem.title = `${timings.sun_set.start} to ${timings.sun_set.end}`; + afterSunsetItem.title = ""; + afterSunsetItem.start_time = startTime.clone().hours(sunsetEnd.hours()).minutes(sunsetEnd.minutes()).seconds(sunsetEnd.seconds()); + afterSunsetItem.end_time = startTime.clone().hours(23).minutes(59).seconds(59); + afterSunsetItem.bgColor = "grey"; + afterSunsetItem.selectedBgColor = "grey"; + sunItems.push(afterSunsetItem); + } + } + } + if (this.state.viewType === UIConstants.timeline.types.WEEKVIEW) { + items = sunItems; + } + return items; + } + /** * Resets the timeline view to default zoom and move to the current timeline */ @@ -964,11 +1161,13 @@ export class CalendarTimeline extends Component { dayHeaderVisible = rangeDays > 35?false: true; weekHeaderVisible = rangeDays > 35?true: false; lstDateHeaderUnit = rangeDays > 35?"day":"hour"; + const items = await this.addWeekSunTimes(timelineStart, timelineEnd, group, result.items); + console.log(items); this.setState({defaultStartTime: timelineStart, defaultEndTime: timelineEnd, timelineStartDate: timelineStart, timelineEndDate: timelineEnd, zoomLevel: this.ZOOM_LEVELS[this.ZOOM_LEVELS.length-1].name, isTimelineZoom: false, dayHeaderVisible: dayHeaderVisible, weekHeaderVisible: weekHeaderVisible, - lstDateHeaderUnit: lstDateHeaderUnit, group: group, items: result.items}); + lstDateHeaderUnit: lstDateHeaderUnit, group: group, items: items}); this.loadLSTDateHeaderMap(startDate, endDate, lstDateHeaderUnit); this.setState({isWeekLoading: false}); } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_suSummary.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_suSummary.scss index c2fb7b46ab00ed3721db70e61b834f5c9d17b412..76b02736fb1959096ef3067f75b67f2ad1a72336 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_suSummary.scss +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_suSummary.scss @@ -1,5 +1,5 @@ .constraints-summary>div { - overflow: scroll; + overflow-y: scroll; max-height: 500px; margin-bottom: 10px; } @@ -24,4 +24,25 @@ .task-summary #block_container { margin-top: 0px; -} \ No newline at end of file +} + +/* + * STYLE 3 + */ + + .json-to-table::-webkit-scrollbar-track + { + -webkit-box-shadow: inset 0 0 6px rgba(248, 245, 245, 0.3); + background-color: #F5F5F5; + } + + .json-to-table::-webkit-scrollbar + { + width: 6px; + background-color: #F5F5F5; + } + + .json-to-table::-webkit-scrollbar-thumb + { + background-color: #0000007c; + } \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss index 25e0ca50ba4e4546260f615e0ccb3150dc01d50a..51b2e71b3a46bf57fa21eb4e54b3de8aaa0152a1 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss @@ -1,7 +1,7 @@ .sticky { position: sticky; top:49px; - z-index:1000; + z-index:999; } .rct-sidebar-row { @@ -84,6 +84,66 @@ margin-left: 10px; } +.sidebar-header { + color: #ffffff; + text-align: right; + padding-right: 10px; + background-color: #8ba7d9; +} + +.sidebar-header-row { + height: 30px; +} + +.legend-row { + padding-top: 10px; + padding-left: 10px; + font-size: 14px; +} + +.legend-suntime { + margin-top: 2px; + padding-left: 3px !important; +} + +.legend-sunrise { + background-color: yellow; + color: #212529; +} + +.legend-sunset { + background-color: orange; + color: #212529; +} + +.legend-night { + background-color: grey; +} + +.suntime-header { + line-height: 30px; +} + +.suntime-header-day { + background-color: white; + color: white; +} + +.suntime-header-night { + background-color: grey; + color: grey; +} + +.suntime-header-sunrise { + background-color: yellow; + color: yellow; +} + +.suntime-header-sunset { + background-color: orange; + color: orange; +} + .resize-div, .resize-div-min, .resize-div-avg, diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js index 5de435d85b1f8e16a09f7e43c85ffaffd3da6227..1bcbcefcbd4a7cd6a5bbb2c2cf96eb7c4b7c2800 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js @@ -1,5 +1,6 @@ import React, { useState, useEffect } from 'react'; import moment from 'moment'; +import _ from 'lodash'; import Jeditor from '../../components/JSONEditor/JEditor'; import UnitConversion from '../../utils/unit.converter'; /* eslint-disable react-hooks/exhaustive-deps */ @@ -138,7 +139,7 @@ export default (props) => { }; const modifyInitiValue = () => { - const initValue = { ...props.initValue } + const initValue = _.cloneDeep(props.initValue); // For DateTime for (let key in initValue.time) { if (typeof initValue.time[key] === 'string') { diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Stations.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Stations.js index 841672888ac6cdbc75d850b1a39442f2752e1f59..7a7a354a7fd5872a6ee6256abba1b92749a22538 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Stations.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Stations.js @@ -122,7 +122,7 @@ export default (props) => { custom_stations_options = custom_stations_options.map(i => ({ value: i })); setCustomStationsOptions(custom_stations_options); if (props.onUpdateStations) { - updateSchedulingComp(stationState, [...selected_Stations], missing_StationFieldsErrors, customStations); + updateSchedulingComp(stationState, [...selected_Stations], missing_StationFieldsErrors, custom_stations); } }; diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js index 22914ed8871b0fe0ed58a456c2aa4baa4021b5bd..a76501c5c154cadef3782037eb877ff3db06c865 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js @@ -9,6 +9,7 @@ import PageHeader from '../../layout/components/PageHeader'; import ViewTable from './../../components/ViewTable'; import ScheduleService from '../../services/schedule.service'; import moment from 'moment'; +import _ from 'lodash'; import SchedulingConstraint from './Scheduling.Constraints'; import { Dialog } from 'primereact/dialog'; import TaskStatusLogs from '../Task/state_logs'; @@ -120,12 +121,12 @@ class ViewSchedulingUnit extends Component{ task.status_logs = task.tasktype === "Blueprint"?subtaskComponent(task):""; return task; }); - const targetObservation = tasks.find(task => task.name === 'Target Observation'); + const targetObservation = _.find(tasks, (task)=> {return task.template.type_value==='observation' && task.tasktype.toLowerCase()===schedule_type && task.specifications_doc.station_groups}); this.setState({ scheduleunit : schedulingUnit, schedule_unit_task : tasks, isLoading: false, - stationGroup: targetObservation.specifications_doc.station_groups + stationGroup: targetObservation?targetObservation.specifications_doc.station_groups:[] }, this.getAllStations); }); } else { @@ -139,9 +140,9 @@ class ViewSchedulingUnit extends Component{ getScheduleUnitTasks(type, scheduleunit){ if(type === 'draft') - return ScheduleService.getTasksBySchedulingUnit(scheduleunit.id); + return ScheduleService.getTasksBySchedulingUnit(scheduleunit.id, true); else - return ScheduleService.getTaskBlueprintsBySchedulingUnit(scheduleunit); + return ScheduleService.getTaskBlueprintsBySchedulingUnit(scheduleunit, true); } getScheduleUnit(type, id){ if(type === 'draft') diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js index 805787316649f94e265385af3e200bd9a8c73d20..4dba59e7cc582d9ff47663d3130e2dd9efdea3ab 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js @@ -121,8 +121,7 @@ export class SchedulingUnitCreate extends Component { */ async changeStrategy (strategyId) { const observStrategy = _.find(this.observStrategies, {'id': strategyId}); - const station_group = observStrategy.template.tasks['Target Observation'].specifications_doc.station_groups; - this.setState({ stationGroup: station_group }); + let station_group = []; const tasks = observStrategy.template.tasks; let paramsOutput = {}; let schema = { type: 'object', additionalProperties: false, @@ -138,6 +137,9 @@ export class SchedulingUnitCreate extends Component { const taskTemplate = _.find(this.taskTemplates, {'name': task['specifications_template']}); schema['$id'] = taskTemplate.schema['$id']; schema['$schema'] = taskTemplate.schema['$schema']; + if (taskTemplate.type_value==='observation' && task.specifications_doc.station_groups) { + station_group = task.specifications_doc.station_groups; + } let index = 0; for (const param of observStrategy.template.parameters) { if (param.refs[0].indexOf(`/tasks/${taskName}`) > 0) { @@ -174,7 +176,7 @@ export class SchedulingUnitCreate extends Component { } } - this.setState({observStrategy: observStrategy, paramsSchema: schema, paramsOutput: paramsOutput}); + this.setState({observStrategy: observStrategy, paramsSchema: schema, paramsOutput: paramsOutput, stationGroup: station_group}); // Function called to clear the JSON Editor fields and reload with new schema if (this.state.editorFunction) { @@ -345,6 +347,12 @@ export class SchedulingUnitCreate extends Component { observStrategy.template.parameters.forEach(async(param, index) => { $refs.set(observStrategy.template.parameters[index]['refs'][0], this.state.paramsOutput['param_' + index]); }); + for (const taskName in observStrategy.template.tasks) { + let task = observStrategy.template.tasks[taskName]; + if (task.specifications_doc.station_groups) { + task.specifications_doc.station_groups = station_groups; + } + } const const_strategy = {scheduling_constraints_doc: constStrategy, id: this.constraintTemplates[0].id, constraint: this.constraintTemplates[0]}; const schedulingUnit = await ScheduleService.saveSUDraftFromObservStrategy(observStrategy, this.state.schedulingUnit, const_strategy, station_groups); if (schedulingUnit) { @@ -388,7 +396,6 @@ export class SchedulingUnitCreate extends Component { }, projectDisabled: (this.props.match.params.project? true:false), observStrategy: {}, - selectedStations:{}, paramsOutput: null, validEditor: false, validFields: {}, diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/edit.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/edit.js index d3060f0f30d53126e4b4d95596323c40e99a879f..f2f56627824593d0f270148148f2e718fb2d41ff 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/edit.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/edit.js @@ -130,6 +130,9 @@ export class EditSchedulingUnit extends Component { } index++; } + if (taskTemplate.type_value==='observation' && task.specifications_doc.station_groups) { + tasksToUpdate[taskName] = taskName; + } } this.setState({observStrategy: observStrategy, paramsSchema: schema, paramsOutput: paramsOutput, tasksToUpdate: tasksToUpdate}); @@ -161,9 +164,9 @@ export class EditSchedulingUnit extends Component { observStrategyVisible: responses[4].observation_strategy_template_id?true:false }); if (responses[4].observation_strategy_template_id) { this.changeStrategy(responses[4].observation_strategy_template_id); - const targetObservation = responses[5].data.results.find(task => task.name === 'Target Observation'); + const targetObservation = responses[5].data.results.find(task => {return task.specifications_doc.station_groups?true:false}); this.setState({ - stationGroup: targetObservation.specifications_doc.station_groups + stationGroup: targetObservation?targetObservation.specifications_doc.station_groups:[] }); } if (this.state.schedulingUnit.project) { diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js index 7263fc7b3441386d301f4bba6e5e1e82aedd368c..4b2ef70081a130dd75f9567e8c1ec2616b186c90 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js @@ -3,7 +3,7 @@ import { Link } from 'react-router-dom/cjs/react-router-dom.min'; import moment from 'moment'; import _ from 'lodash'; import ViewTable from '../../components/ViewTable'; -import { JSONToHTMLTable } from '@kevincobain2000/json-to-html-table' +import { JsonToTable } from "react-json-to-table"; import SchedulingConstraints from './Scheduling.Constraints'; /** @@ -42,7 +42,7 @@ export class SchedulingUnitSummary extends Component { /* Format the object to remove empty values*/ const constraint = this.getFormattedConstraint(constraintsDoc[constraintKey]); if (constraint) { - orderedConstraints[constraintKey] = constraint; + orderedConstraints[constraintKey.replace('_',' ')] = constraint; } } return orderedConstraints; @@ -66,7 +66,7 @@ export class SchedulingUnitSummary extends Component { break; } case "boolean": { - constraint = constraint?constraint:null; + constraint = constraint?'Yes':null; break; } case "object": { @@ -88,7 +88,7 @@ export class SchedulingUnitSummary extends Component { for (const objectKey of _.keys(constraint)) { let object = this.getFormattedConstraint(constraint[objectKey]); if (object) { - newObject[objectKey] = object; + newObject[objectKey.replace(/_/g, ' ')] = object; } } constraint = (!_.isEmpty(newObject))? newObject:null; @@ -143,10 +143,12 @@ export class SchedulingUnitSummary extends Component { </div> {/* Scheduling Constraint Display in table format */} {constraintsDoc && + <> <div className="col-12 constraints-summary"> <label>Constraints:</label> - <JSONToHTMLTable data={constraintsDoc} tableClassName="table table-sm"/> + <JsonToTable json={constraintsDoc} /> </div> + </> } </> } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js index 5ab65e6a7b09886f107dc78943236999c1836156..2fd2e272fc587b7ed335de24b48ed3a6c31a4352 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js @@ -46,6 +46,7 @@ export class TimelineView extends Component { isSummaryLoading: false } this.STATUS_BEFORE_SCHEDULED = ['defining', 'defined', 'schedulable']; // Statuses before scheduled to get station_group + this.allStationsGroup = []; this.onItemClick = this.onItemClick.bind(this); this.closeSUDets = this.closeSUDets.bind(this); @@ -60,7 +61,8 @@ export class TimelineView extends Component { ScheduleService.getSchedulingUnitBlueprint(), ScheduleService.getSchedulingUnitDraft(), ScheduleService.getSchedulingSets(), - UtilService.getUTC()] ; + UtilService.getUTC(), + ScheduleService.getStations('All')] ; Promise.all(promises).then(async(responses) => { const projects = responses[0]; const suBlueprints = _.sortBy(responses[1].data.results, 'name'); @@ -105,7 +107,9 @@ export class TimelineView extends Component { } } } - + for (const station of responses[5]['stations']) { + this.allStationsGroup.push({id: station, title: station}); + } this.setState({suBlueprints: suBlueprints, suDrafts: suDrafts, group: group, suSets: suSets, projects: projects, suBlueprintList: suList, items: items, currentUTC: currentUTC, isLoading: false}); @@ -117,9 +121,6 @@ export class TimelineView extends Component { * @param {Object} suBlueprint */ getTimelineItem(suBlueprint) { - // Temporary for testing - const diffOfCurrAndStart = moment().diff(moment(suBlueprint.stop_time), 'seconds'); - // suBlueprint.status = diffOfCurrAndStart>=0?"FINISHED":"DEFINED"; let item = { id: suBlueprint.id, group: suBlueprint.suDraft.id, title: `${suBlueprint.project} - ${suBlueprint.suDraft.name} - ${(suBlueprint.durationInSec/3600).toFixed(2)}Hrs`, @@ -189,7 +190,7 @@ export class TimelineView extends Component { if (this.state.stationView) { const loadSubtasks = this.STATUS_BEFORE_SCHEDULED.indexOf(suBlueprint.status.toLowerCase()) < 0 ; suBlueprint.tasks = await ScheduleService.getTaskBlueprintsBySchedulingUnit(suBlueprint, true, loadSubtasks); - this.getStationItemGroups(suBlueprint, timelineItem, group, items); + this.getStationItemGroups(suBlueprint, timelineItem, this.allStationsGroup, items); } else { items.push(timelineItem); if (!_.find(group, {'id': suBlueprint.suDraft.id})) { @@ -207,7 +208,7 @@ export class TimelineView extends Component { this.setState({suBlueprintList: _.filter(suBlueprintList, (suBlueprint) => {return suBlueprint.start_time!=null})}); // On range change close the Details pane // this.closeSUDets(); - return {group: _.sortBy(group,'id'), items: items}; + return {group: this.stationView?this.allStationsGroup:_.sortBy(group,'id'), items: items}; } /** @@ -245,9 +246,9 @@ export class TimelineView extends Component { stationItem.id = `${stationItem.id}-${station}`; stationItem.group = station; items.push(stationItem); - if (!_.find(group, {'id': station})) { - group.push({'id': station, title: station}); - } + // if (!_.find(group, {'id': station})) { + // group.push({'id': station, title: station}); + // } } } @@ -281,7 +282,7 @@ export class TimelineView extends Component { const suBlueprint = _.find(suBlueprints, {actionpath: data.actionpath}); let timelineItem = this.getTimelineItem(suBlueprint); if (this.state.stationView) { - this.getStationItemGroups(suBlueprint, timelineItem, group, items); + this.getStationItemGroups(suBlueprint, timelineItem, this.allStationsGroup, items); } else { items.push(timelineItem); if (!_.find(group, {'id': suBlueprint.suDraft.id})) { @@ -290,7 +291,7 @@ export class TimelineView extends Component { } } if (this.timeline) { - this.timeline.updateTimeline({group: _.sortBy(group,"id"), items: items}); + this.timeline.updateTimeline({group: this.state.stationView?this.allStationsGroup:_.sortBy(group,"id"), items: items}); } } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js index 6c7570bb5988cac32bb82e12a79eb8a48940ca3f..1ace4022edceedeffc7a6916b1749edaf81cc4fa 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js @@ -91,7 +91,7 @@ const ScheduleService = { } return taskblueprintsList; }, - getTasksBySchedulingUnit: async function(id){ + getTasksBySchedulingUnit: async function(id, loadTemplate){ let scheduletasklist=[]; // let taskblueprints = []; // Common keys for Task and Blueprint @@ -119,6 +119,9 @@ const ScheduleService = { scheduletask.duration = moment.utc((scheduletask.duration || 0)*1000).format('HH:mm:ss'); scheduletask.relative_start_time = moment.utc(scheduletask.relative_start_time*1000).format('HH:mm:ss'); scheduletask.relative_stop_time = moment.utc(scheduletask.relative_stop_time*1000).format('HH:mm:ss'); + if (loadTemplate) { + scheduletask.template = await TaskService.getTaskTemplate(task.specifications_template_id); + } //Fetch blueprint details for Task Draft const draftBlueprints = await TaskService.getDraftsTaskBlueprints(task.id); // let filteredblueprints = _.filter(taskblueprints, function(o) { @@ -140,7 +143,9 @@ const ScheduleService = { taskblueprint.duration = moment.utc((taskblueprint.duration || 0)*1000).format('HH:mm:ss'); taskblueprint.relative_start_time = moment.utc(taskblueprint.relative_start_time*1000).format('HH:mm:ss'); taskblueprint.relative_stop_time = moment.utc(taskblueprint.relative_stop_time*1000).format('HH:mm:ss'); - + if (loadTemplate) { + taskblueprint.template = scheduletask.template; + } //Add Blue print details to array scheduletasklist.push(taskblueprint); } @@ -237,7 +242,6 @@ const ScheduleService = { if (schedulingUnit && schedulingUnit.id) { // Update the newly created SU draft requirement_doc with captured parameter values schedulingUnit.requirements_doc = observStrategy.template; - schedulingUnit.requirements_doc.tasks['Target Observation'].specifications_doc.station_groups = station_groups; schedulingUnit.scheduling_constraints_doc = constraint.scheduling_constraints_doc; schedulingUnit.scheduling_constraints_template_id = constraint.id; schedulingUnit.scheduling_constraints_template = constraint.constraint.url; @@ -267,7 +271,7 @@ const ScheduleService = { for (const taskToUpdate in tasksToUpdate) { let task = tasks.find(task => { return task.name === taskToUpdate}); task.specifications_doc = observStrategy.template.tasks[taskToUpdate].specifications_doc; - if (task.name === 'Target Observation') { + if (task.specifications_doc.station_groups) { task.specifications_doc.station_groups = station_groups; } delete task['duration']; diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/util.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/util.service.js index 5d629c58d0dbbc340bf084c0f57ff31f0868cee2..4b8d41ca2c1fd83d8476980d9ac4ef882a44af0c 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/services/util.service.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/services/util.service.js @@ -39,8 +39,10 @@ const UtilService = { console.error(error); } }, + /** Function to fetch sun timings from the backend for single station. */ getSunTimings: async(timestamp, station) => { try { + station = station?station:"CS001"; let stationTimestamp = (station?`${station}-`:"") + timestamp; let localSunTimeMap = localStorage.getItem('SUN_TIME_MAP'); if (localSunTimeMap) { @@ -51,14 +53,9 @@ const UtilService = { } else { localSunTimeMap = {}; } - // const url = `/api/sun_rise_and_set/${timestamp}`; - // const sunTimings = (await axios.get(url)).data; - let sunTimings = {sun_rise: moment.utc(moment(timestamp, "YYYYMMDDTHH:mm:ss")).format('YYYY-MM-DDT06:30:00.sssss')+"Z", - sun_set: moment.utc(moment(timestamp, "YYYYMMDDTHH:mm:ss")).format('YYYY-MM-DDT17:00:00.sssss')+"Z"}; - if (station==="CS001") { - sunTimings = {sun_rise: moment.utc(moment(timestamp, "YYYYMMDDTHH:mm:ss")).format('YYYY-MM-DDT05:30:00.sssss')+"Z", - sun_set: moment.utc(moment(timestamp, "YYYYMMDDTHH:mm:ss")).format('YYYY-MM-DDT16:00:00.sssss')+"Z"}; - } + const url = `/api/util/sun_rise_and_set?stations=${station?station:'CS001'}×tamps=${timestamp}`; + const stationSunTimings = (await axios.get(url)).data; + let sunTimings = {sun_rise: stationSunTimings[station]['sunrise'][0], sun_set: stationSunTimings[station]['sunset'][0]}; localSunTimeMap[stationTimestamp] = sunTimings; localStorage.setItem('SUN_TIME_MAP', JSON.stringify(localSunTimeMap)); return sunTimings; diff --git a/SAS/TMSS/services/CMakeLists.txt b/SAS/TMSS/services/CMakeLists.txt index b1cdad1bc8906d3ba0302fe6c867a6eb8bff9df1..7ca90e1a5220ba1c278a45e986029e408c2506d6 100644 --- a/SAS/TMSS/services/CMakeLists.txt +++ b/SAS/TMSS/services/CMakeLists.txt @@ -1,4 +1,4 @@ -lofar_add_package(TMSSSubtaskSchedulingService subtask_scheduling) +lofar_add_package(TMSSSchedulingService scheduling) lofar_add_package(TMSSFeedbackHandlingService feedback_handling) lofar_add_package(TMSSPostgresListenerService tmss_postgres_listener) diff --git a/SAS/TMSS/services/scheduling/CMakeLists.txt b/SAS/TMSS/services/scheduling/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..34de269349de481543af911fa1ad28162fb07b2f --- /dev/null +++ b/SAS/TMSS/services/scheduling/CMakeLists.txt @@ -0,0 +1,11 @@ +lofar_package(TMSSSchedulingService 0.1 DEPENDS TMSSClient PyCommon pyparameterset PyMessaging) + +lofar_find_package(PythonInterp 3.4 REQUIRED) + +include(FindPythonModule) +find_python_module(astroplan REQUIRED) # pip3 install astroplan + +add_subdirectory(lib) +add_subdirectory(bin) +add_subdirectory(test) + diff --git a/SAS/TMSS/services/scheduling/bin/CMakeLists.txt b/SAS/TMSS/services/scheduling/bin/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..a84d2f43814392f07e0b938b47c91e386e95fe4f --- /dev/null +++ b/SAS/TMSS/services/scheduling/bin/CMakeLists.txt @@ -0,0 +1,4 @@ +lofar_add_bin_scripts(tmss_scheduling_service) + +# supervisord config files +lofar_add_sysconf_files(tmss_scheduling_service.ini DESTINATION supervisord.d) diff --git a/SAS/TMSS/services/scheduling/bin/tmss_scheduling_service b/SAS/TMSS/services/scheduling/bin/tmss_scheduling_service new file mode 100755 index 0000000000000000000000000000000000000000..5f4d206b4a453635cb8f5ffcab9234b5b468da30 --- /dev/null +++ b/SAS/TMSS/services/scheduling/bin/tmss_scheduling_service @@ -0,0 +1,57 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + + +import os +from optparse import OptionParser +import logging +logger = logging.getLogger(__name__) + +from lofar.messaging.config import DEFAULT_BROKER, DEFAULT_BUSNAME + +def main(): + # make sure we run in UTC timezone + os.environ['TZ'] = 'UTC' + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + + # Check the invocation arguments + parser = OptionParser('%prog [options]', + description='run the tmss_subtask_scheduling_service which automatically schedules the defined successor tasks for finished subtasks') + parser.add_option('-q', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, help='Address of the messaging broker, default: %default') + parser.add_option('--exchange', dest='exchange', type='string', default=DEFAULT_BUSNAME, help='Name of the exchange on the messaging broker, default: %default') + parser.add_option('-t', '--tmss_client_credentials_id', dest='tmss_client_credentials_id', type='string', + default=os.environ.get("TMSS_CLIENT_DBCREDENTIALS", "TMSSClient"), + help='the credentials id for the file in ~/.lofar/dbcredentials which holds the TMSS http REST api url and credentials, default: %default') + (options, args) = parser.parse_args() + + os.environ["DJANGO_SETTINGS_MODULE"] = "lofar.sas.tmss.tmss.settings" + import django + django.setup() + + from lofar.common.util import waitForInterrupt + from lofar.sas.tmss.services.scheduling.subtask_scheduling import create_subtask_scheduling_service + from lofar.sas.tmss.services.scheduling.dynamic_scheduling import create_dynamic_scheduling_service + + with create_subtask_scheduling_service(options.exchange, options.broker, options.tmss_client_credentials_id): + with create_dynamic_scheduling_service(options.exchange, options.broker): + waitForInterrupt() + +if __name__ == '__main__': + main() diff --git a/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service.ini b/SAS/TMSS/services/scheduling/bin/tmss_scheduling_service.ini similarity index 100% rename from SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service.ini rename to SAS/TMSS/services/scheduling/bin/tmss_scheduling_service.ini diff --git a/SAS/TMSS/services/scheduling/lib/CMakeLists.txt b/SAS/TMSS/services/scheduling/lib/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..f4808987873979c7d600174fca802f167d1689a6 --- /dev/null +++ b/SAS/TMSS/services/scheduling/lib/CMakeLists.txt @@ -0,0 +1,13 @@ +lofar_find_package(PythonInterp 3.4 REQUIRED) +include(PythonInstall) + +set(_py_files + dynamic_scheduling.py + subtask_scheduling.py + constraints/__init__.py + constraints/template_constraints_v1.py + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/services/scheduling) + diff --git a/SAS/TMSS/services/scheduling/lib/constraints/__init__.py b/SAS/TMSS/services/scheduling/lib/constraints/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..49f9857f8f2630dee58271dd8b59596fe168f702 --- /dev/null +++ b/SAS/TMSS/services/scheduling/lib/constraints/__init__.py @@ -0,0 +1,238 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2020 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# + +""" +This __init__ module for this constraints python package defines the 'API' to: + - filter a list of schedulable scheduling_units by checking their constraints: see method filter_scheduling_units_using_constraints + - sort a (possibly filtered) list of schedulable scheduling_units evaluating their constraints and computing a 'fitness' score: see method get_sorted_scheduling_units_scored_by_constraints +These main methods are used in the dynamic_scheduler to pick the next best scheduling unit, and compute the midterm schedule. + +Currently we have only one SchedulingConstraintsTemplate in TMSS, named 'constraints', version 1. +But, it is envisioned that we we get more templates. +So, based on the template the actual filter and score methods are selected from a specific module. +By convention we use one module per template. Currently, we have and use only module template_constraints_v1.py + +If/When we add a new SchedulingConstraintsTemplate, then we should add a new module with the specific filter and score methods, +and add a extra 'if' in the strategy pattern used here. (see below for implementation) +""" + +import logging +logger = logging.getLogger(__name__) +from datetime import datetime +from typing import NamedTuple + +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.exceptions import * + +################## main data struct and methods ################## + +class ScoredSchedulingUnit(NamedTuple): + '''struct for collecting scores per constraint and a weighted_score for a scheduling_unit at the given start_time + ''' + scheduling_unit: models.SchedulingUnitBlueprint + scores: dict + start_time: datetime + weighted_score: float + + +def filter_scheduling_units_using_constraints(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound: datetime, upper_bound: datetime) -> [models.SchedulingUnitBlueprint]: + """ + Filter the given scheduling_units by whether their constraints are met within the given timewindow. + If one or more scheduling units can run only within this time window and not after it, then only these exclusivly runnable scheduling units. + :param lower_bound: evaluate and score the constrains at and after lower_bound_start_time. The returned unit has a start_time guaranteed at or after lower_bound_start_time. + :param upper_bound: evaluate and score the constrains before upper_bound_stop_time. The returned unit has a stop_time guaranteed before upper_bound_stop_time. + :param scheduling_units: evaluate/filter these scheduling_units. + Returns a list scheduling_units for which their constraints are met within the given timewindow. + """ + runnable_scheduling_units = [] + runnable_exclusive_in_this_window_scheduling_units = [] + + for scheduling_unit in scheduling_units: + try: + if can_run_within_timewindow(scheduling_unit, lower_bound, upper_bound): + runnable_scheduling_units.append(scheduling_unit) + + # if a schedulingunit cannot run after this window, then apparently its limited to run exclusively in this time window. + earliest_possible_start_time = get_earliest_possible_start_time(scheduling_unit, lower_bound) + if not can_run_after(scheduling_unit, earliest_possible_start_time+scheduling_unit.duration): + runnable_exclusive_in_this_window_scheduling_units.append(scheduling_unit) + except UnknownTemplateException as e: + # TODO: how do we notify the user that we cannot dynamically schedule this sub due to an unknown template? + # current pragmatic solution: log warning, and set sub state to error via its schedulable subtasks. + # This ensures that the unit is not schedulable anymore, and forces the user to take action. + # For example, the user can choose a different template, + # or submit a feature request to implement constraint solvers for this new template. + logger.warning(e) + for subtask in models.Subtask.independent_subtasks().filter(task_blueprint__scheduling_unit_blueprint_id=scheduling_unit.id).all(): + subtask.status = models.SubtaskState.objects.get(value=models.SubtaskState.Choices.ERROR.value) + subtask.save() + + # if we have schedulingunit(s) that can run exclusively in this time window (and not afterwards), then return only these. + if runnable_exclusive_in_this_window_scheduling_units: + return runnable_exclusive_in_this_window_scheduling_units + + # there are no exclusive units, so return all runnable_scheduling_units + return runnable_scheduling_units + + +def get_best_scored_scheduling_unit_scored_by_constraints(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound_start_time:datetime, upper_bound_stop_time:datetime) -> ScoredSchedulingUnit: + """ + get the best scored schedulable scheduling_unit which can run withing the given time window from the given scheduling_units. + :param lower_bound_start_time: evaluate and score the constrains at and after lower_bound_start_time. The returned unit has a start_time guaranteed at or after lower_bound_start_time. + :param upper_bound_stop_time: evaluate and score the constrains before upper_bound_stop_time. The returned unit has a stop_time guaranteed before upper_bound_stop_time. + :param scheduling_units: evaluate these scheduling_units. + Returns a ScoredSchedulingUnit struct with the best next schedulable scheduling unit and its proposed start_time where it best fits its contraints. + """ + sorted_scored_scheduling_units = sort_scheduling_units_scored_by_constraints(scheduling_units, lower_bound_start_time, upper_bound_stop_time) + + if sorted_scored_scheduling_units: + # they are sorted best to worst, so return/use first. + best_scored_scheduling_unit = sorted_scored_scheduling_units[0] + return best_scored_scheduling_unit + + return None + + +def sort_scheduling_units_scored_by_constraints(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound_start_time: datetime, upper_bound_stop_time: datetime) -> [ScoredSchedulingUnit]: + """ + Compute the score and proposed start_time for all given scheduling_units. Return them sorted by their weighted_score. + :param lower_bound_start_time: evaluate and score the constrains at and after lower_bound_start_time. The returned unit has a start_time guaranteed at or after lower_bound_start_time. + :param upper_bound_stop_time: evaluate and score the constrains before upper_bound_stop_time. The returned unit has a stop_time guaranteed before upper_bound_stop_time. + :param scheduling_units: evaluate these scheduling_units. + Returns a list of ScoredSchedulingUnit structs with the score details, a weighted_score and a proposed start_time where it best fits its contraints. + """ + + scored_scheduling_units = [] + for scheduling_unit in scheduling_units: + try: + scored_scheduling_unit = compute_scores(scheduling_unit, lower_bound_start_time, upper_bound_stop_time) + + # check and ensure that the proposed start_time is within the required [lower_bound_start_time, upper_bound_stop_time] window. + schedulable_unit = scored_scheduling_unit.scheduling_unit + proposed_start_time = scored_scheduling_unit.start_time + proposed_stop_time = proposed_start_time + schedulable_unit.duration + + if proposed_start_time < lower_bound_start_time: + raise DynamicSchedulingException("The best next schedulable scheduling_unit id=%s has a proposed start_time '%s' before the given lower bound '%s'" % ( + schedulable_unit.id, proposed_start_time, lower_bound_start_time)) + + if proposed_stop_time > upper_bound_stop_time: + raise DynamicSchedulingException("The best next schedulable scheduling_unit id=%s has a proposed stop_time '%s' after the given upper bound '%s'" % ( + schedulable_unit.id, proposed_stop_time, upper_bound_stop_time)) + + scored_scheduling_units.append(scored_scheduling_unit) + except (UnknownTemplateException, DynamicSchedulingException) as e: + # TODO: how do we notify the user that we cannot dynamically schedule this sub due to an unknown template? + # current pragmatic solution: log warning, and set sub state to error via its schedulable subtasks. + # This ensures that the unit is not schedulable anymore, and forces the user to take action. + # For example, the user can choose a different template, + # or submit a feature request to implement constraint solvers for this new template. + logger.warning(e) + for subtask in models.Subtask.independent_subtasks().filter(task_blueprint__scheduling_unit_blueprint_id=scheduling_unit.id).all(): + subtask.status = models.SubtaskState.objects.get(value=models.SubtaskState.Choices.ERROR.value) + subtask.save() + + return sorted(scored_scheduling_units, key=lambda x: x.weighted_score, reverse=True) + + +################## helper methods ################################################################# +# # +# these helper methods are selected by a strategy pattern based on the template name and version # +# The actual implementation can be found in the other module(s) in this package # +# Currently we only have one template with one implementation in template_constraints_v1.py # +# # +################################################################################################### + +def can_run_within_timewindow(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: + '''Check if the given scheduling_unit can run somewhere within the given time window depending on the sub's constrains-template/doc.''' + constraints_template = scheduling_unit.draft.scheduling_constraints_template + + # choose appropriate method based on template (strategy pattern), or raise + if constraints_template.name == 'constraints' and constraints_template.version == 1: + # import here to prevent circular imports. Do not worry about performance loss, cause python only imports once and then uses a cache. + from . import template_constraints_v1 + return template_constraints_v1.can_run_within_timewindow(scheduling_unit, lower_bound, upper_bound) + + # TODO: if we get more constraint templates or versions, then add a check here and import and use the new module with the constraint methods for that specific template. (strategy pattern) + + raise UnknownTemplateException("Cannot check if scheduling_unit id=%s can run between '%s' and '%s', because we have no constraint checker for scheduling constraints template '%s' version=%s" % ( + scheduling_unit.id, lower_bound, upper_bound, constraints_template.name, constraints_template.version)) + + +def can_run_after(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> bool: + '''Check if the given scheduling_unit can run somewhere after the given lowerbound timestamp depending on the sub's constrains-template/doc.''' + constraints_template = scheduling_unit.draft.scheduling_constraints_template + + # choose appropriate method based on template (strategy pattern), or raise + if constraints_template.name == 'constraints' and constraints_template.version == 1: + # import here to prevent circular imports. Do not worry about performance loss, cause python only imports once and then uses a cache. + from . import template_constraints_v1 + return template_constraints_v1.can_run_after(scheduling_unit, lower_bound) + + # TODO: if we get more constraint templates or versions, then add a check here and import and use the new module with the constraint methods for that specific template. (strategy pattern) + + raise UnknownTemplateException("Cannot check if scheduling_unit id=%s can run after '%s', because we have no constraint checker for scheduling constraints template '%s' version=%s" % ( + scheduling_unit.id, lower_bound, constraints_template.name, constraints_template.version)) + + + +def compute_scores(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound:datetime, upper_bound:datetime) -> ScoredSchedulingUnit: + '''Compute the "fitness" scores per constraint for the given scheduling_unit at the given starttime depending on the sub's constrains-template/doc.''' + constraints_template = scheduling_unit.draft.scheduling_constraints_template + + # choose appropriate method based on template (strategy pattern), or raise + if constraints_template.name == 'constraints' and constraints_template.version == 1: + # import here to prevent circular imports. Do not worry about performance loss, cause python only imports once and then uses a cache. + from . import template_constraints_v1 + return template_constraints_v1.compute_scores(scheduling_unit, lower_bound, upper_bound) + + # TODO: if we get more constraint templates or versions, then add a check here and import and use the new module with the constraint methods for that specific template. (strategy pattern) + + raise UnknownTemplateException("Cannot compute scores for scheduling_unit id=%s, because we have no score computation method for scheduling constraints template '%s' version=%s" % ( + scheduling_unit.id, constraints_template.name, constraints_template.version)) + + +def get_earliest_possible_start_time(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> datetime: + '''determine the earliest possible start_time for the given scheduling unit, taking into account all its constraints''' + constraints_template = scheduling_unit.draft.scheduling_constraints_template + + # choose appropriate method based on template (strategy pattern), or raise + if constraints_template.name == 'constraints' and constraints_template.version == 1: + # import here to prevent circular imports. Do not worry about performance loss, cause python only imports once and then uses a cache. + from . import template_constraints_v1 + return template_constraints_v1.get_earliest_possible_start_time(scheduling_unit, lower_bound) + + # TODO: if we get more constraint templates or versions, then add a check here and import and use the new module with the constraint methods for that specific template. (strategy pattern) + + raise UnknownTemplateException("Cannot compute earliest possible start_time for scheduling_unit id=%s, because we have no constraint checker for scheduling constraints template '%s' version=%s" % ( + scheduling_unit.id, constraints_template.name, constraints_template.version)) + + +def get_min_earliest_possible_start_time(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound: datetime) -> datetime: + '''deterimine the earliest possible starttime over all given scheduling units, taking into account all their constraints''' + try: + return min(get_earliest_possible_start_time(scheduling_unit, lower_bound) for scheduling_unit in scheduling_units) + except ValueError: + return lower_bound + + + + diff --git a/SAS/TMSS/services/scheduling/lib/constraints/template_constraints_v1.py b/SAS/TMSS/services/scheduling/lib/constraints/template_constraints_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..6eb1f5084d741164d127812a55da7729e379ad7b --- /dev/null +++ b/SAS/TMSS/services/scheduling/lib/constraints/template_constraints_v1.py @@ -0,0 +1,237 @@ +#!/usr/bin/env python3 + +# dynamic_scheduling.py +# +# Copyright (C) 2020 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id: $ + +""" +""" + +import logging +logger = logging.getLogger(__name__) +from datetime import datetime, timedelta +from dateutil import parser + +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.tmssapp.conversions import create_astroplan_observer_for_station, Time, timestamps_and_stations_to_sun_rise_and_set, coordinates_and_timestamps_to_separation_from_bodies + +from . import ScoredSchedulingUnit + +def can_run_within_timewindow(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: + '''determine if the given scheduling_unit can run withing the given timewindow evaluating all constraints from the "constraints" version 1 template''' + if has_manual_scheduler_constraint(scheduling_unit): + return False + + if not can_run_within_timewindow_with_time_constraints(scheduling_unit, lower_bound, upper_bound): + return False + + if not can_run_within_timewindow_with_sky_constraints(scheduling_unit, lower_bound, upper_bound): + return False + + if not can_run_within_timewindow_with_daily_constraints(scheduling_unit, lower_bound, upper_bound): + return False + + return True + + +def can_run_after(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> bool: + '''Check if the given scheduling_unit can run somewhere after the given lowerbound timestamp depending on the sub's constrains-template/doc.''' + constraints = scheduling_unit.draft.scheduling_constraints_doc + if 'before' in constraints['time']: + before = parser.parse(constraints['time']['before'], ignoretz=True) + return before > lower_bound + + return True + +# only expose the can_run_within_timewindow and can_run_after methods, and keep the details hidden for this module's importers who do not need these implemnetation details +__all__ = ['can_run_within_timewindow', 'can_run_after'] + + +def has_manual_scheduler_constraint(scheduling_unit: models.SchedulingUnitBlueprint) -> bool: + '''evaluate the scheduler contraint. Should this unit be manually scheduled?''' + constraints = scheduling_unit.draft.scheduling_constraints_doc + return constraints.get('scheduler', '') == 'manual' + + +def can_run_within_timewindow_with_daily_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: + '''evaluate the daily contraint''' + constraints = scheduling_unit.draft.scheduling_constraints_doc + if not (constraints['daily']['require_day'] and constraints['daily']['require_night']): + # no day/night restrictions, can run any time + return True + + if constraints['daily']['require_day'] or constraints['daily']['require_night']: + # TODO: TMSS-254 and TMSS-255 + # TODO: take avoid_twilight into account + # Please note that this first crude proof of concept treats sunset/sunrise as 'events', + # whereas in our definition they are transition periods. See: TMSS-435 + + # Ugly code. Should be improved. Works for demo. + # create a series of timestamps in the window of opportunity, and evaluate of there are all during day or night + possible_start_time = get_earliest_possible_start_time(scheduling_unit, lower_bound) + + # ToDo: use specified total observation duration, and ignore pipelines who don't care about day/night + possible_stop_time = possible_start_time + scheduling_unit.duration + timestamps = [possible_start_time] + while timestamps[-1] < possible_stop_time - timedelta(hours=8): + timestamps.append(timestamps[-1] + timedelta(hours=8)) + timestamps.append(possible_stop_time) + + LOFAR_CENTER_OBSERVER = create_astroplan_observer_for_station('CS002') + if constraints['daily']['require_night'] and all(LOFAR_CENTER_OBSERVER.is_night(timestamp) for timestamp in timestamps): + return True + + if constraints['daily']['require_day'] and all(not LOFAR_CENTER_OBSERVER.is_night(timestamp) for timestamp in timestamps): + return True + + return False + + +def can_run_within_timewindow_with_time_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: + '''evaluate the time contraint(s)''' + constraints = scheduling_unit.draft.scheduling_constraints_doc + # TODO: TMSS-244 (and more?), evaluate the constraints in constraints['time'] + if has_manual_scheduler_constraint(scheduling_unit): + at = parser.parse(constraints['time']['at'], ignoretz=True) + return at >= lower_bound and at+scheduling_unit.duration <= upper_bound + + if 'before' in constraints['time']: + before = parser.parse(constraints['time']['before'], ignoretz=True) + return before <= upper_bound-scheduling_unit.duration + + if 'after' in constraints['time']: + after = parser.parse(constraints['time']['after'], ignoretz=True) + return lower_bound >= after + + # if 'between' in constraints['time']: + # betweens = [ dateutil.parser.parse(constraints['time']['between']) + # return lower_bound >= after + + return True # for now, ignore time contraints. + + +def can_run_within_timewindow_with_sky_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: + '''evaluate the time contraint(s)''' + constraints = scheduling_unit.draft.scheduling_constraints_doc + # TODO: TMSS-245 TMSS-250 (and more?), evaluate the constraints in constraints['sky'] + # maybe even split this method into sub methods for the very distinct sky constraints: min_calibrator_elevation, min_target_elevation, transit_offset & min_distance + + beam = scheduling_unit.requirements_doc['tasks']['Observation']['specifications_doc']['tile_beam'] + angle1 = beam['angle1'] + angle2 = beam['angle2'] + direction_type = beam['direction_type'] + if "sky" in constraints and 'min_distance' in constraints['sky']: + distances = coordinates_and_timestamps_to_separation_from_bodies(angle1=angle1, angle2=angle2, direction_type=direction_type, timestamps=(lower_bound, upper_bound), bodies=tuple(constraints['sky']['min_distance'].keys())) + for body, timestamps in distances.items(): + for timestamp, angle in timestamps.items(): + min_distance = constraints['sky']['min_distance'][body] + if angle.rad < min_distance: + logger.info('Distance=%s from body=%s does not meet min_distance=%s constraint at timestamp=%s' % (angle.rad, body, min_distance, timestamp)) + return False + + return True + + +def get_earliest_possible_start_time(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> datetime: + constraints = scheduling_unit.draft.scheduling_constraints_doc + + try: + if has_manual_scheduler_constraint(scheduling_unit) and 'at' in constraints['time']: + at = parser.parse(constraints['time']['at'], ignoretz=True) + return at + + if 'after' in constraints['time']: + return parser.parse(constraints['time']['after'], ignoretz=True) + + if constraints['daily']['require_day'] or constraints['daily']['require_night'] or constraints['daily']['avoid_twilight']: + + # TODO: TMSS-254 and TMSS-255 + # TODO: make sure contraints are met by all stations of this observation, not just CS002. + sun_events = timestamps_and_stations_to_sun_rise_and_set(timestamps=(lower_bound,lower_bound+timedelta(days=1)), stations=('CS002',))['CS002'] + day = sun_events['day'][0] + night = sun_events['night'][0] + next_day = sun_events['day'][1] + next_night = sun_events['night'][1] + if constraints['daily']['require_day']: + # TODO: Do we need to check for observations that are too long and can e.g. only be run in summer? + if lower_bound+scheduling_unit.duration > day['end']: + return next_day['start'] + if lower_bound >= day['start']: + return lower_bound + return day['start'] + + if constraints['daily']['require_night']: + if lower_bound + scheduling_unit.duration > night['end']: + return next_night['start'] + if lower_bound >= night['start']: + return lower_bound + return night['start'] + + if constraints['daily']['avoid_twilight']: + if lower_bound + scheduling_unit.duration < day['end']: + if lower_bound >= day['start']: + return lower_bound + return day['start'] + if lower_bound + scheduling_unit.duration < night['end']: + if lower_bound >= night['start']: + return lower_bound + return night['start'] + return next_day['start'] + + except Exception as e: + logger.exception(str(e)) + + # no constraints dictating starttime? make a guesstimate. + return lower_bound + + +def compute_scores(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound:datetime, upper_bound:datetime) -> ScoredSchedulingUnit: + '''Compute the "fitness" scores per constraint for the given scheduling_unit at the given starttime depending on the sub's constrains-template/doc.''' + constraints = scheduling_unit.draft.scheduling_constraints_doc + + # TODO: add compute_scores methods for each type of constraint + # TODO: take start_time into account. For example, an LST constraint yields a better score when the starttime is such that the center of the obs is at LST. + # TODO: TMSS-??? (and more?), compute score using the constraints in constraints['daily'] + # TODO: TMSS-244 (and more?), compute score using the constraints in constraints['time'] + # TODO: TMSS-245 TMSS-250 (and more?), compute score using the constraints in constraints['sky'] + + # for now (as a proof of concept and sort of example), just return 1's + scores = {'daily': 1.0, + 'time': 1.0, + 'sky': 1.0 } + + # add "common" scores which do not depend on constraints, such as project rank and creation date + # TODO: should be normalized! + scores['project_rank'] = scheduling_unit.draft.scheduling_set.project.priority_rank + #scores['age'] = (datetime.utcnow() - scheduling_unit.created_at).total_seconds() + + try: + # TODO: apply weights. Needs some new weight model in django, probably linked to constraints_template. + # for now, just average the scores + weighted_score = sum(scores.values())/len(scores) + except: + weighted_score = 1 + + return ScoredSchedulingUnit(scheduling_unit=scheduling_unit, + scores=scores, + weighted_score=weighted_score, + start_time=get_earliest_possible_start_time(scheduling_unit, lower_bound)) + diff --git a/SAS/TMSS/services/scheduling/lib/dynamic_scheduling.py b/SAS/TMSS/services/scheduling/lib/dynamic_scheduling.py new file mode 100644 index 0000000000000000000000000000000000000000..a15475960a3e94e18d3dbe0afbf2bd7c93dc3fc5 --- /dev/null +++ b/SAS/TMSS/services/scheduling/lib/dynamic_scheduling.py @@ -0,0 +1,328 @@ +#!/usr/bin/env python3 + +# dynamic_scheduling.py +# +# Copyright (C) 2020 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id: $ + +""" +""" + +import os +import logging +logger = logging.getLogger(__name__) +from datetime import datetime, timedelta, time + +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.tmssapp.tasks import schedule_independent_subtasks_in_scheduling_unit_blueprint, unschedule_subtasks_in_scheduling_unit_blueprint +from lofar.sas.tmss.tmss.tmssapp.subtasks import update_subtasks_start_times_for_scheduling_unit, clear_defined_subtasks_start_stop_times_for_scheduling_unit +from lofar.sas.tmss.client.tmssbuslistener import * +from lofar.common.datetimeutils import round_to_second_precision +from threading import Thread, Event + +from lofar.sas.tmss.services.scheduling.constraints import * + +# LOFAR needs to have a gap in between observations to (re)initialize hardware. +DEFAULT_INTER_OBSERVATION_GAP = timedelta(seconds=60) + +################## core dynamic scheduling methods ################################################ +# # +# This module starts with the core dynamic scheduling methods which are used in the dynamic # +# scheduling service. These high level methods only filter/score/sort in a generic way. # +# The detailed concrete filter/score/sort methods are pick by a strategy pattern in the # +# constraints package based on each scheduling unit's scheduling_constrains template. # +# # +################################################################################################### + +def find_best_next_schedulable_unit(scheduling_units:[models.SchedulingUnitBlueprint], lower_bound_start_time: datetime, upper_bound_stop_time: datetime) -> ScoredSchedulingUnit: + """ + find the best schedulable scheduling_unit which can run withing the given time window from the given scheduling_units. + :param lower_bound_start_time: evaluate the constrains at and after lower_bound_start_time. The returned unit has a start_time guaranteed at or after lower_bound_start_time. + :param upper_bound_stop_time: evaluate the constrains before upper_bound_stop_time. The returned unit has a stop_time guaranteed before upper_bound_stop_time. + :param scheduling_units: evaluate these scheduling_units. + Returns a ScoredSchedulingUnit struct with the best next schedulable scheduling unit and its proposed start_time where it best fits its contraints. + """ + # ensure upper is greater than or equal to lower + upper_bound_stop_time = max(lower_bound_start_time, upper_bound_stop_time) + + filtered_scheduling_units = filter_scheduling_units_using_constraints(scheduling_units, lower_bound_start_time, upper_bound_stop_time) + + if filtered_scheduling_units: + best_scored_scheduling_unit = get_best_scored_scheduling_unit_scored_by_constraints(filtered_scheduling_units, lower_bound_start_time, upper_bound_stop_time) + return best_scored_scheduling_unit + + # no filtered scheduling units found... + logger.debug("No schedulable scheduling units found which meet the requirements between '%s' and '%s'", lower_bound_start_time, upper_bound_stop_time) + return None + + +def schedule_next_scheduling_unit() -> models.SchedulingUnitBlueprint: + '''find the best next schedulable scheduling unit and try to schedule it. + Overlapping existing scheduled units are unscheduled if their score is lower. + :return: the scheduled scheduling unit.''' + + # --- setup of needed variables --- + schedulable_units = get_schedulable_scheduling_units() + + # estimate the lower_bound_start_time + lower_bound_start_time = get_min_earliest_possible_start_time(schedulable_units, datetime.utcnow()) + + # estimate the upper_bound_stop_time, which may give us a small timewindow before any next scheduled unit, or a default window of a day + try: + upper_bound_stop_time = max(su.start_time for su in get_scheduled_scheduling_units(lower=lower_bound_start_time, upper=lower_bound_start_time + timedelta(days=1))) + except ValueError: + upper_bound_stop_time = lower_bound_start_time + timedelta(days=1) + + # no need to irritate user in log files with subsecond scheduling precision + lower_bound_start_time = round_to_second_precision(lower_bound_start_time) + upper_bound_stop_time = max(round_to_second_precision(upper_bound_stop_time), lower_bound_start_time) + + # --- core routine --- + while lower_bound_start_time < upper_bound_stop_time: + try: + # try to find the best next scheduling_unit + logger.info("schedule_next_scheduling_unit: searching for best scheduling unit to schedule between '%s' and '%s'", lower_bound_start_time, upper_bound_stop_time) + best_scored_scheduling_unit = find_best_next_schedulable_unit(schedulable_units, lower_bound_start_time, upper_bound_stop_time) + if best_scored_scheduling_unit: + best_scheduling_unit = best_scored_scheduling_unit.scheduling_unit + best_scheduling_unit_score = best_scored_scheduling_unit.weighted_score + best_start_time = best_scored_scheduling_unit.start_time + + # make start_time "look nice" for us humans + best_start_time = round_to_second_precision(best_start_time) + + logger.info("schedule_next_scheduling_unit: found best candidate id=%s '%s' weighted_score=%s start_time=%s", + best_scheduling_unit.id, best_scheduling_unit.name, best_scheduling_unit_score, best_start_time) + + if unschededule_blocking_scheduled_units_if_needed_and_possible(best_scored_scheduling_unit): + # no (old) scheduled scheduling_units in the way, so schedule our candidate! + scheduled_scheduling_unit = schedule_independent_subtasks_in_scheduling_unit_blueprint(best_scheduling_unit, start_time=best_start_time) + + logger.info("schedule_next_scheduling_unit: scheduled best candidate id=%s '%s' score=%s start_time=%s", + best_scheduling_unit.id, best_scheduling_unit.name, best_scheduling_unit_score, best_start_time) + return scheduled_scheduling_unit + + except SubtaskSchedulingException as e: + logger.error("Could not schedule scheduling_unit id=%s name='%s'. Error: %s", best_scheduling_unit.id, best_scheduling_unit.name, e) + + # nothing was found, or an error occurred. + # seach again... (loop) with the remaining schedulable_units and new lower_bound_start_time + schedulable_units = get_schedulable_scheduling_units() + lower_bound_start_time = get_min_earliest_possible_start_time(schedulable_units, lower_bound_start_time + timedelta(hours=1)) + + +def assign_start_stop_times_to_schedulable_scheduling_units(lower_bound_start_time: datetime): + '''''' + logger.info("Estimating mid-term schedule...") + + scheduling_units = get_schedulable_scheduling_units() + + upper_bound_stop_time = lower_bound_start_time + timedelta(days=365) + + # update the start_times of the remaining ones (so they form queue, and can be visualized in a timeline) + while scheduling_units and lower_bound_start_time < upper_bound_stop_time: + best_scored_scheduling_unit = find_best_next_schedulable_unit(scheduling_units, lower_bound_start_time, upper_bound_stop_time) + + if best_scored_scheduling_unit: + scheduling_unit = best_scored_scheduling_unit.scheduling_unit + start_time = round_to_second_precision(best_scored_scheduling_unit.start_time) + logger.info("mid-term schedule: next scheduling unit id=%s '%s' start_time=%s", scheduling_unit.id, scheduling_unit.name, start_time) + update_subtasks_start_times_for_scheduling_unit(scheduling_unit, start_time) + + # keep track of the lower_bound_start_time based on last sub.stoptime and gap + lower_bound_start_time = scheduling_unit.stop_time + DEFAULT_INTER_OBSERVATION_GAP + + scheduling_units.remove(scheduling_unit) + else: + # search again in a later timeslot + min_earliest_possible_start_time = get_min_earliest_possible_start_time(scheduling_units, lower_bound_start_time+timedelta(minutes=10)) + if min_earliest_possible_start_time > lower_bound_start_time: + lower_bound_start_time = min_earliest_possible_start_time + else: + # cannot advance anymore to find more + logger.warning("Cannot assign start/stop times to remaining scheduling units for mid-term schedule...") + for su in scheduling_units: + logger.warning("Remaining scheduling unit: id=%s '%s'", su.id, su.name) + + # clear start/stop times, so they don't show up in the timeline, + # and we can filter/show them in a seperate list which the user can tinker on the constraints + clear_defined_subtasks_start_stop_times_for_scheduling_unit(su) + break + + logger.info("Estimating mid-term schedule... finished") + + +def do_dynamic_schedule() -> models.SchedulingUnitBlueprint: + '''do a full update of the schedule: schedule next scheduling unit and assign start stop times to remaining schedulable scheduling units''' + logger.info("Updating dynamic schedule....") + scheduled_unit = schedule_next_scheduling_unit() + + # determine next possible start time for remaining scheduling_units + if scheduled_unit: + lower_bound_start_time = scheduled_unit.stop_time + DEFAULT_INTER_OBSERVATION_GAP + else: + try: + scheduled_units = get_scheduled_scheduling_units(datetime.utcnow(), datetime.utcnow()) + lower_bound_start_time = max([s.stop_time for s in scheduled_units if s.stop_time is not None]) + DEFAULT_INTER_OBSERVATION_GAP + except: + lower_bound_start_time = datetime.utcnow() + + # round up to next nearest second + lower_bound_start_time += timedelta(microseconds=1000000-lower_bound_start_time.microsecond) + + # determine mid-term schedule by assigning start/stop times to remaining schedulable units using the same search strategy + assign_start_stop_times_to_schedulable_scheduling_units(lower_bound_start_time) + logger.info("Finished updating dynamic schedule") + + return scheduled_unit + + +################## service/messagebug handler class ############################################### + +class TMSSDynamicSchedulingMessageHandler(TMSSEventMessageHandler): + ''' + The TMSSDynamicSchedulingMessageHandler reacts to TMSS EventMessages by triggering a new full update of the dynamic + schedule. + The actual schedule-update method runs on a backround thread, and can take some time to complete ranging from a + few seconds to several minutes. In the mean time new EventMessages may be received. These are handled by raising a flag + that signals the schedule-update-thread that a new full update is needed. This way, a burst of Events results in + a single update, and it also ensures that we always compute the schedule with the latest data. + ''' + + def __init__(self): + super().__init__(log_event_messages=True) + self._scheduling_thread = None + self._scheduling_thread_running = False + self._do_schedule_event = Event() + + def start_handling(self): + # start the background thread which waits until the _do_schedule_event event is set upon receiving to the correct TMSS EVentMessages. + self._scheduling_thread = Thread(target=TMSSDynamicSchedulingMessageHandler._scheduling_loop, kwargs={'self':self}) + self._scheduling_thread.daemon = True + self._scheduling_thread_running = True + self._scheduling_thread.start() + super().start_handling() + + def stop_handling(self): + self._scheduling_thread_running = False + self._scheduling_thread.join() + self._scheduling_thread = None + super().stop_handling() + + def onSchedulingUnitBlueprintStatusChanged(self, id: int, status: str): + if status in ["schedulable", "observed", "finished", "cancelled"]: + logger.info("onSchedulingUnitBlueprintStatusChanged(id=%s, status=%s): triggering update of dynamic schedule...", id, status) + # scheduling takes a long time, longer then creating many scheduling units in bulk + # so, we do not create a complete new schedule for each new unit, + # but we only trigger a new schedule update. + # This way we are sure that the latest units are always taken into account while scheduling, but we do not waste cpu cylces. + self._do_schedule_event.set() + + def onSchedulingUnitDraftConstraintsUpdated(self, id: int, scheduling_constraints_doc: dict): + affected_scheduling_units = models.SchedulingUnitBlueprint.objects.filter(draft__id=id).all() + for scheduling_unit in affected_scheduling_units: + if scheduling_unit.status == 'scheduled': + unschedule_subtasks_in_scheduling_unit_blueprint(scheduling_unit) + + self._do_schedule_event.set() + + def onSettingUpdated(self, name: str, value: bool): + if name == models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value and value: + logger.info("%s was set to %s: triggering update of dynamic schedule...", name, value) + self._do_schedule_event.set() + + def _scheduling_loop(self): + while self._scheduling_thread_running: + if self._do_schedule_event.wait(timeout=10): + self._do_schedule_event.clear() + try: + if models.Setting.objects.get(name=models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value).value: + do_dynamic_schedule() + else: + logger.warning("Skipping update of dynamic schedule because the setting %s=%s", models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value, models.Setting.objects.get(name=models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value).value) + except Exception as e: + logger.exception(str(e)) + # just continue processing events. better luck next time... + + +def create_dynamic_scheduling_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER): + return TMSSBusListener(handler_type=TMSSDynamicSchedulingMessageHandler, + handler_kwargs=None, + exchange=exchange, + broker=broker) + + + + + +################## helper methods ################################################################# + +def get_schedulable_scheduling_units() -> [models.SchedulingUnitBlueprint]: + '''get a list of all schedulable scheduling_units''' + defined_independend_subtasks = models.Subtask.independent_subtasks().filter(state__value='defined') + defined_independend_subtask_ids = defined_independend_subtasks.values('task_blueprint__scheduling_unit_blueprint_id').distinct().all() + scheduling_units = models.SchedulingUnitBlueprint.objects.filter(id__in=defined_independend_subtask_ids).select_related('draft', 'draft__scheduling_constraints_template').all() + return [su for su in scheduling_units if su.status == 'schedulable'] + + +def get_scheduled_scheduling_units(lower:datetime=None, upper:datetime=None) -> [models.SchedulingUnitBlueprint]: + '''get a list of all scheduled scheduling_units''' + scheduled_subtasks = models.Subtask.objects.filter(state__value='scheduled') + if lower is not None: + scheduled_subtasks = scheduled_subtasks.filter(stop_time__gte=lower) + if upper is not None: + scheduled_subtasks = scheduled_subtasks.filter(start_time__lte=upper) + return list(models.SchedulingUnitBlueprint.objects.filter(id__in=scheduled_subtasks.values('task_blueprint__scheduling_unit_blueprint_id').distinct()).all()) + + +def unschededule_blocking_scheduled_units_if_needed_and_possible(candidate: ScoredSchedulingUnit) -> bool: + '''check if there are any already scheduled units in the way, and unschedule them if allowed. Return True if nothing is blocking anymore.''' + # check any previously scheduled units, and unschedule if needed/allowed + scheduled_scheduling_units = get_scheduled_scheduling_units(lower=candidate.start_time, + upper=candidate.start_time + candidate.scheduling_unit.duration) + + # check if we can and need to unschedule the blocking units + for scheduled_scheduling_unit in scheduled_scheduling_units: + scheduled_score = compute_scores(scheduled_scheduling_unit, candidate.start_time, candidate.start_time + candidate.scheduling_unit.duration) + + if candidate.weighted_score > scheduled_score.weighted_score: + # ToDo: also check if the scheduled_scheduling_unit is manually/dynamically scheduled + logger.info("unscheduling id=%s '%s' because it is in the way and has a lower score than the best candidate id=%s '%s' score=%s start_time=%s", + scheduled_scheduling_unit.id, scheduled_scheduling_unit.name, + candidate.scheduling_unit.id, candidate.scheduling_unit.name, candidate.weighted_score, candidate.scheduling_unit.start_time) + + unschedule_subtasks_in_scheduling_unit_blueprint(scheduled_scheduling_unit) + + # check again... are still there any scheduled_scheduling_units in the way? + scheduled_scheduling_units = get_scheduled_scheduling_units(lower=candidate.start_time, + upper=candidate.start_time + candidate.scheduling_unit.duration) + if scheduled_scheduling_units: + # accept current solution with current scheduled_scheduling_units + logger.info("keeping current scheduled unit(s) which have a better (or equal) score: %s", "; ".join( + "id=%s '%s' start_time='%s'" % (su.id, su.name, su.start_time) for su in scheduled_scheduling_units)) + + # indicate there are still blocking units + return False + + # all clear, nothing is blocking anymore + return True + + + diff --git a/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py b/SAS/TMSS/services/scheduling/lib/subtask_scheduling.py similarity index 95% rename from SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py rename to SAS/TMSS/services/scheduling/lib/subtask_scheduling.py index 524a616a86fa35fca2351278a1d69b1df46d882f..af80ff8c94b1576407ede4b51df456d52cb0a495 100644 --- a/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py +++ b/SAS/TMSS/services/scheduling/lib/subtask_scheduling.py @@ -77,7 +77,7 @@ class TMSSSubTaskSchedulingEventMessageHandler(TMSSEventMessageHandler): except Exception as e: logger.error(e) -def create_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER, tmss_client_credentials_id: str=None): +def create_subtask_scheduling_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER, tmss_client_credentials_id: str=None): return TMSSBusListener(handler_type=TMSSSubTaskSchedulingEventMessageHandler, handler_kwargs={'tmss_client_credentials_id': tmss_client_credentials_id}, exchange=exchange, @@ -99,7 +99,7 @@ def main(): help='the credentials id for the file in ~/.lofar/dbcredentials which holds the TMSS http REST api url and credentials, default: %default') (options, args) = parser.parse_args() - with create_service(options.exchange, options.broker, options.tmss_client_credentials_id): + with create_subtask_scheduling_service(options.exchange, options.broker, options.tmss_client_credentials_id): waitForInterrupt() if __name__ == '__main__': diff --git a/SAS/TMSS/services/scheduling/test/CMakeLists.txt b/SAS/TMSS/services/scheduling/test/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..e3547f4fe0a484e2a395b50411e3e7d8b7486879 --- /dev/null +++ b/SAS/TMSS/services/scheduling/test/CMakeLists.txt @@ -0,0 +1,11 @@ +# $Id: CMakeLists.txt 32679 2015-10-26 09:31:56Z schaap $ + +if(BUILD_TESTING) + include(LofarCTest) + + lofar_add_test(t_subtask_scheduling_service) + lofar_add_test(t_dynamic_scheduling) + + set_tests_properties(t_subtask_scheduling_service PROPERTIES TIMEOUT 300) + set_tests_properties(t_dynamic_scheduling PROPERTIES TIMEOUT 300) +endif() diff --git a/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.py b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.py new file mode 100755 index 0000000000000000000000000000000000000000..5d95558568f61159c5975fcb073b7fd0a12ca3c0 --- /dev/null +++ b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.py @@ -0,0 +1,348 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import unittest +import uuid +from unittest import mock + +from astropy.coordinates import Angle + +import logging +logger = logging.getLogger(__name__) + +from lofar.common.test_utils import skip_integration_tests +if skip_integration_tests(): + exit(3) + +TEST_UUID = uuid.uuid1() + +from datetime import datetime, timedelta +from lofar.common.json_utils import get_default_json_object_for_schema, add_defaults_to_json_object_for_schema +from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor + +tmp_exchange = TemporaryExchange("t_dynamic_scheduling_%s" % (TEST_UUID,)) +tmp_exchange.open() + +# override DEFAULT_BUSNAME +import lofar +lofar.messaging.config.DEFAULT_BUSNAME = tmp_exchange.address + +from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment +tmss_test_env = TMSSTestEnvironment(exchange=tmp_exchange.address, + populate_schemas=True, populate_test_data=False, + start_postgres_listener=True, start_subtask_scheduler=False, + start_ra_test_environment=True, enable_viewflow=False, + start_dynamic_scheduler=False) # do not start the dynamic scheduler in the testenv, because it is the object-under-test. +tmss_test_env.start() + +def tearDownModule(): + tmss_test_env.stop() + tmp_exchange.close() + +from lofar.sas.tmss.test.tmss_test_data_django_models import * +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprints_and_subtasks_from_scheduling_unit_draft +from lofar.sas.tmss.tmss.tmssapp.subtasks import unschedule_subtask +from lofar.common.postgres import PostgresDatabaseConnection + +# the module under test +from lofar.sas.tmss.services.scheduling.dynamic_scheduling import * + +@unittest.skip('Disabled until scheduler can deal with failing constraints. (Currently causes infinite loop.)') +class TestDynamicScheduling(unittest.TestCase): + ''' + Tests for the Dynamic Scheduling + ''' + @classmethod + def setUpClass(cls) -> None: + # make some re-usable projects with high/low priority + cls.project_low = models.Project.objects.create(**Project_test_data("dynamic scheduling test project %s"% (uuid.uuid4(),), priority_rank=1)) + cls.project_medium = models.Project.objects.create(**Project_test_data("dynamic scheduling test project %s"% (uuid.uuid4(),), priority_rank=2)) + cls.project_high = models.Project.objects.create(**Project_test_data("dynamic scheduling test project %s"% (uuid.uuid4(),), priority_rank=3)) + cls.scheduling_set_low = models.SchedulingSet.objects.create(**SchedulingSet_test_data(project=cls.project_low)) + cls.scheduling_set_medium = models.SchedulingSet.objects.create(**SchedulingSet_test_data(project=cls.project_medium)) + cls.scheduling_set_high = models.SchedulingSet.objects.create(**SchedulingSet_test_data(project=cls.project_high)) + + def setUp(self) -> None: + # wipe all radb entries (via cascading deletes) in between tests, so the tests don't influence each other + with PostgresDatabaseConnection(tmss_test_env.ra_test_environment.radb_test_instance.dbcreds) as radb: + radb.executeQuery('DELETE FROM resource_allocation.specification;') + radb.executeQuery('TRUNCATE resource_allocation.resource_usage;') + radb.commit() + + # wipe all scheduling_unit_drafts in between tests, so the tests don't influence each other + for scheduling_set in [self.scheduling_set_low, self.scheduling_set_medium, self.scheduling_set_high]: + for scheduling_unit_draft in scheduling_set.scheduling_unit_drafts.all(): + for scheduling_unit_blueprint in scheduling_unit_draft.scheduling_unit_blueprints.all(): + for task_blueprint in scheduling_unit_blueprint.task_blueprints.all(): + for subtask in task_blueprint.subtasks.all(): + try: + if subtask.state.value == models.SubtaskState.Choices.SCHEDULED.value: + unschedule_subtask(subtask) + except Exception as e: + logger.exception(e) + for output in subtask.outputs.all(): + for dataproduct in output.dataproducts.all(): + dataproduct.delete() + for consumer in output.consumers.all(): + consumer.delete() + output.delete() + for input in subtask.inputs.all(): + input.delete() + subtask.delete() + task_blueprint.draft.delete() + task_blueprint.delete() + scheduling_unit_blueprint.delete() + scheduling_unit_draft.delete() + + @staticmethod + def create_simple_observation_scheduling_unit(name:str=None, scheduling_set=None, + obs_duration:int=60, + constraints=None): + constraints_template = models.SchedulingConstraintsTemplate.objects.get(name="constraints") + constraints = add_defaults_to_json_object_for_schema(constraints or {}, constraints_template.schema) + + strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="Simple Observation") + scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template, + strategy_template.scheduling_unit_template.schema) + scheduling_unit_spec['tasks']['Observation']['specifications_doc']['duration'] = obs_duration + + # add the scheduling_unit_doc to a new SchedulingUnitDraft instance, and were ready to use it! + return models.SchedulingUnitDraft.objects.create(name=name, + scheduling_set=scheduling_set, + requirements_template=strategy_template.scheduling_unit_template, + requirements_doc=scheduling_unit_spec, + observation_strategy_template=strategy_template, + scheduling_constraints_doc=constraints, + scheduling_constraints_template=constraints_template) + + + def test_three_simple_observations_no_constraints_different_project_priority(self): + scheduling_unit_draft_low = self.create_simple_observation_scheduling_unit("scheduling unit low", scheduling_set=self.scheduling_set_low) + scheduling_unit_blueprint_low = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_low) + + scheduling_unit_draft_medium = self.create_simple_observation_scheduling_unit("scheduling unit medium", scheduling_set=self.scheduling_set_medium) + scheduling_unit_blueprint_medium = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_medium) + + scheduling_unit_draft_high = self.create_simple_observation_scheduling_unit("scheduling unit high", scheduling_set=self.scheduling_set_high) + scheduling_unit_blueprint_high = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_high) + + # call the method-under-test. + scheduled_scheduling_unit = do_dynamic_schedule() + + # we expect the scheduling_unit with the highest project rank to be scheduled first + self.assertIsNotNone(scheduled_scheduling_unit) + self.assertEqual(scheduling_unit_blueprint_high.id, scheduled_scheduling_unit.id) + + # check the results + # we expect the sub_high to be scheduled + scheduling_unit_blueprint_low.refresh_from_db() + scheduling_unit_blueprint_medium.refresh_from_db() + scheduling_unit_blueprint_high.refresh_from_db() + self.assertEqual(scheduling_unit_blueprint_low.status, 'schedulable') + self.assertEqual(scheduling_unit_blueprint_medium.status, 'schedulable') + self.assertEqual(scheduling_unit_blueprint_high.status, 'scheduled') + + # check the scheduled subtask + upcoming_scheduled_subtasks = models.Subtask.objects.filter(state__value='scheduled', + task_blueprint__scheduling_unit_blueprint__in=(scheduling_unit_blueprint_low, + scheduling_unit_blueprint_medium, + scheduling_unit_blueprint_high)).all() + self.assertEqual(1, upcoming_scheduled_subtasks.count()) + self.assertEqual(scheduling_unit_blueprint_high.id, upcoming_scheduled_subtasks[0].task_blueprint.scheduling_unit_blueprint.id) + + # check scheduling_unit_blueprint_low starts after the scheduled scheduling_unit_blueprint_high + self.assertGreater(scheduling_unit_blueprint_low.start_time, scheduling_unit_blueprint_medium.start_time) + self.assertGreater(scheduling_unit_blueprint_medium.start_time, scheduling_unit_blueprint_high.start_time) + + # ensure DEFAULT_INTER_OBSERVATION_GAP between them + self.assertGreaterEqual(scheduling_unit_blueprint_medium.start_time - scheduling_unit_blueprint_high.stop_time, DEFAULT_INTER_OBSERVATION_GAP) + self.assertGreaterEqual(scheduling_unit_blueprint_low.start_time - scheduling_unit_blueprint_medium.stop_time, DEFAULT_INTER_OBSERVATION_GAP) + + + def test_time_bound_unit_wins_even_at_lower_priority(self): + # create two schedunits, one with high one with low prio. + # first create them without any further constraints, and check if high prio wins. + scheduling_unit_draft_low = self.create_simple_observation_scheduling_unit("scheduling unit low", scheduling_set=self.scheduling_set_low) + scheduling_unit_blueprint_low = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_low) + + scheduling_unit_draft_high = self.create_simple_observation_scheduling_unit("scheduling unit high", scheduling_set=self.scheduling_set_high) + scheduling_unit_blueprint_high = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_high) + + now = datetime.utcnow() + tomorrow = now+timedelta(days=1) + + # call the method-under-test. + best_scored_scheduling_unit = find_best_next_schedulable_unit([scheduling_unit_blueprint_low, scheduling_unit_blueprint_high], now, tomorrow) + + # we expect the scheduling_unit with the highest project rank to be scheduled first + self.assertEqual(scheduling_unit_blueprint_high.id, best_scored_scheduling_unit.scheduling_unit.id) + + #now update the low prio unit with a time constraint, "forcing" it to be run in a very thight upcoming time window. + scheduling_unit_draft_low.scheduling_constraints_doc['time'] = { 'before': (now+scheduling_unit_draft_low.duration).isoformat()+'Z' } + scheduling_unit_draft_low.save() + scheduling_unit_blueprint_low.refresh_from_db() + + # call the method-under-test. + best_scored_scheduling_unit = find_best_next_schedulable_unit([scheduling_unit_blueprint_low, scheduling_unit_blueprint_high], now, tomorrow) + + # now we expect the scheduling_unit with the lowest project rank to be scheduled first because it can only run within this limited timewindow + self.assertEqual(scheduling_unit_draft_low.id, best_scored_scheduling_unit.scheduling_unit.id) + + + # update the low prio unit. enlarge the time window constraint a bit, so both low and high prio units can fit + # this should result that the high prio goes first, and the low prio (which now fits as well) goes second + scheduling_unit_draft_low.scheduling_constraints_doc['time'] = { 'before': (now+scheduling_unit_draft_low.duration+scheduling_unit_draft_high.duration).isoformat()+'Z' } + scheduling_unit_draft_low.save() + scheduling_unit_blueprint_low.refresh_from_db() + + # call the method-under-test. + best_scored_scheduling_unit = find_best_next_schedulable_unit([scheduling_unit_blueprint_low, scheduling_unit_blueprint_high], now, tomorrow) + + # now we expect the scheduling_unit with the lowest project rank to be scheduled first because it can only run within this limited timewindow + self.assertEqual(scheduling_unit_blueprint_high.id, best_scored_scheduling_unit.scheduling_unit.id) + + # call the method-under-test again but search after first unit (should return low prio unit) + stop_time_of_first = best_scored_scheduling_unit.start_time + best_scored_scheduling_unit.scheduling_unit.duration + best_scored_scheduling_unit = find_best_next_schedulable_unit([scheduling_unit_blueprint_low, scheduling_unit_blueprint_high], stop_time_of_first, tomorrow) + self.assertEqual(scheduling_unit_blueprint_low.id, best_scored_scheduling_unit.scheduling_unit.id) + + + def test_manual_constraint_is_preventing_scheduling_unit_from_being_scheduled_dynamically(self): + scheduling_unit_draft_manual = self.create_simple_observation_scheduling_unit("scheduling unit manual low", scheduling_set=self.scheduling_set_low, + constraints={'scheduler': 'manual'}) + scheduling_unit_blueprint_manual = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_manual) + self.assertEqual(scheduling_unit_blueprint_manual.status, "schedulable") + + # call the method-under-test. + scheduled_scheduling_unit = do_dynamic_schedule() + + # we expect no scheduling_unit to be scheduled, because the only one is set to 'manual' constraint + self.assertIsNone(scheduled_scheduling_unit) + + # check the results + scheduling_unit_blueprint_manual.refresh_from_db() + self.assertEqual(scheduling_unit_blueprint_manual.status, 'schedulable') + + + def test_manually_scheduled_blocking_dynamically_scheduled(self): + scheduling_unit_draft_manual = self.create_simple_observation_scheduling_unit("scheduling unit manual low", scheduling_set=self.scheduling_set_low, + constraints={'scheduler': 'manual'}) + scheduling_unit_blueprint_manual = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_manual) + self.assertEqual(scheduling_unit_blueprint_manual.status, "schedulable") + + schedule_independent_subtasks_in_scheduling_unit_blueprint(scheduling_unit_blueprint_manual, datetime.utcnow()) + self.assertEqual(scheduling_unit_blueprint_manual.status, "scheduled") + + scheduling_unit_draft_high = self.create_simple_observation_scheduling_unit("scheduling unit online high", scheduling_set=self.scheduling_set_high) + scheduling_unit_blueprint_high = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_high) + + # call the method-under-test. + scheduled_scheduling_unit = do_dynamic_schedule() + + # we expect the no scheduling_unit to be scheduled, because the manual is in the way + self.assertIsNone(scheduled_scheduling_unit) + + # check the results + # we expect the sub_high to be scheduled + scheduling_unit_blueprint_high.refresh_from_db() + self.assertEqual(scheduling_unit_blueprint_high.status, 'schedulable') + + # check scheduling_unit_blueprint_low starts after the scheduled scheduling_unit_blueprint_high + self.assertGreater(scheduling_unit_blueprint_high.start_time, scheduling_unit_blueprint_manual.start_time) + + # ensure DEFAULT_INTER_OBSERVATION_GAP between them + self.assertGreaterEqual(scheduling_unit_blueprint_high.start_time - scheduling_unit_blueprint_manual.stop_time, DEFAULT_INTER_OBSERVATION_GAP) + + +class TestSchedulingConstraints(unittest.TestCase): + ''' + Tests for the constraint checkers used in dynamic scheduling + ''' + + @classmethod + def setUpClass(cls) -> None: + cls.obs_duration = 120 * 60 + scheduling_set = models.SchedulingSet.objects.create(**SchedulingSet_test_data()) + scheduling_unit_draft = TestDynamicScheduling.create_simple_observation_scheduling_unit("scheduling unit for contraints tests", + scheduling_set=scheduling_set, + obs_duration=cls.obs_duration) + cls.scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + + def setUp(self) -> None: + self.sunrise_data = { + 'CS002': {"sunrise": [{"start": datetime(2020, 1, 1, 7, 30, 0), "end": datetime(2020, 1, 1, 9, 30, 0)},{"start": datetime(2020, 1, 2, 7, 30, 0), "end": datetime(2020, 1, 2, 9, 30, 0)}], + "day": [{"start": datetime(2020, 1, 1, 9, 30, 0), "end": datetime(2020, 1, 1, 15, 30, 0)}, {"start": datetime(2020, 1, 2, 9, 30, 0), "end": datetime(2020, 1, 2, 15, 30, 0)}], + "sunset": [{"start": datetime(2020, 1, 1, 15, 30, 0), "end": datetime(2020, 1, 1, 17, 30, 0)},{"start": datetime(2020, 1, 2, 15, 30, 0), "end": datetime(2020, 1, 2, 17, 30, 0)}], + "night": [{"start": datetime(2020, 1, 1, 17, 30, 0), "end": datetime(2020, 1, 2, 7, 30, 0)}, {"start": datetime(2020, 1, 2, 17, 30, 0), "end": datetime(2020, 1, 3, 7, 30, 0)}]}} + self.sunrise_patcher = mock.patch('lofar.sas.tmss.services.scheduling.constraints.template_constraints_v1.timestamps_and_stations_to_sun_rise_and_set') + self.sunrise_mock = self.sunrise_patcher.start() + self.sunrise_mock.return_value = self.sunrise_data + self.addCleanup(self.sunrise_patcher.stop) + + self.distance_data = { + "sun": {datetime(2020, 1, 1, 10, 0, 0): Angle("0.3rad"), datetime(2020, 1, 1, 12, 0, 0): Angle("0.35rad")}, + "moon": {datetime(2020, 1, 1, 10, 0, 0): Angle("0.2rad"), datetime(2020, 1, 1, 12, 0, 0): Angle("0.25rad")}, + "jupiter": {datetime(2020, 1, 1, 10, 0, 0): Angle("0.1rad"), datetime(2020, 1, 1, 12, 0, 0): Angle("0.15rad")} + } + self.distance_patcher = mock.patch('lofar.sas.tmss.services.scheduling.constraints.template_constraints_v1.coordinates_and_timestamps_to_separation_from_bodies') + self.distance_mock = self.distance_patcher.start() + self.distance_mock.return_value = self.distance_data + self.addCleanup(self.distance_patcher.stop) + + def test_get_earliest_possible_start_time_with_daytime_constraint_timestamp_returns_day_start(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_day'] = True + self.scheduling_unit_blueprint.save() + timestamp = datetime(2020, 1, 1, 4, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, self.sunrise_data['CS002']['day'][0]['start']) + + def test_get_earliest_possible_start_time_with_daytime_constraint_timestamp_returns_timestamp(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['daily']['require_day'] = True + self.scheduling_unit_blueprint.save() + timestamp = datetime(2020, 1, 1, 10, 0, 0) + returned_time = get_earliest_possible_start_time(self.scheduling_unit_blueprint, timestamp) + self.assertEqual(returned_time, timestamp) + + # todo: add more daytime checks with 255 + + # todo: add nighttime checks with 254 + + # todo: add twilight checks with 256 + + def test_can_run_within_timewindow_with_min_distance_constraint_returns_true_when_met(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky']['min_distance'] = {'sun': 0.1, 'moon': 0.1, 'jupiter': 0.1} + self.scheduling_unit_blueprint.save() + timestamp = datetime(2020, 1, 1, 10, 0, 0) + returned_value = can_run_within_timewindow(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) + self.assertTrue(returned_value) + + def test_can_run_within_timewindow_with_min_distance_constraint_returns_false_when_not_met(self): + self.scheduling_unit_blueprint.draft.scheduling_constraints_doc['sky']['min_distance'] = {'sun': 0.2, 'moon': 0.2, 'jupiter': 0.2} + self.scheduling_unit_blueprint.save() + timestamp = datetime(2020, 1, 1, 10, 0, 0) + returned_value = can_run_within_timewindow(self.scheduling_unit_blueprint, timestamp, timestamp + timedelta(seconds=self.obs_duration)) + self.assertFalse(returned_value) + + +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + +if __name__ == '__main__': + #run the unit tests + unittest.main() diff --git a/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.run b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.run new file mode 100755 index 0000000000000000000000000000000000000000..d0831a318c2949b8a6990c0cef62fa6ea3bac68b --- /dev/null +++ b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.run @@ -0,0 +1,4 @@ +#!/bin/bash + +python3 t_dynamic_scheduling.py + diff --git a/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.sh b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.sh new file mode 100755 index 0000000000000000000000000000000000000000..ee5a97caed28fae29660df70d067fd9170658d70 --- /dev/null +++ b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_dynamic_scheduling \ No newline at end of file diff --git a/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.py b/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.py new file mode 100755 index 0000000000000000000000000000000000000000..57d3ca6f86bbc6ab3b9e5d5a7de7c051e75e2650 --- /dev/null +++ b/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.py @@ -0,0 +1,253 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import unittest +import uuid + +import logging +logger = logging.getLogger('lofar.'+__name__) + +from lofar.common.test_utils import skip_integration_tests +if skip_integration_tests(): + exit(3) + +from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor +from lofar.sas.tmss.services.scheduling.subtask_scheduling import create_subtask_scheduling_service +from lofar.common.json_utils import get_default_json_object_for_schema, add_defaults_to_json_object_for_schema + +from time import sleep +from datetime import datetime, timedelta + +class TestSubtaskSchedulingService(unittest.TestCase): + ''' + Tests for the SubtaskSchedulingService + ''' + @classmethod + def setUpClass(cls) -> None: + cls.TEST_UUID = uuid.uuid1() + + cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID)) + cls.tmp_exchange.open() + + # override DEFAULT_BUSNAME + import lofar + lofar.messaging.config.DEFAULT_BUSNAME = cls.tmp_exchange.address + + # import here, and not at top of module, because DEFAULT_BUSNAME needs to be set before importing + from lofar.sas.resourceassignment.resourceassigner.test.ra_test_environment import RATestEnvironment + from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment + from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator + + cls.ra_test_env = RATestEnvironment(exchange=cls.tmp_exchange.address) + cls.ra_test_env.start() + + cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address, populate_schemas=True, populate_test_data=False, + start_subtask_scheduler=True, start_postgres_listener=True, start_ra_test_environment=True, + start_dynamic_scheduler=False, enable_viewflow=False) + cls.tmss_test_env.start() + + cls.test_data_creator = TMSSRESTTestDataCreator(cls.tmss_test_env.django_server.url, + (cls.tmss_test_env.ldap_server.dbcreds.user, cls.tmss_test_env.ldap_server.dbcreds.password)) + + @classmethod + def tearDownClass(cls) -> None: + cls.tmss_test_env.stop() + cls.ra_test_env.stop() + cls.tmp_exchange.close() + + @staticmethod + def wait_for_subtask_to_get_status(tmss_client, subtask_id, expected_status, timeout=30): + '''helper method to poll for a subtask's status. + raises TimeoutError if expected_status is not met withing timout seconds. + returns subtask when expected_status is met.''' + start = datetime.utcnow() + subtask = tmss_client.get_subtask(subtask_id) + while subtask['state_value'] != expected_status: + sleep(0.5) + logger.info("Waiting for subtask id=%s to get status '%s'. Current status='%s'. Polling...", subtask_id, expected_status, subtask['state_value']) + subtask = tmss_client.get_subtask(subtask_id) + if datetime.utcnow() - start > timedelta(seconds=timeout): + raise TimeoutError("timeout while waiting for subtask id=%s to get status '%s'. It currently has status '%s'" % ( + subtask_id, expected_status, subtask['state_value'])) + return subtask + + def test_01_for_expected_behaviour_of_two_connected_subtasks(self): + ''' + This test starts a scheduling service and tmss, creates a chain of subtasks, finishes the first, and checks if the successors are then scheduled. + ''' + return + + logger.info(' -- test_01_for_expected_behaviour -- ') + + # create and start the service (the object under test) + service = create_service(exchange=self.tmp_exchange.address, tmss_client_credentials_id=self.tmss_test_env.client_credentials.dbcreds_id) + with BusListenerJanitor(service): + # ------------------------- + # long setup of objects.... + + # setup proper template + subtask_template_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskTemplate(subtask_type_url=self.test_data_creator.django_api_url + '/subtask_type/qa_files/'), '/subtask_template/') + + # create two subtasks + subtask1 = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url), '/subtask/') + subtask2 = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url, task_blueprint_url=subtask1['task_blueprint']), '/subtask/') + + # connect them + output_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskOutput(subtask1['url']), '/subtask_output/') + input_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskInput(subtask_url=subtask2['url'], subtask_output_url=output_url), '/subtask_input/') + + # ... end of long setup of objects + # -------------------------------- + + # now for the real test: set subtask1_id status to finished, and check that subtask2 is then properly scheduled + with self.tmss_test_env.create_tmss_client() as tmss_client: + subtask1 = tmss_client.get_subtask(subtask1['id']) + subtask2 = tmss_client.get_subtask(subtask2['id']) + + self.assertEqual(subtask1['state_value'], 'defined') + self.assertEqual(subtask2['state_value'], 'defined') + + # the first subtask ran, and is now finished... set it's status. This should trigger the scheduling service to schedule the second subtask. + tmss_client.set_subtask_status(subtask1['id'], 'finished') + + subtask2 = self.wait_for_subtask_to_get_status(tmss_client, subtask2['id'], 'scheduled') + + # subtask2 should now be scheduled + self.assertEqual(subtask2['state_value'], 'scheduled') + + def test_02_for_expected_behaviour_of_UC1_scheduling_unit(self): + ''' + This test starts a scheduling service and tmss, creates a chain of subtasks, finishes the first, and checks if the successors are then scheduled. + ''' + + logger.info(' -- test_02_for_expected_behaviour_of_UC1_scheduling_unit -- ') + + # import here, and not at top of module, because the tmsstestenv needs to be running before importing + from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprints_and_subtasks_from_scheduling_unit_draft + from lofar.sas.tmss.tmss.tmssapp.subtasks import update_subtasks_start_times_for_scheduling_unit + from lofar.sas.tmss.tmss.tmssapp import models + from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data + + # create and start the service (the object under test) + service = create_subtask_scheduling_service(exchange=self.tmp_exchange.address, tmss_client_credentials_id=self.tmss_test_env.client_credentials.dbcreds_id) + with BusListenerJanitor(service): + # ------------------------- + # setup of objects: create the UC1 scheduling unit, and then select the first runnable subtasks + strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines") + spec = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema) + scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(name="UC1 CTC+pipelines", + scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data()), + requirements_template=strategy_template.scheduling_unit_template, + requirements_doc=spec, + observation_strategy_template=strategy_template, + scheduling_constraints_doc=get_default_json_object_for_schema(models.SchedulingConstraintsTemplate.objects.get(name="constraints").schema), + scheduling_constraints_template=models.SchedulingConstraintsTemplate.objects.get(name="constraints")) + + scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + + # assign some non-overlapping starttimes, so the tasks can be scheduled + update_subtasks_start_times_for_scheduling_unit(scheduling_unit_blueprint, datetime.utcnow()) + + # scheduling_unit_blueprint now has task_blueprints and subtasks + # "unpack" the whole graph, so we can "walk" it and see if the correct subtasks are scheduled once its predecessors are finished + obs_cal1 = scheduling_unit_blueprint.task_blueprints.get(name="Calibrator Observation 1") + obs_cal1_st_obs = obs_cal1.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.OBSERVATION.value))) + obs_cal1_st_qa1 = obs_cal1.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_FILES.value))) + obs_cal1_st_qa2 = obs_cal1.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_PLOTS.value))) + + pl_cal1 = scheduling_unit_blueprint.task_blueprints.get(name="Pipeline 1") + pl_cal1_st = pl_cal1.subtasks.first() + + obs_tgt = scheduling_unit_blueprint.task_blueprints.get(name="Target Observation") + obs_tgt_st_obs = obs_tgt.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.OBSERVATION.value))) + obs_tgt_st_qa1 = obs_tgt.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_FILES.value))) + obs_tgt_st_qa2 = obs_tgt.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_PLOTS.value))) + + pl_tgt1 = scheduling_unit_blueprint.task_blueprints.get(name="Pipeline target1") + pl_tgt1_st = pl_tgt1.subtasks.first() + + pl_tgt2 = scheduling_unit_blueprint.task_blueprints.get(name="Pipeline target2") + pl_tgt2_st = pl_tgt2.subtasks.first() + + obs_cal2 = scheduling_unit_blueprint.task_blueprints.get(name="Calibrator Observation 2") + obs_cal2_st_obs = obs_cal2.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.OBSERVATION.value))) + obs_cal2_st_qa1 = obs_cal2.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_FILES.value))) + obs_cal2_st_qa2 = obs_cal2.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_PLOTS.value))) + + pl_cal2 = scheduling_unit_blueprint.task_blueprints.get(name="Pipeline 2") + pl_cal2_st = pl_cal2.subtasks.first() + + # define the graph in an iterable way: as tuples of a subtask-successors-pair + # the graph is also ordered in a 'data-flow' direction + graph = (# calibrator1 obs, qa and pipeline + (obs_cal1_st_obs, (obs_cal1_st_qa1, pl_cal1_st)), + (obs_cal1_st_qa1, (obs_cal1_st_qa2,)), + (obs_cal1_st_qa2, tuple()), + (pl_cal1_st, tuple()), + #target obs, qa and pipelines + (obs_tgt_st_obs, (obs_tgt_st_qa1, pl_tgt1_st, pl_tgt2_st)), + (obs_tgt_st_qa1, (obs_tgt_st_qa2,)), + (obs_tgt_st_qa2, tuple()), + (pl_tgt1_st, tuple()), + (pl_tgt2_st, tuple()), + # calibrator2 obs, qa and pipeline + (obs_cal2_st_obs, (obs_cal2_st_qa1, pl_cal2_st)), + (obs_cal2_st_qa1, (obs_cal2_st_qa2,)), + (obs_cal2_st_qa2, tuple()), + (pl_cal2_st, tuple()) ) + + logger.info(" --- test_02_for_expected_behaviour_of_UC1_scheduling_unit setup finished. starting actual test ---") + # ... end of long setup of objects + # -------------------------------- + + # now for the real test: use only the http rest api to check statuses and call schedule methods + with self.tmss_test_env.create_tmss_client() as tmss_client: + # walk the graph in a "data-flow" direction + for subtask, successors in graph: + # get up-to-date subtask via the rest client + subtask1 = tmss_client.get_subtask(subtask.id) + logger.info("subtask id=%s status=%s successors: %s", subtask1['id'], subtask1['state_value'], ','.join(str(s.id) for s in successors)) + + if subtask1['state_value'] == 'defined': + for successor in successors: + # get up-to-date subtask via the rest client + subtask2 = tmss_client.get_subtask(successor.id) + self.assertEqual(subtask2['state_value'], 'defined') + + # simulate that some scheduler schedules the first subtask (which does not depend on predecessors)... + if len(tmss_client.get_subtask_predecessors(subtask1['id'])) == 0: + subtask1 = tmss_client.schedule_subtask(subtask1['id']) + self.assertEqual(subtask1['state_value'], 'scheduled') + + if subtask1['state_value'] == 'scheduled': + # simulate that the first subtask ran, and is now finished... + # cycle over the 'run time' statuses, concluding with status to finished. + # The finished status should trigger the scheduling service to schedule the successor subtask(s). + for status in ['queueing', 'queued', 'starting', 'started', 'finishing', 'finished']: + tmss_client.set_subtask_status(subtask1['id'], status) + + for successor in successors: + # get up-to-date subtask via the rest client + subtask2 = self.wait_for_subtask_to_get_status(tmss_client, successor.id, 'scheduled') + self.assertEqual(subtask2['state_value'], 'scheduled') + + +if __name__ == '__main__': + #run the unit tests + unittest.main() diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.run b/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.run similarity index 100% rename from SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.run rename to SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.run diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.sh b/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.sh similarity index 100% rename from SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.sh rename to SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.sh diff --git a/SAS/TMSS/services/subtask_scheduling/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/CMakeLists.txt deleted file mode 100644 index 460e356bc2c99121eb41a48fc27fad7d20a51fac..0000000000000000000000000000000000000000 --- a/SAS/TMSS/services/subtask_scheduling/CMakeLists.txt +++ /dev/null @@ -1,8 +0,0 @@ -lofar_package(TMSSSubtaskSchedulingService 0.1 DEPENDS TMSSClient PyCommon pyparameterset PyMessaging) - -lofar_find_package(PythonInterp 3.4 REQUIRED) - -add_subdirectory(lib) -add_subdirectory(bin) -add_subdirectory(test) - diff --git a/SAS/TMSS/services/subtask_scheduling/bin/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/bin/CMakeLists.txt deleted file mode 100644 index 07e30a532f710dd1242ba026ad12e9ce014f1125..0000000000000000000000000000000000000000 --- a/SAS/TMSS/services/subtask_scheduling/bin/CMakeLists.txt +++ /dev/null @@ -1,4 +0,0 @@ -lofar_add_bin_scripts(tmss_subtask_scheduling_service) - -# supervisord config files -lofar_add_sysconf_files(tmss_subtask_scheduling_service.ini DESTINATION supervisord.d) diff --git a/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service b/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service deleted file mode 100755 index 2ecd686a25fd88e45094bf4cda143e41de1fb61d..0000000000000000000000000000000000000000 --- a/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/python3 - -# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) -# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands -# -# This file is part of the LOFAR software suite. -# The LOFAR software suite is free software: you can redistribute it and/or -# modify it under the terms of the GNU General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# The LOFAR software suite is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. - - -from lofar.sas.tmss.services.subtask_scheduling import main - -if __name__ == "__main__": - main() diff --git a/SAS/TMSS/services/subtask_scheduling/lib/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/lib/CMakeLists.txt deleted file mode 100644 index 7cf0b591612ccb75bc2a73c1a6f9d1d8a2c2d9da..0000000000000000000000000000000000000000 --- a/SAS/TMSS/services/subtask_scheduling/lib/CMakeLists.txt +++ /dev/null @@ -1,10 +0,0 @@ -lofar_find_package(PythonInterp 3.4 REQUIRED) -include(PythonInstall) - -set(_py_files - subtask_scheduling.py - ) - -python_install(${_py_files} - DESTINATION lofar/sas/tmss/services) - diff --git a/SAS/TMSS/services/subtask_scheduling/test/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/test/CMakeLists.txt deleted file mode 100644 index b9da06a5dc6b27fde81e26c6cc5ba027cae2d821..0000000000000000000000000000000000000000 --- a/SAS/TMSS/services/subtask_scheduling/test/CMakeLists.txt +++ /dev/null @@ -1,7 +0,0 @@ -# $Id: CMakeLists.txt 32679 2015-10-26 09:31:56Z schaap $ - -if(BUILD_TESTING) - include(LofarCTest) - - lofar_add_test(t_subtask_scheduling_service) -endif() diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py b/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py deleted file mode 100755 index 84d85d879019b0a5d09832d7cf5815f53ef12a2b..0000000000000000000000000000000000000000 --- a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) -# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands -# -# This file is part of the LOFAR software suite. -# The LOFAR software suite is free software: you can redistribute it and/or -# modify it under the terms of the GNU General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# The LOFAR software suite is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. - -import unittest -import uuid - -import logging -logger = logging.getLogger(__name__) -logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - -from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment -from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator - -from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor -from lofar.sas.tmss.services.subtask_scheduling import create_service -from lofar.common.test_utils import integration_test -from time import sleep -from datetime import datetime, timedelta - -@integration_test -class TestSubtaskSchedulingService(unittest.TestCase): - ''' - Tests for the SubtaskSchedulingService - ''' - @classmethod - def setUpClass(cls) -> None: - cls.TEST_UUID = uuid.uuid1() - - cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID)) - cls.tmp_exchange.open() - - cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address) - cls.tmss_test_env.start() - - cls.test_data_creator = TMSSRESTTestDataCreator(cls.tmss_test_env.django_server.url, - (cls.tmss_test_env.ldap_server.dbcreds.user, cls.tmss_test_env.ldap_server.dbcreds.password)) - - @classmethod - def tearDownClass(cls) -> None: - cls.tmss_test_env.stop() - cls.tmp_exchange.close() - - def test_01_for_expected_behaviour(self): - ''' - This test starts a scheduling service and tmss, creates a chain of subtasks, finishes the first, and checks if the successors are then scheduled. - ''' - - logger.info(' -- test_01_for_expected_behaviour -- ') - - # create and start the service (the object under test) - service = create_service(exchange=self.tmp_exchange.address, tmss_client_credentials_id=self.tmss_test_env.client_credentials.dbcreds_id) - with BusListenerJanitor(service): - # ------------------------- - # long setup of objects.... - - # setup proper template - subtask_template_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskTemplate(subtask_type_url=self.test_data_creator.django_api_url + '/subtask_type/qa_files/'), '/subtask_template/') - - # create two subtasks - subtask1_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url), '/subtask/') - subtask2_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url), '/subtask/') - - # ugly - subtask1_id = subtask1_url.split('/')[subtask1_url.split('/').index('subtask') + 1] - subtask2_id = subtask2_url.split('/')[subtask2_url.split('/').index('subtask') + 1] - - # connect them - output_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskOutput(subtask1_url), '/subtask_output/') - input_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskInput(subtask_url=subtask2_url, subtask_output_url=output_url), '/subtask_input/') - - # ... end of long setup of objects - # -------------------------------- - - # now for the real test: set subtask1_id status to finished, and check that subtask2 is then properly scheduled - with self.tmss_test_env.create_tmss_client() as tmss_client: - subtask1 = tmss_client.get_subtask(subtask1_id) - subtask2 = tmss_client.get_subtask(subtask2_id) - - self.assertEqual(subtask1['state_value'], 'defined') - self.assertEqual(subtask2['state_value'], 'defined') - - # the first subtask ran, and is now finished... set it's status. This should trigger the scheduling service to schedule the second subtask. - tmss_client.set_subtask_status(subtask1_id, 'finished') - - # allow some time for the scheduling service to do its thing... - start = datetime.utcnow() - while subtask2['state_value'] != 'scheduled': - subtask2 = tmss_client.get_subtask(subtask2_id) - sleep(0.5) - if datetime.utcnow() - start > timedelta(seconds=2): - raise TimeoutError() - - # subtask2 should now be scheduled - self.assertEqual(subtask2['state_value'], 'scheduled') - -if __name__ == '__main__': - #run the unit tests - unittest.main() diff --git a/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py b/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py index 3cf20c24ec7ed26321f2c8acc85e09a14961b6eb..51532b9390cc3e2b54a2f637f4bc26faf992b4e7 100644 --- a/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py +++ b/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py @@ -54,7 +54,7 @@ class TMSSPGListener(PostgresListener): self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_subtask', 'delete')) self.subscribe('tmssapp_subtask_delete', self.onSubTaskDeleted) - self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_subtask', 'update', 'state_id')) + self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_subtask', 'update', column_name='state_id', quote_column_value=True)) self.subscribe('tmssapp_subtask_update_column_state_id', self.onSubTaskStateUpdated) @@ -107,6 +107,13 @@ class TMSSPGListener(PostgresListener): self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_schedulingunitdraft', 'delete')) self.subscribe('tmssapp_schedulingunitdraft_delete', self.onSchedulingUnitDraftDeleted) + self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_schedulingunitdraft', 'update', column_name='scheduling_constraints_doc', quote_column_value=False)) + self.subscribe('tmssapp_schedulingunitdraft_update_column_scheduling_constraints_doc'[:63], self.onSchedulingUnitDraftConstraintsUpdated) + + # Settings + self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_setting', 'update', id_column_name='name_id', quote_id_value=True, column_name='value', quote_column_value=True)) + self.subscribe('tmssapp_setting_update_column_value', self.onSettingUpdated) + return super().start() def __exit__(self, exc_type, exc_val, exc_tb): @@ -191,6 +198,17 @@ class TMSSPGListener(PostgresListener): def onSchedulingUnitDraftDeleted(self, payload = None): self._sendNotification(TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX+'.Deleted', payload) + def onSchedulingUnitDraftConstraintsUpdated(self, payload = None): + # convert payload string to nested json doc + self._sendNotification(TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX+'.Constraints.Updated', payload) + + def onSettingUpdated(self, payload = None): + payload = json.loads(payload) + payload['name'] = payload['name_id'] + del payload['name_id'] + payload['value'] = payload['value'] in ('true', 'True', 't') + self._sendNotification(TMSS_SETTING_OBJECT_EVENT_PREFIX+'.Updated', payload) + def create_service(dbcreds, exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER): '''create a TMSSPGListener instance''' @@ -216,7 +234,7 @@ def main(): parser.add_option_group(group) parser.add_option_group(dbcredentials.options_group(parser)) - parser.set_defaults(dbcredentials=os.environ.get('TMSS_CLIENT_DBCREDENTIALS', 'TMSS')) + parser.set_defaults(dbcredentials=os.environ.get('TMSS_DBCREDENTIALS', 'TMSS')) (options, args) = parser.parse_args() dbcreds = dbcredentials.parse_options(options) diff --git a/SAS/TMSS/src/CMakeCache.txt b/SAS/TMSS/src/CMakeCache.txt deleted file mode 100644 index 0b2dc14cb11f159cf34cbf5f5ad840ce0aaab7d0..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/CMakeCache.txt +++ /dev/null @@ -1,326 +0,0 @@ -# This is the CMakeCache file. -# For build in directory: /lofar/SAS/TMSS/src -# It was generated by CMake: /usr/bin/cmake -# You can edit this file to change values found and used by cmake. -# If you do not want to change any of the values, simply exit the editor. -# If you do want to change a value, simply edit, save, and exit the editor. -# The syntax for the file is as follows: -# KEY:TYPE=VALUE -# KEY is the name of a variable in the cache. -# TYPE is a hint to GUIs for the type of VALUE, DO NOT EDIT TYPE!. -# VALUE is the current value for the KEY. - -######################## -# EXTERNAL cache entries -######################## - -//No help, variable specified on the command line. -BUILD_PACKAGES:UNINITIALIZED=TMSS - -//Path to a program. -CMAKE_AR:FILEPATH=/usr/bin/ar - -//For backwards compatibility, what version of CMake commands and -// syntax should this version of CMake try to support. -CMAKE_BACKWARDS_COMPATIBILITY:STRING=2.4 - -//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or -// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel. -CMAKE_BUILD_TYPE:STRING= - -//Enable/Disable color output during build. -CMAKE_COLOR_MAKEFILE:BOOL=ON - -//CXX compiler. -CMAKE_CXX_COMPILER:FILEPATH=/usr/bin/c++ - -//Flags used by the compiler during all build types. -CMAKE_CXX_FLAGS:STRING= - -//Flags used by the compiler during debug builds. -CMAKE_CXX_FLAGS_DEBUG:STRING=-g - -//Flags used by the compiler during release minsize builds. -CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG - -//Flags used by the compiler during release builds (/MD /Ob1 /Oi -// /Ot /Oy /Gs will produce slightly less optimized but smaller -// files). -CMAKE_CXX_FLAGS_RELEASE:STRING=-O3 -DNDEBUG - -//Flags used by the compiler during Release with Debug Info builds. -CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g -DNDEBUG - -//C compiler. -CMAKE_C_COMPILER:FILEPATH=/usr/bin/cc - -//Flags used by the compiler during all build types. -CMAKE_C_FLAGS:STRING= - -//Flags used by the compiler during debug builds. -CMAKE_C_FLAGS_DEBUG:STRING=-g - -//Flags used by the compiler during release minsize builds. -CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG - -//Flags used by the compiler during release builds (/MD /Ob1 /Oi -// /Ot /Oy /Gs will produce slightly less optimized but smaller -// files). -CMAKE_C_FLAGS_RELEASE:STRING=-O3 -DNDEBUG - -//Flags used by the compiler during Release with Debug Info builds. -CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g -DNDEBUG - -//Flags used by the linker. -CMAKE_EXE_LINKER_FLAGS:STRING=' ' - -//Flags used by the linker during debug builds. -CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//Enable/Disable output of compile commands during generation. -CMAKE_EXPORT_COMPILE_COMMANDS:BOOL=OFF - -//Install path prefix, prepended onto install directories. -CMAKE_INSTALL_PREFIX:PATH=/usr/local - -//Path to a program. -CMAKE_LINKER:FILEPATH=/usr/bin/ld - -//Path to a program. -CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/gmake - -//Flags used by the linker during the creation of modules. -CMAKE_MODULE_LINKER_FLAGS:STRING=' ' - -//Flags used by the linker during debug builds. -CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//Path to a program. -CMAKE_NM:FILEPATH=/usr/bin/nm - -//Path to a program. -CMAKE_OBJCOPY:FILEPATH=/usr/bin/objcopy - -//Path to a program. -CMAKE_OBJDUMP:FILEPATH=/usr/bin/objdump - -//Value Computed by CMake -CMAKE_PROJECT_NAME:STATIC=Project - -//Path to a program. -CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib - -//Flags used by the linker during the creation of dll's. -CMAKE_SHARED_LINKER_FLAGS:STRING=' ' - -//Flags used by the linker during debug builds. -CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//If set, runtime paths are not added when installing shared libraries, -// but are added when building. -CMAKE_SKIP_INSTALL_RPATH:BOOL=NO - -//If set, runtime paths are not added when using shared libraries. -CMAKE_SKIP_RPATH:BOOL=NO - -//Flags used by the linker during the creation of static libraries. -CMAKE_STATIC_LINKER_FLAGS:STRING= - -//Flags used by the linker during debug builds. -CMAKE_STATIC_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_STATIC_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//Path to a program. -CMAKE_STRIP:FILEPATH=/usr/bin/strip - -//If true, cmake will use relative paths in makefiles and projects. -CMAKE_USE_RELATIVE_PATHS:BOOL=OFF - -//If this value is on, makefiles will be generated without the -// .SILENT directive, and all commands will be echoed to the console -// during the make. This is useful for debugging only. With Visual -// Studio IDE projects all commands are done without /nologo. -CMAKE_VERBOSE_MAKEFILE:BOOL=FALSE - -//Single output directory for building all executables. -EXECUTABLE_OUTPUT_PATH:PATH= - -//Single output directory for building all libraries. -LIBRARY_OUTPUT_PATH:PATH= - -//Value Computed by CMake -Project_BINARY_DIR:STATIC=/lofar/SAS/TMSS/src - -//Value Computed by CMake -Project_SOURCE_DIR:STATIC=/lofar/SAS - - -######################## -# INTERNAL cache entries -######################## - -//ADVANCED property for variable: CMAKE_AR -CMAKE_AR-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_BUILD_TOOL -CMAKE_BUILD_TOOL-ADVANCED:INTERNAL=1 -//What is the target build tool cmake is generating for. -CMAKE_BUILD_TOOL:INTERNAL=/usr/bin/gmake -//This is the directory where this CMakeCache.txt was created -CMAKE_CACHEFILE_DIR:INTERNAL=/lofar/SAS/TMSS/src -//Major version of cmake used to create the current loaded cache -CMAKE_CACHE_MAJOR_VERSION:INTERNAL=2 -//Minor version of cmake used to create the current loaded cache -CMAKE_CACHE_MINOR_VERSION:INTERNAL=8 -//Patch version of cmake used to create the current loaded cache -CMAKE_CACHE_PATCH_VERSION:INTERNAL=12 -//ADVANCED property for variable: CMAKE_COLOR_MAKEFILE -CMAKE_COLOR_MAKEFILE-ADVANCED:INTERNAL=1 -//Path to CMake executable. -CMAKE_COMMAND:INTERNAL=/usr/bin/cmake -//Path to cpack program executable. -CMAKE_CPACK_COMMAND:INTERNAL=/usr/bin/cpack -//Path to ctest program executable. -CMAKE_CTEST_COMMAND:INTERNAL=/usr/bin/ctest -//ADVANCED property for variable: CMAKE_CXX_COMPILER -CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_CXX_FLAGS -CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_CXX_FLAGS_DEBUG -CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_CXX_FLAGS_MINSIZEREL -CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELEASE -CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO -CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_C_COMPILER -CMAKE_C_COMPILER-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_C_FLAGS -CMAKE_C_FLAGS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_C_FLAGS_DEBUG -CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_C_FLAGS_MINSIZEREL -CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_C_FLAGS_RELEASE -CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_C_FLAGS_RELWITHDEBINFO -CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//Path to cache edit program executable. -CMAKE_EDIT_COMMAND:INTERNAL=/usr/bin/ccmake -//Executable file format -CMAKE_EXECUTABLE_FORMAT:INTERNAL=ELF -//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS -CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG -CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL -CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE -CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO -CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_EXPORT_COMPILE_COMMANDS -CMAKE_EXPORT_COMPILE_COMMANDS-ADVANCED:INTERNAL=1 -//Name of generator. -CMAKE_GENERATOR:INTERNAL=Unix Makefiles -//Name of generator toolset. -CMAKE_GENERATOR_TOOLSET:INTERNAL= -//Start directory with the top level CMakeLists.txt file for this -// project -CMAKE_HOME_DIRECTORY:INTERNAL=/lofar/SAS -//Install .so files without execute permission. -CMAKE_INSTALL_SO_NO_EXE:INTERNAL=0 -//ADVANCED property for variable: CMAKE_LINKER -CMAKE_LINKER-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_MAKE_PROGRAM -CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS -CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG -CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL -CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE -CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO -CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_NM -CMAKE_NM-ADVANCED:INTERNAL=1 -//number of local generators -CMAKE_NUMBER_OF_LOCAL_GENERATORS:INTERNAL=1 -//ADVANCED property for variable: CMAKE_OBJCOPY -CMAKE_OBJCOPY-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_OBJDUMP -CMAKE_OBJDUMP-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_RANLIB -CMAKE_RANLIB-ADVANCED:INTERNAL=1 -//Path to CMake installation. -CMAKE_ROOT:INTERNAL=/usr/share/cmake -//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS -CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG -CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL -CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE -CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO -CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_SKIP_INSTALL_RPATH -CMAKE_SKIP_INSTALL_RPATH-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_SKIP_RPATH -CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS -CMAKE_STATIC_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_DEBUG -CMAKE_STATIC_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL -CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_RELEASE -CMAKE_STATIC_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO -CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_STRIP -CMAKE_STRIP-ADVANCED:INTERNAL=1 -//uname command -CMAKE_UNAME:INTERNAL=/usr/bin/uname -//ADVANCED property for variable: CMAKE_USE_RELATIVE_PATHS -CMAKE_USE_RELATIVE_PATHS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_VERBOSE_MAKEFILE -CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1 - diff --git a/SAS/TMSS/src/CMakeLists.txt b/SAS/TMSS/src/CMakeLists.txt index fd5a8389a74c27f43c3def1fadb5a87813d9212f..1b99aca609835d03bc2d0a7714fac842de58eb63 100644 --- a/SAS/TMSS/src/CMakeLists.txt +++ b/SAS/TMSS/src/CMakeLists.txt @@ -1,6 +1,6 @@ set(USE_PYTHON_COMPILATION Off) -lofar_find_package(PythonInterp 3.4 REQUIRED) +lofar_find_package(PythonInterp 3.5 REQUIRED) #lofar_find_package(PostgreSQL 9.4) include(PythonInstall) @@ -17,6 +17,7 @@ find_python_module(django_jsonforms REQUIRED) # pip3 install django-jsonforms find_python_module(django_json_widget REQUIRED) # pip3 install django-json-widget find_python_module(jsoneditor REQUIRED) # pip3 install django-jsoneditor find_python_module(jsonschema REQUIRED) # pip3 install jsonschema +find_python_module(astropy REQUIRED) # pip3 install astropy # modules for swagger API export find_python_module(drf_yasg REQUIRED) # pip install drf-yasg diff --git a/SAS/TMSS/src/migrate_momdb_to_tmss.py b/SAS/TMSS/src/migrate_momdb_to_tmss.py index 13efa43bbc7759f453875c51cdbfb3f9b5734fb9..e2d0c8102979755204db98ddc326c00a62a44230 100755 --- a/SAS/TMSS/src/migrate_momdb_to_tmss.py +++ b/SAS/TMSS/src/migrate_momdb_to_tmss.py @@ -512,8 +512,7 @@ def create_subtask_trees_for_project_in_momdb(project_mom2id, project): "priority": project.priority_rank, # todo: correct to derive from project? # optional: "start_time": start_time, - "stop_time": stop_time, - "schedule_method": models.ScheduleMethod.objects.get(value="manual"), # todo: correct? Or leave None? + "stop_time": stop_time # "created_or_updated_by_user" = None, # "raw_feedback" = None, # "do_cancel": None, diff --git a/SAS/TMSS/src/tmss/exceptions.py b/SAS/TMSS/src/tmss/exceptions.py index a320dbd527a5a58a0d7274836beb66f9f5387c1c..e45ba40745dbfac84a842d9334b3fd687ad2cc23 100644 --- a/SAS/TMSS/src/tmss/exceptions.py +++ b/SAS/TMSS/src/tmss/exceptions.py @@ -22,3 +22,11 @@ class SubtaskSchedulingException(SchedulingException): class TaskSchedulingException(SchedulingException): pass + +class DynamicSchedulingException(SchedulingException): + pass + +class UnknownTemplateException(TMSSException): + '''raised when TMSS trying to base its processing routines on the chosen template, but this specific template is unknown.''' + pass + diff --git a/SAS/TMSS/src/tmss/settings.py b/SAS/TMSS/src/tmss/settings.py index 46acbc47faf5d67a2606fe9b5d365308abdaeeb6..7177ac4ae27cbbe4c020910eafa1846bece0d872 100644 --- a/SAS/TMSS/src/tmss/settings.py +++ b/SAS/TMSS/src/tmss/settings.py @@ -24,52 +24,21 @@ logger = logging.getLogger(__name__) LOGGING = { 'version': 1, 'disable_existing_loggers': False, - 'filters': { - 'require_debug_false': { - '()': 'django.utils.log.RequireDebugFalse', - }, - 'require_debug_true': { - '()': 'django.utils.log.RequireDebugTrue', - }, - }, 'formatters': { - 'django.server': { - '()': 'django.utils.log.ServerFormatter', - 'format': '%(asctime)s %(levelname)s %(message)s', - }, - 'lofar': { + 'lofar_formatter': { 'format': '%(asctime)s %(levelname)s %(message)s', }, }, 'handlers': { 'console': { 'level': 'DEBUG', - 'filters': ['require_debug_true'], 'class': 'logging.StreamHandler', - }, - 'django.server': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'django.server', - }, - 'mail_admins': { - 'level': 'ERROR', - 'filters': ['require_debug_false'], - 'class': 'django.utils.log.AdminEmailHandler' - }, - 'lofar': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'lofar', + 'formatter': 'lofar_formatter', }, }, 'loggers': { - 'django': { - 'handlers': ['console', 'mail_admins'], - 'level': 'INFO', - }, 'django.server': { - 'handlers': ['django.server'], + 'handlers': ['console'], 'level': 'INFO', 'propagate': False, }, @@ -82,13 +51,15 @@ LOGGING = { 'level': 'DEBUG', # change debug level as appropiate 'propagate': False, }, - 'django.db.backends': { - 'level': 'INFO', - 'handlers': ['console'], - }, + # 'django.db.backends': { # uncomment to enable logging of each db query. Very spammy and slow, but also usefull for performance improvement. Gives more even detail/insight than django debug toolbar. + # 'level': 'DEBUG', + # 'handlers': ['console'], + # 'propagate': False, + # }, 'lofar': { - 'handlers': ['lofar'], + 'handlers': ['console'], 'level': 'INFO', + 'propagate': False, }, } } @@ -121,20 +92,9 @@ INSTALLED_APPS = [ 'drf_yasg', 'django_filters', 'material', - 'material.frontend', - 'viewflow', - 'viewflow.frontend', - 'lofar.sas.tmss.tmss.workflowapp', + 'material.frontend' ] - -def show_debug_toolbar(*args, **kwargs): - return os.environ.get('SHOW_DJANGO_DEBUG_TOOLBAR', False) - -DEBUG_TOOLBAR_CONFIG = { - 'SHOW_TOOLBAR_CALLBACK': show_debug_toolbar -} - MIDDLEWARE = [ 'django.middleware.gzip.GZipMiddleware', 'django.middleware.security.SecurityMiddleware', @@ -146,10 +106,19 @@ MIDDLEWARE = [ 'django.middleware.clickjacking.XFrameOptionsMiddleware' ] +def show_debug_toolbar(*args, **kwargs): + return bool(os.environ.get('SHOW_DJANGO_DEBUG_TOOLBAR', False)) + if show_debug_toolbar(): + DEBUG_TOOLBAR_CONFIG = { 'SHOW_TOOLBAR_CALLBACK': show_debug_toolbar } INSTALLED_APPS.append('debug_toolbar') MIDDLEWARE.insert(MIDDLEWARE.index('django.middleware.gzip.GZipMiddleware')+1, 'debug_toolbar.middleware.DebugToolbarMiddleware') + +if bool(os.environ.get('TMSS_ENABLE_VIEWFLOW', False)): + INSTALLED_APPS.extend(['viewflow', 'viewflow.frontend', 'lofar.sas.tmss.tmss.workflowapp']) + + ROOT_URLCONF = 'lofar.sas.tmss.tmss.urls' TEMPLATES = [ diff --git a/SAS/TMSS/src/tmss/tmssapp/conversions.py b/SAS/TMSS/src/tmss/tmssapp/conversions.py index ce112f7b30b8f697baf91d4da9202899703715ba..af5d004637c17f20118bd660e4e761b22fef288a 100644 --- a/SAS/TMSS/src/tmss/tmssapp/conversions.py +++ b/SAS/TMSS/src/tmss/tmssapp/conversions.py @@ -1,11 +1,14 @@ from astropy.time import Time import astropy.units -from lofar.lta.sip import station_coordinates -from datetime import datetime +from datetime import datetime, timedelta, time as dtime from astropy.coordinates.earth import EarthLocation -from astropy.coordinates import Angle +from astropy.coordinates import Angle, get_body from astroplan.observer import Observer +import astropy.time +from functools import lru_cache +import logging +logger = logging.getLogger(__name__) def create_astroplan_observer_for_station(station: str) -> Observer: ''' @@ -13,31 +16,34 @@ def create_astroplan_observer_for_station(station: str) -> Observer: :param station: a station name, e.g. "CS002" :return: astroplan.observer.Observer object ''' - + from lofar.lta.sip import station_coordinates coords = station_coordinates.parse_station_coordinates()["%s_LBA" % station.upper()] location = EarthLocation.from_geocentric(x=coords['x'], y=coords['y'], z=coords['z'], unit=astropy.units.m) observer = Observer(location, name="LOFAR", timezone="UTC") return observer + # default angle to the horizon at which the sunset/sunrise starts and ends, as per LOFAR definition. SUN_SET_RISE_ANGLE_TO_HORIZON = Angle(10, unit=astropy.units.deg) +SUN_SET_RISE_PRECISION = 30 # n_grid_points; higher is more precise but very costly; astropy defaults to 150, errors now can be in the minutes, increase if this is not good enough -def timestamps_and_stations_to_sun_rise_and_set(timestamps: [datetime], stations: [str], angle_to_horizon: Angle=SUN_SET_RISE_ANGLE_TO_HORIZON) -> dict: +@lru_cache(maxsize=256, typed=False) # does not like lists, so use tuples to allow caching +def timestamps_and_stations_to_sun_rise_and_set(timestamps: tuple, stations: tuple, angle_to_horizon: Angle=SUN_SET_RISE_ANGLE_TO_HORIZON) -> dict: """ compute sunrise, sunset, day and night of the given stations at the given timestamps - :param timestamps: list of datetimes, e.g. [datetime(2020, 1, 1), datetime(2020, 1, 2)] - :param stations: list of station names, e.g. ["CS001"] + :param timestamps: tuple of datetimes, e.g. (datetime(2020, 1, 1), datetime(2020, 1, 2)) + :param stations: tuple of station names, e.g. ("CS002",) :return A dict that maps station names to a nested dict that contains lists of start and end times for sunrise, sunset, etc, on each requested date. E.g. - {"CS001": - { "sunrise": [{"start": (2020, 1, 1, 6, 0, 0)), "end": (2020, 1, 1, 6, 30, 0)}, - {"start": (2020, 1, 2, 6, 0, 0)), "end": (2020, 1, 2, 6, 30, 0)}], - "sunset": [{"start": (2020, 1, 1, 18, 0, 0)), "end": (2020, 1, 1, 18, 30, 0)}, - {"start": (2020, 1, 2, 18, 0, 0)), "end": (2020, 1, 2, 18, 30, 0)}], - "day": [{"start": (2020, 1, 1, 6, 30, 0)), "end": (2020, 1, 1, 18, 00, 0)}, - {"start": (2020, 1, 2, 6, 30, 0)), "end": (2020, 1, 2, 18, 00, 0)}], - "night": [{"start": (2020, 1, 1, 18, 30, 0)), "end": (2020, 1, 2, 6, 0, 0)}, - {"start": (2020, 1, 2, 18,3 0, 0)), "end": (2020, 1, 3, 6, 0, 0)}], + {"CS002": + { "sunrise": [{"start": datetime(2020, 1, 1, 6, 0, 0)), "end": datetime(2020, 1, 1, 6, 30, 0)}, + {"start": datetime(2020, 1, 2, 6, 0, 0)), "end": datetime(2020, 1, 2, 6, 30, 0)}], + "sunset": [{"start": datetime(2020, 1, 1, 18, 0, 0)), "end": datetime(2020, 1, 1, 18, 30, 0)}, + {"start": datetime(2020, 1, 2, 18, 0, 0)), "end": datetime(2020, 1, 2, 18, 30, 0)}], + "day": [{"start": datetime(2020, 1, 1, 6, 30, 0)), "end": datetime(2020, 1, 1, 18, 00, 0)}, + {"start": datetime(2020, 1, 2, 6, 30, 0)), "end": datetime(2020, 1, 2, 18, 00, 0)}], + "night": [{"start": datetime(2020, 1, 1, 18, 30, 0)), "end": datetime(2020, 1, 2, 6, 0, 0)}, + {"start": datetime(2020, 1, 2, 18,3 0, 0)), "end": datetime(2020, 1, 3, 6, 0, 0)}], } } """ @@ -45,13 +51,15 @@ def timestamps_and_stations_to_sun_rise_and_set(timestamps: [datetime], stations for station in stations: for timestamp in timestamps: observer = create_astroplan_observer_for_station(station) - sunrise_start = observer.sun_rise_time(time=Time(timestamp), which='previous') + sunrise_start = observer.sun_rise_time(time=Time(timestamp), which='previous', n_grid_points=SUN_SET_RISE_PRECISION) if sunrise_start.to_datetime().date() < timestamp.date(): - sunrise_start = observer.sun_rise_time(time=Time(timestamp), horizon=-angle_to_horizon, which='next') - sunrise_end = observer.sun_rise_time(time=Time(timestamp), horizon=angle_to_horizon, which='next') - sunset_start = observer.sun_set_time(time=sunrise_end, horizon=angle_to_horizon, which='next') - sunset_end = observer.sun_set_time(time=sunrise_end, horizon=-angle_to_horizon, which='next') - sunrise_next_start = observer.sun_rise_time(time=sunset_end, horizon=-angle_to_horizon, which='next') + sunrise_start = observer.sun_rise_time(time=Time(timestamp), horizon=-angle_to_horizon, which='nearest', n_grid_points=SUN_SET_RISE_PRECISION) + if sunrise_start.to_datetime().date() < timestamp.date(): + sunrise_start = observer.sun_rise_time(time=Time(timestamp), horizon=-angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION) + sunrise_end = observer.sun_rise_time(time=Time(timestamp), horizon=angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION) + sunset_start = observer.sun_set_time(time=sunrise_end, horizon=angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION) + sunset_end = observer.sun_set_time(time=sunrise_end, horizon=-angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION) + sunrise_next_start = observer.sun_rise_time(time=sunset_end, horizon=-angle_to_horizon, which='next', n_grid_points=SUN_SET_RISE_PRECISION) return_dict.setdefault(station, {}).setdefault("sunrise", []).append({"start": sunrise_start.to_datetime(), "end": sunrise_end.to_datetime()}) return_dict[station].setdefault("sunset", []).append({"start": sunset_start.to_datetime(), "end": sunset_end.to_datetime()}) return_dict[station].setdefault("day", []).append({"start": sunrise_end.to_datetime(), "end": sunset_start.to_datetime()}) @@ -59,6 +67,39 @@ def timestamps_and_stations_to_sun_rise_and_set(timestamps: [datetime], stations return return_dict +# Depending on usage patterns, we should consider refactoring this a little so that we cache on a function with a single timestamp as input. Requests with similar (but not identical) timestamps or bodies currently make no use of cached results for the subset computed in previous requests. +@lru_cache(maxsize=256, typed=False) # does not like lists, so use tuples to allow caching +def coordinates_and_timestamps_to_separation_from_bodies(angle1: float, angle2: float, direction_type: str, timestamps: tuple, bodies: tuple) -> dict: + """ + compute angular distances of the given sky coordinates from the given solar system bodies at the given timestamps (seen from LOFAR core) + :param angle1: first angle of celectial coordinates, e.g. RA + :param angle2: second angle of celectial coordinates, e.g. Dec + :param direction_type: direction_type of celectial coordinates, e.g. 'J2000' + :param timestamps: tuple of datetimes, e.g. (datetime(2020, 1, 1, 15, 0, 0), datetime(2020, 1, 1, 16, 0, 0)) + :param bodies: tuple of solar system bodies, e.g. ('sun', 'moon', 'jupiter') + :return A dict that maps each body to a dict that maps the given timestamp to a separation angle from the given coordinate. + E.g. + { + "sun": {datetime(2020, 1, 1, 6, 0, 0): Angle("0.7rad"), datetime(2020, 1, 1, 7, 0, 0): Angle("0.7rad")}, + "moon": {datetime(2020, 1, 1, 6, 0, 0): Angle("0.4rad"), datetime(2020, 1, 1, 7, 0, 0): Angle("0.4rad")}, + "jupiter": {datetime(2020, 1, 1, 6, 0, 0): Angle("2.7rad"), datetime(2020, 1, 1, 7, 0, 0): Angle("2.7rad")} + } + """ + if direction_type == "J2000": + coord = astropy.coordinates.SkyCoord(ra=angle1, dec=angle2, unit=astropy.units.deg) + else: + raise ValueError("Do not know how to convert direction_type=%s to SkyCoord" % direction_type) + return_dict = {} + for body in bodies: + location = create_astroplan_observer_for_station("CS002").location + for timestamp in timestamps: + # get body coords at timestamp + body_coord = get_body(body=body, time=astropy.time.Time(timestamp), location=location) + angle = coord.separation(body_coord) + return_dict.setdefault(body, {})[timestamp] = angle + return return_dict + + def local_sidereal_time_for_utc_and_station(timestamp: datetime = None, station: str = 'CS002', field: str = 'LBA', @@ -71,6 +112,8 @@ def local_sidereal_time_for_utc_and_station(timestamp: datetime = None, :param kind: 'mean' or 'apparent' :return: """ + from lofar.lta.sip import station_coordinates + if timestamp is None: timestamp = datetime.utcnow() station_coords = station_coordinates.parse_station_coordinates() diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py index 7faae82d9b3a694ab79ebddd5b3d0c5676d8ffac..4ae9725ed8dbaa5c4897e7d309adfdb2fe9ff126 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.0.7 on 2020-10-29 16:37 +# Generated by Django 3.0.9 on 2020-11-24 11:24 from django.conf import settings import django.contrib.postgres.fields @@ -17,34 +17,6 @@ class Migration(migrations.Migration): ] operations = [ - migrations.CreateModel( - name='SchedulingUnitBlueprintSummary', - fields=[ - ('id', models.IntegerField(primary_key=True, serialize=False)), - ('sub_id', models.IntegerField()), - ('taskblueprint_id', models.IntegerField()), - ('task_type', models.CharField(max_length=128)), - ('derived_task_status', models.CharField(max_length=128)), - ], - options={ - 'db_table': 'tmssapp_schedulingunitblueprintsummary', - 'managed': False, - }, - ), - migrations.CreateModel( - name='TaskBlueprintSummary', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('taskblueprint_id', models.IntegerField()), - ('subtask_id', models.IntegerField()), - ('substate', models.CharField(max_length=128)), - ('subtask_type', models.CharField(max_length=128)), - ], - options={ - 'db_table': 'tmssapp_taskblueprintsummary', - 'managed': False, - }, - ), migrations.CreateModel( name='Algorithm', fields=[ @@ -397,6 +369,7 @@ class Migration(migrations.Migration): ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), ('priority_rank', models.FloatField(help_text='Priority of this project w.r.t. other projects. Projects can interrupt observations of lower-priority projects.')), ('trigger_priority', models.IntegerField(default=1000, help_text='Priority of this project w.r.t. triggers.')), + ('auto_ingest', models.BooleanField(default=False, help_text='True if The data is ingested when the other scheduling unit tasks are finished. False if The data is ingested after approval in the QA validation workflow. At the end of this a flag is set that the data can be ingested.')), ('can_trigger', models.BooleanField(default=False, help_text='True if this project is allowed to supply observation requests on the fly, possibly interrupting currently running observations (responsive telescope).')), ('private_data', models.BooleanField(default=True, help_text='True if data of this project is sensitive. Sensitive data is not made public.')), ('expert', models.BooleanField(default=False, help_text='Expert projects put more responsibility on the PI.')), @@ -483,15 +456,6 @@ class Migration(migrations.Migration): 'abstract': False, }, ), - migrations.CreateModel( - name='ScheduleMethod', - fields=[ - ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), - ], - options={ - 'abstract': False, - }, - ), migrations.CreateModel( name='SchedulingConstraintsTemplate', fields=[ @@ -543,6 +507,8 @@ class Migration(migrations.Migration): ('description', models.CharField(blank=True, default='', help_text='A longer description of this object.', max_length=255)), ('requirements_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling and/or quality requirements for this scheduling unit (IMMUTABLE).')), ('do_cancel', models.BooleanField()), + ('ingest_permission_required', models.BooleanField(default=False, help_text='Explicit permission is needed before the task.')), + ('ingest_permission_granted_since', models.DateTimeField(help_text='Moment of object creation.', null=True)), ], options={ 'abstract': False, @@ -560,6 +526,7 @@ class Migration(migrations.Migration): ('requirements_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling and/or quality requirements for this run.')), ('generator_instance_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Parameter value that generated this run draft (NULLable).', null=True)), ('scheduling_constraints_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Scheduling Constraints for this run.', null=True)), + ('ingest_permission_required', models.BooleanField(default=False, help_text='Explicit permission is needed before the task.')), ], options={ 'abstract': False, @@ -630,7 +597,6 @@ class Migration(migrations.Migration): ('stop_time', models.DateTimeField(help_text='Stop this subtask at the specified time (NULLable).', null=True)), ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Final specifications, as input for the controller.')), ('do_cancel', models.DateTimeField(help_text='Timestamp when the subtask has been ordered to cancel (NULLable).', null=True)), - ('priority', models.IntegerField(help_text='Absolute priority of this subtask (higher value means more important).')), ('raw_feedback', models.CharField(help_text='The raw feedback for this Subtask', max_length=1048576, null=True)), ], options={ @@ -852,9 +818,9 @@ class Migration(migrations.Migration): ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), ('time_offset', models.IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.')), - ('first', models.ForeignKey(help_text='First Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_to_connect', to='tmssapp.TaskDraft')), + ('first', models.ForeignKey(help_text='First Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_scheduling_relation', to='tmssapp.TaskDraft')), ('placement', models.ForeignKey(help_text='Task scheduling relation placement.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingRelationPlacement')), - ('second', models.ForeignKey(help_text='Second Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_to_connect', to='tmssapp.TaskDraft')), + ('second', models.ForeignKey(help_text='Second Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_scheduling_relation', to='tmssapp.TaskDraft')), ], options={ 'abstract': False, @@ -868,9 +834,9 @@ class Migration(migrations.Migration): ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), ('time_offset', models.IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.')), - ('first', models.ForeignKey(help_text='First Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_to_connect', to='tmssapp.TaskBlueprint')), + ('first', models.ForeignKey(help_text='First Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_scheduling_relation', to='tmssapp.TaskBlueprint')), ('placement', models.ForeignKey(default='after', help_text='Task scheduling relation placement.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingRelationPlacement')), - ('second', models.ForeignKey(help_text='Second Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_to_connect', to='tmssapp.TaskBlueprint')), + ('second', models.ForeignKey(help_text='Second Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_scheduling_relation', to='tmssapp.TaskBlueprint')), ], options={ 'abstract': False, @@ -1075,11 +1041,6 @@ class Migration(migrations.Migration): name='global_identifier', field=models.ForeignKey(editable=False, help_text='The global unique identifier for LTA SIP.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SIPidentifier'), ), - migrations.AddField( - model_name='subtask', - name='schedule_method', - field=models.ForeignKey(help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ScheduleMethod'), - ), migrations.AddField( model_name='subtask', name='specifications_template', diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py index 6e58f28a9dcd373dc38be715dd609274e2e6deb1..188b5c3086547549a8f527febaf37f6749044238 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py @@ -47,6 +47,7 @@ class SubtaskState(AbstractChoice): DEFINED = "defined" SCHEDULING = "scheduling" SCHEDULED = "scheduled" + UNSCHEDULING = "unscheduling" QUEUEING = "queueing" QUEUED = "queued" STARTING = "starting" @@ -92,16 +93,6 @@ class Algorithm(AbstractChoice): AES256 = 'aes256' -class ScheduleMethod(AbstractChoice): - """Defines the model and predefined list of possible Algorithm's for DataproductHash. - The items in the Choices class below are automagically populated into the database via a data migration.""" - - class Choices(Enum): - MANUAL = 'manual' - BATCH = 'batch' - DYNAMIC = 'dynamic' - - # # Templates # @@ -152,8 +143,6 @@ class Subtask(BasicCommon): task_blueprint = ForeignKey('TaskBlueprint', related_name='subtasks', null=True, on_delete=SET_NULL, help_text='Task Blueprint to which this Subtask belongs.') specifications_template = ForeignKey('SubtaskTemplate', null=False, on_delete=PROTECT, help_text='Schema used for specifications_doc.') do_cancel = DateTimeField(null=True, help_text='Timestamp when the subtask has been ordered to cancel (NULLable).') - priority = IntegerField(help_text='Absolute priority of this subtask (higher value means more important).') - schedule_method = ForeignKey('ScheduleMethod', null=False, on_delete=PROTECT, help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).') cluster = ForeignKey('Cluster', null=True, on_delete=PROTECT, help_text='Where the Subtask is scheduled to run (NULLable).') # resource_claim = ForeignKey("ResourceClaim", null=False, on_delete=PROTECT) # todo <-- how is this external reference supposed to work? created_or_updated_by_user = ForeignKey(User, null=True, editable=False, on_delete=PROTECT, help_text='The user who created / updated the subtask.') @@ -166,6 +155,34 @@ class Subtask(BasicCommon): # keep original state for logging self.__original_state_id = self.state_id + @property + def duration(self) -> timedelta: + '''the duration of this subtask (stop-start), or 0 if start/stop are None''' + if self.start_time is None or self.stop_time is None: + return timedelta(seconds=0) + return self.stop_time - self.start_time + + @property + def specified_duration(self) -> timedelta: + '''get the specified (or estimated) duration of this subtask based on the specified task duration and the subtask type''' + if self.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value: + # observations have a specified duration, so grab it from the spec. + return timedelta(seconds=self.task_blueprint.specifications_doc.get('duration', 0)) + + if self.specifications_template.type.value == SubtaskType.Choices.PIPELINE.value: + # pipelines usually do not have a specified duration, so make a guess (half the obs duration?). + return timedelta(seconds=self.task_blueprint.specifications_doc.get('duration', max(p.specified_duration.total_seconds() for p in self.predecessors)/2)) + + # other subtasktypes usually depend on cpu/data/network etc. So, make a guess (for now) + return timedelta(minutes=5) + + @staticmethod + def independent_subtasks() -> QuerySet: + '''return a QuerySet of all subtasks with no input (i.e. which are "independent" because they have no predecessors) + If you want the result, add .all() like so: Subtask.independent_subtasks().all() + ''' + return Subtask.objects.filter(inputs=None) + @property def successors(self) -> QuerySet: '''return the connect successor subtask(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets) @@ -188,6 +205,20 @@ class Subtask(BasicCommon): "INNER JOIN tmssapp_subtaskinput as st_input on st_input.producer_id = st_output.id\n" "WHERE st_input.subtask_id = %s", params=[self.id])) + @property + def input_dataproducts(self) -> QuerySet: + '''return the input dataproducts(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets) + If you want the result, add .all() like so: my_subtask.input_dataproducts.all() + ''' + return Dataproduct.objects.filter(subtaskinput__subtask_id=self.id) + + @property + def output_dataproducts(self) -> QuerySet: + '''return the output dataproducts(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets) + If you want the result, add .all() like so: my_subtask.input_dataproducts.all() + ''' + return Dataproduct.objects.filter(producer__subtask_id=self.id) + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): creating = self._state.adding # True on create, False on update @@ -204,16 +235,10 @@ class Subtask(BasicCommon): if duplicate_names: raise ValidationError("Pointings defined in the same Subtask must have unique names. Duplicate names %s in subtask id=%s." % (duplicate_names, self.pk)) - # check if we have a start time or there were predecessors + # check if we have a start time when scheduling if self.state.value == SubtaskState.Choices.SCHEDULED.value and self.__original_state_id == SubtaskState.Choices.SCHEDULING.value: if self.start_time is None: - if self.predecessors.all().count() == 0: raise SubtaskSchedulingException("Cannot schedule subtask id=%s when start time is 'None'." % (self.pk, )) - else: - self.start_time = datetime.utcnow() - - if self.state.value == SubtaskState.Choices.FINISHING.value: - self.stop_time = datetime.utcnow() super().save(force_insert, force_update, using, update_fields) diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py index 7ec6f980a09fb9dd3d765efb164611a5d898b8a6..503bb9dbe020d3e462d936e5dc2f47052a1453d5 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py @@ -21,6 +21,7 @@ from django.urls import reverse as revese_url from collections import Counter from django.utils.functional import cached_property + # # Common # @@ -150,12 +151,11 @@ class SchedulingRelationPlacement(AbstractChoice): BEFORE = "before" PARALLEL = "parallel" - class Flag(AbstractChoice): """Defines the model and predefined list of possible Flags to be used in Setting. The items in the Choises class below are automagically populated into the database via a data migration.""" class Choices(Enum): - AUTOSCHEDULE = "allow_scheduling_observations" + DYNAMIC_SCHEDULING_ENABLED = "dynamic_scheduling_enabled" @@ -375,35 +375,6 @@ class DefaultTaskRelationSelectionTemplate(BasicCommon): name = CharField(max_length=128, unique=True) template = ForeignKey("TaskRelationSelectionTemplate", on_delete=PROTECT) - -# -# DatabaseView objects -# -class TaskBlueprintSummary(Model): - taskblueprint_id = IntegerField() - subtask_id = IntegerField() - substate = CharField(max_length=128) - subtask_type = CharField(max_length=128) - - class Meta: - managed = False - db_table = 'tmssapp_taskblueprintsummary' - - -class SchedulingUnitBlueprintSummary(Model): - # Using in an id and ForeignKey is not common for a view BUT the id is a 'dummy' to be able to use in Django - # https://resources.rescale.com/using-database-views-in-django-orm/ - # otherwise an exception will be thrown - id = IntegerField(primary_key=True) - sub_id = IntegerField() - taskblueprint_id = IntegerField() - task_type = CharField(max_length=128) - derived_task_status = CharField(max_length=128) - - class Meta: - managed = False - db_table = 'tmssapp_schedulingunitblueprintsummary' - # # Instance Objects # @@ -440,6 +411,7 @@ class Project(NamedCommonPK): cycles = ManyToManyField('Cycle', related_name='projects', null=True, help_text='Cycles to which this project belongs (NULLable).') priority_rank = FloatField(null=False, help_text='Priority of this project w.r.t. other projects. Projects can interrupt observations of lower-priority projects.') # todo: add if needed: validators=[MinValueValidator(0.0), MaxValueValidator(1.0)] trigger_priority = IntegerField(default=1000, help_text='Priority of this project w.r.t. triggers.') # todo: verify meaning and add to help_text: "Triggers with higher priority than this threshold can interrupt observations of projects." + auto_ingest = BooleanField(default=False, help_text='True if The data is ingested when the other scheduling unit tasks are finished. False if The data is ingested after approval in the QA validation workflow. At the end of this a flag is set that the data can be ingested.') can_trigger = BooleanField(default=False, help_text='True if this project is allowed to supply observation requests on the fly, possibly interrupting currently running observations (responsive telescope).') private_data = BooleanField(default=True, help_text='True if data of this project is sensitive. Sensitive data is not made public.') expert = BooleanField(default=False, help_text='Expert projects put more responsibility on the PI.') @@ -545,6 +517,7 @@ class SchedulingUnitDraft(NamedCommon): observation_strategy_template = ForeignKey('SchedulingUnitObservingStrategyTemplate', on_delete=PROTECT, null=True, help_text='Observation Strategy Template used to create the requirements_doc.') scheduling_constraints_doc = JSONField(help_text='Scheduling Constraints for this run.', null=True) scheduling_constraints_template = ForeignKey('SchedulingConstraintsTemplate', on_delete=CASCADE, null=True, help_text='Schema used for scheduling_constraints_doc.') + ingest_permission_required = BooleanField(default=False, help_text='Explicit permission is needed before the task.') def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if self.requirements_doc is not None and self.requirements_template_id and self.requirements_template.schema is not None: @@ -556,6 +529,11 @@ class SchedulingUnitDraft(NamedCommon): if self.scheduling_constraints_doc is not None and self.scheduling_constraints_template_id and self.scheduling_constraints_template.schema is not None: validate_json_against_schema(self.scheduling_constraints_doc, self.scheduling_constraints_template.schema) + # This code only happens if the objects is not in the database yet. self._state.adding is True creating + if self._state.adding and hasattr(self, 'scheduling_set') and self.scheduling_set.project.auto_ingest is False: + #When auto_ingest=False, the scheduling units will be created with ingest_permission_required = True + self.ingest_permission_required=True + annotate_validate_add_defaults_to_doc_using_template(self, 'requirements_doc', 'requirements_template') annotate_validate_add_defaults_to_doc_using_template(self, 'scheduling_constraints_doc', 'scheduling_constraints_template') super().save(force_insert, force_update, using, update_fields) @@ -603,11 +581,19 @@ class SchedulingUnitBlueprint(NamedCommon): requirements_doc = JSONField(help_text='Scheduling and/or quality requirements for this scheduling unit (IMMUTABLE).') do_cancel = BooleanField() + ingest_permission_required = BooleanField(default=False, help_text='Explicit permission is needed before the task.') + ingest_permission_granted_since = DateTimeField(auto_now_add=False, null=True, help_text='Moment of object creation.') requirements_template = ForeignKey('SchedulingUnitTemplate', on_delete=CASCADE, help_text='Schema used for requirements_doc (IMMUTABLE).') draft = ForeignKey('SchedulingUnitDraft', related_name='scheduling_unit_blueprints', on_delete=CASCADE, help_text='Scheduling Unit Draft which this run instantiates.') def save(self, force_insert=False, force_update=False, using=None, update_fields=None): annotate_validate_add_defaults_to_doc_using_template(self, 'requirements_doc', 'requirements_template') + + # This code only happens if the objects is not in the database yet. self._state.adding is True creating + if self._state.adding and hasattr(self, 'draft') and self.draft.scheduling_set.project.auto_ingest is False: + #When auto_ingest=False, the scheduling units will be created with ingest_permission_required = True + self.ingest_permission_required=True + super().save(force_insert, force_update, using, update_fields) @@ -813,7 +799,7 @@ class TaskDraft(NamedCommon): def relative_start_time(self) -> datetime.timedelta: '''return the earliest relative start time of all subtasks of this task ''' - scheduling_relations = list(self.first_to_connect.all()) + list(self.second_to_connect.all()) + scheduling_relations = list(self.first_scheduling_relation.all()) + list(self.second_scheduling_relation.all()) for scheduling_relation in scheduling_relations: # sometimes self._id does not exist so use self.id instead to avoid Exception if hasattr(self, '_id'): @@ -934,9 +920,11 @@ class TaskBlueprint(NamedCommon): @cached_property def relative_start_time(self) -> datetime.timedelta: - '''return the earliest relative start time of all subtasks of this task + '''The relative start time is relative to the start time of the 'start' of the parent scheduling unit. + It's based on the scheduling_relation's from the scheduling_unit's specification, + and hence it determines the order in which the tasks within the unit should be executed. ''' - scheduling_relations = list(self.first_to_connect.all()) + list(self.second_to_connect.all()) + scheduling_relations = list(self.first_scheduling_relation.all()) + list(self.second_scheduling_relation.all()) for scheduling_relation in scheduling_relations: # sometimes self._id does not exist so use self.id instead to avoid Exception if hasattr(self, '_id'): @@ -960,7 +948,8 @@ class TaskBlueprint(NamedCommon): @cached_property def relative_stop_time(self) -> datetime.timedelta: - '''return the latest relative stop time of all subtasks of this task + '''The relative_stop_time is the relative_start_time+duration. + See relative_start_time for an explanation of it's intended usage. ''' # todo: when it was added, check if subtask.specifications_template.type.value == TaskType.Choices.OBSERVATION.value: try: @@ -1010,7 +999,7 @@ class TaskBlueprint(NamedCommon): return "defined" if len([s for s in subtasks if s['state'] == 'finished']) == nr_of_subtasks: - return "finished" + return "finished" if any(s for s in subtasks if s['state'] in ('cancelling', 'cancelled')): return "cancelled" @@ -1069,8 +1058,8 @@ class TaskRelationBlueprint(BasicCommon): class TaskSchedulingRelationBlueprint(BasicCommon): - first = ForeignKey('TaskBlueprint', related_name='first_to_connect', on_delete=CASCADE, help_text='First Task Blueprint to connect.') - second = ForeignKey('TaskBlueprint', related_name='second_to_connect', on_delete=CASCADE, help_text='Second Task Blueprint to connect.') + first = ForeignKey('TaskBlueprint', related_name='first_scheduling_relation', on_delete=CASCADE, help_text='First Task Blueprint to connect.') + second = ForeignKey('TaskBlueprint', related_name='second_scheduling_relation', on_delete=CASCADE, help_text='Second Task Blueprint to connect.') placement = ForeignKey('SchedulingRelationPlacement', null=False, default="after", on_delete=PROTECT, help_text='Task scheduling relation placement.') time_offset = IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.') @@ -1083,8 +1072,8 @@ class TaskSchedulingRelationBlueprint(BasicCommon): class TaskSchedulingRelationDraft(BasicCommon): - first = ForeignKey('TaskDraft', related_name='first_to_connect', on_delete=CASCADE, help_text='First Task Draft to connect.') - second = ForeignKey('TaskDraft', related_name='second_to_connect', on_delete=CASCADE, help_text='Second Task Draft to connect.') + first = ForeignKey('TaskDraft', related_name='first_scheduling_relation', on_delete=CASCADE, help_text='First Task Draft to connect.') + second = ForeignKey('TaskDraft', related_name='second_scheduling_relation', on_delete=CASCADE, help_text='Second Task Draft to connect.') placement = ForeignKey('SchedulingRelationPlacement', null=False, on_delete=PROTECT, help_text='Task scheduling relation placement.') time_offset = IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.') diff --git a/SAS/TMSS/src/tmss/tmssapp/populate.py b/SAS/TMSS/src/tmss/tmssapp/populate.py index b786248f34773046434364d3ddc887ecd6d59e3a..05ec07e83f2f102caa1f65d1bcadf8ffb3447935 100644 --- a/SAS/TMSS/src/tmss/tmssapp/populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/populate.py @@ -35,12 +35,12 @@ def populate_choices(apps, schema_editor): :return: None ''' for choice_class in [Role, Datatype, Dataformat, CopyReason, - SubtaskState, SubtaskType, StationType, Algorithm, ScheduleMethod, SchedulingRelationPlacement, + SubtaskState, SubtaskType, StationType, Algorithm, SchedulingRelationPlacement, Flag, ProjectCategory, PeriodCategory, Quantity, TaskType]: choice_class.objects.bulk_create([choice_class(value=x.value) for x in choice_class.Choices]) def populate_settings(apps, schema_editor): - Setting.objects.create(name=Flag.objects.get(value='allow_scheduling_observations'), value=True) + Setting.objects.create(name=Flag.objects.get(value='dynamic_scheduling_enabled'), value=False) def populate_test_data(): """ @@ -52,7 +52,7 @@ def populate_test_data(): # only add (with expensive setup time) example data when developing/testing and we're not unittesting if isTestEnvironment() or isDevelopmentEnvironment(): from lofar.sas.tmss.tmss.exceptions import TMSSException - from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data, SchedulingUnitDraft_test_data + from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingUnitDraft_test_data from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprints_and_subtasks_from_scheduling_unit_draft, create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_draft from lofar.sas.tmss.tmss.tmssapp.subtasks import schedule_subtask from lofar.common.json_utils import get_default_json_object_for_schema @@ -60,48 +60,51 @@ def populate_test_data(): constraints_template = models.SchedulingConstraintsTemplate.objects.get(name="constraints") constraints_spec = get_default_json_object_for_schema(constraints_template.schema) - strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines") - - # create a Test Scheduling Set UC1 under project TMSS-Commissioning - tmss_project = models.Project.objects.get(name="TMSS-Commissioning") - for set_nr in range(2): - scheduling_set_data = SchedulingSet_test_data(name="Test Scheduling Set UC1 example %s" % (set_nr,), project=tmss_project) - scheduling_set = models.SchedulingSet.objects.create(**scheduling_set_data) - scheduling_set.tags = ["TEST", "UC1"] - scheduling_set.save() - - logger.info('created test scheduling_set: %s', scheduling_set.name) - - for unit_nr in range(5): - - # the 'template' in the strategy_template is a predefined json-data blob which validates against the given scheduling_unit_template - # a user might 'upload' a partial json-data blob, so add all the known defaults - scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema) - - # add the scheduling_unit_doc to a new SchedulingUnitDraft instance, and were ready to use it! - scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(name="UC1 test scheduling unit %s.%s" % (set_nr+1, unit_nr+1), - scheduling_set=scheduling_set, - requirements_template=strategy_template.scheduling_unit_template, - requirements_doc=scheduling_unit_spec, - observation_strategy_template=strategy_template, - scheduling_constraints_doc=constraints_spec, - scheduling_constraints_template=constraints_template) - scheduling_unit_draft.tags = ["TEST", "UC1"] - scheduling_unit_draft.save() - - logger.info('created test scheduling_unit_draft: %s', scheduling_unit_draft.name) - - try: - if set_nr==0 and unit_nr==0: - scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) - scheduled_subtasks = models.Subtask.objects.filter(task_blueprint__scheduling_unit_blueprint=scheduling_unit_blueprint, task_blueprint__name='Calibrator Observation 1', specifications_template__type='observation').all() - for subtask in scheduled_subtasks: - schedule_subtask(subtask) - else: - create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) - - except TMSSException as e: - logger.exception(e) + uc1_strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines") + simple_strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="Simple Observation") + + projects = models.Project.objects.order_by('-priority_rank').all() + for tmss_project in projects: + if 'Commissioning' not in tmss_project.tags: + continue + + for scheduling_set in tmss_project.scheduling_sets.all(): + for unit_nr in range(2): + for strategy_template in [uc1_strategy_template, simple_strategy_template]: + # the 'template' in the strategy_template is a predefined json-data blob which validates against the given scheduling_unit_template + # a user might 'upload' a partial json-data blob, so add all the known defaults + scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema) + + # limit target obs duration for demo data + if strategy_template == uc1_strategy_template: + scheduling_unit_spec['tasks']['Calibrator Observation 1']['specifications_doc']['duration'] = 2*60 + scheduling_unit_spec['tasks']['Target Observation']['specifications_doc']['duration'] = 2*3600 + scheduling_unit_spec['tasks']['Calibrator Observation 2']['specifications_doc']['duration'] = 2*60 + elif strategy_template == simple_strategy_template: + scheduling_unit_spec['tasks']['Observation']['specifications_doc']['duration'] = 5*60 + + # set some constraints, so the dynamic scheduler has something to chew on. + # DISABLED for now, because the 'daily' constraint solver is not ready yet. + # constraints_spec['daily']['require_day'] = unit_nr%2==0 + # constraints_spec['daily']['require_night'] = unit_nr%2==1 + # constraints_spec['daily']['avoid_twilight'] = unit_nr%4>1 + + # add the scheduling_unit_doc to a new SchedulingUnitDraft instance, and were ready to use it! + scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(name="%s %s %0d" % ('UC1' if strategy_template==uc1_strategy_template else 'Obs', tmss_project.name, unit_nr+1), + scheduling_set=scheduling_set, + description="Test scheduling unit", + requirements_template=strategy_template.scheduling_unit_template, + requirements_doc=scheduling_unit_spec, + observation_strategy_template=strategy_template, + scheduling_constraints_doc=constraints_spec, + scheduling_constraints_template=constraints_template) + + logger.info('created test scheduling_unit_draft: %s', scheduling_unit_draft.name) + + try: + create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + except TMSSException as e: + logger.exception(e) except ImportError: pass @@ -136,14 +139,22 @@ def populate_cycles(apps, schema_editor): def populate_projects(apps, schema_editor): - tmss_project = models.Project.objects.create(name="TMSS-Commissioning", - description="Project for all TMSS tests and commissioning", - priority_rank=1.0, - can_trigger=False, - private_data=True, - expert=True, - filler=False) - tmss_project.cycles.set([models.Cycle.objects.get(name="Cycle 14")]) + from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data + + for name, rank in (("high", 3), ("normal", 2), ("low", 1)): + tmss_project = models.Project.objects.create(name=name, + description="Project for all TMSS tests and commissioning (%s priority)" % (name,), + priority_rank=rank, + can_trigger=False, + private_data=True, + expert=True, + filler=False) + tmss_project.tags = ["Commissioning"] + tmss_project.cycles.set([models.Cycle.objects.get(name="Cycle 14")]) + tmss_project.save() + + # for convenience, create a schedulingset for each project + models.SchedulingSet.objects.create(**SchedulingSet_test_data(name="Test Scheduling Set", project=tmss_project)) def populate_resources(apps, schema_editor): diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/common_schema_template-datetime-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/common_schema_template-datetime-1.json new file mode 100644 index 0000000000000000000000000000000000000000..d12982a89ed3b79fc306a4a26c2c667f60662e6a --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/common_schema_template-datetime-1.json @@ -0,0 +1,33 @@ +{ + "$id": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1#", + "$schema": "http://json-schema.org/draft-06/schema#", + "title": "datetime", + "description": "This schema defines datetime objects like timestamp and timewindow.", + "version": 1, + "type": "object", + "definitions": { + "timestamp": { + "description": "A timestamp defined in UTC", + "type": "string", + "pattern": "\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d(\\.\\d+)?Z?", + "format": "date-time" + }, + "timewindow": { + "type": "object", + "description": "A timewindow interval: [from, to)", + "properties": { + "from": { + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp" + }, + "to": { + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp" + } + }, + "additionalProperties": false, + "required": [ + "from", + "to" + ] + } + } +} \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json index 77a916705c8df50c069f5929e11fc03d5586acf7..9caf086d923d583720925e44d47dfbc255f95885 100644 --- a/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json @@ -5,29 +5,6 @@ "description": "This schema defines the scheduling constraints for a scheduling unit", "version": 1, "definitions": { - "timestamp": { - "description": "A timestamp defined in UTC", - "type": "string", - "pattern": "\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d(\\.\\d+)?Z", - "format": "date-time" - }, - "timewindow": { - "type": "object", - "description": "A timewindow interval: [from, to)", - "properties": { - "from": { - "$ref": "#/definitions/timestamp" - }, - "to": { - "$ref": "#/definitions/timestamp" - } - }, - "additionalProperties": false, - "required": [ - "from", - "to" - ] - }, "distance_on_sky": { "type": "number", "minimum": 0, @@ -40,38 +17,39 @@ } }, "type": "object", + "default": {}, "properties": { "scheduler": { "name": "Scheduler", - "description": "Which scheduling system will schedule this", + "description": "Schedule manually at the 'time.at' moment, of dynamically taking all time constraints into consideration.", "type": "string", "enum": [ "manual", - "online" + "dynamic" ], - "default": "online" + "default": "dynamic" }, "time": { "type": "object", "default": {}, "properties": { "at": { - "description": "Start at this moment", - "$ref": "#/definitions/timestamp" + "description": "Start at this moment. Requires 'scheduler' to be set to 'manual'.", + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp" }, "after": { "description": "Start after this moment", - "$ref": "#/definitions/timestamp" + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp" }, "before": { "description": "End before this moment", - "$ref": "#/definitions/timestamp" + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp" }, "between": { "description": "Run within one of these time windows", "type": "array", "items": { - "$ref": "#/definitions/timewindow" + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timewindow" }, "minItems":0, "uniqueItems":true, @@ -81,7 +59,7 @@ "description": "Do NOT run within any of these time windows", "type": "array", "items": { - "$ref": "#/definitions/timewindow" + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timewindow" }, "minItems":0, "uniqueItems":true, @@ -129,6 +107,7 @@ "transit_offset": { "description": "Offset window to LST centering", "type": "object", + "default": {}, "properties": { "from": { "type": "number", @@ -145,6 +124,7 @@ }, "min_distance": { "type": "object", + "default": {}, "properties": { "sun": { "$ref": "#/definitions/distance_on_sky", diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json b/SAS/TMSS/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json new file mode 100644 index 0000000000000000000000000000000000000000..cfa908a68e642538b03c398a65e6d752f2e81db2 --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json @@ -0,0 +1,74 @@ +{ + "tasks": { + "Observation": { + "description": "A simple short test observation", + "tags": [], + "specifications_doc": { + "QA": { + "plots": { + "enabled": true, + "autocorrelation": true, + "crosscorrelation": true + }, + "file_conversion": { + "enabled": true, + "nr_of_subbands": -1, + "nr_of_timestamps": 256 + } + }, + "duration": 600, + "correlator": { + "storage_cluster": "CEP4", + "integration_time": 1, + "channels_per_subband": 64 + }, + "antenna_set": "HBA_DUAL_INNER", + "filter": "HBA_110_190", + "stations": ["CS001"], + "tile_beam": { + "direction_type": "J2000", + "angle1": 0.42, + "angle2": 0.43, + "angle3": 0.44 + }, + "SAPs": [ + { + "name": "target0", + "digital_pointing": { + "direction_type": "J2000", + "angle1": 0.42, + "angle2": 0.43, + "angle3": 0.44 + }, + "subbands": [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15] + } + ] + }, + "specifications_template": "target observation" + } + }, + "task_relations": [ + ], + "task_scheduling_relations": [ + ], + "parameters": [ + { + "refs": [ + "#/tasks/Observation/specifications_doc/duration" + ], + "name": "Duration" + }, + { + "refs": [ + "#/tasks/Observation/specifications_doc/SAPs/0/digital_pointing" + ], + "name": "Target Pointing" + }, + { + "refs": [ + "#/tasks/Observation/specifications_doc/tile_beam" + ], + "name": "Tile Beam" + } + ] +} \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json index 27f52ee1913374218f741e80cd33a3ac96a84e06..6e1d2c710101efe1a396935340fcdee899fe3ded 100644 --- a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json @@ -1,4 +1,8 @@ [ + { + "file_name": "common_schema_template-datetime-1.json", + "template": "common_schema_template" + }, { "file_name": "common_schema_template-pointing-1.json", "template": "common_schema_template" @@ -96,6 +100,10 @@ "realtime": true, "queue": false }, + { + "file_name": "scheduling_constraints_template-constraints-1.json", + "template": "scheduling_constraints_template" + }, { "file_name": "UC1-scheduling-unit-observation-strategy.json", "template": "scheduling_unit_observing_strategy_template", @@ -106,8 +114,13 @@ "version": 1 }, { - "file_name": "scheduling_constraints_template-constraints-1.json", - "template": "scheduling_constraints_template" + "file_name": "simple-observation-scheduling-unit-observation-strategy.json", + "template": "scheduling_unit_observing_strategy_template", + "scheduling_unit_template_name": "scheduling unit", + "scheduling_unit_template_version": "1", + "name": "Simple Observation", + "description": "This observation strategy template defines a single simple Target observation.", + "version": 1 }, { "file_name": "sap_template-1.json", diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py index 85d7bd21c54ca2ad78badd911131847c11fb3375..e70f7585074cf5c87edce6ae0c8d10f7475d712e 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py @@ -40,12 +40,6 @@ class AlgorithmSerializer(RelationalHyperlinkedModelSerializer): fields = '__all__' -class ScheduleMethodSerializer(RelationalHyperlinkedModelSerializer): - class Meta: - model = models.ScheduleMethod - fields = '__all__' - - class SubtaskTemplateSerializer(AbstractTemplateSerializer): class Meta: model = models.SubtaskTemplate diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py index 279d0ae76212c863bdbb69146dc0cebe9c375612..0c215aa57d1915e0660bd31572775bd3992d00d9 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py @@ -259,7 +259,7 @@ class FlagSerializer(RelationalHyperlinkedModelSerializer): fields = '__all__' -class SettingSerializer(RelationalHyperlinkedModelSerializer): +class SettingSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = models.Setting fields = '__all__' @@ -334,7 +334,7 @@ class TaskDraftSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.TaskDraft fields = '__all__' - extra_fields = ['task_blueprints', 'produced_by', 'consumed_by', 'first_to_connect', 'second_to_connect', 'duration', 'relative_start_time', 'relative_stop_time'] + extra_fields = ['task_blueprints', 'produced_by', 'consumed_by', 'first_scheduling_relation', 'second_scheduling_relation', 'duration', 'relative_start_time', 'relative_stop_time'] class TaskBlueprintSerializer(RelationalHyperlinkedModelSerializer): @@ -347,7 +347,7 @@ class TaskBlueprintSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.TaskBlueprint fields = '__all__' - extra_fields = ['subtasks', 'produced_by', 'consumed_by', 'first_to_connect', 'second_to_connect', 'duration', + extra_fields = ['subtasks', 'produced_by', 'consumed_by', 'first_scheduling_relation', 'second_scheduling_relation', 'duration', 'start_time', 'stop_time', 'relative_start_time', 'relative_stop_time', 'status'] diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py index d200d964073c2f1786dba6e8dadb53bfdc2be3e2..2c0a5a50dbce25b18ba327c0872c7fa31cc7ea56 100644 --- a/SAS/TMSS/src/tmss/tmssapp/subtasks.py +++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py @@ -249,8 +249,6 @@ def create_observation_control_subtask_from_task_blueprint(task_blueprint: TaskB "task_blueprint": task_blueprint, "specifications_template": subtask_template, "tags": [], - "priority": 1, - "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), "cluster": Cluster.objects.get(name=cluster_name) } subtask = Subtask.objects.create(**subtask_data) @@ -312,8 +310,6 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask) "task_blueprint": observation_subtask.task_blueprint, "specifications_template": qafile_subtask_template, "specifications_doc": qafile_subtask_spec, - "priority": 1, - "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), "cluster": observation_subtask.cluster} qafile_subtask = Subtask.objects.create(**qafile_subtask_data) @@ -335,7 +331,6 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask) def create_qaplots_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask: - qafile_subtasks = [st for st in task_blueprint.subtasks.all() if st.specifications_template.type.value == SubtaskType.Choices.QA_FILES.value] if qafile_subtasks: qafile_subtask = qafile_subtasks[0] # TODO: decide what to do when there are multiple qafile subtasks? @@ -377,8 +372,6 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta "task_blueprint": qafile_subtask.task_blueprint, "specifications_template": qaplots_subtask_template, "specifications_doc": qaplots_subtask_spec_doc, - "priority": 1, - "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), "cluster": qafile_subtask.cluster} qaplots_subtask = Subtask.objects.create(**qaplots_subtask_data) @@ -424,8 +417,6 @@ def create_preprocessing_subtask_from_task_blueprint(task_blueprint: TaskBluepri "task_blueprint": task_blueprint, "specifications_template": subtask_template, "specifications_doc": subtask_specs, - "priority": 1, - "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), "cluster": Cluster.objects.get(name=cluster_name) } subtask = Subtask.objects.create(**subtask_data) @@ -470,8 +461,6 @@ def create_ingest_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> "task_blueprint": task_blueprint, "specifications_template": subtask_template, "specifications_doc": subtask_specs, - "priority": 1, - "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), "cluster": Cluster.objects.get(name=cluster_name)} subtask = Subtask.objects.create(**subtask_data) @@ -519,6 +508,35 @@ def schedule_subtask(subtask: Subtask) -> Subtask: raise SubtaskSchedulingException("Cannot schedule subtask id=%d because there is no schedule-method known for this subtasktype=%s." % (subtask.pk, subtask.specifications_template.type.value)) + except Exception as e: + try: + # set the subtask to state 'ERROR'... + subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.ERROR.value) + subtask.save() + except Exception as e2: + logger.error(e2) + finally: + # ... and re-raise the original exception (wrapped) + raise SubtaskSchedulingException("Error while scheduling subtask id=%d: %s" % (subtask.pk, str(e))) + + +def unschedule_subtask(subtask: Subtask) -> Subtask: + '''unschedule the given subtask, removing all output dataproducts, and setting its state back to 'defined'.''' + if subtask.state.value != SubtaskState.Choices.SCHEDULED.value: + raise SubtaskSchedulingException("Cannot unschedule subtask id=%d because it is not SCHEDULED. Current state=%s" % (subtask.pk, subtask.state.value)) + + try: + subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.UNSCHEDULING.value) + subtask.save() + + for output in subtask.outputs.all(): + output.dataproducts.all().delete() + #TODO: delete dataproduct transforms + + _assign_or_unassign_resources(subtask) + + subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) + subtask.save() except Exception as e: try: # set the subtask to state 'ERROR'... @@ -530,6 +548,62 @@ def schedule_subtask(subtask: Subtask) -> Subtask: # ... and re-raise the original exception raise +def unschedule_subtasks_in_task_blueprint(task_blueprint: TaskBlueprint): + '''Convenience method: Unschedule (and return) all scheduled subtasks in the task_blueprint''' + scheduled_subtasks = list(task_blueprint.subtasks.filter(state__value=SubtaskState.Choices.SCHEDULED.value).all()) + for subtask in scheduled_subtasks: + unschedule_subtask(subtask) + + +def schedule_subtask_and_update_successor_start_times(subtask: Subtask) -> Subtask: + scheduled_subtask = schedule_subtask(subtask) + shift_successors_until_after_stop_time(scheduled_subtask) + return scheduled_subtask + + +def update_subtasks_start_times_for_scheduling_unit(scheduling_unit: SchedulingUnitBlueprint, start_time: datetime): + for task_blueprint in scheduling_unit.task_blueprints.all(): + defined_independend_subtasks = task_blueprint.subtasks.filter(state__value='defined').filter(inputs=None).all() + for subtask in defined_independend_subtasks: + update_start_time_and_shift_successors_until_after_stop_time(subtask, start_time + subtask.task_blueprint.relative_start_time) + + +def update_start_time_and_shift_successors_until_after_stop_time(subtask: Subtask, start_time: datetime): + subtask.start_time = start_time + subtask.stop_time = subtask.start_time + subtask.specified_duration + subtask.save() + + shift_successors_until_after_stop_time(subtask) + + +def shift_successors_until_after_stop_time(subtask: Subtask): + for successor in subtask.successors: + # by default, let the successor directly follow this tasks... + successor_start_time = subtask.stop_time + + # ... but adjust it if there is a scheduling_relation with an offset. + # so, check if these successive subtasks have different task_blueprint parents + if subtask.task_blueprint.id != successor.task_blueprint.id: + relations = (TaskSchedulingRelationBlueprint.objects.filter(first=subtask.task_blueprint, second=successor.task_blueprint) | + TaskSchedulingRelationBlueprint.objects.filter(first=successor.task_blueprint, second=subtask.task_blueprint)).all() + if relations: + # there should be only one scheduling relation between the tasks + relation = relations[0] + successor_start_time += timedelta(seconds=relation.time_offset) + + # update the starttime and recurse to shift the successor successors as well + update_start_time_and_shift_successors_until_after_stop_time(successor, successor_start_time) + + +def clear_defined_subtasks_start_stop_times_for_scheduling_unit(scheduling_unit: SchedulingUnitBlueprint): + '''set start/stop times of all the subtasks in the scheduling unit to None''' + for task_blueprint in scheduling_unit.task_blueprints.all(): + defined_subtasks = task_blueprint.subtasks.filter(state__value='defined').all() + for subtask in defined_subtasks: + subtask.start_time = None + subtask.stop_time = None + subtask.save() + def check_prerequities_for_scheduling(subtask: Subtask) -> bool: if subtask.state.value != SubtaskState.Choices.DEFINED.value: @@ -540,17 +614,11 @@ def check_prerequities_for_scheduling(subtask: Subtask) -> bool: raise SubtaskSchedulingException("Cannot schedule subtask id=%d because its predecessor id=%s in not FINISHED but state=%s" % (subtask.pk, predecessor.pk, predecessor.state.value)) - # check if settings allow scheduling observations - setting = Setting.objects.get(name='allow_scheduling_observations') - if not setting.value: - raise SubtaskSchedulingException("Cannot schedule subtask id=%d because setting %s=%s does not allow that." % - (subtask.pk, setting.name, setting.value)) - return True -def _assign_resources(subtask: Subtask): - if subtask.state.value != SubtaskState.Choices.SCHEDULING.value: - raise SubtaskSchedulingException("Cannot assign resources for subtask id=%d because it is not in SCHEDULING state. " +def _assign_or_unassign_resources(subtask: Subtask): + if subtask.state.value not in [SubtaskState.Choices.SCHEDULING.value, SubtaskState.Choices.UNSCHEDULING.value]: + raise SubtaskSchedulingException("Cannot assign resources for subtask id=%d because it is not in (UN)SCHEDULING state. " "Current state=%s" % (subtask.pk, subtask.state.value)) def create_ra_specification(_subtask): @@ -558,7 +626,7 @@ def _assign_resources(subtask: Subtask): return { 'tmss_id': _subtask.id, 'task_type': _subtask.specifications_template.type.value.lower(), 'task_subtype': parset_dict.get("Observation.processSubtype","").lower(), - 'status': 'prescheduled', + 'status': 'prescheduled' if subtask.state.value == SubtaskState.Choices.SCHEDULING.value else 'approved', 'starttime': _subtask.start_time, 'endtime': _subtask.stop_time, 'cluster': _subtask.cluster.name, @@ -696,7 +764,7 @@ def get_previous_related_task_blueprint_with_time_offset(task_blueprint): previous_related_task_blueprint = None time_offset = 0 - scheduling_relations = list(task_blueprint.first_to_connect.all()) + list(task_blueprint.second_to_connect.all()) + scheduling_relations = list(task_blueprint.first_scheduling_relation.all()) + list(task_blueprint.second_scheduling_relation.all()) for scheduling_relation in scheduling_relations: if scheduling_relation.first.id == task_blueprint.id and scheduling_relation.placement.value == "after": previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.second.id) @@ -708,38 +776,6 @@ def get_previous_related_task_blueprint_with_time_offset(task_blueprint): return previous_related_task_blueprint, time_offset -# todo: maybe this can now be replaced by subtask.relative_start_time -def calculate_start_time(observation_subtask: Subtask): - """ - Calculate the start time of an observation subtask. It should calculate the starttime in case of 'C-T-C train' - The start time of an observation depends on the start_time+duration and offset time of the previous observation - and so its scheduling relations should be known. - If there is no previous observation the 'default' start time is in two minutes from now - For demo purposes, will be changed into dynamic scheduled in the future - Note that the method is not robust now when previous start time is unknown. Also parallel observations are - not supported yet - :param observation_subtask: - :return: start_time (utc time) - """ - previous_related_task_blueprint, time_offset = get_previous_related_task_blueprint_with_time_offset(observation_subtask.task_blueprint) - if previous_related_task_blueprint is None: - # This is the first observation so take start time 2 minutes from now - now = datetime.utcnow() - next_start_time = now + timedelta(minutes=+2, seconds=-now.second, microseconds=-now.microsecond) - else: - # Get the duration of last/previous observation - duration_in_sec = previous_related_task_blueprint.specifications_doc["duration"] - logger.info("Duration of previous observation '%s' (id=%s) is %d seconds", - previous_related_task_blueprint.pk, previous_related_task_blueprint.pk, duration_in_sec) - # Get the previous observation subtask, should actually be one - lst_previous_subtasks_obs = [st for st in previous_related_task_blueprint.subtasks.all() if st.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value] - previous_subtask_obs = lst_previous_subtasks_obs[0] - logger.info("The previous observation subtask is id=%s", previous_subtask_obs.pk) - if previous_subtask_obs.start_time is None: - raise SubtaskSchedulingException("Cannot compute start_time for subtask id=%s because the its predecessor id=%s has not start_time" %(observation_subtask.id, previous_subtask_obs.id)) - next_start_time = previous_subtask_obs.start_time + timedelta(seconds=duration_in_sec+time_offset) - return next_start_time - def schedule_observation_subtask(observation_subtask: Subtask): ''' Schedule the given observation_subtask @@ -761,17 +797,18 @@ def schedule_observation_subtask(observation_subtask: Subtask): observation_subtask.save() # step 1a: check start/stop times + # start time should be known. If not raise. Then the user and/or scheduling service should supply a properly calculated/estimated start_time first. if observation_subtask.start_time is None: - next_start_time = calculate_start_time(observation_subtask) - logger.info("observation id=%s has no starttime. assigned default: %s", observation_subtask.pk, formatDatetime(next_start_time)) - observation_subtask.start_time = next_start_time + raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because it has no start_time" % (observation_subtask.pk, + observation_subtask.specifications_template.type)) - if observation_subtask.stop_time is None: - duration_in_sec = observation_subtask.task_blueprint.specifications_doc["duration"] - logger.info("Duration of observation id=%s is %d seconds", observation_subtask.pk, duration_in_sec) - stop_time = observation_subtask.start_time + timedelta(seconds=duration_in_sec) - logger.info("observation id=%s has no stop_time. assigned default: %s", observation_subtask.pk, formatDatetime(stop_time)) - observation_subtask.stop_time = stop_time + if observation_subtask.specified_duration < timedelta(seconds=1): + raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because its specified duration is too short: %s" % (observation_subtask.pk, + observation_subtask.specifications_template.type, + observation_subtask.specified_duration)) + + # always update the stop_time according to the spec + observation_subtask.stop_time = observation_subtask.start_time + observation_subtask.specified_duration # step 2: define input dataproducts # TODO: are there any observations that take input dataproducts? @@ -802,23 +839,22 @@ def schedule_observation_subtask(observation_subtask: Subtask): } }, specifications_template=SAPTemplate.objects.get(name="SAP")) - sap.save() - for sb_nr in pointing['subbands']: - dp = Dataproduct.objects.create(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr), - directory=directory, - dataformat=Dataformat.objects.get(value="MeasurementSet"), - datatype=Datatype.objects.get(value="visibilities"), # todo: is this correct? - producer=subtask_output, - specifications_doc={"sap": [str(sap_nr)]}, # todo: set correct value. This will be provided by the RA somehow - specifications_template=dataproduct_specifications_template, - feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), - feedback_template=dataproduct_feedback_template, - size=0 if sb_nr%10==0 else 1024*1024*1024*sb_nr, - expected_size=1024*1024*1024*sb_nr, - sap=sap) - dp.save() + + Dataproduct.objects.bulk_create([Dataproduct(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr), + directory=directory, + dataformat=Dataformat.objects.get(value="MeasurementSet"), + datatype=Datatype.objects.get(value="visibilities"), + producer=subtask_output, + specifications_doc={"sap": [str(sap_nr)]}, + specifications_template=dataproduct_specifications_template, + feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), + feedback_template=dataproduct_feedback_template, + size=0 if sb_nr%10==0 else 1024*1024*1024*sb_nr, + expected_size=1024*1024*1024*sb_nr, + sap=sap) for sb_nr in pointing['subbands']]) + # step 4: resource assigner (if possible) - _assign_resources(observation_subtask) + _assign_or_unassign_resources(observation_subtask) # TODO: TMSS-382: evaluate the scheduled stations and see if the requiments given in the subtask.task_bluepring.specifications_doc are met for the station_groups and max_nr_missing. @@ -854,10 +890,13 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): logger.info("pipeline id=%s has no starttime. assigned default: %s", pipeline_subtask.pk, formatDatetime(now)) pipeline_subtask.start_time = now - if pipeline_subtask.stop_time is None: - stop_time = pipeline_subtask.start_time + timedelta(hours=+1) - logger.info("pipeline id=%s has no stop_time. assigned default: %s", pipeline_subtask.pk, formatDatetime(stop_time)) - pipeline_subtask.stop_time = stop_time + if pipeline_subtask.specified_duration < timedelta(seconds=1): + raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because its specified duration is too short: %s" % (pipeline_subtask.pk, + pipeline_subtask.specifications_template.type, + pipeline_subtask.specified_duration)) + + # always update the stop_time according to the spec + pipeline_subtask.stop_time = pipeline_subtask.start_time + pipeline_subtask.specified_duration # step 2: link input dataproducts if pipeline_subtask.inputs.count() == 0: @@ -881,31 +920,35 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): # step 3: create output dataproducts, and link these to the output # TODO: create them from the spec, instead of "copying" the input filename - output_dps = [] + dataformat = Dataformat.objects.get(value="MeasurementSet") + input_dps = list(pipeline_subtask_input.dataproducts.all()) + output_dp_objects = [] for input_dp in pipeline_subtask_input.dataproducts.all(): if '_' in input_dp.filename and input_dp.filename.startswith('L'): filename = "L%s_%s" % (pipeline_subtask.pk, input_dp.filename.split('_', 1)[1]) else: filename = "L%s_%s" % (pipeline_subtask.pk, input_dp.filename) - output_dp = Dataproduct.objects.create(filename=filename, - directory=input_dp.directory.replace(str(pipeline_subtask_input.producer.subtask.pk), str(pipeline_subtask.pk)), - dataformat=Dataformat.objects.get(value="MeasurementSet"), - datatype=Datatype.objects.get(value="visibilities"), # todo: is this correct? - producer=pipeline_subtask_output, - specifications_doc={}, - specifications_template=dataproduct_specifications_template, - feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), - feedback_template=dataproduct_feedback_template, - sap=input_dp.sap) - output_dp.save() - DataproductTransform.objects.create(input=input_dp, output=output_dp, identity=False) - output_dps.append(output_dp) - + output_dp = Dataproduct(filename=filename, + directory=input_dp.directory.replace(str(pipeline_subtask_input.producer.subtask.pk), str(pipeline_subtask.pk)), + dataformat=dataformat, + datatype=Datatype.objects.get(value="visibilities"), # todo: is this correct? + producer=pipeline_subtask_output, + specifications_doc=get_default_json_object_for_schema(dataproduct_specifications_template.schema), + specifications_template=dataproduct_specifications_template, + feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), + feedback_template=dataproduct_feedback_template, + sap=input_dp.sap) + output_dp_objects.append(output_dp) + + output_dps = Dataproduct.objects.bulk_create(output_dp_objects) pipeline_subtask_output.dataproducts.set(output_dps) + transforms = [DataproductTransform(input=input_dp, output=output_dp, identity=False) for input_dp,output_dp in zip(input_dps, output_dps)] + DataproductTransform.objects.bulk_create(transforms) + # step 4: resource assigner (if possible) - _assign_resources(pipeline_subtask) + _assign_or_unassign_resources(pipeline_subtask) # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it) pipeline_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value) @@ -973,14 +1016,17 @@ def create_and_schedule_subtasks_from_task_blueprint(task_blueprint: TaskBluepri create_subtasks_from_task_blueprint(task_blueprint) return schedule_independent_subtasks_in_task_blueprint(task_blueprint) -def schedule_independent_subtasks_in_task_blueprint(task_blueprint: TaskBlueprint) -> [Subtask]: - '''Convenience method: Schedule the subtasks in the task_blueprint that are not dependend on predecessors''' - subtasks = list(task_blueprint.subtasks.all()) - for subtask in subtasks: - if len(subtask.predecessors.all()) == len(subtask.predecessors.filter(state__value='finished').all()): - schedule_subtask(subtask) - return subtasks +def schedule_independent_subtasks_in_task_blueprint(task_blueprint: TaskBlueprint, start_time: datetime=None) -> [Subtask]: + '''Convenience method: Schedule (and return) the subtasks in the task_blueprint that are not dependend on any predecessors''' + independent_subtasks = list(Subtask.independent_subtasks().filter(task_blueprint_id=task_blueprint.id, state__value=SubtaskState.Choices.DEFINED.value).all()) + + for subtask in independent_subtasks: + if start_time is not None: + subtask.start_time = start_time + schedule_subtask_and_update_successor_start_times(subtask) + + return independent_subtasks def _generate_subtask_specs_from_preprocessing_task_specs(preprocessing_task_specs, default_subtask_specs): diff --git a/SAS/TMSS/src/tmss/tmssapp/tasks.py b/SAS/TMSS/src/tmss/tmssapp/tasks.py index 7f7ad51bd577466945b176d334aaeda4feffa880..987f89153e14aa5f91c90993cb00ee70b780dd79 100644 --- a/SAS/TMSS/src/tmss/tmssapp/tasks.py +++ b/SAS/TMSS/src/tmss/tmssapp/tasks.py @@ -1,12 +1,14 @@ from lofar.sas.tmss.tmss.exceptions import * from lofar.sas.tmss.tmss.tmssapp import models -from lofar.sas.tmss.tmss.tmssapp.subtasks import create_and_schedule_subtasks_from_task_blueprint, create_subtasks_from_task_blueprint, schedule_independent_subtasks_in_task_blueprint -from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskBlueprint, SchedulingUnitBlueprint, SchedulingUnitDraft, TaskDraft, SchedulingRelationPlacement +from lofar.sas.tmss.tmss.tmssapp.subtasks import unschedule_subtasks_in_task_blueprint +from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskBlueprint, SchedulingUnitBlueprint from lofar.sas.tmss.tmss.tmssapp.subtasks import create_and_schedule_subtasks_from_task_blueprint, create_subtasks_from_task_blueprint, schedule_independent_subtasks_in_task_blueprint from functools import cmp_to_key +import os from copy import deepcopy from lofar.common.json_utils import add_defaults_to_json_object_for_schema import logging +from datetime import datetime logger = logging.getLogger(__name__) @@ -18,8 +20,8 @@ def create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_ logger.debug("create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft.id=%s name='%s')", scheduling_unit_draft.pk, scheduling_unit_draft.name) scheduling_unit_blueprint = SchedulingUnitBlueprint.objects.create( - name="%s (SchedulingUnitBlueprint)" % (scheduling_unit_draft.name,), - description="%s (SchedulingUnitBlueprint)" % (scheduling_unit_draft.description or "<no description>",), + name=scheduling_unit_draft.name, + description=scheduling_unit_draft.description, requirements_doc=scheduling_unit_draft.requirements_doc, do_cancel=False, draft=scheduling_unit_draft, @@ -90,7 +92,7 @@ def create_scheduling_unit_draft_from_scheduling_unit_blueprint(scheduling_unit_ def copy_task_draft(task_draft: models.TaskDraft, copy_reason: str) -> models.TaskDraft: - + task_template_name = task_draft.specifications_template task_template = models.TaskTemplate.objects.get(name=task_template_name) @@ -114,7 +116,7 @@ def copy_task_blueprint_to_task_draft(task_blueprint:models.TaskBlueprint ) -> m :raises Exception if instantiate fails. """ logger.debug("Create Task Draft from Task Blueprint (id=%s)", task_blueprint.pk) - + original_task_draft = task_blueprint.draft task_template_name = original_task_draft.specifications_template task_template = models.TaskTemplate.objects.get(name=task_template_name) @@ -276,7 +278,7 @@ def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft) -> model task_relation_blueprint.pk, producing_task_blueprint.pk, consuming_task_blueprint.pk,) # Do the same 'trick' for Task Scheduling Relation Draft to Blueprint - task_draft_scheduling_relations = list(task_draft.first_to_connect.all()) + list(task_draft.second_to_connect.all()) + task_draft_scheduling_relations = list(task_draft.first_scheduling_relation.all()) + list(task_draft.second_scheduling_relation.all()) for task_scheduling_relation_draft in task_draft_scheduling_relations: for first_task_blueprint in task_scheduling_relation_draft.first.task_blueprints.all(): for second_task_blueprint in task_scheduling_relation_draft.second.task_blueprints.all(): @@ -360,21 +362,35 @@ def create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(schedulin # refresh so all related fields are updated. scheduling_unit_blueprint.refresh_from_db() + return scheduling_unit_blueprint def create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: '''Convenience method: Create the scheduling_unit_blueprint's task_blueprint(s), then create the task_blueprint's subtasks, and schedule the ones that are not dependend on predecessors''' scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) + scheduling_unit_blueprint = schedule_independent_subtasks_in_scheduling_unit_blueprint(scheduling_unit_blueprint) + return scheduling_unit_blueprint + + +def schedule_independent_subtasks_in_scheduling_unit_blueprint(scheduling_unit_blueprint: SchedulingUnitBlueprint, start_time: datetime=None) -> models.SchedulingUnitBlueprint: + '''Convenience method: Schedule the subtasks in the scheduling_unit_blueprint that are not dependend on predecessors''' task_blueprints = list(scheduling_unit_blueprint.task_blueprints.all()) - # sort task_blueprint(s) in 'data-flow'-order, - # because successors can depend on predecessors, so the first tbp's need to be subtask'd first. - task_blueprints.sort(key=cmp_to_key(lambda tbp_a, tbp_b: -1 if tbp_a in tbp_b.predecessors else 1 if tbp_b in tbp_a.predecessors else 0)) + for task_blueprint in task_blueprints: + schedule_independent_subtasks_in_task_blueprint(task_blueprint, start_time=start_time+task_blueprint.relative_start_time) + + scheduling_unit_blueprint.refresh_from_db() + return scheduling_unit_blueprint + + +def unschedule_subtasks_in_scheduling_unit_blueprint(scheduling_unit_blueprint: SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: + '''Convenience method: Unschedule all scheduled subtasks in the scheduling_unit_blueprint''' + task_blueprints = list(scheduling_unit_blueprint.task_blueprints.all()) for task_blueprint in task_blueprints: - schedule_independent_subtasks_in_task_blueprint(task_blueprint) + unschedule_subtasks_in_task_blueprint(task_blueprint) - # refresh so all related fields are updated. scheduling_unit_blueprint.refresh_from_db() return scheduling_unit_blueprint + diff --git a/SAS/TMSS/src/tmss/tmssapp/views.py b/SAS/TMSS/src/tmss/tmssapp/views.py index a7e0997c65548acac5fc1be42a0f024ec969b142..038d861cecf0276a2c8facc9d3c6449e1b8ac620 100644 --- a/SAS/TMSS/src/tmss/tmssapp/views.py +++ b/SAS/TMSS/src/tmss/tmssapp/views.py @@ -12,9 +12,12 @@ from rest_framework.permissions import AllowAny from rest_framework.decorators import authentication_classes, permission_classes from django.apps import apps +from rest_framework.decorators import api_view from datetime import datetime import dateutil.parser -from lofar.sas.tmss.tmss.tmssapp.conversions import local_sidereal_time_for_utc_and_station, local_sidereal_time_for_utc_and_longitude, timestamps_and_stations_to_sun_rise_and_set +from lofar.sas.tmss.tmss.tmssapp.conversions import local_sidereal_time_for_utc_and_station, local_sidereal_time_for_utc_and_longitude, timestamps_and_stations_to_sun_rise_and_set, coordinates_and_timestamps_to_separation_from_bodies + +# Note: Decorate with @api_view to get this picked up by Swagger def subtask_template_default_specification(request, subtask_template_pk:int): subtask_template = get_object_or_404(models.SubtaskTemplate, pk=subtask_template_pk) @@ -46,9 +49,11 @@ def task_specify_observation(request, pk=None): # Allow everybody to GET our publicly available template-json-schema's @permission_classes([AllowAny]) @authentication_classes([AllowAny]) -@swagger_auto_schema(responses={200: 'Get the JSON schema from the template with the requested <template>, <name> and <version>', +@swagger_auto_schema(#method='GET', + responses={200: 'Get the JSON schema from the template with the requested <template>, <name> and <version>', 404: 'the schema with requested <template>, <name> and <version> is not available'}, operation_description="Get the JSON schema for the given <template> with the given <name> and <version> as application/json content response.") +#@api_view(['GET']) # todo: !! decorating this as api_view somehow breaks json ref resolution !! fix this and double url issue in urls.py, then use decorator here to include in Swagger def get_template_json_schema(request, template:str, name:str, version:str): template_model = apps.get_model("tmssapp", template) template_instance = get_object_or_404(template_model, name=name, version=version) @@ -65,9 +70,11 @@ def get_template_json_schema(request, template:str, name:str, version:str): # Allow everybody to GET our publicly available station group lookups @permission_classes([AllowAny]) @authentication_classes([AllowAny]) -@swagger_auto_schema(responses={200: 'A JSON object with two properties: group:<the_group_name>, stations:<the_list_of_stations>', +@swagger_auto_schema(#method='GET', + responses={200: 'A JSON object with two properties: group:<the_group_name>, stations:<the_list_of_stations>', 404: 'No such group or template available'}, operation_description="Get a JSON list of stations for the given <station_group> name the the group definitions in the common_schema_template given by <template_name> and <template_version>") +#@api_view(['GET']) # todo: fix double url issue in urls.py, then use decorator here to include in Swagger def get_stations_in_group(request, template_name:str, template_version:str, station_group:str): station_schema_template = get_object_or_404(models.CommonSchemaTemplate, name=template_name, version=template_version) station_schema = station_schema_template.schema @@ -88,22 +95,26 @@ def get_stations_in_group(request, template_name:str, template_version:str, stat @permission_classes([AllowAny]) @authentication_classes([AllowAny]) -@swagger_auto_schema(responses={200: 'An isoformat timestamp of the current UTC clock of the system'}, +@swagger_auto_schema(method='GET', + responses={200: 'An isoformat timestamp of the current UTC clock of the system'}, operation_description="Get the current system time in UTC") +@api_view(['GET']) def utc(request): return HttpResponse(datetime.utcnow().isoformat(), content_type='text/plain') @permission_classes([AllowAny]) @authentication_classes([AllowAny]) -@swagger_auto_schema(responses={200: 'The LST time in hms format at the given UTC time and station or longitude'}, +@swagger_auto_schema(method='GET', + responses={200: 'The LST time in hms format at the given UTC time and station or longitude'}, operation_description="Get LST time for UTC time and station or longitude", manual_parameters=[Parameter(name='station', required=False, type='string', in_='query', description="A station names (defaults to CS002)"), Parameter(name='timestamp', required=False, type='string', in_='query', description="A timestamp in isoformat (defaults to utcnow)"), - Parameter(name='longitude', required=False, type='float', in_='query', - description="A longitude") + Parameter(name='longitude', required=False, type='string', in_='query', + description="A longitude as float") ]) +@api_view(['GET']) def lst(request): # Handling optional parameters via django paths in urls.py is a pain, we access them on the request directly instead. timestamp = request.GET.get('timestamp', None) @@ -130,12 +141,15 @@ def lst(request): @permission_classes([AllowAny]) @authentication_classes([AllowAny]) -@swagger_auto_schema(responses={200: 'A JSON object with sunrise, sunset, day and night of the given stations at the given timestamps'}, - operation_description="Get sunrise, sunset, day and night for stations and timestamps", +@swagger_auto_schema(method='GET', + responses={200: 'A JSON object with sunrise, sunset, day and night of the given stations at the given timestamps'}, + operation_description="Get sunrise, sunset, day and night for stations and timestamps.\n\n" + "Example request: /api/util/sun_rise_and_set?stations=CS002,CS005×tamps=2020-05-01,2020-09-09T11-11-00", manual_parameters=[Parameter(name='stations', required=False, type='string', in_='query', description="comma-separated list of station names"), Parameter(name='timestamps', required=False, type='string', in_='query', description="comma-separated list of isoformat timestamps")]) +@api_view(['GET']) def get_sun_rise_and_set(request): """ returns sunrise and sunset at the given stations and timestamps, or today at LOFAR core if none specified. @@ -144,14 +158,62 @@ def get_sun_rise_and_set(request): timestamps = request.GET.get('timestamps', None) stations = request.GET.get('stations', None) if timestamps is None: - timestamps = [datetime.utcnow()] + timestamps = (datetime.utcnow(),) else: timestamps = timestamps.split(',') - timestamps = [dateutil.parser.parse(timestamp) for timestamp in timestamps] # isot to datetime + timestamps = tuple([dateutil.parser.parse(timestamp, ignoretz=True) for timestamp in timestamps]) # isot to datetime if stations is None: - stations = ['CS002'] + stations = ("CS002",) else: - stations = stations.split(',') + stations = tuple(stations.split(',')) + # todo: to improve speed for the frontend, we should probably precompute/cache these and return those (where available), to revisit after constraint table / TMSS-190 is done return JsonResponse(timestamps_and_stations_to_sun_rise_and_set(timestamps, stations)) + +@permission_classes([AllowAny]) +@authentication_classes([AllowAny]) +@swagger_auto_schema(method='GET', + responses={200: 'A JSON object with angular distances of the given sky coordinates from the given solar system bodies at the given timestamps (seen from LOFAR core)'}, + operation_description="Get angular distances of the given sky coordinates from the given solar system bodies at all given timestamps. \n\n" + "Example request: /api/util/angular_separation_from_bodies?angle1=1&angle2=1×tamps=2020-01-01T15,2020-01-01T16", + manual_parameters=[Parameter(name='angle1', required=True, type='string', in_='query', + description="first angle of celectial coordinates as float, e.g. RA"), + Parameter(name='angle2', required=True, type='string', in_='query', + description="second angle of celectial coordinates as float, e.g. RA"), + Parameter(name='direction_type', required=False, type='string', in_='query', + description="direction_type of celectial coordinates as string, e.g. J2000"), + Parameter(name='timestamps', required=False, type='string', in_='query', + description="comma-separated list of isoformat timestamps"), + Parameter(name='bodies', required=False, type='string', in_='query', + description="comma-separated list of solar system bodies")]) +@api_view(['GET']) +def get_angular_separation_from_bodies(request): + ''' + returns angular distances of the given sky coordinates from the given astronomical objects at the given timestamps and stations + ''' + timestamps = request.GET.get('timestamps', None) + angle1 = request.GET.get('angle1') + angle2 = request.GET.get('angle2') + direction_type = request.GET.get("direction_type", "J2000") + bodies = tuple(request.GET.get('bodies', "sun,moon,jupiter").split(',')) + + if angle1 is None or angle2 is None: + raise ValueError("Please provide celestial coordinates via 'angle1', 'angle2' (and optionally 'direction_type') properties.") + + if timestamps is None: + timestamps = (datetime.utcnow(),) + else: + timestamps = timestamps.split(',') + timestamps = tuple([dateutil.parser.parse(timestamp, ignoretz=True) for timestamp in timestamps]) # isot to datetime + + # calculate + sep_dict = coordinates_and_timestamps_to_separation_from_bodies(angle1=angle1, angle2=angle2, direction_type=direction_type, bodies=bodies, timestamps=timestamps) + new_sep_dict = {} + + # serialize angles and datetimes for json response + for body, timestamps in sep_dict.items(): + for timestamp, angle in timestamps.items(): + new_sep_dict.setdefault(body, {})[timestamp.isoformat()] = angle.rad + + return JsonResponse(new_sep_dict) diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py index 2bc7b1814e5c667bcdd9fae7bea322e7696cdf82..bcd3eaf22671451c5d005e36c178c56f66b1c0f3 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py @@ -35,8 +35,6 @@ from lofar.sas.tmss.tmss.tmssapp.renderers import PlainTextRenderer from rest_framework.views import APIView from rest_framework.decorators import api_view, renderer_classes -from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct -from lofar.lta.sip import visualizer class TextPlainAutoSchema(SwaggerAutoSchema): def get_produces(self): @@ -79,10 +77,6 @@ class AlgorithmViewSet(LOFARViewSet): queryset = models.Algorithm.objects.all() serializer_class = serializers.AlgorithmSerializer -class ScheduleMethodViewSet(LOFARViewSet): - queryset = models.ScheduleMethod.objects.all() - serializer_class = serializers.ScheduleMethodSerializer - class SubtaskTemplateFilter(filters.FilterSet): class Meta: model = models.SubtaskTemplate @@ -145,7 +139,8 @@ class SubtaskViewSet(LOFARViewSet): filter_class = SubTaskFilter ordering = ('start_time',) - queryset = queryset.prefetch_related('state') + # performance boost: select the related models in a single db call. + queryset = queryset.select_related('state', 'specifications_template', 'specifications_template__type', 'cluster', 'created_or_updated_by_user') @swagger_auto_schema(auto_schema=TextPlainAutoSchema, responses={200: 'A LOFAR parset for this subtask (as plain text)', @@ -292,6 +287,9 @@ class SubtaskNestedViewSet(LOFARNestedViewSet): filter_class = SubTaskFilter ordering = ('start_time',) + # performance boost: select the related models in a single db call. + queryset = queryset.select_related('state', 'specifications_template', 'specifications_template__type', 'cluster', 'created_or_updated_by_user') + def get_queryset(self): if 'task_blueprint_id' in self.kwargs: task_blueprint = get_object_or_404(models.TaskBlueprint, pk=self.kwargs['task_blueprint_id']) @@ -318,6 +316,7 @@ class DataproductViewSet(LOFARViewSet): @action(methods=['get'], detail=True, url_name="sip") def sip(self, request, pk=None): dataproduct = get_object_or_404(models.Dataproduct, pk=pk) + from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct return HttpResponse(generate_sip_for_dataproduct(dataproduct).get_prettyxml(), content_type='application/xml') @swagger_auto_schema(responses={200: 'The SIP graph for this dataproduct', @@ -326,6 +325,8 @@ class DataproductViewSet(LOFARViewSet): @action(methods=['get'], detail=True, url_name="sip_graph") def sip_graph(self, request, pk=None): dataproduct = get_object_or_404(models.Dataproduct, pk=pk) + from lofar.lta.sip import visualizer + from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct return HttpResponse(visualizer.visualize_sip(generate_sip_for_dataproduct(dataproduct)), content_type='image/svg+xml') diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py index 6e292b61afa714df6356cf528da69ebc18a555f3..f4f1e95ddbe38152855429597c6360be6448e4dc 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py @@ -318,6 +318,10 @@ class SchedulingUnitDraftViewSet(LOFARViewSet): .select_related('copy_reason') \ .select_related('scheduling_set') + # use select_related for forward related references + queryset = queryset.select_related('copy_reason', 'scheduling_set', 'requirements_template', 'observation_strategy_template', 'scheduling_constraints_template') + + @swagger_auto_schema(responses={201: 'The Created SchedulingUnitBlueprint, see Location in Response header', 403: 'forbidden'}, operation_description="Carve SchedulingUnitDraft in stone, and make an (uneditable) blueprint out of it.") @@ -594,6 +598,9 @@ class SchedulingUnitBlueprintViewSet(LOFARViewSet): # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries queryset = queryset.prefetch_related('task_blueprints') + # use select_related for forward related references + queryset = queryset.select_related('requirements_template', 'draft') + @swagger_auto_schema(responses={201: "This SchedulingUnitBlueprint, with references to its created TaskBlueprints and (scheduled) Subtasks.", 403: 'forbidden'}, operation_description="Create TaskBlueprint(s) for this scheduling unit, create subtasks, and schedule the ones that are not dependend on predecessors.") @@ -662,16 +669,16 @@ class TaskDraftViewSet(LOFARViewSet): serializer_class = serializers.TaskDraftSerializer # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries - queryset = queryset.prefetch_related('first_to_connect') \ - .prefetch_related('second_to_connect')\ + queryset = queryset.prefetch_related('first_scheduling_relation') \ + .prefetch_related('second_scheduling_relation')\ .prefetch_related('produced_by')\ .prefetch_related('consumed_by')\ .prefetch_related('task_blueprints')\ .prefetch_related('copied_from') # prefetch nested references in reverse models to avoid duplicate lookup queries - queryset = queryset.prefetch_related('first_to_connect__placement') \ - .prefetch_related('second_to_connect__placement') + queryset = queryset.prefetch_related('first_scheduling_relation__placement') \ + .prefetch_related('second_scheduling_relation__placement') # select all references to other models to avoid even more duplicate queries queryset = queryset.select_related('copies') \ @@ -771,15 +778,19 @@ class TaskBlueprintViewSet(LOFARViewSet): serializer_class = serializers.TaskBlueprintSerializer # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries - queryset = queryset.prefetch_related('first_to_connect')\ - .prefetch_related('second_to_connect')\ + queryset = queryset.prefetch_related('first_scheduling_relation')\ + .prefetch_related('second_scheduling_relation')\ .prefetch_related('produced_by')\ .prefetch_related('consumed_by')\ .prefetch_related('subtasks') # prefetch nested references in reverse models to avoid duplicate lookup queries - queryset = queryset.prefetch_related('first_to_connect__placement') \ - .prefetch_related('second_to_connect__placement') + queryset = queryset.prefetch_related('first_scheduling_relation__placement') \ + .prefetch_related('second_scheduling_relation__placement') \ + .prefetch_related('subtasks__specifications_template') + + # use select_related for forward related references + queryset = queryset.select_related('draft', 'specifications_template', 'specifications_template__type', 'scheduling_unit_blueprint') @swagger_auto_schema(responses={201: "This TaskBlueprint, with it is created subtasks", 403: 'forbidden'}, diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py index 781e6af696a5bc3f0827c84b8c60286fa898112f..623d43642732d4a11463f252adffb0938259d9c9 100644 --- a/SAS/TMSS/src/tmss/urls.py +++ b/SAS/TMSS/src/tmss/urls.py @@ -31,7 +31,6 @@ from drf_yasg import openapi from datetime import datetime import os from material.frontend import urls as frontend_urls -from viewflow.flow.viewset import FlowViewSet # @@ -54,6 +53,7 @@ swagger_schema_view = get_schema_view( # permission_classes=(permissions.AllowAny,), ) +# use re_path(r'<...>/?') to make trailing slash optional (double entries confuse Swagger) urlpatterns = [ path('admin/', admin.site.urls), path('logout/', LogoutView.as_view(), name='logout'), @@ -61,13 +61,16 @@ urlpatterns = [ re_path(r'^swagger(?P<format>\.json|\.yaml)$', swagger_schema_view.without_ui(cache_timeout=0), name='schema-json'), path('swagger/', swagger_schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'), path('redoc/', swagger_schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'), - path('schemas/<str:template>/<str:name>/<str:version>', views.get_template_json_schema, name='get_template_json_schema'), #TODO: how to make trailing slash optional? - path('schemas/<str:template>/<str:name>/<str:version>/', views.get_template_json_schema, name='get_template_json_schema'), - path('station_groups/<str:template_name>/<str:template_version>/<str:station_group>', views.get_stations_in_group, name='get_stations_in_group'), #TODO: how to make trailing slash optional? + #re_path('schemas/<str:template>/<str:name>/<str:version>', views.get_template_json_schema, name='get_template_json_schema'), # !! use of regex here breaks reverse url lookup + path('schemas/<str:template>/<str:name>/<str:version>', views.get_template_json_schema, name='get_template_json_schema'), # !! two urls for same view break Swagger, one url break json ref resolution !! + path('schemas/<str:template>/<str:name>/<str:version>/', views.get_template_json_schema, name='get_template_json_schema'), # !! two urls for same view break Swagger, one url break json ref resolution !! + #re_path('station_groups/<str:template_name>/<str:template_version>/<str:station_group>/?', views.get_stations_in_group, name='get_stations_in_group'), # !! use of regex here somehow breaks functionality (because parameters?) -> index page + path('station_groups/<str:template_name>/<str:template_version>/<str:station_group>', views.get_stations_in_group, name='get_stations_in_group'), path('station_groups/<str:template_name>/<str:template_version>/<str:station_group>/', views.get_stations_in_group, name='get_stations_in_group'), - path('util/sun_rise_and_set', views.get_sun_rise_and_set, name='get_sun_rise_and_set'), - path(r'util/utc', views.utc, name="system-utc"), - path(r'util/lst', views.lst, name="conversion-lst"), + re_path('util/sun_rise_and_set/?', views.get_sun_rise_and_set, name='get_sun_rise_and_set'), + re_path('util/utc/?', views.utc, name="system-utc"), + re_path('util/lst/?', views.lst, name="conversion-lst"), + re_path('util/angular_separation_from_bodies/?', views.get_angular_separation_from_bodies, name='get_angular_separation_from_bodies'), ] if os.environ.get('SHOW_DJANGO_DEBUG_TOOLBAR', False): @@ -175,7 +178,6 @@ router.register(r'subtask_state', viewsets.SubtaskStateViewSet) router.register(r'subtask_type', viewsets.SubtaskTypeViewSet) router.register(r'station_type', viewsets.StationTypeViewSet) router.register(r'algorithm', viewsets.AlgorithmViewSet) -router.register(r'schedule_method', viewsets.ScheduleMethodViewSet) router.register(r'scheduling_relation_placement', viewsets.SchedulingRelationPlacement) # templates @@ -215,7 +217,26 @@ urlpatterns = [url(r'^api$', RedirectView.as_view(url='/api/')), url(r'^api/', include(urlpatterns)), url(r'^oidc$', RedirectView.as_view(url='/oidc/')), url(r'^oidc/', include('mozilla_django_oidc.urls')), - url(r'^workflow$', RedirectView.as_view(url='/workflow/', permanent=False)), url(r'', include(frontend_urls)), url(r'^.*', include(frontend_urlpatterns)), ] + + + +# --- +# QA Workflow steps +if bool(os.environ.get('TMSS_ENABLE_VIEWFLOW', False)): + from .workflowapp import viewsets as workflow_viewsets + + viewflow_router = OptionalSlashRouter() + viewflow_router.APIRootView = TMSSAPIRootView + + viewflow_router.register('scheduling_unit_flow/su', workflow_viewsets.SchedulingUnitFlowViewSet, basename='su') + viewflow_router.register('scheduling_unit_flow/qa_reporting_to', workflow_viewsets.QAReportingTOViewSet, basename='qa_reporting_to') + viewflow_router.register('scheduling_unit_flow/qa_reporting_sos', workflow_viewsets.QAReportingSOSViewSet, basename='qa_reporting_sos') + viewflow_router.register('scheduling_unit_flow/qa_pi_verification', workflow_viewsets.PIVerificationViewSet, basename='qa_pi_verification') + viewflow_router.register('scheduling_unit_flow/qa_decide_acceptance', workflow_viewsets.DecideAcceptanceViewSet, basename='qa_decide_acceptance') + viewflow_router.register('scheduling_unit_flow/qa_scheduling_unit_process', workflow_viewsets.SchedulingUnitProcessViewSet, basename='qa_scheduling_unit_process') + + urlpatterns.extend([url(r'^workflow$', RedirectView.as_view(url='/workflow/', permanent=False)), + url(r'^workflow_api/', include(viewflow_router.urls))]) diff --git a/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt index 94b72e83e35a77ab9b16f84b7647f8ab0c8af94a..e7c3171661a6fd3927e6b4214251c21f0240d0b1 100644 --- a/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt @@ -15,3 +15,6 @@ add_subdirectory(migrations) add_subdirectory(models) add_subdirectory(flows) add_subdirectory(viewsets) +add_subdirectory(forms) +add_subdirectory(templates) +add_subdirectory(serializers) diff --git a/SAS/TMSS/src/tmss/workflowapp/flows/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/flows/CMakeLists.txt index 769f922e4781a912f1c0488c3655f6ab61363d3a..ba35dcf6abd1341333f5da54b43b2977805ef628 100644 --- a/SAS/TMSS/src/tmss/workflowapp/flows/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/workflowapp/flows/CMakeLists.txt @@ -4,7 +4,7 @@ include(PythonInstall) set(_py_files __init__.py helloworldflow.py - schedulingunitdemoflow.py + schedulingunitflow.py ) python_install(${_py_files} diff --git a/SAS/TMSS/src/tmss/workflowapp/flows/__init__.py b/SAS/TMSS/src/tmss/workflowapp/flows/__init__.py index 45516795a25730483ebfa40c1fbdb5f533df8ebe..a0ae3713747c0b28c5595736d06f4bcb800da5b5 100644 --- a/SAS/TMSS/src/tmss/workflowapp/flows/__init__.py +++ b/SAS/TMSS/src/tmss/workflowapp/flows/__init__.py @@ -1,2 +1,2 @@ from .helloworldflow import * -from .schedulingunitdemoflow import * \ No newline at end of file +from .schedulingunitflow import * \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitdemoflow.py b/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitflow.py similarity index 57% rename from SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitdemoflow.py rename to SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitflow.py index 0a2882d7a4550ef3ff8e60b190c4074f60356795..8d01c51a15bc840bdb775acce1297938234a1611 100644 --- a/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitdemoflow.py +++ b/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitflow.py @@ -9,6 +9,7 @@ from viewflow.flow.nodes import Signal from viewflow import mixins from .. import models +from .. import viewsets from viewflow import frontend, ThisObject from viewflow.activation import STATUS @@ -26,7 +27,7 @@ class ConditionActivation(FuncActivation): return activation class Condition(Signal): - #task_type = "HUMAN" # makes it show up in the unassigned task lists + task_type = "HUMAN" # makes it show up in the unassigned task lists activation_class = ConditionActivation def __init__(self, condition_check, signal, sender=None, task_loader=None, **kwargs): @@ -65,55 +66,14 @@ class Condition(Signal): super(Condition, self).ready() @frontend.register -class SchedulingUnitDemoFlow(Flow): - process_class = models.SchedulingUnitDemoProcess - - # 0. Start on SU instantiation - # 1. To be Manually scheduled? -> Go to 1a - # 1a. Present view to manually schedule. - # 2. Wait on signal SU got finished/error/cancelled (might have already!!) -> - # - Wait for assignment to RO user - # View: - Present any quality plots - # - Present any error info - # - Present fixing options - # - Present choice to fix & redo, discard, or continue. - # Continue: - # View: - Present any quality plots - # - Present any error info - # - Submit quality report/score - # - Submit recommendation - # 3. - Assign ticket to Contact Author - # - Present quality plots to user - # - Present quality report/score, and recommendation - # - Submit acceptance & report - # 4. - Assign ticket to owner in step 2. - # - Present quality report/score, and recommendation - # - Present acceptance & report - # - Present choice to ingest or discard. - # Ingest: - # Set ingestable flag on SU. - # Discard: - Cancel SU (triggering garbage collection - # - # Fix & Redo: - # - Wait for user to confirm SU is fixed - # - Go to 2 - # - - # Consider adding to any/all views: - # - Present any opened JIRA tickets - # - Present opportunity to open JIRA ticket - # Note that previously submitted info can be found by clicking through the task. So - # we only need to show whats nominally needed. - # Note that orthogonally to the above flow: - # - Users need to be informed tasks are assigned to them (e-mail?) - # - Users already have an overview in viewflow of tickets assigned to them - # - We likely want to control what e-mails are sent. +class SchedulingUnitFlow(Flow): + process_class = models.SchedulingUnitProcess start = ( flow.StartSignal( post_save, this.on_save_can_start, - sender=models.SchedulingUnitDemo + sender=models.SchedulingUnit ).Next(this.wait_schedulable) ) @@ -121,39 +81,77 @@ class SchedulingUnitDemoFlow(Flow): Condition( this.check_condition, post_save, - sender=models.SchedulingUnitDemo, + sender=models.SchedulingUnit, task_loader=this.get_scheduling_unit_task ) - .Next(this.form) + .Next(this.qa_reporting_to) ) - form = ( + #QA Reporting (TO) + qa_reporting_to = ( flow.View( - UpdateProcessView, - fields=["text"] + viewsets.QAReportingTOView, + task_description='QA Reporting (TO)' ).Permission( auto_create=True - ).Next(this.approve) + ).Next(this.check_operator_accept) + ) + + #Quality Acceptable + check_operator_accept = ( + flow.If(lambda activation: activation.process.qa_reporting_to.operator_accept) + .Then(this.qa_reporting_sos) + .Else(this.mark_sub) ) - approve = ( + #QA Reporting (SOS) + qa_reporting_sos = ( flow.View( - UpdateProcessView, - fields=["approved"] + viewsets.QAReportingSOSView, + task_description='QA Reporting (SOS)' ).Permission( auto_create=True - ).Next(this.check_approve) + ).Next(this.check_sos_accept_show_pi) + ) + + #Quality Acceptable + check_sos_accept_show_pi = ( + flow.If(lambda activation: activation.process.qa_reporting_sos.sos_accept_show_pi) + .Then(this.pi_verification) + .Else(this.mark_sub) ) - check_approve = ( - flow.If(lambda activation: activation.process.approved) - .Then(this.send) - .Else(this.end) + #PI Verification + pi_verification = ( + flow.View( + viewsets.PIVerificationView, + task_description='PI Verification' + ).Permission( + auto_create=True + ).Next(this.decide_acceptance) ) - send = ( + #Decide Acceptance + decide_acceptance = ( + flow.View( + viewsets.DecideAcceptanceView, + task_description='Decide Acceptance' + ).Permission( + auto_create=True + ).Next(this.check_sos_accept_after_pi) + ) + + #Quality Acceptable + check_sos_accept_after_pi = ( + flow.If(lambda activation: activation.process.decide_acceptance.sos_accept_after_pi) + .Then(this.mark_sub) + .Else(this.mark_sub) + ) + + #Mark SUB Successful/failed + mark_sub = ( flow.Handler( - this.send_hello_world_request + this.do_mark_sub ).Next(this.end) ) @@ -164,14 +162,29 @@ class SchedulingUnitDemoFlow(Flow): if created: activation.prepare() activation.process.su = instance + activation.done() print("workflow started") else: print("no workflow started") return activation - def send_hello_world_request(self, activation): - print(activation.process.text) + + def do_mark_sub(self, activation): + + activation.process.can_delete = True + activation.process.results_accepted = ((activation.process.qa_reporting_to is not None and activation.process.qa_reporting_to.operator_accept) + and (activation.process.qa_reporting_sos is not None and activation.process.qa_reporting_sos.sos_accept_show_pi) + and (activation.process.decide_acceptance is not None and activation.process.decide_acceptance.sos_accept_after_pi)) + + print("!!!!!!!!!!!END FLOW!!!!!!!!!!!") + print ("can_delete:") + print (activation.process.can_delete) + print ("results_accepted:") + print (activation.process.results_accepted) + + return activation + def check_condition(self, activation, instance): if instance is None: @@ -183,5 +196,5 @@ class SchedulingUnitDemoFlow(Flow): def get_scheduling_unit_task(self, flow_task, sender, instance, **kwargs): print(kwargs) - process = models.SchedulingUnitDemoProcess.objects.get(su=instance) + process = models.SchedulingUnitProcess.objects.get(su=instance) return Task.objects.get(process=process,flow_task=flow_task) diff --git a/SAS/TMSS/src/tmss/workflowapp/forms/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/forms/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..42b8a34fd53a59e7a7a15885e307a27d4874296a --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/forms/CMakeLists.txt @@ -0,0 +1,10 @@ + +include(PythonInstall) + +set(_py_files + __init__.py + schedulingunitflow.py + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/tmss/workflowapp/forms) diff --git a/SAS/TMSS/src/tmss/workflowapp/forms/__init__.py b/SAS/TMSS/src/tmss/workflowapp/forms/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bfdfbc84e07beb363937412fd7fb6d5788c684d0 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/forms/__init__.py @@ -0,0 +1 @@ +from .schedulingunitflow import * \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/forms/schedulingunitflow.py b/SAS/TMSS/src/tmss/workflowapp/forms/schedulingunitflow.py new file mode 100644 index 0000000000000000000000000000000000000000..a967367b38ff77d43ffdf08fb3b30e0f824907ab --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/forms/schedulingunitflow.py @@ -0,0 +1,16 @@ +from django import forms +from material import Layout, Row, Span2 + +from .. import models + + +class QAReportingTO(forms.ModelForm): + layout = Layout( + Row('operator_report'), + Row('operator_accept'), + ) + + class Meta: + model = models.QAReportingTO + fields = ['operator_report','operator_accept'] + #fields = '__all__' \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py index 2e95b97379265e5eb14cfd44e85357218eb63948..cdea4f733fe87cb65a93715ce9fe5f4ebf25f750 100644 --- a/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py +++ b/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.0.9 on 2020-10-01 12:30 +# Generated by Django 3.0.9 on 2020-11-24 11:24 from django.db import migrations, models import django.db.models.deletion @@ -14,7 +14,39 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='SchedulingUnitDemo', + name='DecideAcceptance', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('sos_accept_after_pi', models.BooleanField(default=False)), + ], + ), + migrations.CreateModel( + name='PIVerification', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('pi_report', models.CharField(max_length=150)), + ('pi_accept', models.BooleanField(default=False)), + ], + ), + migrations.CreateModel( + name='QAReportingSOS', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('sos_report', models.CharField(max_length=150)), + ('quality_within_policy', models.CharField(max_length=150)), + ('sos_accept_show_pi', models.BooleanField(default=False)), + ], + ), + migrations.CreateModel( + name='QAReportingTO', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('operator_report', models.CharField(max_length=150)), + ('operator_accept', models.BooleanField(default=False)), + ], + ), + migrations.CreateModel( + name='SchedulingUnit', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), @@ -35,12 +67,16 @@ class Migration(migrations.Migration): bases=('viewflow.process',), ), migrations.CreateModel( - name='SchedulingUnitDemoProcess', + name='SchedulingUnitProcess', fields=[ ('process_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='viewflow.Process')), - ('text', models.CharField(max_length=150)), - ('approved', models.BooleanField(default=False)), - ('su', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.SchedulingUnitDemo')), + ('can_delete', models.BooleanField(default=False)), + ('results_accepted', models.BooleanField(default=False)), + ('decide_acceptance', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.DecideAcceptance')), + ('pi_verification', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.PIVerification')), + ('qa_reporting_sos', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.QAReportingSOS')), + ('qa_reporting_to', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.QAReportingTO')), + ('su', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.SchedulingUnit')), ], options={ 'abstract': False, diff --git a/SAS/TMSS/src/tmss/workflowapp/models/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/models/CMakeLists.txt index 1c94f0a15d5ade684111945ce5bb79dfe25f7a91..57e7e39aac465b8acf7b209fa3dc901ae4c2076f 100644 --- a/SAS/TMSS/src/tmss/workflowapp/models/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/workflowapp/models/CMakeLists.txt @@ -4,7 +4,7 @@ include(PythonInstall) set(_py_files __init__.py helloworldflow.py - schedulingunitdemoflow.py + schedulingunitflow.py ) python_install(${_py_files} diff --git a/SAS/TMSS/src/tmss/workflowapp/models/__init__.py b/SAS/TMSS/src/tmss/workflowapp/models/__init__.py index 45516795a25730483ebfa40c1fbdb5f533df8ebe..a0ae3713747c0b28c5595736d06f4bcb800da5b5 100644 --- a/SAS/TMSS/src/tmss/workflowapp/models/__init__.py +++ b/SAS/TMSS/src/tmss/workflowapp/models/__init__.py @@ -1,2 +1,2 @@ from .helloworldflow import * -from .schedulingunitdemoflow import * \ No newline at end of file +from .schedulingunitflow import * \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitdemoflow.py b/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitdemoflow.py deleted file mode 100644 index b9797a0b12e56ffb6f284da503f43263561522c4..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitdemoflow.py +++ /dev/null @@ -1,13 +0,0 @@ -# Create your models here. - -from django.db.models import CharField, IntegerField,BooleanField, ForeignKey, CASCADE, Model -from viewflow.models import Process - -class SchedulingUnitDemo(Model): - name = CharField(max_length=50) - state = IntegerField() - -class SchedulingUnitDemoProcess(Process): - text = CharField(max_length=150) - approved = BooleanField(default=False) - su = ForeignKey(SchedulingUnitDemo, blank=True, null=True, on_delete=CASCADE) diff --git a/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitflow.py b/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitflow.py new file mode 100644 index 0000000000000000000000000000000000000000..3e340fbf8c9713fbd37daec0dc977e3d453eb69f --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitflow.py @@ -0,0 +1,38 @@ +# Create your models here. + +from django.db.models import CharField, IntegerField,BooleanField, ForeignKey, CASCADE, Model,NullBooleanField +from viewflow.models import Process + +class QAReportingTO(Model): + operator_report = CharField(max_length=150) + operator_accept = BooleanField(default=False) + + +class QAReportingSOS(Model): + sos_report = CharField(max_length=150) + quality_within_policy = CharField(max_length=150) + sos_accept_show_pi = BooleanField(default=False) + + +class PIVerification(Model): + pi_report = CharField(max_length=150) + pi_accept = BooleanField(default=False) + + +class DecideAcceptance(Model): + sos_accept_after_pi = BooleanField(default=False) + + +class SchedulingUnit(Model): + name = CharField(max_length=50) + state = IntegerField() + + +class SchedulingUnitProcess(Process): + su = ForeignKey(SchedulingUnit, blank=True, null=True, on_delete=CASCADE) + qa_reporting_to=ForeignKey(QAReportingTO, blank=True, null=True, on_delete=CASCADE) + qa_reporting_sos=ForeignKey(QAReportingSOS, blank=True, null=True, on_delete=CASCADE) + pi_verification=ForeignKey(PIVerification, blank=True, null=True, on_delete=CASCADE) + decide_acceptance=ForeignKey(DecideAcceptance, blank=True, null=True, on_delete=CASCADE) + can_delete = BooleanField(default=False) + results_accepted = BooleanField(default=False) \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/serializers/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/serializers/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..93e5e84e9ee2e14a5b311ad8f204c7d62920dae0 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/serializers/CMakeLists.txt @@ -0,0 +1,11 @@ + +include(PythonInstall) + +set(_py_files + __init__.py + schedulingunitflow.py + + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/tmss/workflowapp/serializers) diff --git a/SAS/TMSS/src/tmss/workflowapp/serializers/__init__.py b/SAS/TMSS/src/tmss/workflowapp/serializers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bfdfbc84e07beb363937412fd7fb6d5788c684d0 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/serializers/__init__.py @@ -0,0 +1 @@ +from .schedulingunitflow import * \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/serializers/schedulingunitflow.py b/SAS/TMSS/src/tmss/workflowapp/serializers/schedulingunitflow.py new file mode 100644 index 0000000000000000000000000000000000000000..e29cf3cb9796afcce95e94e63636fe300791f5b0 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/serializers/schedulingunitflow.py @@ -0,0 +1,41 @@ +from rest_framework.serializers import ModelSerializer +from lofar.sas.tmss.tmss.workflowapp import models + +from django.views import generic +from django.forms.models import modelform_factory + + +from .. import forms + +#View to add a fake Scheduling Unit for the QA Workflow +class SchedulingUnitSerializer(ModelSerializer): + class Meta: + model = models.SchedulingUnit + fields = '__all__' + +#Viewsets and serializers to access intermediate steps of the QA Workflow +#through DRF +class QAReportingTOSerializer(ModelSerializer): + class Meta: + model = models.QAReportingTO + fields = '__all__' + +class QAReportingSOSSerializer(ModelSerializer): + class Meta: + model = models.QAReportingSOS + fields = '__all__' + +class PIVerificationSerializer(ModelSerializer): + class Meta: + model = models.PIVerification + fields = '__all__' + +class DecideAcceptanceSerializer(ModelSerializer): + class Meta: + model = models.DecideAcceptance + fields = '__all__' + +class SchedulingUnitProcessSerializer(ModelSerializer): + class Meta: + model = models.SchedulingUnitProcess + fields = '__all__' \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/templates/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/templates/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..3047f7e1561039003dec0c8630b75de4ce5f3037 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/templates/CMakeLists.txt @@ -0,0 +1,10 @@ + +include(PythonInstall) + +set(_py_files + __init__.py + qa_reporting.html + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/tmss/workflowapp/templates) diff --git a/SAS/TMSS/src/tmss/workflowapp/templates/__init__.py b/SAS/TMSS/src/tmss/workflowapp/templates/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SAS/TMSS/src/tmss/workflowapp/templates/qa_reporting.html b/SAS/TMSS/src/tmss/workflowapp/templates/qa_reporting.html new file mode 100644 index 0000000000000000000000000000000000000000..822e7eb45e1677261b67fda229a2848d49963cfc --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/templates/qa_reporting.html @@ -0,0 +1 @@ +{% extends 'viewflow/flow/task.html' %} diff --git a/SAS/TMSS/src/tmss/workflowapp/viewsets/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/viewsets/CMakeLists.txt index 7adc12fcf7a85912784409d17f37177986c94298..eaf3c5ab4b9afa9063deda344de3644dcfbc388d 100644 --- a/SAS/TMSS/src/tmss/workflowapp/viewsets/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/workflowapp/viewsets/CMakeLists.txt @@ -3,7 +3,7 @@ include(PythonInstall) set(_py_files __init__.py - schedulingunitdemoflow.py + schedulingunitflow.py ) python_install(${_py_files} diff --git a/SAS/TMSS/src/tmss/workflowapp/viewsets/__init__.py b/SAS/TMSS/src/tmss/workflowapp/viewsets/__init__.py index b77c70aeb959e9d4f63c395fd1079cfbbe3bc078..bfdfbc84e07beb363937412fd7fb6d5788c684d0 100644 --- a/SAS/TMSS/src/tmss/workflowapp/viewsets/__init__.py +++ b/SAS/TMSS/src/tmss/workflowapp/viewsets/__init__.py @@ -1 +1 @@ -from .schedulingunitdemoflow import * \ No newline at end of file +from .schedulingunitflow import * \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitdemoflow.py b/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitdemoflow.py deleted file mode 100644 index da3dc24e15ff6f3bd93da9037101a718f4ebed66..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitdemoflow.py +++ /dev/null @@ -1,22 +0,0 @@ -from django.shortcuts import render -from rest_framework import viewsets -from rest_framework.response import Response -from rest_framework.decorators import action -from rest_framework.serializers import ModelSerializer -from lofar.sas.tmss.tmss.workflowapp import models - -# Create your views here. - -class SchedulingUnitDemoSerializer(ModelSerializer): - class Meta: - model = models.SchedulingUnitDemo - fields = '__all__' - -class SchedulingUnitFlowViewSet(viewsets.ModelViewSet): - queryset = models.SchedulingUnitDemo.objects.all() - serializer_class = SchedulingUnitDemoSerializer - - @action(methods=['get'], detail=True) - def trigger(self, request, pk=None): - SchedulingUnitDemoFlow - return Response("ok") \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitflow.py b/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitflow.py new file mode 100644 index 0000000000000000000000000000000000000000..1c70e87e110fd31d5f2533712165f973d0701733 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitflow.py @@ -0,0 +1,117 @@ +from django.shortcuts import render, redirect +from rest_framework import viewsets +from rest_framework.response import Response +from rest_framework.decorators import action +from lofar.sas.tmss.tmss.workflowapp import models + +from django.views import generic +from viewflow.flow.views import StartFlowMixin, FlowMixin +from viewflow.decorators import flow_start_view, flow_view +from viewflow.flow.views.utils import get_next_task_url +from django.forms import CharField, CheckboxInput +from django.forms.models import modelform_factory + + +from .. import forms, models, serializers + +class SchedulingUnitFlowViewSet(viewsets.ModelViewSet): + queryset = models.SchedulingUnit.objects.all() + serializer_class = serializers.SchedulingUnitSerializer + + @action(methods=['get'], detail=True) + def trigger(self, request, pk=None): + SchedulingUnitFlow + return Response("ok") + +#Viewsets and serializers to access intermediate steps of the QA Workflow +#through DRF +class QAReportingTOViewSet(viewsets.ModelViewSet): + queryset = models.QAReportingTO.objects.all() + serializer_class = serializers.QAReportingTOSerializer + +class QAReportingSOSViewSet(viewsets.ModelViewSet): + queryset = models.QAReportingSOS.objects.all() + serializer_class = serializers.QAReportingSOSSerializer + +class PIVerificationViewSet(viewsets.ModelViewSet): + queryset = models.PIVerification.objects.all() + serializer_class = serializers.PIVerificationSerializer + +class DecideAcceptanceViewSet(viewsets.ModelViewSet): + queryset = models.DecideAcceptance.objects.all() + serializer_class = serializers.DecideAcceptanceSerializer + +class SchedulingUnitProcessViewSet(viewsets.ModelViewSet): + queryset = models.SchedulingUnitProcess.objects.all() + serializer_class = serializers.SchedulingUnitProcessSerializer + +class QAReportingTOView(FlowMixin, generic.CreateView): + template_name = 'qa_reporting.html' + model = models.QAReportingTO + fields = [ + 'operator_report', 'operator_accept' + ] + + def form_valid(self, form): + report_data = form.save(commit=False) + report_data.save() + + self.activation.process.qa_reporting_to = report_data + self.activation.process.save() + + self.activation_done() + return redirect(self.get_success_url()) + + +class QAReportingSOSView(FlowMixin, generic.CreateView): + template_name = 'qa_reporting.html' + model = models.QAReportingSOS + fields = [ + 'sos_report', 'quality_within_policy','sos_accept_show_pi' + ] + + def form_valid(self, form): + report_data = form.save(commit=False) + report_data.save() + + self.activation.process.qa_reporting_sos = report_data + self.activation.process.save() + + self.activation_done() + return redirect(self.get_success_url()) + + +class PIVerificationView(FlowMixin, generic.CreateView): + template_name = 'qa_reporting.html' + model = models.PIVerification + fields = [ + 'pi_report', 'pi_accept' + ] + + def form_valid(self, form): + report_data = form.save(commit=False) + report_data.save() + + self.activation.process.pi_verification = report_data + self.activation.process.save() + + self.activation_done() + return redirect(self.get_success_url()) + + +class DecideAcceptanceView(FlowMixin, generic.CreateView): + template_name = 'qa_reporting.html' + model = models.DecideAcceptance + fields = [ + 'sos_accept_after_pi' + ] + + def form_valid(self, form): + report_data = form.save(commit=False) + report_data.save() + + self.activation.process.decide_acceptance = report_data + self.activation.process.save() + + self.activation_done() + return redirect(self.get_success_url()) \ No newline at end of file diff --git a/SAS/TMSS/test/CMakeLists.txt b/SAS/TMSS/test/CMakeLists.txt index 716469c7ca9294350badc60448fc92870eb6be8e..eb2ee7306ad4ba2583c4e45374724b75b97b9b71 100644 --- a/SAS/TMSS/test/CMakeLists.txt +++ b/SAS/TMSS/test/CMakeLists.txt @@ -37,6 +37,7 @@ if(BUILD_TESTING) # To get ctest running file(COPY testdata DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) + set_tests_properties(t_scheduling PROPERTIES TIMEOUT 300) set_tests_properties(t_tmssapp_scheduling_REST_API PROPERTIES TIMEOUT 300) set_tests_properties(t_tmssapp_specification_REST_API PROPERTIES TIMEOUT 360) endif() diff --git a/SAS/TMSS/test/t_conversions.py b/SAS/TMSS/test/t_conversions.py index 14231c4f091c04b1f3c53b971bbf069555e6000f..f153900312eac5e6ebab6a268c80386892983c26 100755 --- a/SAS/TMSS/test/t_conversions.py +++ b/SAS/TMSS/test/t_conversions.py @@ -165,6 +165,73 @@ class UtilREST(unittest.TestCase): response_date = dateutil.parser.parse(r_dict['CS002']['sunrise'][i]['start']).date() self.assertEqual(expected_date, response_date) + def test_util_angular_separation_from_bodies_yields_error_when_no_pointing_is_given(self): + r = requests.get(BASE_URL + '/util/angular_separation_from_bodies', auth=AUTH) + + # assert error + self.assertEqual(r.status_code, 500) + self.assertIn("celestial coordinates", r.content.decode('utf-8')) + + def test_util_angular_separation_from_bodies_returns_json_structure_with_defaults(self): + r = requests.get(BASE_URL + '/util/angular_separation_from_bodies?angle1=1&angle2=1', auth=AUTH) + self.assertEqual(r.status_code, 200) + r_dict = json.loads(r.content.decode('utf-8')) + + # assert default bodies + for key in ['sun', 'jupiter', 'moon']: + self.assertIn(key, r_dict.keys()) + + # assert timestamp is now and has a value + returned_datetime = dateutil.parser.parse(list(r_dict['jupiter'].keys())[0]) + current_datetime = datetime.datetime.utcnow() + delta = abs((returned_datetime - current_datetime).total_seconds()) + self.assertTrue(delta < 60.0) + self.assertEqual(type(list(r_dict['jupiter'].values())[0]), float) + + def test_util_angular_separation_from_bodies_considers_bodies(self): + bodies = ['sun', 'neptune', 'mercury'] + r = requests.get(BASE_URL + '/util/angular_separation_from_bodies?angle1=1&angle2=1&bodies=%s' % ','.join(bodies), auth=AUTH) + self.assertEqual(r.status_code, 200) + r_dict = json.loads(r.content.decode('utf-8')) + + # assert station is included in response and angles differ + angle_last = None + for body in bodies: + self.assertIn(body, r_dict.keys()) + angle = list(r_dict[body].values())[0] + if angle_last: + self.assertNotEqual(angle, angle_last) + angle_last = angle + + def test_util_angular_separation_from_bodies_considers_timestamps(self): + timestamps = ['2020-01-01', '2020-02-22T16-00-00', '2020-3-11', '2020-01-01'] + r = requests.get(BASE_URL + '/util/angular_separation_from_bodies?angle1=1&angle2=1×tamps=%s' % ','.join(timestamps), auth=AUTH) + self.assertEqual(r.status_code, 200) + r_dict = json.loads(r.content.decode('utf-8')) + + # assert all requested timestamps yield a response and angles differ + angle_last = None + for timestamp in timestamps: + expected_timestamp = dateutil.parser.parse(timestamp, ignoretz=True).isoformat() + self.assertIn(expected_timestamp, list(r_dict['jupiter'].keys())) + angle = r_dict['jupiter'][expected_timestamp] + if angle_last: + self.assertNotEqual(angle, angle_last) + angle_last = angle + + def test_util_angular_separation_from_bodies_considers_coordinates(self): + test_coords = [(1, 1,"J2000"), (1.1, 1, "J2000"), (1.1, 1.1, "J2000")] + for coords in test_coords: + r = requests.get(BASE_URL + '/util/angular_separation_from_bodies?angle1=%s&angle2=%s&direction_type=%s' % coords, auth=AUTH) + self.assertEqual(r.status_code, 200) + r_dict = json.loads(r.content.decode('utf-8')) + + # assert all requested timestamps yield a response and angles differ + angle_last = None + angle = list(r_dict['jupiter'].values())[0] + if angle_last: + self.assertNotEqual(angle, angle_last) + angle_last = angle if __name__ == "__main__": os.environ['TZ'] = 'UTC' diff --git a/SAS/TMSS/test/t_scheduling.py b/SAS/TMSS/test/t_scheduling.py index ec9de7be402f90143072687153d34c53c6bac89a..ef00fc0a9956c05a7ce6425db34220e3777165ff 100755 --- a/SAS/TMSS/test/t_scheduling.py +++ b/SAS/TMSS/test/t_scheduling.py @@ -34,25 +34,21 @@ if skip_integration_tests(): # before we import any django modules the DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS need to be known/set. # import and start an isolated RATestEnvironment and TMSSTestEnvironment (with fresh database and attached django and ldap server on free ports) # this automagically sets the required DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS envvars. -from lofar.sas.resourceassignment.resourceassigner.test.ra_test_environment import RATestEnvironment from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment -ra_test_env = RATestEnvironment() -tmss_test_env = TMSSTestEnvironment() +tmss_test_env = TMSSTestEnvironment(populate_schemas=True, populate_test_data=False, start_ra_test_environment=True, + start_postgres_listener=False, start_subtask_scheduler=False, start_dynamic_scheduler=False, + enable_viewflow=False) try: - ra_test_env.start() tmss_test_env.start() - tmss_test_env.populate_schemas() except: - ra_test_env.stop() tmss_test_env.stop() exit(1) # tell unittest to stop (and automagically cleanup) the test database once all testing is done. def tearDownModule(): tmss_test_env.stop() - ra_test_env.stop() from lofar.sas.tmss.test.tmss_test_data_django_models import * @@ -73,20 +69,23 @@ def create_subtask_object_for_testing(subtask_type_value, subtask_state_value): as string (no object) For these testcases 'pipeline control' and 'observation control' is relevant """ + task_blueprint = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data(specifications_template=models.TaskTemplate.objects.get(name='target observation' if subtask_type_value=='observation' else 'preprocessing pipeline'))) subtask_template_obj = models.SubtaskTemplate.objects.get(name="%s control" % subtask_type_value) subtask_state_obj = models.SubtaskState.objects.get(value=subtask_state_value) - subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj) + subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj, task_blueprint=task_blueprint) return models.Subtask.objects.create(**subtask_data) class SchedulingTest(unittest.TestCase): def setUp(self): # clean all specs/tasks/claims in RADB (cascading delete) - for spec in ra_test_env.radb.getSpecifications(): - ra_test_env.radb.deleteSpecification(spec['id']) + for spec in tmss_test_env.ra_test_environment.radb.getSpecifications(): + tmss_test_env.ra_test_environment.radb.deleteSpecification(spec['id']) def test_schedule_observation_subtask_with_enough_resources_available(self): with tmss_test_env.create_tmss_client() as client: + task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url']) + task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(task_blueprint_data, '/task_blueprint/') subtask_template = client.get_subtask_template("observation control") spec = get_default_json_object_for_schema(subtask_template['schema']) spec['stations']['digital_pointings'][0]['subbands'] = [0] @@ -96,7 +95,7 @@ class SchedulingTest(unittest.TestCase): specifications_doc=spec, cluster_url=cluster_url, start_time=datetime.utcnow()+timedelta(minutes=5), - stop_time=datetime.utcnow()+timedelta(minutes=15)) + task_blueprint_url=task_blueprint['url']) subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/') subtask_id = subtask['id'] test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/') @@ -105,7 +104,7 @@ class SchedulingTest(unittest.TestCase): subtask = client.schedule_subtask(subtask_id) self.assertEqual('scheduled', subtask['state_value']) - self.assertEqual('scheduled', ra_test_env.radb.getTask(tmss_id=subtask_id)['status']) + self.assertEqual('scheduled', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=subtask_id)['status']) def test_schedule_observation_subtask_with_blocking_reservations(self): @@ -126,6 +125,9 @@ class SchedulingTest(unittest.TestCase): self.assertTrue(assigned) with tmss_test_env.create_tmss_client() as client: + task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url']) + task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(task_blueprint_data, '/task_blueprint/') + subtask_template = client.get_subtask_template("observation control") spec = get_default_json_object_for_schema(subtask_template['schema']) spec['stations']['digital_pointings'][0]['subbands'] = [0] @@ -134,7 +136,7 @@ class SchedulingTest(unittest.TestCase): subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'], specifications_doc=spec, cluster_url=cluster_url, - task_blueprint_url=test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/')) + task_blueprint_url=task_blueprint['url']) subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/') subtask_id = subtask['id'] test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/') @@ -146,13 +148,17 @@ class SchedulingTest(unittest.TestCase): subtask = client.get_subtask(subtask_id) self.assertEqual('error', subtask['state_value']) - self.assertEqual('conflict', ra_test_env.radb.getTask(tmss_id=subtask_id)['status']) + ra_task = tmss_test_env.ra_test_environment.radb.getTask(tmss_id=subtask_id) + self.assertIsNotNone(ra_task) + self.assertEqual('conflict', ra_task['status']) def test_schedule_pipeline_subtask_with_enough_resources_available(self): with tmss_test_env.create_tmss_client() as client: cluster_url = client.get_path_as_json_object('/cluster/1')['url'] # setup: first create an observation, so the pipeline can have input. + obs_task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url']) + obs_task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(obs_task_blueprint_data, '/task_blueprint/') obs_subtask_template = client.get_subtask_template("observation control") obs_spec = get_default_json_object_for_schema(obs_subtask_template['schema']) obs_spec['stations']['digital_pointings'][0]['subbands'] = [0] @@ -160,19 +166,22 @@ class SchedulingTest(unittest.TestCase): obs_subtask_data = test_data_creator.Subtask(specifications_template_url=obs_subtask_template['url'], specifications_doc=obs_spec, cluster_url=cluster_url, - task_blueprint_url=test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/')) + task_blueprint_url=obs_task_blueprint['url']) obs_subtask = test_data_creator.post_data_and_get_response_as_json_object(obs_subtask_data, '/subtask/') obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url']), '/subtask_output/') test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(filename="L%s_SB000.MS"%obs_subtask['id'], subtask_output_url=obs_subtask_output_url), '/dataproduct/') # now create the pipeline... + pipe_task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="preprocessing pipeline")['url']) + pipe_task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(pipe_task_blueprint_data, '/task_blueprint/') + pipe_subtask_template = client.get_subtask_template("pipeline control") pipe_spec = get_default_json_object_for_schema(pipe_subtask_template['schema']) pipe_subtask_data = test_data_creator.Subtask(specifications_template_url=pipe_subtask_template['url'], specifications_doc=pipe_spec, - task_blueprint_url=obs_subtask['task_blueprint'], + task_blueprint_url=pipe_task_blueprint['url'], cluster_url=cluster_url) pipe_subtask = test_data_creator.post_data_and_get_response_as_json_object(pipe_subtask_data, '/subtask/') @@ -187,7 +196,7 @@ class SchedulingTest(unittest.TestCase): subtask = client.schedule_subtask(pipe_subtask['id']) self.assertEqual('scheduled', subtask['state_value']) - self.assertEqual('scheduled', ra_test_env.radb.getTask(tmss_id=pipe_subtask['id'])['status']) + self.assertEqual('scheduled', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=pipe_subtask['id'])['status']) def test_schedule_ingest_subtask(self): with tmss_test_env.create_tmss_client() as client: @@ -279,11 +288,13 @@ class SchedulingTest(unittest.TestCase): self.assertEqual(1, len(task_blueprint['subtasks'])) subtask = client.get_url_as_json_object(task_blueprint['subtasks'][0]) + client.session.patch(subtask['url'], {'start_time': datetime.utcnow() + timedelta(minutes=5)}) client.set_subtask_status(subtask['id'], 'defined') + subtask = client.schedule_subtask(subtask['id']) self.assertEqual('scheduled', subtask['state_value']) - self.assertEqual('scheduled', ra_test_env.radb.getTask(tmss_id=subtask['id'])['status']) + self.assertEqual('scheduled', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=subtask['id'])['status']) client.set_subtask_status(subtask['id'], 'finished') @@ -297,11 +308,11 @@ class SubtaskInputOutputTest(unittest.TestCase): def setUp(self) -> None: # make sure we're allowed to schedule - setting = Setting.objects.get(name='allow_scheduling_observations') + setting = Setting.objects.get(name='dynamic_scheduling_enabled') setting.value = True setting.save() - @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks._assign_resources") + @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks._assign_or_unassign_resources") def test_schedule_pipeline_subtask_filters_predecessor_output_dataproducts_for_input(self, assign_resources_mock): # setup: # create observation subtask and outputs and dataproducts @@ -341,12 +352,14 @@ class SAPTest(unittest.TestCase): def setUp(self) -> None: # make sure we're allowed to schedule - setting = Setting.objects.get(name='allow_scheduling_observations') + setting = Setting.objects.get(name='dynamic_scheduling_enabled') setting.value = True setting.save() def test_schedule_observation_subtask_creates_sap_with_correct_pointing(self): with tmss_test_env.create_tmss_client() as client: + task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url']) + task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(task_blueprint_data, '/task_blueprint/') subtask_template = client.get_subtask_template("observation control") spec = get_default_json_object_for_schema(subtask_template['schema']) spec['stations']['digital_pointings'][0]['subbands'] = [0] @@ -357,6 +370,7 @@ class SAPTest(unittest.TestCase): subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'], specifications_doc=spec, cluster_url = cluster_url, + task_blueprint_url=task_blueprint['url'], start_time=datetime.utcnow() + timedelta(minutes=5), stop_time=datetime.utcnow() + timedelta(minutes=15)) subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/') @@ -364,15 +378,17 @@ class SAPTest(unittest.TestCase): test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/') - sap_count_before_scheduling = models.SAP.objects.count() + subtask_model = models.Subtask.objects.get(id=subtask_id) + self.assertEqual(0, subtask_model.output_dataproducts.values('sap').count()) + client.set_subtask_status(subtask_id, 'defined') subtask = client.schedule_subtask(subtask_id) - self.assertGreater(models.SAP.objects.count(), sap_count_before_scheduling) + self.assertEqual(1, subtask_model.output_dataproducts.values('sap').count()) self.assertEqual(models.SAP.objects.first().specifications_doc['pointing']['angle1'], pointing['angle1']) self.assertEqual(models.SAP.objects.first().specifications_doc['pointing']['angle2'], pointing['angle2']) - @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks._assign_resources") + @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks._assign_or_unassign_resources") def test_schedule_pipeline_subtask_copies_sap_from_input_to_output(self, assign_resources_mock): # setup: # create observation subtask and outputs and dataproducts diff --git a/SAS/TMSS/test/t_subtasks.py b/SAS/TMSS/test/t_subtasks.py index b9021a86f94d25f5fcccd620daf7705c07c8d88e..0cdb95de14d749d73d32ff03728e0daacb5ce79f 100755 --- a/SAS/TMSS/test/t_subtasks.py +++ b/SAS/TMSS/test/t_subtasks.py @@ -301,7 +301,7 @@ class SubtaskInputSelectionFilteringTest(unittest.TestCase): def setUp(self) -> None: # make sure we're allowed to schedule - setting = Setting.objects.get(name='allow_scheduling_observations') + setting = Setting.objects.get(name='dynamic_scheduling_enabled') setting.value = True setting.save() @@ -371,7 +371,7 @@ class SubtaskInputSelectionFilteringTest(unittest.TestCase): class SettingTest(unittest.TestCase): def test_schedule_observation_subtask_raises_when_flag_is_false(self): - setting = Setting.objects.get(name='allow_scheduling_observations') + setting = Setting.objects.get(name='dynamic_scheduling_enabled') setting.value = False setting.save() obs_st = create_subtask_object_for_testing('observation', 'defined') diff --git a/SAS/TMSS/test/t_tmssapp_specification_django_API.py b/SAS/TMSS/test/t_tmssapp_specification_django_API.py index c5c917319778ad16ef17e94331674b10af68309b..98bb7aad6b8dc43efef99e435ee2ebcb27cbb38b 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_django_API.py +++ b/SAS/TMSS/test/t_tmssapp_specification_django_API.py @@ -446,6 +446,15 @@ class SchedulingUnitDraftTest(unittest.TestCase): with self.assertRaises(IntegrityError): models.SchedulingUnitDraft.objects.create(**test_data) + def test_SchedulingUnitDraft_gets_created_with_correct_default_ingest_permission_required(self): + + # setup + entry = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()) + #check the auto_ingest on project + self.assertEqual(False, entry.scheduling_set.project.auto_ingest) + #When auto_ingest=False (in project), the scheduling units should be created with ingest_permission_required = True + self.assertEqual(True, entry.ingest_permission_required) + class TaskDraftTest(unittest.TestCase): @@ -650,6 +659,16 @@ class SchedulingUnitBlueprintTest(unittest.TestCase): # assert with self.assertRaises(IntegrityError): models.SchedulingUnitBlueprint.objects.create(**test_data) + + + def test_SchedulingUnitBlueprint_gets_created_with_correct_default_ingest_permission_required(self): + + # setup + entry = models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data()) + #check the auto_ingest on project + self.assertEqual(False, entry.draft.scheduling_set.project.auto_ingest) + #When auto_ingest=False (in project), the scheduling units should be created with ingest_permission_required = True + self.assertEqual(True, entry.ingest_permission_required) class TaskBlueprintTest(unittest.TestCase): diff --git a/SAS/TMSS/test/test_utils.py b/SAS/TMSS/test/test_utils.py index 7d559bb9800d4ad3112d49df59d3aa3094fec86a..1029deb3474ce830e83f3d8d0a26f07c9bf3620f 100644 --- a/SAS/TMSS/test/test_utils.py +++ b/SAS/TMSS/test/test_utils.py @@ -270,7 +270,9 @@ class TMSSTestEnvironment: def __init__(self, host: str='127.0.0.1', preferred_django_port: int=8000, public_host: str=None, exchange: str=os.environ.get("TMSS_EXCHANGE", DEFAULT_BUSNAME), broker: str=os.environ.get("TMSS_BROKER", DEFAULT_BROKER), populate_schemas:bool=False, populate_test_data:bool=False, - start_postgres_listener: bool=True): + start_ra_test_environment: bool=False, start_postgres_listener: bool=False, + start_subtask_scheduler: bool=False, start_dynamic_scheduler: bool=False, + start_pipeline_control: bool=False, enable_viewflow: bool=False): self._exchange = exchange self._broker = broker self._populate_schemas = populate_schemas @@ -284,9 +286,25 @@ class TMSSTestEnvironment: public_host=public_host) self.client_credentials = TemporaryCredentials(user=self.ldap_server.dbcreds.user, password=self.ldap_server.dbcreds.password) + + self._start_ra_test_environment = start_ra_test_environment + self.ra_test_environment = None + self._start_postgres_listener = start_postgres_listener self.postgres_listener = None + self._start_subtask_scheduler = start_subtask_scheduler + self.subtask_scheduler = None + + self._start_dynamic_scheduler = start_dynamic_scheduler + self.dynamic_scheduler = None + + self._start_pipeline_control = start_pipeline_control + self.pipeline_control = None + + if enable_viewflow: + os.environ['TMSS_ENABLE_VIEWFLOW'] = 'True' + # Check for correct Django version, should be at least 3.0 if django.VERSION[0] < 3: print("\nWARNING: YOU ARE USING DJANGO VERSION '%s', WHICH WILL NOT SUPPORT ALL FEATURES IN TMSS!\n" % @@ -318,12 +336,36 @@ class TMSSTestEnvironment: user.is_superuser = True user.save() + if self._start_ra_test_environment: + self.ra_test_environment = RATestEnvironment(exchange=self._exchange, broker=self._broker) + self.ra_test_environment.start() + if self._start_postgres_listener: # start the TMSSPGListener, so the changes in the database are posted as EventMessages on the bus from lofar.sas.tmss.services.tmss_postgres_listener import TMSSPGListener self.postgres_listener = TMSSPGListener(exchange=self._exchange, broker=self._broker, dbcreds=self.database.dbcreds) self.postgres_listener.start() + if self._start_subtask_scheduler: + from lofar.sas.tmss.services.scheduling.subtask_scheduling import create_subtask_scheduling_service + self.subtask_scheduler = create_subtask_scheduling_service(exchange=self._exchange, broker=self._broker) + self.subtask_scheduler.start_listening() + + if self._start_dynamic_scheduler: + from lofar.sas.tmss.services.scheduling.dynamic_scheduling import create_dynamic_scheduling_service, models + # by default, dynamic scheduling is disabled in TMSS. + # In this test environment, we do want to have it enabled. Why else would we wanna start this service? + setting = models.Setting.objects.get(name=models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value) + setting.value = True + setting.save() + self.dynamic_scheduler = create_dynamic_scheduling_service(exchange=self._exchange, broker=self._broker) + self.dynamic_scheduler.start_listening() + + if self._start_pipeline_control: + from lofar.mac.PipelineControl import PipelineControlTMSS + self.pipeline_control = PipelineControlTMSS(exchange=self._exchange, broker=self._broker) + self.pipeline_control.start_listening() + if self._populate_schemas or self._populate_test_data: self.populate_schemas() @@ -336,6 +378,22 @@ class TMSSTestEnvironment: self.postgres_listener.stop() self.postgres_listener = None + if self.subtask_scheduler is not None: + self.subtask_scheduler.stop_listening() + self.subtask_scheduler = None + + if self.dynamic_scheduler is not None: + self.dynamic_scheduler.stop_listening() + self.dynamic_scheduler = None + + if self.pipeline_control is not None: + self.pipeline_control.stop_listening() + self.pipeline_control = None + + if self.ra_test_environment is not None: + self.ra_test_environment.stop() + self.ra_test_environment = None + self.django_server.stop() self.ldap_server.stop() self.database.destroy() @@ -406,9 +464,15 @@ def main_test_environment(): group.add_option("-P", "--public_host", dest="public_host", type="string", default='127.0.0.1', help="expose the TMSS Django REST API via this host. [default=%default]") - group = OptionGroup(parser, 'Example/Test data') + group = OptionGroup(parser, 'Example/Test data, schemas and services', + description='Options to enable/create example/test data, schemas and services. ' \ + 'Without these options you get a lean and mean TMSS test environment, but then you need to run the background services yourselves, and create test data yourself. ' \ + 'For standalone commissioning/testing/playing around you need all these options.') parser.add_option_group(group) group.add_option('-d', '--data', dest='data', action='store_true', help='populate the test-database with test/example data') + group.add_option('-s', '--schemas', dest='schemas', action='store_true', help='populate the test-database with the TMSS JSON schemas') + group.add_option('-S', '--services', dest='services', action='store_true', help='start the TMSS background services.') + group.add_option('-v', '--viewflow', dest='viewflow', action='store_true', help='Enable the viewflow app for workflows on top of TMSS') group = OptionGroup(parser, 'Messaging options') parser.add_option_group(group) @@ -419,10 +483,12 @@ def main_test_environment(): logging.basicConfig(format = '%(asctime)s %(levelname)s %(message)s', level = logging.INFO) - with RATestEnvironment(exchange=options.exchange, broker=options.broker): - with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port, public_host=options.public_host, - exchange=options.exchange, broker=options.broker, - populate_schemas=True, populate_test_data=options.data) as instance: + with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port, public_host=options.public_host, + exchange=options.exchange, broker=options.broker, + populate_schemas=options.schemas, populate_test_data=options.data, + start_ra_test_environment=options.services, start_postgres_listener=options.services, + start_subtask_scheduler=options.services, start_dynamic_scheduler=options.services, + start_pipeline_control=options.services, enable_viewflow=options.viewflow) as tmss_test_env: # print some nice info for the user to use the test servers... # use print instead of log for clean lines. @@ -433,19 +499,20 @@ def main_test_environment(): print("*****************************************************") print("Test-TMSS database, LDAP and Django up and running...") print("*****************************************************") - print("DB Credentials ID: %s" % (instance.database.dbcreds_id, )) - print("LDAP Credentials ID: %s" % (instance.django_server.ldap_dbcreds_id, )) - print("TMSS Client Credentials ID: %s" % (instance.client_credentials.dbcreds_id, )) - print("Django URL: %s" % (instance.django_server.url)) + print("DB Credentials ID: %s" % (tmss_test_env.database.dbcreds_id, )) + print("LDAP Credentials ID: %s" % (tmss_test_env.django_server.ldap_dbcreds_id, )) + print("TMSS Client Credentials ID: %s" % (tmss_test_env.client_credentials.dbcreds_id, )) + print("Django URL: %s" % (tmss_test_env.django_server.url)) print() print("Example cmdlines to run tmss or tmss_manage_django:") - print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) - print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss_manage_django" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) + print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss" % (tmss_test_env.database.dbcreds_id, tmss_test_env.django_server.ldap_dbcreds_id)) + print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss_manage_django" % (tmss_test_env.database.dbcreds_id, tmss_test_env.django_server.ldap_dbcreds_id)) print() print("Example cmdline to run tmss client call:") - print("TMSS_CLIENT_DBCREDENTIALS=%s tmss_set_subtask_state <id> <state>" % (instance.client_credentials.dbcreds_id, )) + print("TMSS_CLIENT_DBCREDENTIALS=%s tmss_set_subtask_state <id> <state>" % (tmss_test_env.client_credentials.dbcreds_id, )) print() print("Press Ctrl-C to exit (and remove the test database and django server automatically)") + waitForInterrupt() diff --git a/SAS/TMSS/test/testdata/subtasks.json b/SAS/TMSS/test/testdata/subtasks.json index 70f8b97d95e9c2c830bcc42092bcf0144a506f9e..2596021102cda14054c339f651d9b7c0c0eb7a55 100644 --- a/SAS/TMSS/test/testdata/subtasks.json +++ b/SAS/TMSS/test/testdata/subtasks.json @@ -31,11 +31,9 @@ "stop_time": "2020-01-02T12:00:00", "specifications_doc": 1, "do_cancel": null, - "priority": 1, "state": "defined", "task_blueprint": null, "specifications_template": 1, - "schedule_method": "manual", "cluster": 2, "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], "created_at": "2020-02-24T13:19:57", @@ -50,11 +48,9 @@ "stop_time": "2020-01-03T12:00:00", "specifications_doc": 1, "do_cancel": null, - "priority": 1, "state": "defined", "task_blueprint": null, "specifications_template": 1, - "schedule_method": "manual", "cluster": 3, "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], "created_at": "2020-02-24T13:19:57", @@ -69,11 +65,9 @@ "stop_time": "2020-01-04T12:00:00", "specifications_doc": 1, "do_cancel": null, - "priority": 1, "state": "defined", "task_blueprint": null, "specifications_template": 1, - "schedule_method": "manual", "cluster": 1, "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], "created_at": "2020-02-24T13:19:57", diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py index f58583962a1887ddc6e3e6e136351ede386ba255..7f5f266be4ac768d006f7860abd9d8f85351c723 100644 --- a/SAS/TMSS/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/test/tmss_test_data_django_models.py @@ -118,12 +118,16 @@ def Cycle_test_data() -> dict: "start": datetime.utcnow().isoformat(), "stop": datetime.utcnow().isoformat()} -def Project_test_data(archive_subdirectory="my_project/") -> dict: +def Project_test_data(name: str=None, priority_rank: int = 1, archive_subdirectory="my_project/") -> dict: + if name is None: + name = 'my_project_' + str(uuid.uuid4()) + return { #"cycles": [models.Cycle.objects.create(**Cycle_test_data())], # ManyToMany, use set() - "name": 'my_project_' + str(uuid.uuid4()), + "name": name, "description": 'my description ' + str(uuid.uuid4()), "tags": [], - "priority_rank": 1.0, + "auto_ingest": False, + "priority_rank": priority_rank, "trigger_priority": 1000, "can_trigger": False, "private_data": True, @@ -234,20 +238,26 @@ def SchedulingUnitBlueprint_test_data(name='my_scheduling_unit_blueprint', requi "do_cancel": False, "draft": models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()) } -def TaskBlueprint_test_data(name='my_task_blueprint', task_draft: models.TaskDraft = None, scheduling_unit_blueprint: models.SchedulingUnitBlueprint = None) -> dict: +def TaskBlueprint_test_data(name='my_task_blueprint', task_draft: models.TaskDraft = None, scheduling_unit_blueprint: models.SchedulingUnitBlueprint = None, specifications_template: models.TaskTemplate=None, specifications_doc: dict=None) -> dict: if task_draft is None: task_draft = models.TaskDraft.objects.create(**TaskDraft_test_data()) + if specifications_template is None: + specifications_template = task_draft.specifications_template + + if specifications_doc is None: + specifications_doc = get_default_json_object_for_schema(specifications_template.schema) + if scheduling_unit_blueprint is None: scheduling_unit_blueprint = models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data()) return {"name": name, "description": "", "tags": [], - "specifications_doc": task_draft.specifications_doc, + "specifications_doc": specifications_doc, "do_cancel": False, "draft": task_draft, - "specifications_template": task_draft.specifications_template, + "specifications_template": specifications_template, "scheduling_unit_blueprint": scheduling_unit_blueprint} def TaskRelationBlueprint_test_data(producer: models.TaskBlueprint = None, consumer: models.TaskBlueprint = None) -> dict: @@ -375,8 +385,6 @@ def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_templat "specifications_template": subtask_template, "tags": ["TMSS", "TESTING"], "do_cancel": datetime.utcnow(), - "priority": 1, - "schedule_method": models.ScheduleMethod.objects.get(value='manual'), "cluster": cluster, "raw_feedback": raw_feedback} diff --git a/SAS/TMSS/test/tmss_test_data_rest.py b/SAS/TMSS/test/tmss_test_data_rest.py index 82f35cf01ae41d98230365c02cc85fbdc0ec8908..1a16d480f10c74cd783b3ea88d39fd363b1c2cfc 100644 --- a/SAS/TMSS/test/tmss_test_data_rest.py +++ b/SAS/TMSS/test/tmss_test_data_rest.py @@ -298,8 +298,8 @@ class TMSSRESTTestDataCreator(): 'task_blueprints': [], 'produced_by': [], 'consumed_by': [], - 'first_to_connect': [], - 'second_to_connect': []} + 'first_scheduling_relation': [], + 'second_scheduling_relation': []} def TaskRelationDraft(self, producer_url=None, consumer_url=None, template_url=None, input_role_url=None, output_role_url=None, selection_doc=None): @@ -380,8 +380,8 @@ class TMSSRESTTestDataCreator(): "subtasks": [], "produced_by": [], "consumed_by": [], - 'first_to_connect': [], - 'second_to_connect': []} + 'first_scheduling_relation': [], + 'second_scheduling_relation': []} def TaskRelationBlueprint(self, draft_url=None, template_url=None, input_role_url=None, output_role_url=None, consumer_url=None, producer_url=None, selection_doc=None): if draft_url is None: @@ -517,8 +517,6 @@ class TMSSRESTTestDataCreator(): "specifications_template": specifications_template_url, "tags": ["TMSS", "TESTING"], "do_cancel": datetime.utcnow().isoformat(), - "priority": 1, - "schedule_method": self.django_api_url + '/schedule_method/manual', "cluster": cluster_url, "raw_feedback": raw_feedack} diff --git a/SubSystems/RAServices/CMakeLists.txt b/SubSystems/RAServices/CMakeLists.txt index fba2f3ff4837f061ce7251daaeae624ee7bddee6..43896bd6785b73d1aa7f65bb64aa004ad5f6abb8 100644 --- a/SubSystems/RAServices/CMakeLists.txt +++ b/SubSystems/RAServices/CMakeLists.txt @@ -27,7 +27,7 @@ lofar_package(RAServices ltastorageoverview QA_Service MessageLogger - TMSSSubtaskSchedulingService) + TMSSSchedulingService) # supervisord config files lofar_add_sysconf_files(RAServices.ini