diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000000000000000000000000000000000..cad7657dfa543e02eca53f1ecc7545c92bc0a550 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "cmake.configureOnOpen": false +} \ No newline at end of file diff --git a/CMake/LofarPackageList.cmake b/CMake/LofarPackageList.cmake index db28a087be704c2b09c85cd66fea45146d617029..d4f6966e12814caac01dda87311fdbea2535433f 100644 --- a/CMake/LofarPackageList.cmake +++ b/CMake/LofarPackageList.cmake @@ -1,7 +1,7 @@ # - Create for each LOFAR package a variable containing the absolute path to # its source directory. # -# Generated by gen_LofarPackageList_cmake.sh at do 29 okt 2020 7:42:34 CET +# Generated by gen_LofarPackageList_cmake.sh at do 28 mei 2020 11:22:44 CEST # # ---- DO NOT EDIT ---- # @@ -207,7 +207,7 @@ if(NOT DEFINED LOFAR_PACKAGE_LIST_INCLUDED) set(TaskPrescheduler_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/ResourceAssignment/TaskPrescheduler) set(RACommon_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/ResourceAssignment/Common) set(TMSSClient_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/client) - set(TMSSSubtaskSchedulingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/subtask_scheduling) + set(TMSSSchedulingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/scheduling) set(TMSSFeedbackHandlingService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/feedback_handling) set(TMSSPostgresListenerService_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TMSS/services/tmss_postgres_listener) set(TriggerEmailServiceCommon_SOURCE_DIR ${CMAKE_SOURCE_DIR}/SAS/TriggerEmailService/Common) diff --git a/CMake/variants/variants.lcs157 b/CMake/variants/variants.lcs157 index 1e70de71e0b9d86b4c0e0f32fbdba10342bd8c1d..1c7d26eaa4309cb4a3444e443326ae5b4c03aca1 100644 --- a/CMake/variants/variants.lcs157 +++ b/CMake/variants/variants.lcs157 @@ -2,6 +2,7 @@ # AS: put under comment as LOFAR general rule is to use shared libs now. #option(BUILD_SHARED_LIBS "Build shared libraries" OFF) +set(PYTHON_EXECUTABLE "/usr/bin/python3.6" CACHE FILEPATH "") set(WINCC_ROOT_DIR /opt/WinCC_OA/3.16) set(CASACORE_ROOT_DIR "/opt/casacore") set(CASAREST_ROOT_DIR "/opt/casarest") diff --git a/Docker/lofar-base/Dockerfile.tmpl b/Docker/lofar-base/Dockerfile.tmpl index 9ba56ab2373f0094e6ee0194f91790e608f64403..b32d6d4e736eb922c7bf65b62b0b007ef992bed5 100644 --- a/Docker/lofar-base/Dockerfile.tmpl +++ b/Docker/lofar-base/Dockerfile.tmpl @@ -202,6 +202,16 @@ RUN aptitude install -y libqpid-proton8 libqpid-proton-cpp8 python3-qpid-proton # ******************* RUN aptitude install -y python3-kombu +# ******************* +# Unconsumed build arguments +# ******************* + +# Apply a finger print to force a rebuild if the source code changes. Supply a unique ID here to force a rebuild. +ARG LOFAR_FINGERPRINT=whatever + +# we do not use this, but will be configured to produce it +ARG LOFAR_BASE_IMAGE_VERSION=latest + # # entry # diff --git a/Docker/lofar-ci/Dockerfile_ci_mac b/Docker/lofar-ci/Dockerfile_ci_mac index b23bda761611ba93ddb5fa6d9bba1cc05cc40078..ceb9978ef6d2516e6f1a16d8d6ade53a08b381df 100644 --- a/Docker/lofar-ci/Dockerfile_ci_mac +++ b/Docker/lofar-ci/Dockerfile_ci_mac @@ -11,7 +11,7 @@ ARG BASE_VERSION=latest FROM ci_base:$BASE_VERSION RUN echo "Installing packages for MAC..." && \ - yum -y install readline-devel boost-python36-devel hdf5-devel blas-devel lapack-devel cfitsio-devel wcslib-devel autogen postgresql-devel cmake3 libpqxx-devel qpid-cpp-server qpid-cpp-client-devel qpid-tools unittest-cpp-devel jsoncpp-devel jsoncpp libcurl-devel libcurl && \ + yum -y install readline-devel boost-python36-devel hdf5-devel blas-devel lapack-devel cfitsio-devel wcslib-devel autogen postgresql-devel cmake3 libpqxx-devel qpid-cpp-server qpid-cpp-client-devel unittest-cpp-devel jsoncpp-devel jsoncpp libcurl-devel libcurl && \ pip3 install psycopg2 testing.postgresql lxml mock numpy kombu requests python-dateutil fabric RUN echo "Installing WinCC3.14 build and Demo App from Nexus repo..." && \ diff --git a/Docker/lofar-outputproc/Dockerfile.tmpl b/Docker/lofar-outputproc/Dockerfile.tmpl index 6c5ffa7b4a9707b9b2478e47615dacc58efc3b99..e0969b5d78a69578232c18fa9861bcb60ef4f9e5 100644 --- a/Docker/lofar-outputproc/Dockerfile.tmpl +++ b/Docker/lofar-outputproc/Dockerfile.tmpl @@ -5,6 +5,10 @@ FROM lofar-base:${LOFAR_TAG} RUN apt-get update && apt-get install -y git python python3 g++ make +# Consume superfluous build variables +ARG DOCKER_IMAGE_BUILD_DATE=now +ENV DOCKER_IMAGE_BUILD_DATE=${DOCKER_IMAGE_BUILD_DATE} + # # ******************* # Blitz @@ -24,13 +28,15 @@ RUN apt-get update && apt-get install -y git python && \ # ******************* # -ENV DAL_VERSION=v3.3.1 +ENV DAL_VERSION=v3.3.2 # Run-time dependencies RUN aptitude install -y libhdf5-${LIBHDF5_VERSION} python3 && \ aptitude clean && \ aptitude autoclean +ARG J=6 + RUN export BUILD_PACKAGES="git cmake g++ swig3.0 python3-setuptools python3-dev libhdf5-dev" && \ aptitude install -y ${BUILD_PACKAGES} && \ mkdir -p ${INSTALLDIR}/DAL/build && \ @@ -54,8 +60,11 @@ RUN export BUILD_PACKAGES="git cmake g++ swig3.0 python3-setuptools python3-dev RUN apt-get update && apt-get install -y binutils liblog4cplus-1.1-9 libxml2 libboost-thread${BOOST_VERSION}.1 libboost-filesystem${BOOST_VERSION}.1 libboost-date-time${BOOST_VERSION}.1 libpng16-16 libsigc++-2.0-dev libxml++2.6-2v5 libboost-regex${BOOST_VERSION}.1 libreadline${READLINE_VERSION} # Tell image build information -ENV LOFAR_BRANCH=${LOFAR_VERSION} \ - LOFAR_BUILDVARIANT=gnucxx11_opt +ARG LOFAR_VERSION=master +ENV LOFAR_BRANCH=${LOFAR_VERSION} + +ARG LOFAR_BUILDVARIANT=gnucxx11_opt +ENV LOFAR_BUILDVARIANT=${LOFAR_BUILDVARIANT} # Install RUN apt-get update && apt-get install -y git cmake g++ gfortran bison flex autogen liblog4cplus-dev libhdf5-dev libboost-dev boost-python${BOOST_VERSION}-dev libxml2-dev pkg-config libpng-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev libboost-regex${BOOST_VERSION}-dev binutils-dev libopenblas-dev libcfitsio-dev wcslib-dev libcap2-bin libreadline-dev && \ diff --git a/LCS/PyCommon/datetimeutils.py b/LCS/PyCommon/datetimeutils.py index 93841f255b159cd4d967e208f4eb8b93a6ff2967..fb1df8788220bf9af22515b5903c63c8c6a1147f 100644 --- a/LCS/PyCommon/datetimeutils.py +++ b/LCS/PyCommon/datetimeutils.py @@ -140,11 +140,33 @@ def from_milliseconds_since_unix_epoch(nr_of_milliseconds_since_epoch): ''' return from_seconds_since_unix_epoch(nr_of_milliseconds_since_epoch/1000.0) -def round_to_millisecond_precision(timestamp): +def round_to_millisecond_precision(timestamp: datetime) -> datetime: """ returns the given timestamp rounded to the nearest millisecond :param timestamp: datetime a python datetime timestamp :return: the given timestamp rounded to the nearest millisecond """ diff_to_rounded_millisecond = timestamp.microsecond - 1000*round(timestamp.microsecond/1000) - return timestamp - timedelta(microseconds=diff_to_rounded_millisecond) \ No newline at end of file + return timestamp - timedelta(microseconds=diff_to_rounded_millisecond) + +def round_to_second_precision(timestamp: datetime) -> datetime: + """ + returns the given timestamp rounded to the nearest second + :param timestamp: datetime a python datetime timestamp + :return: the given timestamp rounded to the nearest second + """ + if timestamp.microsecond < 500000: + return timestamp + timedelta(microseconds=-timestamp.microsecond) + else: + return timestamp + timedelta(microseconds=-timestamp.microsecond, seconds=1) + +def round_to_minute_precision(timestamp: datetime) -> datetime: + """ + returns the given timestamp rounded to the nearest minute + :param timestamp: datetime a python datetime timestamp + :return: the given timestamp rounded to the nearest minute + """ + if timestamp.second < 30: + return timestamp + timedelta(seconds=-timestamp.second, microseconds=-timestamp.microsecond) + else: + return timestamp + timedelta(minutes=1, seconds=-timestamp.second, microseconds=-timestamp.microsecond) diff --git a/LCS/PyCommon/json_utils.py b/LCS/PyCommon/json_utils.py index 956fd3b0a29c34bc25bc3e204ff877943e266ca1..232ba7841c4d588378ab01fb192d8d25b59577bc 100644 --- a/LCS/PyCommon/json_utils.py +++ b/LCS/PyCommon/json_utils.py @@ -72,17 +72,38 @@ def _extend_with_required(validator_class): _DefaultValidatingDraft6Validator = _extend_with_default(jsonschema.Draft6Validator) _DefaultValidatingDraft6Validator = _extend_with_required(_DefaultValidatingDraft6Validator) +# storage for validators, for fast caching of ref resolved urls. +_schema_validators = {} +_schema__defaults_addding_validators = {} + +def get_validator_for_schema(schema: dict, add_defaults: bool=False): + '''get a json validator for the given schema. + If the schema is already known in the cache by its $id, then the validator from the cached is return. + This saves many many lookups and ref resolving. + the 'add_defaults' parameter indicates if we want the validator to add defaults while validating or not.''' + if isinstance(schema, str): + schema = json.loads(schema) + + validators_cache = _schema__defaults_addding_validators if add_defaults else _schema_validators + + if '$id' in schema: + if schema['$id'] not in validators_cache: + validators_cache[schema['$id']] = _DefaultValidatingDraft6Validator(schema) if add_defaults else jsonschema.Draft6Validator(schema=schema) + validator = validators_cache[schema['$id']] + else: + validator = _DefaultValidatingDraft6Validator(schema) if add_defaults else jsonschema.Draft6Validator(schema=schema) + + validator.schema = schema + return validator def get_default_json_object_for_schema(schema: str) -> dict: '''return a valid json object for the given schema with all properties with their default values''' return add_defaults_to_json_object_for_schema({}, schema) - def add_defaults_to_json_object_for_schema(json_object: dict, schema: str) -> dict: '''return a copy of the json object with defaults filled in according to the schema for all the missing properties''' copy_of_json_object = deepcopy(json_object) - #TODO: investigate if we want to use a 'common'/singleton validator and use (remote) schema caching for faster validation - _DefaultValidatingDraft6Validator(schema).validate(copy_of_json_object) + get_validator_for_schema(schema, add_defaults=True).validate(copy_of_json_object) return copy_of_json_object def replace_host_in_urls(schema, new_base_url: str, keys=['$id', '$ref', '$schema']): @@ -202,7 +223,7 @@ def validate_json_object_with_schema(json_object, schema): """ Validate the given json_object with schema """ - jsonschema.Draft6Validator(schema=schema).validate(json_object) + get_validator_for_schema(schema, add_defaults=False).validate(json_object) diff --git a/LCS/PyCommon/postgres.py b/LCS/PyCommon/postgres.py index 9c6d36e6e4369f722c807b198ae07b34b0924d06..b04e99e4cadcea254e8fb4925edfc1aea508798f 100644 --- a/LCS/PyCommon/postgres.py +++ b/LCS/PyCommon/postgres.py @@ -40,28 +40,29 @@ from lofar.common.dbcredentials import DBCredentials logger = logging.getLogger(__name__) -def makePostgresNotificationQueries(schema, table, action, column_name='id'): +def makePostgresNotificationQueries(schema, table, action, column_name=None, quote_column_value:bool=True, id_column_name='id', quote_id_value:bool=False): action = action.upper() if action not in ('INSERT', 'UPDATE', 'DELETE'): raise ValueError('''trigger_type '%s' not in ('INSERT', 'UPDATE', 'DELETE')''' % action) change_name = '''{table}_{action}'''.format(table=table, action=action) - if column_name != 'id': + if column_name is not None and column_name != id_column_name: change_name += '_column_' + column_name function_name = '''NOTIFY_{change_name}'''.format(change_name=change_name) - if action == 'UPDATE': - if column_name == 'id': - select_payload = '''SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;''' - else: - select_payload = '''SELECT '{"id": ' || CAST(NEW.id AS text) || ', "''' + column_name + '''": "' || CAST(NEW.''' + column_name + ''' AS text) || '"}' INTO payload;''' - elif action == 'INSERT': - select_payload = '''SELECT '{"id": ' || CAST(NEW.id AS text) || '}' INTO payload;''' - elif action == 'DELETE': - select_payload = '''SELECT '{"id": ' || CAST(OLD.id AS text) || '}' INTO payload;''' + # build query string selecting the id:value (and col:col_value) into a json formatted object string + select_payload = '''SELECT '{po}"{id_column_name}": {id_value_quote}' || CAST({new_or_old}.{id_column_name} AS text) || '{id_value_quote}{column_key_value}{pc}' INTO payload;'''.format( + po="{", + id_column_name=id_column_name, + id_value_quote='"' if quote_id_value else '', + new_or_old='OLD' if action=='DELETE' else 'NEW', + column_key_value=''', "{column_name}": {column_value_quote}' || CAST(NEW.{column_name} AS text) || '{column_value_quote}'''.format( + column_name=column_name, + column_value_quote='"' if quote_column_value else '') if column_name else '', + pc = "}") if action == 'UPDATE': - begin_update_check = 'IF ROW(NEW.{what}) IS DISTINCT FROM ROW(OLD.{what}) THEN'.format(what='*' if column_name == 'id' else column_name) + begin_update_check = 'IF ROW(NEW.{what}) IS DISTINCT FROM ROW(OLD.{what}) THEN'.format(what='*' if column_name is None or column_name == id_column_name else column_name) end_update_check = 'END IF;' else: begin_update_check = '' @@ -83,9 +84,8 @@ def makePostgresNotificationQueries(schema, table, action, column_name='id'): function_name=function_name, table=table, action=action, - old_or_new=('OLD' if action == 'DELETE' else 'NEW') + '.' + column_name, value='OLD' if action == 'DELETE' else 'NEW', - change_name=change_name.lower(), + change_name=change_name[:63].lower(), # postgres limits channel names to 63 chars begin_update_check=begin_update_check, select_payload=select_payload, end_update_check=end_update_check) @@ -441,7 +441,7 @@ class PostgresListener(PostgresDatabaseConnection): self.connect() - logger.info("Started listening to %s" % ', '.join([str(x) for x in list(self.__callbacks.keys())])) + logger.info("Started listening to %s on database %s", ', '.join([str(x) for x in list(self.__callbacks.keys())]), self.dbcreds.stringWithHiddenPassword()) def eventLoop(): while self.isListening(): @@ -477,7 +477,7 @@ class PostgresListener(PostgresDatabaseConnection): self.__thread.join() self.__thread = None - logger.info("Stopped listening") + logger.info("Stopped listening for notifications on database %s", self.dbcreds.stringWithHiddenPassword()) self.stopWaiting() self.disconnect() diff --git a/LCS/PyCommon/test/postgres.py b/LCS/PyCommon/test/postgres.py index 51e3be001e05424dea7358c5aa4f239e02140faf..104a43a7508372829b25ddce531534b2cf3fce90 100755 --- a/LCS/PyCommon/test/postgres.py +++ b/LCS/PyCommon/test/postgres.py @@ -70,7 +70,7 @@ class PostgresTestDatabaseInstance(): def create(self): '''instantiate the isolated postgres server''' - logger.info('creating test-database instance...') + logger.info('%s creating test-database instance...', self.__class__.__name__) with self._named_lock: start_time = datetime.utcnow() @@ -90,9 +90,9 @@ class PostgresTestDatabaseInstance(): # make the user known in the new test database self._create_superuser(dsn) - logger.info('Created test-database instance. It is available at: %s', self.dbcreds.stringWithHiddenPassword()) + logger.info('%s created test-database instance. It is available at: %s', self.__class__.__name__, self.dbcreds.stringWithHiddenPassword()) - logger.info('Applying test-database schema...') + logger.info('%s applying test-database schema...', self.__class__.__name__) self.apply_database_schema() return except Exception as e: @@ -117,9 +117,9 @@ class PostgresTestDatabaseInstance(): '''destroy the running postgres server''' try: if self._postgresql: - logger.info('removing test-database instance at %s', self.dbcreds.stringWithHiddenPassword()) + logger.info('%s removing test-database instance at %s', self.__class__.__name__, self.dbcreds.stringWithHiddenPassword()) self._postgresql.stop() - logger.info('test-database instance removed') + logger.info('%s test-database instance removed', self.__class__.__name__) except Exception as e: logger.info('error while removing test-database instance at %s: %s', self.dbcreds.stringWithHiddenPassword(), e) diff --git a/LCU/StationTest/rspctlprobe.py b/LCU/StationTest/rspctlprobe.py old mode 100755 new mode 100644 index 1d254e6ad388452474771e25d72e34e82f82573f..96dab5d98679c2736d0312ea9e344db579fdf07f --- a/LCU/StationTest/rspctlprobe.py +++ b/LCU/StationTest/rspctlprobe.py @@ -19,6 +19,31 @@ from functools import reduce name = __name__ if __name__ != '__main__' else 'rspctlprobe' logger = logging.getLogger(name) +_NUM_HBA_ELEMENTS = 16 + +# Optimum element calculation done by M.Brentjes (Dec 2015) +_OptimumElements_Int = [0, 5, 3, 1, 8, 3, 12, 15, 10, 13, 11, 5, 12, 12, 5, 2, 10, 8, 0, 3, 5, 1, 4, 0, 11, 6, 2, 4, 9, + 14, 15, 3, 7, 5, 13, 15, 5, 6, 5, 12, 15, 7, 1, 1, 14, 9, 4, 9, 3, 9, 3, 13, 7, 14, 7, 14, 2, 8, + 8, 0, 1, 4, 2, 2, 12, 15, 5, 7, 6, 10, 12, 3, 3, 12, 7, 4, 6, 0, 5, 9, 1, 10, 10, 11, 5, 11, 7, + 9, 7, 6, 4, 4, 15, 4, 1, 15] +_OptimumElements_Core = [0, 10, 4, 3, 14, 0, 5, 5, 3, 13, 10, 3, 12, 2, 7, 15, 6, 14, 7, 5, 7, 9, 0, 15, 0, 10, 4, 3, + 14, 0, 5, 5, 3, 13, 10, 3, 12, 2, 7, 15, 6, 14, 7, 5, 7, 9, 0, 15] +_OptimumElements_Remote = [0, 13, 12, 4, 11, 11, 7, 8, 2, 7, 11, 2, 10, 2, 6, 3, 8, 3, 1, 7, 1, 15, 13, 1, 11, 1, 12, 7, + 10, 15, 8, 2, 12, 13, 9, 13, 4, 5, 5, 12, 5, 5, 9, 11, 15, 12, 2, 15] + +_NUM_TILES = {'core': 48, + 'remote': 48, + 'international': 96} +_OptimumElements = {'core': _OptimumElements_Core, + 'remote': _OptimumElements_Remote, + 'international': _OptimumElements_Int} +_SLEEP_TIME_SINGLE_ELEMENT_SELECTION = 2. # in units of s +STATION_TYPE = 'Unknown' +_HBA_MODES = (5, 6, 7) +_ELEMENT_OFF_CODE = '2' +_ELEMENT_ON_ZERO_DELAY = '128' + + # --------------------------------NICE PRINTOUT def table_maxlength_per_column(column): """ @@ -28,7 +53,8 @@ def table_maxlength_per_column(column): """ return reduce(max, list(map(len, column))) -def compute_table_width(data, margin = 1): + +def compute_table_width(data, margin=1): """ Compute the column width in characters :param data: table made of a list of columns @@ -39,6 +65,7 @@ def compute_table_width(data, margin = 1): """ return [x + 2 * margin for x in list(map(table_maxlength_per_column, data))] + def table_fix_string_length(string, length): """ Reformat each string to have the same character width @@ -48,7 +75,8 @@ def table_fix_string_length(string, length): :type length: str :return: a formatted string with the request character size """ - return '{:^{width}}'.format(string, width = length) + return '{:^{width}}'.format(string, width=length) + def table_format_column(column, length): """ @@ -60,6 +88,7 @@ def table_format_column(column, length): """ return [table_fix_string_length(x, length) for x in column] + def table_transpose(table): """ Transpose a list of rows in a list of columns and viceversa @@ -69,7 +98,8 @@ def table_transpose(table): """ return list(zip(*table)) -def table_format(table, separator = "|", margin_size = 1): + +def table_format(table, separator="|", margin_size=1): """ Format a table of values :param table: table of values @@ -84,6 +114,7 @@ def table_format(table, separator = "|", margin_size = 1): # transpose the list of columns in list of rows and concatenate the values to obtain rows using the separator return [separator.join(row) for row in table_transpose(formatted_columns)] + def table_print_out_table(write_function, table): """ Calls the write function for each row in the new formatted table @@ -97,6 +128,7 @@ def table_print_out_table(write_function, table): except Exception as e: logger.error("Error formatting table: %s", e) + # ---------------------------------UTILITIES def issue_rspctl_command(cmd): """ @@ -109,7 +141,8 @@ def issue_rspctl_command(cmd): cmd = ["rspctl"] + cmd try: - proc = subprocess.Popen(cmd, stdout = subprocess.PIPE, stderr = subprocess.PIPE) + logging.debug('executing command: %s', cmd) + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() if proc.returncode == 0: @@ -122,13 +155,15 @@ def issue_rspctl_command(cmd): except OSError as e: raise Exception("Error executing " + " ".join(cmd) + ":" + e.strerror) + def list_mode(l): """ Return the most frequent element in the list :param l: input list :return: the most frequent element """ - return max(set(l), key = l.count) + return max(set(l), key=l.count) + # ----------------------------------COMMANDS # -------Clock @@ -152,6 +187,7 @@ def parse_clock_output(out, err): "STDOUT: %s\n" % out + "STDERR: %s\n" % err) + def query_clock(): """ Execute the command rspctl --clock and and parses the result @@ -161,19 +197,20 @@ def query_clock(): out, err = issue_rspctl_command(['--clock']) return parse_clock_output(out, err) + class RCUBoard: """ This class describes the properties of a RCUBoard """ - def __init__(self, - identifier = -1, - status = None, - mode = None, - delay = None, - attenuation = None, - sub_bands = None, - xcsub_bands = None): + def __init__(self, + identifier=-1, + status=None, + mode=None, + delay=None, + attenuation=None, + sub_bands=None, + xcsub_bands=None): self.id = identifier self.status = status self.mode = mode @@ -195,6 +232,7 @@ class RCUBoard: def __getitem__(self, item): return getattr(self, item) + # -------RCU mode def parse_rcu_output(out, err): """ @@ -211,21 +249,21 @@ def parse_rcu_output(out, err): :rtype: dict """ rcu_values = out[1:] - rcu_by_id = {} # list of RCUs listed by ID + rcu_by_id = {} # list of RCUs listed by ID for rcu_value in rcu_values: - match = re.search("RCU\[\s*(?P<RCU_id>\d+)\].control=" + # parsing id - "\d+x\w+\s=>\s*(?P<status>\w+)," + # parsing status - "\smode:(?P<mode>\-?\d)," + # parsing mode - "\sdelay=(?P<delay>\d+)," + # parsing delay - "\satt=(?P<attenuation>\d+)", rcu_value) # parsing attenuation + match = re.search("RCU\[\s*(?P<RCU_id>\d+)\].control=" + # parsing id + "\d+x\w+\s=>\s*(?P<status>\w+)," + # parsing status + "\smode:(?P<mode>-?\d)," + # parsing mode + "\sdelay=(?P<delay>\d+)," + # parsing delay + "\satt=(?P<attenuation>\d+)", rcu_value) # parsing attenuation if match: rcu_id = int(match.group('RCU_id')) - rcu_board = RCUBoard(identifier = rcu_id, - status = match.group('status'), - mode = match.group('mode'), - delay = match.group('delay'), - attenuation = match.group('attenuation') + rcu_board = RCUBoard(identifier=rcu_id, + status=match.group('status'), + mode=match.group('mode'), + delay=match.group('delay'), + attenuation=match.group('attenuation') ) rcu_by_id[rcu_id] = rcu_board @@ -235,6 +273,7 @@ def parse_rcu_output(out, err): "STDERR: %s\n" % err) return rcu_by_id + def query_rcu_mode(): """ Execute the command rspctl --rcu and parses the result @@ -244,6 +283,7 @@ def query_rcu_mode(): out, err = issue_rspctl_command(['--rcu']) return parse_rcu_output(out, err) + # -------Subbands def parse_subbands_output(out, err): """ @@ -270,9 +310,9 @@ def parse_subbands_output(out, err): i_row = 0 while i_row < len(rcu_values): value = rcu_values[i_row] - match = re.search("RCU\[\s*(?P<RCU_id>\d+)\]" + # parsing RCU id - ".subbands=\(\d+,(?P<n_rows>\d)\)\s+x\s+\(0," + # parsing the number of rows - "(?P<n_elements>\d+)\)\s*", # parsing the number of elements + match = re.search("RCU\[\s*(?P<RCU_id>\d+)\]" + # parsing RCU id + ".subbands=\(\d+,(?P<n_rows>\d)\)\s+x\s+\(0," + # parsing the number of rows + "(?P<n_elements>\d+)\)\s*", # parsing the number of elements value) if match: rcu_id = int(match.group('RCU_id')) @@ -287,15 +327,17 @@ def parse_subbands_output(out, err): sub_band_list = [] for i in range(n_rows): # Parsing the string [ 143 145 ... or ... 122 123] into a list of integers - row = list(map(int, [_f for _f in rcu_values[i_row + i + 1].strip().lstrip('[').rstrip(']').split(' ') if _f])) + row = list( + map(int, [_f for _f in rcu_values[i_row + i + 1].strip().lstrip('[').rstrip(']').split(' ') if _f])) sub_band_list.append(row) - i_row = i_row + n_rows + 1 # ADVANCE + i_row = i_row + n_rows + 1 # ADVANCE rcu_by_id[rcu_id] = sub_band_list return rcu_by_id + def query_sub_bands_mode(): """ Execute the command rspctl --subbands and parses the result @@ -305,6 +347,7 @@ def query_sub_bands_mode(): out, err = issue_rspctl_command(['--subbands']) return parse_subbands_output(out, err) + # -------XCSub bands def parse_xcsub_bands_output(out, err): """ @@ -342,7 +385,7 @@ def parse_xcsub_bands_output(out, err): :return: a dict indexed by the rcu board id containing the list of xcsub bands used :rtype: dict """ - rcu_values= out[1:] + rcu_values = out[1:] rcu_by_id = {} i_row = 0 @@ -362,10 +405,11 @@ def parse_xcsub_bands_output(out, err): xcsub_bands_list = [] for i in range(n_rows): # Parsing the string [ 143 145 ... or ... 122 123] into a list of integers - row = list(map(int, [_f for _f in rcu_values[i_row + i + 1].strip().lstrip('[').rstrip(']').split(' ') if _f])) + row = list( + map(int, [_f for _f in rcu_values[i_row + i + 1].strip().lstrip('[').rstrip(']').split(' ') if _f])) xcsub_bands_list.append(row) - i_row = i_row + n_rows + 1 # ADVANCE + i_row = i_row + n_rows + 1 # ADVANCE # concatenates the two rows -> computes the max xcsub_band and returns the value # [NOTE max accepts only a couple of values] val = reduce(lambda x, a: max(x, a), reduce(lambda x, a: x + a, xcsub_bands_list)) @@ -377,6 +421,7 @@ def parse_xcsub_bands_output(out, err): rcu_by_id[rcu_id] = val return rcu_by_id + def query_xcsub_bands_mode(): """ Execute the command rspctl --subbands and parses the result @@ -386,6 +431,7 @@ def query_xcsub_bands_mode(): out, err = issue_rspctl_command(['--xcsubband']) return parse_xcsub_bands_output(out, err) + # -------Spectral inversion def parse_spinv_output(out, err): """ @@ -450,6 +496,7 @@ def parse_spinv_output(out, err): return rcu_by_id + def query_spinv_mode(): """ Execute the command rspctl --spinv and parses the result @@ -459,6 +506,7 @@ def query_spinv_mode(): out, err = issue_rspctl_command(['--specinv']) return parse_spinv_output(out, err) + def execute_xcstatistics_mode(parameters): """ Execute the command rspclt --xcstatistics from a dict of parameters @@ -481,11 +529,12 @@ def execute_xcstatistics_mode(parameters): cmd_list.append('--integration=%d' % parameters['integration']) if 'directory' in parameters: cmd_list.append('--directory=%s' % parameters['directory']) - if 'select'in parameters: + if 'select' in parameters: cmd_list.append('--select=%s' % parameters['select']) issue_rspctl_command(cmd_list) + # ----------------------------------Merging information def query_status(): @@ -542,6 +591,7 @@ def query_status(): return res + def dump_info_file(path, res): """ Dump the information collected in json format into the directory specified in path @@ -553,7 +603,8 @@ def dump_info_file(path, res): file_path = os.path.join(path, "infos") with open(file_path, 'w') as fout: - fout.write(json.dumps(res, indent = 4, separators = (',', ': '))) + fout.write(json.dumps(res, indent=4, separators=(',', ': '))) + def query_xcstatistics(options): """ @@ -576,7 +627,7 @@ def query_xcstatistics(options): filename = "_mode_%s_xst_sb%0.3d.dat" % (mode, subband) - temporary_output_directory = tempfile.mkdtemp(prefix = "rspctlprobe_tmp") + temporary_output_directory = tempfile.mkdtemp(prefix="rspctlprobe_tmp") options['directory'] = temporary_output_directory integration = options['integration'] @@ -600,7 +651,7 @@ def query_xcstatistics(options): rcus = res["rcus"] header = ["RCUID", "delay", "attenuation", "mode", "status", "xcsub_bands"] - ids = [[header[0]] + list(map(str, list(rcus.keys())))] # Create the id column of the file + ids = [[header[0]] + list(map(str, list(rcus.keys())))] # Create the id column of the file table = [[key] + [str(rcus[i][key]) for i in rcus] for key in header[1:]] table = ids + table @@ -618,6 +669,7 @@ def query_xcstatistics(options): return res + def query_most_common_mode(): """ Return the most frequent mode that the RCUs have @@ -627,9 +679,10 @@ def query_most_common_mode(): rcus_mode = [rcus_mode[rcu] for rcu in rcus_mode] return int(list_mode([x['mode'] for x in rcus_mode])) + def set_mode(mode): """ - Set the mode on all the rsp boards + Set the mode on all the rcu boards :param mode: the mode to be set :type mode: int @@ -645,11 +698,128 @@ def set_mode(mode): for i in range(10): time.sleep(3) outmode = query_most_common_mode() - logger.info('current rsp mode is {}'.format(outmode)) + logger.info('current rcu mode is {}'.format(outmode)) if mode == outmode: logger.info('mode changed correctly to {}'.format(outmode)) return True - raise Exception('Cannot change rsp mode') + raise Exception('Cannot change rcu mode') + + +def _single_element_hba_delay_string(element_id): + """ + Generate the HBA delay string to select a single element id + :param element_id: the element id to be selected + :return: the element id string + >>> _single_element_hba_delay_string(0) + '128,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2' + >>> _single_element_hba_delay_string(15) + '2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,128' + >>> _single_element_hba_delay_string(8) + '2,2,2,2,2,2,2,2,128,2,2,2,2,2,2,2' + >>> _single_element_hba_delay_string(-1) + Traceback (most recent call last): + ... + ValueError: the element id -1 out of range [0, 15] + >>> _single_element_hba_delay_string(18) + Traceback (most recent call last): + ... + ValueError: the element id 18 out of range [0, 15] + """ + if element_id < 0 or element_id > _NUM_HBA_ELEMENTS: + raise ValueError('the element id %d out of range [0, 15]' % element_id) + + return ",".join([_ELEMENT_OFF_CODE for _ in range(element_id)] + + [_ELEMENT_ON_ZERO_DELAY] + + [_ELEMENT_OFF_CODE for _ in range(element_id + 1, _NUM_HBA_ELEMENTS)]) + + +def _tile_to_rcu_ids(tile_id): + """ + RCU ids for a given tile id (both polarizations) + :param tile_id: the id of the tile + :return: the list of the rcu ids corresponding to the tile_id + + >>> _tile_to_rcu_ids(1) + [2,3] + >>> _tile_to_rcu_ids(4) + [8,9] + """ + return [2 * tile_id, 2 * tile_id + 1] + + +def _rcu_selection_string(element_id, station_type): + """ + Generate the rcu selection string to select a series of rcu to be set for the HBA single element mode + :param element_id: the element id to be selected + :return: the element id string + >>> _rcu_selection_string(0, 'remote') + '0,1' + >>> _rcu_selection_string(8, 'remote') + '14,15,32,33,60,61' + >>> _rcu_selection_string(15, 'remote') + '42,43,58,59,88,89,94,95' + >>> _rcu_selection_string(8, 'international') + '8,9,34,35,114,115,116,117' + >>> _rcu_selection_string(9, 'core') + '42,43,90,91' + >>> _rcu_selection_string(8, 'core') + '' + >>> _rcu_selection_string(-1, 'core') + Traceback (most recent call last): + ... + ValueError: the element id -1 out of range [0, 15] + >>> _rcu_selection_string(18, 'core') + Traceback (most recent call last): + ... + ValueError: the element id 18 out of range [0, 15] + """ + if element_id < 0 or element_id > _NUM_HBA_ELEMENTS: + raise ValueError('the element id %d out of range [0, 15]' % element_id) + + num_tiles = _NUM_TILES[station_type] + elements_list = _OptimumElements[station_type] + rcu_to_be_selected = [] + for tile in range(num_tiles): + if elements_list[tile] == element_id: + # convert tile number to RCU number + rcu_to_be_selected += _tile_to_rcu_ids(tile) + + rcu_ctrl_string = ','.join(map(str, rcu_to_be_selected)) + return rcu_ctrl_string + + +def detect_station_type(): + hostname = socket.gethostname() + if hostname.startswith('RS'): + station_type = 'remote' + elif hostname.startswith('CS'): + station_type = 'core' + else: + station_type = 'international' + logger.info('Station type detected is %s', station_type) + return station_type + + +def set_single_hba_element(station_type): + """ + Activate a single element in the HBA tile + + :return: None + """ + if station_type not in _NUM_TILES: + raise ValueError('the station type "%s" not existent' % station_type) + + logger.info('selecting a single element only') + for element_id in range(_NUM_HBA_ELEMENTS): + rcu_to_select = _rcu_selection_string(element_id, station_type) + if rcu_to_select == '': + continue + delay_to_set = _single_element_hba_delay_string(element_id) + + issue_rspctl_command(['--hbadelay={}'.format(delay_to_set), + '--select={}'.format(rcu_to_select)]) + time.sleep(_SLEEP_TIME_SINGLE_ELEMENT_SELECTION) + def set_xcsubband(subband): """ @@ -659,7 +829,7 @@ def set_xcsubband(subband): :type subband: string """ logger.info('switching rcu xcsubband to %d', subband) - issue_rspctl_command(["--xcsubband={}".format(subband)]) + issue_rspctl_command(['--xcsubband={}'.format(subband)]) logger.debug('xcsubband change command issued') for i in range(10): time.sleep(1) @@ -670,7 +840,8 @@ def set_xcsubband(subband): return True raise Exception('Cannot change rsp xcsubband to {}'.format(subband)) -def produce_xcstatistics(integration_time = 1, duration = 1, add_options = None, output_directory = "./"): + +def produce_xcstatistics(integration_time=1, duration=1, add_options=None, output_directory="./"): """ Execute the command to compute the xcstatistics with a given integration and duration. It is also possible to specify an output directory and additional options. @@ -690,13 +861,15 @@ def produce_xcstatistics(integration_time = 1, duration = 1, add_options = None, res = query_xcstatistics(add_options) return res + def batch_produce_xcstatistics(integration_time, duration, - wait_time = None, - xcsub_bands = None, - mode = None, - add_options = None, - output_directory = "./"): + wait_time=None, + xcsub_bands=None, + mode=None, + add_options=None, + output_directory="./", + select_single_element=False): """ Produces the xcstatistics for a list of integration_times durations and wait_times on the given set of xcsubband storing everything in the output directory. @@ -719,6 +892,9 @@ def batch_produce_xcstatistics(integration_time, if mode != -2: set_mode(mode) + if select_single_element: + set_single_hba_element(station_type=STATION_TYPE) + for ind, (i, d, w) in enumerate(zip(integration_time, duration, wait_time)): if not xcsub_bands: produce_xcstatistics(i, d, add_options, output_directory) @@ -729,51 +905,79 @@ def batch_produce_xcstatistics(integration_time, time.sleep(w) + # ----------------------------------MAIN CODE LOGIC -def setup_logging(): +def setup_logging(log_level): """ Setup the logging system """ logging.basicConfig( - format = '%(asctime)s - %(name)s: %(message)s', - datefmt = "%m/%d/%Y %I:%M:%S %p", - level = logging.DEBUG) - - + format='%(asctime)s - %(name)s: %(message)s', + datefmt="%m/%d/%Y %I:%M:%S %p", + level=log_level) + + __MODE_NOT_SET_DEFAULT = -2 -def init(): +def init(log_level=logging.DEBUG): """ Init phase of the program """ - setup_logging() + global STATION_TYPE + setup_logging(log_level=log_level) + STATION_TYPE = detect_station_type() + def setup_command_argument_parser(): parser = argparse.ArgumentParser( - formatter_class=argparse.RawDescriptionHelpFormatter, - description = "es: rspctlprobe.py --mode 3 --xcstatistics --xcsubband 100:400:50 --integration 5 --duration 5 --wait 3600 --loops 24 --directory /localhome/data/") - - parser.add_argument('--xcstatistics', action = 'store_true') - parser.add_argument('--integration', type = int, default = [1], nargs = '+') - parser.add_argument('--duration', type = int, default = [1], nargs = '+') - parser.add_argument('--xcangle', default = 'False') - parser.add_argument('--directory', default = os.getcwd()) - parser.add_argument('--wait', type = int, default = [0], nargs = '+') - parser.add_argument('--xcsubband', type = str, default = "") - parser.add_argument('--loops', type = int, default = 1) - parser.add_argument('--mode', type = int, default = __MODE_NOT_SET_DEFAULT) + formatter_class=argparse.RawDescriptionHelpFormatter, + description="Example complete tile: rspctlprobe.py --mode 5 --xcstatistics --xcsubband 100:400:25 --integration 5 --duration 5 --wait 3600 --loops 24 --directory /localhome/data/ \n\n" + "Example single element: rspctlprobe.py --mode 5 --single --xcstatistics --xcsubband 100:400:25 --integration 5 --duration 5 --wait 3600 --loops 24 --directory /localhome/data/") + + parser.add_argument('--xcstatistics', action='store_true') + parser.add_argument('--integration', type=int, default=[1], nargs='+') + parser.add_argument('--duration', type=int, default=[1], nargs='+') + parser.add_argument('--xcangle', default='False') + parser.add_argument('--directory', default=os.getcwd()) + parser.add_argument('--wait', type=int, default=[0], nargs='+') + parser.add_argument('--xcsubband', type=str, default="") + parser.add_argument('--loops', type=int, default=1) + parser.add_argument('--mode', type=int, default=__MODE_NOT_SET_DEFAULT) + parser.add_argument('--single', action='store_true', help='select a single HBA element') return parser + +def check_input_validity(arguments): + if arguments.single: + current_mode = query_most_common_mode() + if current_mode not in _HBA_MODES and arguments.mode == __MODE_NOT_SET_DEFAULT: + logger.error('single selection cannot be done for not HBA modes with code (5, 6, 7): current mode is %d', + current_mode) + raise SystemExit('single selection cannot be done for not HBA modes with code (5, 6, 7)') + + +def xcsubband_specification_to_list(xcsubbands_string): + if ":" in xcsubbands_string: + start, end, step = map(int, xcsubbands_string.split(":")) + xcsub_bands = [int(i) for i in range(start, end + step, step)] + elif "," in xcsubbands_string: + xcsub_bands = [int(i) for i in xcsubbands_string.split(",")] + else: + xcsub_bands = [int(xcsubbands_string)] + return xcsub_bands + + def parse_and_execute_command_arguments(): """ Parses the command line arguments and execute the procedure linked :return: :rtype: """ + global STATION_TYPE parser = setup_command_argument_parser() program_arguments = parser.parse_args() - + check_input_validity(program_arguments) if program_arguments.xcstatistics: options = {} if program_arguments.xcangle: @@ -781,34 +985,31 @@ def parse_and_execute_command_arguments(): try: if program_arguments.xcsubband: - if ":" in program_arguments.xcsubband: - start, end, step = map(int, program_arguments.xcsubband.split(":")) - xcsub_bands = [int(i) for i in range(start, end+step, step)] - elif "," in program_arguments.xcsubband: - xcsub_bands = [int(i) for i in program_arguments.xcsubband.split(",")] - else: - xcsub_bands = [int(program_arguments.xcsubband)] + xcsub_bands = xcsubband_specification_to_list(program_arguments.xcsubband) for i in range(program_arguments.loops): batch_produce_xcstatistics(program_arguments.integration, program_arguments.duration, - wait_time = program_arguments.wait, - xcsub_bands = xcsub_bands, - mode = program_arguments.mode, - add_options = options, - output_directory = program_arguments.directory) + wait_time=program_arguments.wait, + xcsub_bands=xcsub_bands, + mode=program_arguments.mode, + add_options=options, + output_directory=program_arguments.directory, + select_single_element=program_arguments.single) else: for i in range(program_arguments.loops): batch_produce_xcstatistics(program_arguments.integration, program_arguments.duration, - wait_time = program_arguments.wait, - mode = program_arguments.mode, - add_options = options, - output_directory = program_arguments.directory) + wait_time=program_arguments.wait, + mode=program_arguments.mode, + add_options=options, + output_directory=program_arguments.directory, + select_single_element=program_arguments.single) + if program_arguments.mode != __MODE_NOT_SET_DEFAULT: # SWITCH BACK TO MODE 0 AT THE END IF MODE SWITCH WAS SET - set_mode(0) + set_mode(0) except Exception as e: logger.error('error executing rspctl : %s', e) logger.error('traceback \n%s', traceback.format_exc()) @@ -816,11 +1017,13 @@ def parse_and_execute_command_arguments(): else: parser.error('please specify a task') + def main(): - init() - logging.basicConfig(format = '%(asctime)s ' + socket.gethostname() + ' %(levelname)s %(message)s', - level = logging.INFO) + init(log_level=logging.INFO) + logging.basicConfig(format='%(asctime)s ' + socket.gethostname() + ' %(levelname)s %(message)s', + level=logging.INFO) parse_and_execute_command_arguments() + if __name__ == '__main__': main() diff --git a/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc b/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc index a2462a0717583c1669d07e6ead0ab81c00c3c330..5a9bc3f4bea1cadc352584deeb3ff09fba52e036 100644 --- a/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc +++ b/MAC/APL/MainCU/src/MACScheduler/TMSSBridge.cc @@ -127,9 +127,17 @@ std::string TMSSBridge::getParsetAsText(int subtask_id) bool TMSSBridge::setSubtaskState(int subtask_id, const string& state) { string queryStr = "/api/subtask/" + to_string(subtask_id) + "/"; + string json_doc = "{ \"state\": \"/api/subtask_state/" + state +"/\""; + if(state == "finishing") { + // set stop_time to 'now' upon finished to get an actual record of when the observation stopped + ptime now = from_time_t(time(0)); + json_doc += ", \"stop_time\": \"" + to_iso_extended_string(now) + "\""; + } + json_doc += " }"; + string result; - if(httpQuery(queryStr, result, "PATCH", "{ \"state\": \"/api/subtask_state/" + state +"/\" }")) { - LOG_INFO_STR("Updated subtask id=" << subtask_id << " to status=" << state); + if(httpQuery(queryStr, result, "PATCH", json_doc)) { + LOG_INFO_STR("Updated subtask state id=" << subtask_id << " with patch: " << json_doc); return true; } diff --git a/MAC/Deployment/data/StaticMetaData/CableDelays/CS030-CableDelays.conf b/MAC/Deployment/data/StaticMetaData/CableDelays/CS030-CableDelays.conf index 204a334e12b246e4f005a907e37f7efe939fd414..cfc24531fd144ec059c9175a806576c27127163f 100644 --- a/MAC/Deployment/data/StaticMetaData/CableDelays/CS030-CableDelays.conf +++ b/MAC/Deployment/data/StaticMetaData/CableDelays/CS030-CableDelays.conf @@ -16,6 +16,10 @@ # 115m 465.5254 # 130m 530.6981 # +# T25 has 122 meter coax i.s.o. 115 meter. New delay in table added (M.J. Norden, 24-9-2020) +#50 115 465.5254 80 326.9640 115 493.8617 +#51 115 465.5254 80 326.9640 115 493.8617 +# # LBL LBH HBA #RCUnr len delay len delay len delay #----------------------------------------------------------------------- @@ -69,8 +73,8 @@ 47 115 465.5254 80 326.9640 115 465.5254 48 115 465.5254 80 326.9640 115 465.5254 49 115 465.5254 80 326.9640 115 465.5254 -50 115 465.5254 80 326.9640 115 465.5254 -51 115 465.5254 80 326.9640 115 465.5254 +50 115 465.5254 80 326.9640 120 493.8617 +51 115 465.5254 80 326.9640 120 493.8617 52 115 465.5254 115 465.5254 115 465.5254 53 115 465.5254 115 465.5254 115 465.5254 54 80 326.9640 115 465.5254 115 465.5254 diff --git a/QA/QA_Service/lib/qa_service.py b/QA/QA_Service/lib/qa_service.py index 18bd13f9c1f44378b90bdd5e6f99919627123012..992ddb000178fcf1fff3cb93cedc9b7d5a91ac25 100644 --- a/QA/QA_Service/lib/qa_service.py +++ b/QA/QA_Service/lib/qa_service.py @@ -69,6 +69,10 @@ class QAFilteringOTDBBusListener(OTDBBusListener): class QAFilteringTMSSSubTaskBusListener(TMSSBusListener): class QAFilteringTMSSSubTaskEventMessageHandler(UsingToBusMixin, TMSSEventMessageHandler): + def __init__(self): + UsingToBusMixin.__init__(self) + TMSSEventMessageHandler.__init__(self) + def _send_qa_command_message(self, subtask_id: int, command_subject: str): with TMSSsession.create_from_dbcreds_for_ldap() as tmsssession: tmsssession.set_subtask_status(subtask_id, 'queueing') diff --git a/QA/QA_Service/test/t_qa_service.py b/QA/QA_Service/test/t_qa_service.py index fe5bfc908acd25b0225f6a6747277302564efa44..8daf86ce36f8c27fb947bac2a843051e2204e205 100755 --- a/QA/QA_Service/test/t_qa_service.py +++ b/QA/QA_Service/test/t_qa_service.py @@ -96,7 +96,8 @@ class TestQAService(unittest.TestCase): cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID)) cls.tmp_exchange.open() - cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address) + cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address, + start_postgres_listener=True, start_ra_test_environment=True) cls.tmss_test_env.start() cls.tmss_test_env.populate_schemas() diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py b/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py index 75fe6059ed1fc2f9098c774c600d3439e7810960..f5711b4db9753551debdda209e418df0c022cdc2 100755 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py @@ -30,7 +30,7 @@ to assign resources to these tasks. import logging logger = logging.getLogger(__name__) -from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME +from lofar.messaging.config import DEFAULT_BROKER, DEFAULT_BUSNAME from lofar.messaging.rpc import RPCClientContextManagerMixin, RPCClient diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py b/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py index e97993b95a5533e282a1c2b106dbd514abc9b071..6875b8004895b7302e338e31d3a1e32df31aefe2 100755 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py @@ -158,7 +158,7 @@ class ResourceAssigner(object): if spec.status == 'approved': # Only needed to send misc field info (storagemanager) to OTDB logger.info('Task otdb_id=%s tmss_id=%s is only approved, no resource assignment needed yet' % (otdb_id, tmss_id)) self._send_task_status_notification(spec, 'approved') - return + return True #TODO have Specification propagate to the estimator? if self._schedule_resources(spec, specification_tree): # Cleanup the data of any previous run of the task diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py b/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py index ea374250236b38524742631147c1c98879f7867b..a2d282ce204f6d1361443d231c8f13b0865a26df 100644 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py @@ -283,13 +283,21 @@ class BasicScheduler(object): # up more resources as a by-product, in which case other conflicts can simply be shifted to those newly freed # resources. conflict_claims = self.radb.getResourceClaims(task_ids=[self.task_id], status="conflict", extended=True) - logger.info("Resulting claims in conflict before resolution: %s", conflict_claims) - - if conflict_claims and not any([self._resolve_conflict(c) for c in conflict_claims]): - if need_all or len(conflict_claims) == len(tentative_claims): - # Could not resolve any conflict - raise ScheduleException("Could not resolve one or more conflicting claims: #tentative_claims=%s #conflict_claims=%s conflict_claims=%s" % ( - len(tentative_claims), len(conflict_claims), conflict_claims)) + if conflict_claims: + for conflict_claim in conflict_claims: + logger.warning("conflicting_claim: %s\nresource:%s\noverlapping_claims:%s\noverlapping_tasks:%s", conflict_claim, + self.radb.getResources(resource_ids=[conflict_claim['resource_id']], + include_availability=True, + claimable_capacity_lower_bound=conflict_claim['starttime'], + claimable_capacity_upper_bound=conflict_claim['endtime'])[0], + self.radb.get_overlapping_claims(conflict_claim['id']), + self.radb.get_overlapping_tasks(conflict_claim['id'])) + + if not any([self._resolve_conflict(c) for c in conflict_claims]): + if need_all or len(conflict_claims) == len(tentative_claims): + # Could not resolve any conflict + raise ScheduleException("Could not resolve one or more conflicting claims: #tentative_claims=%s #conflict_claims=%s conflict_claims=%s" % ( + len(tentative_claims), len(conflict_claims), conflict_claims)) # remove conflicting claims (allowing the next iteration to propose alternatives). Note that _handle_conflicts # could have reduced the number of conflicting claims. diff --git a/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py b/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py index 5046b4eb97f63c4354418a1352c9e4803c641054..fc053cfb2674659aac93c62c0861fa436d109ca9 100644 --- a/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py +++ b/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py @@ -41,7 +41,7 @@ class RATestEnvironment: exchange: str=os.environ.get("RA_EXCHANGE", DEFAULT_BUSNAME), broker: str=os.environ.get("RA_BROKER", DEFAULT_BROKER)): self.radb_test_instance = RADBTestDatabaseInstance() self.radb = self.radb_test_instance.create_database_connection() - self.radb_service = createRADBService(dbcreds=self.radb_test_instance.dbcreds, exchange=exchange, broker=broker) + self.radb_service = createRADBService(dbcreds=self.radb_test_instance.dbcreds, exchange=exchange, broker=broker, num_threads=1) self.re_service = createEstimatorService(exchange=exchange, broker=broker) self.ra_service = RAService(radbcreds=self.radb_test_instance.dbcreds, exchange=exchange, broker=broker) diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py index 29358de27d660b822a48c48a705f5dd0ec6ff135..285933e110ad53c2a94c719bc15bb8386932059d 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py @@ -659,7 +659,7 @@ class RADatabase(PostgresDatabaseConnection): return self._cursor.rowcount > 0 - def _to_fields_and_value_placeholders_strings(self, fields: collections.Iterable) -> (str, str): + def _to_fields_and_value_placeholders_strings(self, fields: collections.abc.Iterable) -> (str, str): """convert a list of fields (column names) into a tuple of a comma-seperated string and a comma-seperated placeholder string For usage with prepared statements (postgres mogrify)""" fields_str = ', '.join(fields) diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbbuslistener.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbbuslistener.py index c0412d60c699d267af1993a0e185c43904ff2165..7a833e499e9449b557333504e3bf394e5515353d 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbbuslistener.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbbuslistener.py @@ -124,8 +124,12 @@ if __name__ == '__main__': logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + class ExampleRADBEventMessageHandler(RADBEventMessageHandler): + def onTaskUpdated(self, updated_task): + logger.info("Example task updated: %s", updated_task) + from lofar.messaging import BusListenerJanitor - with BusListenerJanitor(RADBEventMessageBusListener()): + with BusListenerJanitor(RADBEventMessageBusListener(handler_type=ExampleRADBEventMessageHandler)): waitForInterrupt() __all__ = ["RADBEventMessageBusListener", "RADBEventMessageHandler"] diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py index 6a1786252db82892e650c2e24899cc6836046570..e0e853db3c9267aa8e46e3a12263ab24f36ee671 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radbpglistener.py @@ -122,14 +122,13 @@ class RADBPGListener(PostgresListener): r = {k:r[k] for k in ['id', 'total_capacity', 'available_capacity', 'used_capacity']} self._sendNotification('ResourceCapacityUpdated', r) - def __enter__(self): - super(RADBPGListener, self).__enter__() + def start(self): + super(RADBPGListener, self).start() self.radb.connect() self.event_bus.open() - return self - def __exit__(self, exc_type, exc_val, exc_tb): - super(RADBPGListener, self).__exit__(exc_type, exc_val, exc_tb) + def stop(self): + super(RADBPGListener, self).stop() self.radb.disconnect() self.event_bus.close() diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py index 8ecc6fb4b4ebbb380d168e83e695431d4db9af91..422174462c3ddb222fc7437c38c3548c7eefb5ed 100755 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py @@ -55,8 +55,14 @@ class RADBTestDatabaseInstance(PostgresTestDatabaseInstance): for sql_path in sql_createdb_paths: logger.debug("setting up database. applying sql file: %s", sql_path) with open(sql_path) as sql: + # temporarily suppress logging of queries to prevent the log from being spammed with the entire sql schema + logging.getLogger('lofar.common.postgres').disabled = True + db.executeQuery(sql.read()) + # revert temporarily suppressed logging + logging.getLogger('lofar.common.postgres').disabled = False + def create_database_connection(self) -> RADatabase: self.radb = RADatabase(self.dbcreds) return self.radb diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py index 399b826974ae0275845fc6f639a66be40dddd980..f1ec6d530f2797ee805874072688562d4f103c21 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py @@ -161,12 +161,12 @@ class ResourceEstimatorHandler(ServiceMessageHandler): return self.get_subtree_estimate(specification_tree) -def createService(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER): +def createService(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER, num_threads=1): return RPCService(service_name=DEFAULT_RESOURCEESTIMATOR_SERVICENAME, handler_type=ResourceEstimatorHandler, exchange=exchange, broker=broker, - num_threads=1) + num_threads=num_threads) def main(): diff --git a/SAS/ResourceAssignment/ResourceAssignmentService/service.py b/SAS/ResourceAssignment/ResourceAssignmentService/service.py index 732404dbadbe236fb8668ae75ed32b62e021a6c9..0ec23d0a9638704c53a74677b4599fc6f91605cf 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentService/service.py +++ b/SAS/ResourceAssignment/ResourceAssignmentService/service.py @@ -386,13 +386,13 @@ class RADBServiceMessageHandler(ServiceMessageHandler): return { 'resource_claimable_capacity': resource_claimable_capacity} -def createService(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER, dbcreds=None): +def createService(exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER, dbcreds=None, num_threads=4): return RPCService(DEFAULT_RADB_SERVICENAME, RADBServiceMessageHandler, handler_kwargs={'dbcreds': dbcreds}, exchange=exchange, broker=broker, - num_threads=4) + num_threads=num_threads) def main(): # make sure we run in UTC timezone diff --git a/SAS/TMSS/client/lib/populate.py b/SAS/TMSS/client/lib/populate.py index ed05353790cb9db1ccdeeed71f0b5589201ca502..bb39f6967617e077aa4c8d00f425534cbfc4d95c 100644 --- a/SAS/TMSS/client/lib/populate.py +++ b/SAS/TMSS/client/lib/populate.py @@ -5,6 +5,7 @@ import json from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession from lofar.common import json_utils import os +from concurrent.futures import ThreadPoolExecutor def populate_schemas_main(): from optparse import OptionParser @@ -34,7 +35,9 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None): with TMSSsession.create_from_dbcreds_for_ldap() as client: base_url = client.base_url.rstrip('/').rstrip('api').rstrip('/') - for template in templates: + + # define upload method for parallel execution (see below) + def upload_template(template): try: with open(os.path.join(schema_dir, template.pop('file_name'))) as schema_file: try: @@ -69,7 +72,7 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None): else: template['schema'] = json_schema - logger.info("Uploading template template='%s' name='%s' version='%s'", template, name, version) + logger.info("Uploading template name='%s' version='%s'", name, version) client.post_template(template_path=template_name, name=name, @@ -81,3 +84,13 @@ def populate_schemas(schema_dir: str=None, templates_filename: str=None): except Exception as e: logger.error(e) + # TODO: make parallel upload work. Right now it fails sometimes do to interdependencies and non-determistic upload order. + # do parallel upload + # with ThreadPoolExecutor() as executor: + # executor.map(upload_template, templates) + + # for now, do sequeltial upload + for template in templates: + upload_template(template) + + diff --git a/SAS/TMSS/client/lib/tmss_http_rest_client.py b/SAS/TMSS/client/lib/tmss_http_rest_client.py index 48df33a1cab208a1892a4f5aa47ed11e73b066e0..6d8a1f647edac91a3b3974a5551788ec98de9850 100644 --- a/SAS/TMSS/client/lib/tmss_http_rest_client.py +++ b/SAS/TMSS/client/lib/tmss_http_rest_client.py @@ -5,7 +5,7 @@ import requests from http.client import responses import os import json -from datetime import datetime +from datetime import datetime, timedelta from lofar.common.datetimeutils import formatDatetime # usage example: @@ -94,8 +94,12 @@ class TMSSsession(object): def set_subtask_status(self, subtask_id: int, status: str) -> {}: '''set the status for the given subtask, and return the subtask with its new state, or raise on error''' + json_doc = {'state': "%s/subtask_state/%s/" % (self.base_url, status)} + if status == 'finishing': + json_doc['stop_time'] = datetime.utcnow().isoformat() + response = self.session.patch(url='%s/subtask/%s/' % (self.base_url, subtask_id), - json={'state': "%s/subtask_state/%s/" % (self.base_url, status)}, + json=json_doc, params={'format':'json'}) if response.status_code >= 200 and response.status_code < 300: @@ -106,7 +110,8 @@ class TMSSsession(object): def get_subtask_parset(self, subtask_id) -> str: '''get the lofar parameterset (as text) for the given subtask''' - result = self.session.get(url='%s/subtask/%s/parset' % (self.base_url, subtask_id)) + result = self.session.get(url=self.get_full_url_for_path('/subtask/%s/parset' % (subtask_id,)), + headers={'Accept': 'text/plain'}) if result.status_code >= 200 and result.status_code < 300: return result.content.decode('utf-8') raise Exception("Could not get parameterset for subtask %s.\nResponse: %s" % (subtask_id, result)) @@ -167,7 +172,9 @@ class TMSSsession(object): def get_url_as_json_object(self, full_url: str, params={}) -> object: '''get resource at the given full url (including http://<base_url>, interpret it as json, and return it as as native object (usually a dict or a list of dicts)''' response = self.session.get(url=full_url, params=params, timeout=100000) - logger.info("%s %s %s on %s", response.request.method.upper(), response.status_code, responses.get(response.status_code), response.request.url) + logger.info("%s %s %s in %.1fms%s on %s", response.request.method.upper(), response.status_code, responses.get(response.status_code), + response.elapsed.total_seconds()*1000, ' SLOW!' if response.elapsed > timedelta(seconds=1) else '', + response.request.url) if response.status_code >= 200 and response.status_code < 300: result = json.loads(response.content.decode('utf-8')) @@ -239,7 +246,7 @@ class TMSSsession(object): def specify_observation_task(self, task_id: int) -> requests.Response: """specify observation for the given draft task by just doing a REST API call """ - result = self.session.get(url='%s/api/task/%s/specify_observation' % (self.base_url, task_id)) + result = self.session.get(url=self.get_full_url_for_path('/task/%s/specify_observation' % (task_id,))) if result.status_code >= 200 and result.status_code < 300: return result.content.decode('utf-8') raise Exception("Could not specify observation for task %s.\nResponse: %s" % (task_id, result)) @@ -257,7 +264,7 @@ class TMSSsession(object): def get_setting(self, setting_name: str) -> {}: """get the value of a TMSS setting. returns the setting value upon success, or raises.""" - response = self.session.get(url='%s/setting/%s/' % (self.base_url, setting_name), + response = self.session.get(url=self.get_full_url_for_path('/setting/%s/' % (setting_name,)), params={'format': 'json'}) if response.status_code >= 200 and response.status_code < 300: @@ -269,7 +276,7 @@ class TMSSsession(object): def set_setting(self, setting_name: str, setting_value: bool) -> {}: """set a value for a TMSS setting. returns the setting value upon success, or raises.""" - response = self.session.patch(url='%s/setting/%s/' % (self.base_url, setting_name), + response = self.session.patch(url=self.get_full_url_for_path('/setting/%s/' % (setting_name,)), json={'value': setting_value}) if response.status_code >= 200 and response.status_code < 300: @@ -289,7 +296,7 @@ class TMSSsession(object): json_data['template'] = json.loads(template) if isinstance(template, str) else template json_data.update(**kwargs) - response = self.session.post(url='%s/%s/' % (self.base_url, template_path), json=json_data) + response = self.session.post(url=self.get_full_url_for_path(template_path), json=json_data) if response.status_code == 201: logger.info("created new template: %s", json.loads(response.text)['url']) else: @@ -302,7 +309,7 @@ class TMSSsession(object): new_feedback = feedback else: new_feedback = "%s\n%s" % (existing_feedback, feedback) - response = self.session.patch(url='%s/subtask/%s/' % (self.base_url, subtask_id), + response = self.session.patch(url=self.get_full_url_for_path('/subtask/%s/' % (subtask_id,)), json={'raw_feedback': new_feedback}, params={'format': 'json'}) @@ -316,7 +323,7 @@ class TMSSsession(object): def process_subtask_feedback_and_set_finished(self, subtask_id: int) -> {}: '''process the raw_feedback of a given subtask and set the subtask to finished on succes. Return the subtask with its new state, or raise an error''' - response = self.session.post(url='%s/subtask/%s/process_feedback_and_set_finished' % (self.base_url, subtask_id), + response = self.session.post(url=self.get_full_url_for_path('/subtask/%s/process_feedback_and_set_finished' % (subtask_id,)), params={'format': 'json'}) if response.status_code >= 200 and response.status_code < 300: diff --git a/SAS/TMSS/client/lib/tmssbuslistener.py b/SAS/TMSS/client/lib/tmssbuslistener.py index 81448e9a16c97e4cfb5f91213a218dde91f9edaf..75d63297e8d5dfff5403d560c6cbc3843ffcd71e 100644 --- a/SAS/TMSS/client/lib/tmssbuslistener.py +++ b/SAS/TMSS/client/lib/tmssbuslistener.py @@ -45,6 +45,7 @@ TMSS_TASKDRAFT_OBJECT_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % TMSS_SCHEDULINGUNITBLUEPRINT_OBJECT_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % 'SchedulingUnitBlueprint.Object' TMSS_SCHEDULINGUNITBLUEPRINT_STATUS_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % 'SchedulingUnitBlueprint.Status' TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % 'SchedulingUnitDraft.Object' +TMSS_SETTING_OBJECT_EVENT_PREFIX = _TMSS_EVENT_PREFIX_TEMPLATE % 'Setting.Object' TMSS_ALL_OBJECT_EVENTS_FILTER = _TMSS_EVENT_PREFIX_TEMPLATE % '.*.Object.#' TMSS_ALL_STATUS_EVENTS_FILTER = _TMSS_EVENT_PREFIX_TEMPLATE % '.*.Status.#' TMSS_ALL_EVENTS_FILTER = _TMSS_EVENT_PREFIX_TEMPLATE % '#' @@ -55,13 +56,20 @@ class TMSSEventMessageHandler(AbstractMessageHandler): Base-type messagehandler for handling all TMSS event messages. Typical usage is to derive your own subclass from TMSSEventMessageHandler and implement the specific on<SomeMessage> methods that you are interested in. ''' + + def __init__(self, log_event_messages: bool=False) -> None: + self.log_event_messages = log_event_messages + super().__init__() + + def handle_message(self, msg: EventMessage): if not isinstance(msg, EventMessage): raise ValueError("%s: Ignoring non-EventMessage: %s" % (self.__class__.__name__, msg)) stripped_subject = msg.subject.replace(_TMSS_EVENT_PREFIX_TEMPLATE%('',), '') - logger.info("%s %s: %s" % (self.__class__.__name__, stripped_subject, single_line_with_single_spaces(msg.content))) + if self.log_event_messages: + logger.info("%s %s: %s" % (self.__class__.__name__, stripped_subject, single_line_with_single_spaces(msg.content))) # sorry, very big if/elif/else tree. # it just maps all possible event subjects for all possible objects and statuses onto handler methods. @@ -93,6 +101,8 @@ class TMSSEventMessageHandler(AbstractMessageHandler): self.onSchedulingUnitDraftCreated(**msg.content) elif stripped_subject == 'SchedulingUnitDraft.Object.Updated': self.onSchedulingUnitDraftUpdated(**msg.content) + elif stripped_subject == 'SchedulingUnitDraft.Object.Constraints.Updated': + self.onSchedulingUnitDraftConstraintsUpdated(**msg.content) elif stripped_subject == 'SchedulingUnitDraft.Object.Deleted': self.onSchedulingUnitDraftDeleted(**msg.content) elif stripped_subject.startswith('SubTask.Status.'): @@ -101,6 +111,8 @@ class TMSSEventMessageHandler(AbstractMessageHandler): self.onTaskBlueprintStatusChanged(**msg.content) elif stripped_subject.startswith('SchedulingUnitBlueprint.Status.'): self.onSchedulingUnitBlueprintStatusChanged(**msg.content) + elif stripped_subject == 'Setting.Object.Updated': + self.onSettingUpdated(**msg.content) else: raise MessageHandlerUnknownSubjectError("TMSSBusListener.handleMessage: unknown subject: %s" % msg.subject) @@ -192,6 +204,12 @@ class TMSSEventMessageHandler(AbstractMessageHandler): ''' pass + def onSchedulingUnitDraftConstraintsUpdated(self, id: int, scheduling_constraints_doc: dict): + '''onSchedulingUnitDraftConstraintsUpdated is called upon receiving a SchedulingUnitDraft.Object.Constraints.Updated message, which is sent when a the constraints on a SchedulingUnitDrafts were updated. + :param id: the TMSS id of the SchedulingUnitDraft + ''' + pass + def onSchedulingUnitDraftDeleted(self, id: int): '''onSchedulingUnitDraftDeleted is called upon receiving a SchedulingUnitDraft.Object.Deleted message, which is sent when a SchedulingUnitDrafts was created. :param id: the TMSS id of the SchedulingUnitDraft @@ -216,6 +234,13 @@ class TMSSEventMessageHandler(AbstractMessageHandler): ''' pass + def onSettingUpdated(self, name: str, value): + '''onSettingUpdated is called upon receiving a Setting.Object.Updated message, which is sent when a Setting was updated. + :param name: the name of the Setting + ''' + pass + + class TMSSBusListener(BusListener): def __init__(self, diff --git a/SAS/TMSS/docker-compose-scu199.yml b/SAS/TMSS/docker-compose-scu199.yml index 0778331fa0f4cbdbc15cf49c1c3c88273b98b4db..85cfd2d27d6fd292129294551405937b511a07bf 100644 --- a/SAS/TMSS/docker-compose-scu199.yml +++ b/SAS/TMSS/docker-compose-scu199.yml @@ -7,7 +7,7 @@ services: env_file: - ./.env network_mode: "host" - command: bash -c 'source /opt/lofar/lofarinit.sh && ALLOWED_HOSTS=* tmss_test_environment -H 0.0.0.0 -P `hostname -f` -p 8008 --data' + command: bash -c 'source /opt/lofar/lofarinit.sh && ALLOWED_HOSTS=* tmss_test_environment -H 0.0.0.0 -P `hostname -f` -p 8008 -sSd' ports: - "8008:8008" testprovider: diff --git a/SAS/TMSS/frontend/tmss_webapp/package.json b/SAS/TMSS/frontend/tmss_webapp/package.json index d2e77ea1090cfdd2614c1bcfbd452cab25913554..8230388bc6f2c1f16805cf9a25ba4f05f6b6b000 100644 --- a/SAS/TMSS/frontend/tmss_webapp/package.json +++ b/SAS/TMSS/frontend/tmss_webapp/package.json @@ -9,6 +9,7 @@ "@apidevtools/json-schema-ref-parser": "^9.0.6", "@fortawesome/fontawesome-free": "^5.13.1", "@json-editor/json-editor": "^2.3.0", + "@kevincobain2000/json-to-html-table": "^1.0.1", "@testing-library/jest-dom": "^4.2.4", "@testing-library/react": "^9.3.2", "@testing-library/user-event": "^7.1.2", @@ -45,6 +46,7 @@ "react-transition-group": "^2.5.1", "reactstrap": "^8.5.1", "styled-components": "^5.1.1", + "suneditor-react": "^2.14.4", "typescript": "^3.9.5", "yup": "^0.29.1" }, diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js b/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js index 55e28fb7b7e7ed6c24d1bec42d4ff6a0bcca47bd..e9c0b245f5eaeeaf5bd579a4c42c6b67e46eae44 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/JSONEditor/JEditor.js @@ -195,7 +195,9 @@ function Jeditor(props) { */ function setEditorOutput(){ const editorOutput = editorRef.current.getValue(); - const formattedOutput = updateOutput(_.cloneDeep(editorOutput)); + /* Sends editor output without formatting if requested */ + const formatOutput = props.formatOutput===undefined?true:props.formatOutput; + const formattedOutput = formatOutput?updateOutput(_.cloneDeep(editorOutput)):_.cloneDeep(editorOutput); const editorValidationErrors = editorRef.current.validate(); if (props.callback) { // editorRef.current for accessing fields in parent to add classname for enabling and disabling diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js b/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js index a045bc5f47a2b7559902a5ba1f0b9f996bafff9a..d611b7857cf365ed32c8eee56b0936e8eb7e5b87 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/Timeline/CalendarTimeline.js @@ -5,7 +5,8 @@ import Timeline, { SidebarHeader, DateHeader, CustomMarker, - CursorMarker + CursorMarker, + CustomHeader } from 'react-calendar-timeline'; import containerResizeDetector from 'react-calendar-timeline/lib/resize-detector/container'; import moment from 'moment'; @@ -73,7 +74,7 @@ export class CalendarTimeline extends Component { group: group, items: props.items || [], //>>>>>> Properties to pass to react-calendar-timeline component - stackItems: props.stackItems || true, + stackItems: props.stackItems || false, zoomAllowed: props.zoomAllowed || true, minZoom: props.minZoom || (1 * 60 * 1000), // One Minute maxZoom: props.maxZoom || (32 * 24 * 60 * 60 * 1000), // 32 hours @@ -83,7 +84,7 @@ export class CalendarTimeline extends Component { prevZoomRange: null, lineHeight: props.rowHeight || 50, // Row line height sidebarWidth: props.sidebarWidth || 200, - timeSteps: props.timeSteps || {minute: 60}, + timeSteps: props.timeSteps || {minute: 1}, canMove: props.itemsMovable || false, canResize: props.itemsResizable || false, canchangeGroup: props.itemGroupChangeable || true, @@ -126,6 +127,7 @@ export class CalendarTimeline extends Component { this.renderLSTDateHeader = this.renderLSTDateHeader.bind(this); this.renderCursor = this.renderCursor.bind(this); this.renderItem = this.renderItem.bind(this); + this.renderNormalSuntimeHeader = this.renderNormalSuntimeHeader.bind(this); //<<<<<<< Custom Renderer Functions //>>>>>> Functions of this component @@ -183,7 +185,7 @@ export class CalendarTimeline extends Component { currentLST: this.state.currentLST?this.state.currentLST.add(1, 'second'):null}); } if (this.state.isLive) { - this.props.dateRangeCallback(this.state.defaultStartTime.add(1, 'second'), this.state.defaultEndTime.add(1, 'second')); + this.changeDateRange(this.state.defaultStartTime.add(1, 'second'), this.state.defaultEndTime.add(1, 'second')); // const result = this.props.dateRangeCallback(this.state.defaultStartTime.add(1, 'second'), this.state.defaultEndTime.add(1, 'second')); // let group = DEFAULT_GROUP.concat(result.group); } @@ -257,6 +259,14 @@ export class CalendarTimeline extends Component { :`Week (${this.state.timelineStartDate.week()}) / Day`}</div> <div style={{height:'30px'}}>{this.state.dayHeaderVisible?`UTC(Hr)`:`UTC(Day)`}</div> <div style={{height:'30px'}}>{this.state.dayHeaderVisible?`LST(Hr)`:`LST(Day)`}</div> + {this.state.viewType === UIConstants.timeline.types.NORMAL && + <div className="p-grid" + style={{height:this.props.showSunTimings?'30px':'0px', paddingTop:'10px', paddingLeft:'10px'}}> + <div className="col-4" style={{marginTop:'2px', paddingLeft:'5px', backgroundColor:'yellow', color: '#212529'}}>Sunrise</div> + <div className="col-4" style={{marginTop:'2px', paddingLeft:'5px', backgroundColor:'orange', color: '#212529'}}>Sunset</div> + <div className="col-4" style={{marginTop:'2px', paddingLeft:'5px', backgroundColor:'blue'}}>Night</div> + </div> + } </div> ); } @@ -345,7 +355,7 @@ export class CalendarTimeline extends Component { return <div {...getIntervalProps()} className="rct-dateHeader" style={divStyle}> { (this.state.timeHeaderLabelVisibile)? (showBorder)? - <span> + <span key={`utchead-${displayValue}`}> {displayValue} </span>: <> @@ -420,6 +430,121 @@ export class CalendarTimeline extends Component { } } + /** Custom renderer to show sunrise, sunset and night times */ + renderNormalSuntimeHeader({ + headerContext: { intervals }, + getRootProps, + getIntervalProps, + showPeriod, + data, + }) { + const sunTimeMap = this.state.sunTimeMap; + return ( + <div {...getRootProps()}> + {intervals.map(interval => { + const dayStyle = { + lineHeight: '30px', + backgroundColor: 'white', + color: 'white' + } + const nightStyle = { + lineHeight: '30px', + backgroundColor: 'blue', + color: 'blue' + } + const sunriseStyle = { + lineHeight: '30px', + backgroundColor: 'yellow', + color: 'yellow' + } + const sunsetStyle = { + lineHeight: '30px', + backgroundColor: 'orange', + color: 'orange' + } + // Get the intervals UTC date format and time + const intervalDate = interval.startTime.clone().utc().format("YYYYMMDDT12:00:00"); + const intervalTime = interval.startTime.clone().utc(); + // Get the suntime for the UTC date + const intervalDateSunTime = sunTimeMap[intervalDate]; + let intervalStyle = dayStyle; + // If suntime is available display suntime blocks + if (intervalDateSunTime) { + // Set 15 minutes duration for sunrise and sunset and create blocks accordingly + if (intervalTime.isBefore(intervalDateSunTime.sunrise) || + intervalTime.isAfter(intervalDateSunTime.sunset.clone().add(14, 'minutes'))) { + intervalStyle = nightStyle; + } else if (intervalTime.isSame(intervalDateSunTime.sunrise) || + intervalTime.isBefore(intervalDateSunTime.sunrise.clone().add(15, 'minutes'))) { + intervalStyle = sunriseStyle; + } else if (intervalTime.isSame(intervalDateSunTime.sunset) || + (intervalTime.isAfter(intervalDateSunTime.sunset) && + intervalTime.isBefore(intervalDateSunTime.sunset.clone().add(15, 'minutes')))) { + intervalStyle = sunsetStyle; + } + return ( + <div + {...getIntervalProps({ + interval, + style: intervalStyle + })} + > + </div> + ) + } else { + return (""); + } + })} + </div> + ) + } + + /** + * Function to render sunrise timings on the timeline view in normal view. + * @param {Array} sunRiseTimings + */ + renderSunriseMarkers(sunRiseTimings) { + return ( + <> + {sunRiseTimings && sunRiseTimings.length>0 && sunRiseTimings.map((item, index) => ( + <CustomMarker key={"sunrise-"+index} date={item}> + {({ styles, date }) => { + const customStyles = { + ...styles, + backgroundColor: 'yellow', + width: '3px' + } + return <div style={customStyles} /> + }} + </CustomMarker> + ))} + </> + ); + } + + /** + * Function to render sunrise timings on the timeline view in normal view. + * @param {Array} sunSetTimings + */ + renderSunsetMarkers(sunSetTimings) { + return ( + <> + {sunSetTimings && sunSetTimings.length>0 && sunSetTimings.map((item, index) => ( + <CustomMarker key={"sunset-"+index} date={item}> + {({ styles, date }) => { + const customStyles = { + ...styles, + backgroundColor: 'orange', + width: '3px' + } + return <div style={customStyles} /> + }} + </CustomMarker> + ))} + </> + ); + } + /** Custom Render function to pass to the CursorMarker component to display cursor labels on cursor movement */ renderCursor({ styles, date }) { const utc = moment(date).utc(); @@ -474,13 +599,16 @@ export class CalendarTimeline extends Component { color: item.color, // borderColor, borderStyle: "solid", - borderWidth: 1, + borderWidth: item.type==="SUNTIME"?0:0, borderRadius: 3, borderLeftWidth: itemContext.selected ? 3 : 1, - borderRightWidth: itemContext.selected ? 3 : 1 + borderRightWidth: itemContext.selected ? 3 : 1, + opacity: item.type==="SUNTIME"?0.6:1 }, onMouseDown: () => { - this.onItemClick(item); + if (item.type !== "SUNTIME") { + this.onItemClick(item); + } } })} > @@ -550,7 +678,7 @@ export class CalendarTimeline extends Component { } this.loadLSTDateHeaderMap(newVisibleTimeStart, newVisibleTimeEnd, this.state.lstDateHeaderUnit); updateScrollCanvas(newVisibleTimeStart.valueOf(), newVisibleTimeEnd.valueOf()); - this.props.dateRangeCallback(newVisibleTimeStart, newVisibleTimeEnd); + this.changeDateRange(newVisibleTimeStart, newVisibleTimeEnd); // this.setState({defaultStartTime: moment(visibleTimeStart), defaultEndTime: moment(visibleTimeEnd)}) this.setState({defaultStartTime: newVisibleTimeStart, defaultEndTime: newVisibleTimeEnd}); } @@ -565,6 +693,83 @@ export class CalendarTimeline extends Component { } } + /** + * Function to call the parent function callback and fetch new data. It also retrieves sunrise and sunset time. + * @param {moment} startTime + * @param {moment} endTime + */ + async changeDateRange(startTime, endTime, refreshData) { + if (this.props.showSunTimings && this.state.viewType===UIConstants.timeline.types.NORMAL) { + this.setNormalSuntimings(startTime, endTime); + } + const result = await this.props.dateRangeCallback(startTime, endTime, refreshData); + if (!this.props.showSunTimings && this.state.viewType === UIConstants.timeline.types.NORMAL) { + result.items = await this.addStationSunTimes(startTime, endTime, result.group, result.items); + } + return result; + } + + /** + * Function to set sunrise and sunset timings in Normal view. + * @param {moment} startTime + * @param {moment} endTime + */ + setNormalSuntimings(startTime, endTime) { + let sunRiseTimings = [], sunSetTimings = [], sunTimeMap={}; + const noOfDays = endTime.diff(startTime, 'days'); + for (const number of _.range(noOfDays+1)) { + const date = startTime.clone().add(number, 'days').hours(12).minutes(0).seconds(0); + const formattedDate = date.format("YYYYMMDDTHH:mm:ss"); + UtilService.getSunTimings(formattedDate+"Z").then(timings => { + const sunriseTime = moment.utc(timings.sun_rise.split('.')[0]); + const sunsetTime = moment.utc(timings.sun_set.split('.')[0]); + if (moment.utc(timings.sun_rise).isAfter(startTime)) { + sunRiseTimings.push(sunriseTime); + } + if (moment.utc(timings.sun_set).isBefore(endTime)) { + sunSetTimings.push(sunsetTime); + } + sunTimeMap[formattedDate] = {sunrise: sunriseTime, sunset: sunsetTime}; + this.setState({sunRiseTimings: sunRiseTimings, sunSetTimings: sunSetTimings, sunTimeMap: sunTimeMap}); + }); + } + } + + async addStationSunTimes(startTime, endTime, stationGroup, items) { + const noOfDays = endTime.diff(startTime, 'days'); + let sunItems = _.cloneDeep(items); + for (const number of _.range(noOfDays+1)) { + for (const station of stationGroup) { + const date = startTime.clone().add(number, 'days').hours(12).minutes(0).seconds(0); + const timings = await UtilService.getSunTimings(date.format("YYYYMMDDTHH:mm:ss")+"Z", station.id); + let sunriseItem = { id: `sunrise-${number}-${station.id}`, + group: station.id, + title: timings.sun_rise, + project: "", + name: "", + duration: "", + start_time: moment.utc(timings.sun_rise), + end_time: moment.utc(timings.sun_rise).add(5, 'minutes'), + bgColor: "yellow", + selectedBgColor: "yellow", + type: "SUNTIME"}; + sunItems.push(sunriseItem); + let sunsetItem = _.cloneDeep(sunriseItem); + sunsetItem.id = `sunset-${number}-${station.id}`; + sunsetItem.start_time = moment.utc(timings.sun_set); + sunsetItem.end_time = moment.utc(timings.sun_set).add(5, 'minutes'); + sunsetItem.bgColor = "orange"; + sunsetItem.selectedBgColor = "0range"; + sunItems.push(sunsetItem); + + } + } + if (!this.props.showSunTimings && this.state.viewType === UIConstants.timeline.types.NORMAL) { + items = sunItems; + } + return items; + } + /** * Resets the timeline view to default zoom and move to the current timeline */ @@ -572,7 +777,7 @@ export class CalendarTimeline extends Component { if (this.state.viewType===UIConstants.timeline.types.NORMAL) { const startTime = moment().utc().add(-24, 'hours'); const endTime = moment().utc().add(24, 'hours'); - let result = await this.props.dateRangeCallback(startTime, endTime); + let result = await this.changeDateRange(startTime, endTime); let group = DEFAULT_GROUP.concat(result.group); this.setState({defaultStartTime: startTime, defaultEndTime: endTime, zoomLevel: DEFAULT_ZOOM_LEVEL, dayHeaderVisible: true, @@ -596,6 +801,7 @@ export class CalendarTimeline extends Component { async changeZoomLevel(zoomLevel, isTimelineZoom) { zoomLevel = zoomLevel?zoomLevel: DEFAULT_ZOOM_LEVEL; const newZoomLevel = _.find(ZOOM_LEVELS, {'name': zoomLevel}); + this.setState({isTimelineZoom: isTimelineZoom}); let startTime = this.state.defaultStartTime; let endTime = this.state.defaultEndTime; if (zoomLevel === 'Custom') { @@ -626,10 +832,10 @@ export class CalendarTimeline extends Component { } } this.loadLSTDateHeaderMap(startTime, endTime, 'hour'); - let result = await this.props.dateRangeCallback(startTime, endTime); + let result = await this.changeDateRange(startTime, endTime); let group = DEFAULT_GROUP.concat(result.group); this.setState({zoomLevel: zoomLevel, defaultStartTime: startTime, defaultEndTime: endTime, - isTimelineZoom: isTimelineZoom, zoomRange: null, + isTimelineZoom: true, zoomRange: null, dayHeaderVisible: true, weekHeaderVisible: false, lstDateHeaderUnit: 'hour', group: group, items: result.items}); } @@ -650,7 +856,7 @@ export class CalendarTimeline extends Component { newVisibleTimeStart = this.state.timelineStartDate.clone().hours(0).minutes(0).seconds(0); newVisibleTimeEnd = newVisibleTimeStart.clone().add(visibleTimeDiff/1000, 'seconds'); } - let result = await this.props.dateRangeCallback(newVisibleTimeStart, newVisibleTimeEnd); + let result = await this.changeDateRange(newVisibleTimeStart, newVisibleTimeEnd); this.loadLSTDateHeaderMap(newVisibleTimeStart, newVisibleTimeEnd, 'hour'); let group = DEFAULT_GROUP.concat(result.group); this.setState({defaultStartTime: newVisibleTimeStart, @@ -673,7 +879,7 @@ export class CalendarTimeline extends Component { newVisibleTimeEnd = this.state.timelineEndDate.clone().hours(23).minutes(59).minutes(59); newVisibleTimeStart = newVisibleTimeEnd.clone().add((-1 * visibleTimeDiff/1000), 'seconds'); } - let result = await this.props.dateRangeCallback(visibleTimeStart, visibleTimeEnd); + let result = await this.changeDateRange(visibleTimeStart, visibleTimeEnd); this.loadLSTDateHeaderMap(newVisibleTimeStart, newVisibleTimeEnd, 'hour'); let group = DEFAULT_GROUP.concat(result.group); this.setState({defaultStartTime: newVisibleTimeStart, @@ -731,7 +937,7 @@ export class CalendarTimeline extends Component { dayHeaderVisible: dayHeaderVisible, weekHeaderVisible: weekHeaderVisible, lstDateHeaderUnit: lstDateHeaderUnit }); - const result = await this.props.dateRangeCallback(startDate, endDate); + const result = await this.changeDateRange(startDate, endDate); let group = DEFAULT_GROUP.concat(result.group); this.setState({group: group, items: result.items}); this.loadLSTDateHeaderMap(startDate, endDate, lstDateHeaderUnit); @@ -749,7 +955,7 @@ export class CalendarTimeline extends Component { let endDate = this.state.group[this.state.group.length-1].value.clone().add(direction * 7, 'days').hours(23).minutes(59).seconds(59); let timelineStart = this.state.timelineStartDate.clone().add(direction * 7, 'days'); let timelineEnd = this.state.timelineEndDate.clone().add(direction * 7, 'days'); - const result = await this.props.dateRangeCallback(startDate, endDate, true); + const result = await this.changeDateRange(startDate, endDate, true); let group = DEFAULT_GROUP.concat(result.group); let dayHeaderVisible = this.state.dayHeaderVisible; let weekHeaderVisible = this.state.weekHeaderVisible; @@ -772,7 +978,12 @@ export class CalendarTimeline extends Component { * as objects * @param {Object} props */ - updateTimeline(props) { + async updateTimeline(props) { + if (!this.props.showSunTimings && this.state.viewType === UIConstants.timeline.types.NORMAL) { + props.items = await this.addStationSunTimes(this.state.defaultStartTime, this.state.defaultEndTime, props.group, props.items); + } else if(this.props.showSunTimings && this.state.viewType === UIConstants.timeline.types.NORMAL) { + this.setNormalSuntimings(this.state.defaultStartTime, this.state.defaultEndTime); + } this.setState({group: DEFAULT_GROUP.concat(props.group), items: props.items}); } @@ -884,6 +1095,13 @@ export class CalendarTimeline extends Component { // This method will render once but will not update the values after fetching from server // <DateHeader unit={this.state.lstDateHeaderUnit} intervalRenderer={this.renderLSTDateHeader}></DateHeader> } + {/* Suntime Header in normal view with sunrise, sunset and night time */} + {this.props.showSunTimings && this.state.viewType === UIConstants.timeline.types.NORMAL && this.state.sunTimeMap && + <CustomHeader height={30} unit="minute" + children={({ headerContext: { intervals }, getRootProps, getIntervalProps, showPeriod, data})=> { + return this.renderNormalSuntimeHeader({ headerContext: { intervals }, getRootProps, getIntervalProps, showPeriod, data})}}> + </CustomHeader> + } </TimelineHeaders> <TimelineMarkers> @@ -898,6 +1116,15 @@ export class CalendarTimeline extends Component { return <div style={customStyles} /> }} </CustomMarker> + {/* Show sunrise and sunset markers for normal timeline view (Not station view and week view */} + {this.props.showSunTimings && this.state.viewType===UIConstants.timeline.types.NORMAL && + <> + {/* Sunrise time line markers */} + { this.renderSunriseMarkers(this.state.sunRiseTimings)} + {/* Sunset time line markers */} + { this.renderSunsetMarkers(this.state.sunSetTimings)} + </> + } {this.state.showCursor? <CursorMarker> {this.renderCursor} diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/_overrides.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/_overrides.scss index 330a00a4d7520597c039ec2d3cb010075797b93b..4ec1204d72a8ead8c5565e6457231059a9e82108 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/layout/_overrides.scss +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/_overrides.scss @@ -109,6 +109,34 @@ .hide { display: none !important; } +.grouping { + padding: 0 15px; +} +.grouping fieldset { + border-width: 1px; + border-style: double; + border-color: #ccc; + border-image: initial; + padding: 10px; + width: 100%; +} +.grouping legend { + width: auto; +} + +.comments{ + margin: 90px; + position: absolute; + margin-left: 15px; +} +.qaButton{ + margin-left: 0; + position: relative; + top: 350px; +} +.plots{ + padding-left: 2px; +} diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/layout.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/layout.scss index 5980a6378545ea73371eef791355b27aba58dd6d..e098f79b4c332c0f90d52c99d6ed41fe01215e72 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/layout/layout.scss +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/layout.scss @@ -1,3 +1,4 @@ @import "./_variables"; @import "./sass/_layout"; -@import "./_overrides"; \ No newline at end of file +@import "./_overrides"; +@import "./sass/stations"; \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_layout.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_layout.scss index 0f0e755f9b2b3bd8503ab89d0d9a195b54186fec..302208b3dad39e6c4ebf092e614e502fbd131e37 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_layout.scss +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_layout.scss @@ -14,5 +14,6 @@ @import "./_viewtable"; @import "./_pageheader"; @import "./timeline"; -@import "./_aggrid" +@import "./_aggrid"; +@import "./suSummary"; // @import "./splitpane"; diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_stations.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_stations.scss new file mode 100644 index 0000000000000000000000000000000000000000..e0c2e01575b7d261bb353d311a22730592c8af06 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_stations.scss @@ -0,0 +1,84 @@ +.grouping { + padding: 0 15px; +} +.grouping fieldset { + border-width: 1px; + border-style: double; + border-color: #ccc; + border-image: initial; + padding: 10px; + width: 100%; +} +.grouping legend { + width: auto; +} +.grouping .selected_stations { + margin-top: 10px; +} +.selected_stations .info { + background-color: transparent !important; + border: none !important; + padding: 0; + width: auto !important; + top: 2px; + span { + font-size: 14px !important; + padding: 0 !important; + } +} +.text-caps { + text-transform: capitalize; +} +.station-container { + padding: 10px; + max-height: 200px; + overflow-y: auto; + label { + display: block; + } +} +.custom-label { + padding-left: 8px !important; +} +.custom-value { + padding-right: 8px !important; +} +.custom-field { + padding-left: 30px !important; +} +.error-message { + font-size: 12px; + color: red; +} +.custom-missingstation-label{ + padding-left: 4px !important; +} + +.customMissingStationLabel{ + padding-left: 22px !important; +} +#missingStation{ + width: 110%; +} +.station_header { + // padding-left: 22px !important; +} +#stationgroup-label{ + padding-left: 5px; +} +.custom-station-wrapper { + position: relative; +} +.custom-remove { + position: absolute; + left: -12px; + background-color: transparent !important; + border: none !important; + padding: 0; + width: auto !important; + top: 2px; + span { + font-size: 14px !important; + padding: 0 !important; + } +} diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_suSummary.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_suSummary.scss new file mode 100644 index 0000000000000000000000000000000000000000..c2fb7b46ab00ed3721db70e61b834f5c9d17b412 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_suSummary.scss @@ -0,0 +1,27 @@ +.constraints-summary>div { + overflow: scroll; + max-height: 500px; + margin-bottom: 10px; +} + +.constraints-summary table { + border: 1px solid lightgray; + text-transform: capitalize; + margin-bottom: 0px; +} + +.table-sm>tbody>tr>td:first-child { + font-weight: bold; +} + +.table-sm>tbody>tr>td>span { + font-weight: normal; +} + +.task-summary>label { + margin-bottom: 0px; +} + +.task-summary #block_container { + margin-top: 0px; +} \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss index 1c5e635be11f8ff590511f5b9407cbc7242a250a..25e0ca50ba4e4546260f615e0ccb3150dc01d50a 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_timeline.scss @@ -1,3 +1,9 @@ +.sticky { + position: sticky; + top:49px; + z-index:1000; +} + .rct-sidebar-row { font-size: 14px; } @@ -6,6 +12,10 @@ background-color: #f0f0f0; } +.rct-item { + border: none !important; +} + .timeline-view-toolbar { margin-left: 10px; } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_topbar.scss b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_topbar.scss index 310133c2db197d0c33a8db3e045e256ffe19c868..e687ef7dae3dec800531b48174fbc34b7790b0dc 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_topbar.scss +++ b/SAS/TMSS/frontend/tmss_webapp/src/layout/sass/_topbar.scss @@ -3,7 +3,7 @@ height: 50px; padding: .7em 1.5em 0em 1.5em; color: #ffffff; - z-index: 999; + z-index: 2000; right: 0; @include clearfix(); @include linear-gradient-left($topbarLeftBgColor,$topbarRightBgColor); diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js index a19914a42ebf219f36ff9f1c38369ff082b01eba..5de435d85b1f8e16a09f7e43c85ffaffd3da6227 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Scheduling.Constraints.js @@ -187,6 +187,7 @@ export default (props) => { callback: onEditForm, initValue: initialValue, disabled: props.disable, + formatOutput: props.formatOutput, parentFunction: parentFunction })} </> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Stations.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Stations.js new file mode 100644 index 0000000000000000000000000000000000000000..7a7a354a7fd5872a6ee6256abba1b92749a22538 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/Stations.js @@ -0,0 +1,340 @@ + + +import React, { useState, useEffect } from 'react'; +import _ from 'lodash'; +import {MultiSelect} from 'primereact/multiselect'; +import { OverlayPanel } from 'primereact/overlaypanel'; +import {InputText} from 'primereact/inputtext'; +import { Button } from 'primereact/button'; +import UIConstants from '../../utils/ui.constants'; +import ScheduleService from '../../services/schedule.service'; +/* eslint-disable react-hooks/exhaustive-deps */ +/* +const props = { + selectedStations, + stationOptions, + selectedStrategyId, + observStrategies, + customStations +} +*/ + +export default (props) => { + const { tooltipOptions } = UIConstants; + let op; + + const [selectedStations, setSelectedStations] = useState([]); + const [stationOptions, setStationOptions] = useState([]); + const [customStations, setCustomStations] = useState([]); + const [customStationsOptions, setCustomStationsOptions] = useState([]); + const [stations, setStations] = useState([]); + const [missing_StationFieldsErrors, setmissing_StationFieldsErrors] = useState([]); + const [state, setState] = useState({ + Custom: { + stations: [] + } + }); + + useEffect(() => { + if (props.stationGroup && props.stationGroup.length) { + getAllStations(); + } else { + reset(); + } + }, [props.stationGroup]); + + // Restting the stations + const reset = () => { + setStations([]); + setSelectedStations([]); + setStationOptions([]); + setCustomStations([]); + setCustomStationsOptions([]); + setmissing_StationFieldsErrors([]); + }; + + /** + * Fetches all stations + */ + const getAllStations = async () => { + const stationGroup = await ScheduleService.getStationGroup(); + + const promises = []; + stationGroup.forEach(st => { + promises.push(ScheduleService.getStations(st.value)) + }); + Promise.all(promises).then(responses => { + getStationsDetails(stationGroup, responses); + }); + setStationOptions(stationGroup); + }; + + /** + * Cosntruct and set appropriate values to each station by finding station from station_group + * like error, missing fields, etc. + * Also will construct stations for custom group by merging all the stations + */ + const getStationsDetails = (stations, responses) => { + let stationState = { + Custom: { + stations: [] + } + }; + let custom_Stations = []; + setStationOptions(stations); + let selected_Stations = []; + responses.forEach((response, index) => { + const StationName = stations[index].value; + const missing_StationFields = props.stationGroup.find(i => { + if (i.stations.length === response.stations.length && i.stations[0] === response.stations[0]) { + i.stationType = StationName; + return true; + } + return false; + }); + // Missing fields present then it matched with station type otherwise its a custom... + if (missing_StationFields) { + selected_Stations = [...selected_Stations, StationName]; + } + stationState ={ + ...stationState, + [StationName]: { + stations: response.stations, + missing_StationFields: missing_StationFields ? missing_StationFields.max_nr_missing : '' + }, + Custom: { + stations: [...stationState['Custom'].stations, ...response.stations], + }, + }; + // Setting in Set to avoid duplicate, otherwise have to loop multiple times. + custom_Stations = new Set([...custom_Stations, ...response.stations]); + }); + // Find the custom one + const custom_stations = props.stationGroup.filter(i => !i.stationType); + stationState = { + ...stationState + }; + setCustomStations(custom_stations); + setSelectedStationGroup([...selected_Stations]); + setState(stationState); + let custom_stations_options = Array.from(custom_Stations); + // Changing array of sting into array of objects to support filter in primereact multiselect + custom_stations_options = custom_stations_options.map(i => ({ value: i })); + setCustomStationsOptions(custom_stations_options); + if (props.onUpdateStations) { + updateSchedulingComp(stationState, [...selected_Stations], missing_StationFieldsErrors, custom_stations); + } + }; + + /** + * Method will trigger on change of station group multiselect. + * Same timw will update the parent component also + * *param value* -> array of string + */ + const setSelectedStationGroup = (value) => { + setSelectedStations(value); + if (props.onUpdateStations) { + updateSchedulingComp(state, value, missing_StationFieldsErrors, customStations); + } + }; + + /** + * Method will trigger on change of custom station dropdown. + */ + const onChangeCustomSelectedStations = (value, index) => { + const custom_selected_options = [...customStations]; + custom_selected_options[index].stations = value; + if (value < custom_selected_options[index].max_nr_missing || !value.length) { + custom_selected_options[index].error = true; + } else { + custom_selected_options[index].error = false; + } + setCustomStations(custom_selected_options); + updateSchedulingComp(state, selectedStations, missing_StationFieldsErrors, custom_selected_options); + }; + + /** + * Method will trigger on click of info icon to show overlay + * param event -> htmlevent object + * param key -> string - selected station + */ + const showStations = (event, key) => { + op.toggle(event); + setStations((state[key] && state[key].stations ) || []); + }; + + /** + * Method will trigger on change of missing fields. + * Will store all fields error in array of string to enable/disable save button. + */ + const setNoOfmissing_StationFields = (key, value) => { + let cpmissing_StationFieldsErrors = [...missing_StationFieldsErrors]; + if (value > state[key].stations.length || value === '') { + if (!cpmissing_StationFieldsErrors.includes(key)) { + cpmissing_StationFieldsErrors.push(key); + } + } else { + cpmissing_StationFieldsErrors = cpmissing_StationFieldsErrors.filter(i => i !== key); + } + setmissing_StationFieldsErrors(cpmissing_StationFieldsErrors); + const stationState = { + ...state, + [key]: { + ...state[key], + missing_StationFields: value, + error: value > state[key].stations.length || value === '' + }, + }; + setState(stationState); + if (props.onUpdateStations) { + updateSchedulingComp(stationState, selectedStations, cpmissing_StationFieldsErrors, customStations); + } + } + + /** + * Method will trigger onchange of missing fields in custom + * @param {*} value string + * @param {*} index number + */ + const setMissingFieldsForCustom = (value, index) => { + const custom_selected_options = [...customStations]; + if (value > custom_selected_options[index].stations.length || value === '' || !custom_selected_options[index].stations.length) { + custom_selected_options[index].error = true; + } else { + custom_selected_options[index].error = false; + } + custom_selected_options[index].touched = true; + custom_selected_options[index].max_nr_missing = value; + setCustomStations(custom_selected_options); + updateSchedulingComp(state, selectedStations, missing_StationFieldsErrors, custom_selected_options); + }; + + /** + * Method will get trigger on click of add custom + */ + const addCustom = () => { + const custom_selected_options = [...customStations]; + custom_selected_options.push({ + stations: [], + max_nr_missing: 0, + error: true + }); + setCustomStations(custom_selected_options); + updateSchedulingComp(state, selectedStations, missing_StationFieldsErrors, custom_selected_options); + }; + + const updateSchedulingComp = (param_State, param_SelectedStations, param_missing_StationFieldsErrors, param_Custom_selected_options) => { + const isError = param_missing_StationFieldsErrors.length || param_Custom_selected_options.filter(i => i.error).length; + debugger + props.onUpdateStations(param_State, param_SelectedStations, isError, param_Custom_selected_options); + }; + /** + * Method to remove the custom stations + * @param {*} index number + */ + const removeCustomStations = (index) => { + const custom_selected_options = [...customStations]; + custom_selected_options.splice(index,1); + setCustomStations(custom_selected_options); + updateSchedulingComp(state, selectedStations, missing_StationFieldsErrors, custom_selected_options); + }; + + return ( + <div className="p-field p-grid grouping p-fluid"> + <fieldset> + <legend> + <label>Stations<span style={{color:'red'}}>*</span></label> + </legend> + {!props.view && <div className="col-sm-12 p-field p-grid" data-testid="stations"> + <div className="col-md-6 d-flex"> + <label htmlFor="stationgroup" className="col-sm-6 station_header">Station Groups</label> + <div className="col-sm-6"> + <MultiSelect data-testid="stations" id="stations" optionLabel="value" optionValue="value" filter={true} + tooltip="Select Stations" tooltipOptions={tooltipOptions} + value={selectedStations} + options={stationOptions} + placeholder="Select Stations" + onChange={(e) => setSelectedStationGroup(e.value)} + /> + </div> + </div> + <div className="add-custom"> + <Button onClick={addCustom} label="Add Custom" icon="pi pi-plus" disabled={!stationOptions.length}/> + </div> + </div>} + {selectedStations.length ? <div className="col-sm-12 selected_stations" data-testid="selected_stations"> + {<div className="col-sm-12"><label style={{paddingLeft: '8px'}}>Maximum number of stations that can be missed in the selected groups</label></div>} + <div className="col-sm-12 p-0 d-flex flex-wrap"> + {selectedStations.map(i => ( + <div className="p-field p-grid col-md-6" key={i}> + <label className="col-sm-6 text-caps"> + {i} + <Button icon="pi pi-info-circle" className="p-button-rounded p-button-secondary p-button-text info" onClick={(e) => showStations(e, i)} /> + </label> + <div className="col-sm-6"> + <InputText id="missingstation" data-testid="name" + className={(state[i] && state[i].error) ?'input-error':''} + tooltip="Max No. of Missing Stations" tooltipOptions={tooltipOptions} maxLength="128" + placeholder="Max No. of Missing Stations" + value={state[i] ? state[i].missing_StationFields : ''} + disabled={props.view} + onChange={(e) => setNoOfmissing_StationFields(i, e.target.value)}/> + {(state[i] && state[i].error) && <span className="error-message">{state[i].missing_StationFields ? `Max. no of missing stations is ${state[i] ? state[i].stations.length : 0}` : 'Max. no of missing stations required'}</span>} + </div> + </div> + ))} + {customStations.map((stat, index) => ( + <div className="p-field p-grid col-md-12 custom-station-wrapper" key={index}> + {!props.view && <Button icon="pi pi-trash" className="p-button-secondary p-button-text custom-remove" onClick={() => removeCustomStations(index)} />} + + <div className="col-md-6 p-field p-grid"> + <label className="col-sm-6 text-caps custom-label"> + Custom {index + 1} + </label> + <div className="col-sm-6 pr-8 custom-value"> + <MultiSelect data-testid="custom_stations" id="custom_stations" filter + tooltip="Select Stations" tooltipOptions={tooltipOptions} + value={stat.stations} + options={customStationsOptions} + placeholder="Select Stations" + disabled={props.view} + optionLabel="value" + optionValue="value" + onChange={(e) => onChangeCustomSelectedStations(e.value, index)} + /> + </div> + </div> + <div className="col-md-6 p-field p-grid"> + <label className="col-sm-6 customMissingStationLabel"> + Maximum No. Of Missing Stations + </label> + <div className="col-sm-6 pr-8 custom-field"> + <InputText id="missingStation" data-testid="name" + className={(stat.error && stat.touched) ?'input-error':''} + tooltip="Max Number of Missing Stations" tooltipOptions={tooltipOptions} + placeholder="Max Number of Missing Stations" + value={stat.max_nr_missing} + disabled={props.view} + onChange={(e) => setMissingFieldsForCustom(e.target.value, index)}/> + {(stat.error && stat.touched) && <span className="error-message">{stat.max_nr_missing ? `Max. no of missing stations is ${stat.stations.length}` : 'Max. no of missing stations required'}</span>} + {/* {props.view && + <span className="info">Max No. of Missing Stations</span>} */} + + </div> + </div> + </div> + ))} + </div> + + </div> : null} + <OverlayPanel ref={(el) => op = el} dismissable style={{width: '450px'}}> + <div className="station-container"> + {(stations || []).map(i => ( + <label>{i}</label> + ))} + </div> + </OverlayPanel> + </fieldset> + </div> + ); +}; \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js index c9145d81439dfe3417fbba1765138d492bd2321f..a76501c5c154cadef3782037eb877ff3db06c865 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/ViewSchedulingUnit.js @@ -9,9 +9,11 @@ import PageHeader from '../../layout/components/PageHeader'; import ViewTable from './../../components/ViewTable'; import ScheduleService from '../../services/schedule.service'; import moment from 'moment'; +import _ from 'lodash'; import SchedulingConstraint from './Scheduling.Constraints'; import { Dialog } from 'primereact/dialog'; import TaskStatusLogs from '../Task/state_logs'; +import Stations from './Stations'; class ViewSchedulingUnit extends Component{ constructor(props){ @@ -24,7 +26,7 @@ class ViewSchedulingUnit extends Component{ paths: [{ "View": "/task", }], - + missingStationFieldsErrors: [], defaultcolumns: [ { status_logs: "Status Logs", tasktype:{ @@ -70,16 +72,19 @@ class ViewSchedulingUnit extends Component{ "Relative Start Time (HH:mm:ss)": "filter-input-75", "Relative End Time (HH:mm:ss)": "filter-input-75", "Status":"filter-input-100" - }] + }], + stationGroup: [] } this.actions = [ {icon: 'fa-window-close',title:'Click to Close Scheduling Unit View', link: this.props.history.goBack} ]; + this.stations = []; this.constraintTemplates = []; if (this.props.match.params.type === 'draft') { this.actions.unshift({icon: 'fa-edit', title: 'Click to edit', props : { pathname:`/schedulingunit/edit/${ this.props.match.params.id}`} }); } else { + this.actions.unshift({icon: 'fa-sitemap',title :'View Workflow',props :{pathname:`/schedulingunit/${this.props.match.params.id}/workflow`}}); this.actions.unshift({icon: 'fa-lock', title: 'Cannot edit blueprint'}); } if (this.props.match.params.id) { @@ -90,7 +95,7 @@ class ViewSchedulingUnit extends Component{ } } - componentDidMount(){ + async componentDidMount(){ let schedule_id = this.state.scheduleunitId; let schedule_type = this.state.scheduleunitType; if (schedule_type && schedule_id) { @@ -101,6 +106,8 @@ class ViewSchedulingUnit extends Component{ </button> ); }; + this.stations = await ScheduleService.getStationGroup(); + this.setState({stationOptions: this.stations}); this.getScheduleUnit(schedule_type, schedule_id) .then(schedulingUnit =>{ if (schedulingUnit) { @@ -114,11 +121,13 @@ class ViewSchedulingUnit extends Component{ task.status_logs = task.tasktype === "Blueprint"?subtaskComponent(task):""; return task; }); + const targetObservation = _.find(tasks, (task)=> {return task.template.type_value==='observation' && task.tasktype.toLowerCase()===schedule_type && task.specifications_doc.station_groups}); this.setState({ scheduleunit : schedulingUnit, schedule_unit_task : tasks, isLoading: false, - }); + stationGroup: targetObservation?targetObservation.specifications_doc.station_groups:[] + }, this.getAllStations); }); } else { this.setState({ @@ -128,12 +137,12 @@ class ViewSchedulingUnit extends Component{ }); } } - + getScheduleUnitTasks(type, scheduleunit){ if(type === 'draft') - return ScheduleService.getTasksBySchedulingUnit(scheduleunit.id); + return ScheduleService.getTasksBySchedulingUnit(scheduleunit.id, true); else - return ScheduleService.getTaskBlueprintsBySchedulingUnit(scheduleunit); + return ScheduleService.getTaskBlueprintsBySchedulingUnit(scheduleunit, true); } getScheduleUnit(type, id){ if(type === 'draft') @@ -205,6 +214,13 @@ class ViewSchedulingUnit extends Component{ </div> </> } + + {<Stations + stationGroup={this.state.stationGroup} + targetObservation={this.state.targetObservation} + view + />} + {this.state.scheduleunit && this.state.scheduleunit.scheduling_constraints_doc && <SchedulingConstraint disable constraintTemplate={this.state.constraintSchema} initValue={this.state.scheduleunit.scheduling_constraints_doc} />} <div> <h3>Tasks Details</h3> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js index 8f2b1dd4dd1408d676e2d18e849caa8ae9a18aea..f70b3eb5b38d6797bc9b578dd2e660c9a8379b54 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.js @@ -20,6 +20,8 @@ import TaskService from '../../services/task.service'; import UIConstants from '../../utils/ui.constants'; import PageHeader from '../../layout/components/PageHeader'; import SchedulingConstraint from './Scheduling.Constraints'; +import Stations from './Stations'; + /** * Component to create a new SchedulingUnit from Observation strategy template */ @@ -33,6 +35,10 @@ export class SchedulingUnitCreate extends Component { redirect: null, // URL to redirect errors: [], // Form Validation errors schedulingSets: [], // Scheduling set of the selected project + missing_StationFieldsErrors: [], // Validation for max no.of missing station + stationOptions: [], + stationGroup: [], + customSelectedStations: [], // custom stations schedulingUnit: { project: (props.match?props.match.params.project:null) || null, }, @@ -42,7 +48,7 @@ export class SchedulingUnitCreate extends Component { constraintSchema:null, validEditor: false, // For JSON editor validation validFields: {}, // For Form Validation - } + }; this.projects = []; // All projects to load project dropdown this.schedulingSets = []; // All scheduling sets to be filtered for project this.observStrategies = []; // All Observing strategy templates @@ -77,14 +83,16 @@ export class SchedulingUnitCreate extends Component { ScheduleService.getSchedulingSets(), ScheduleService.getObservationStrategies(), TaskService.getTaskTemplates(), - ScheduleService.getSchedulingConstraintTemplates()] + ScheduleService.getSchedulingConstraintTemplates(), + ScheduleService.getStationGroup()] Promise.all(promises).then(responses => { this.projects = responses[0]; this.schedulingSets = responses[1]; this.observStrategies = responses[2]; this.taskTemplates = responses[3]; this.constraintTemplates = responses[4]; - // Setting first value as constraint template + this.stations = responses[5]; + // Setting first value as constraint template this.constraintStrategy(this.constraintTemplates[0]); if (this.state.schedulingUnit.project) { const projectSchedSets = _.filter(this.schedulingSets, {'project_id': this.state.schedulingUnit.project}); @@ -105,7 +113,7 @@ export class SchedulingUnitCreate extends Component { schedulingUnit.project = projectName; this.setState({schedulingUnit: schedulingUnit, schedulingSets: projectSchedSets, validForm: this.validateForm('project')}); } - + /** * Function called when observation strategy template is changed. * It generates the JSON schema for JSON editor and defult vales for the parameters to be captured @@ -113,6 +121,7 @@ export class SchedulingUnitCreate extends Component { */ async changeStrategy (strategyId) { const observStrategy = _.find(this.observStrategies, {'id': strategyId}); + let station_group = []; const tasks = observStrategy.template.tasks; let paramsOutput = {}; let schema = { type: 'object', additionalProperties: false, @@ -128,6 +137,9 @@ export class SchedulingUnitCreate extends Component { const taskTemplate = _.find(this.taskTemplates, {'name': task['specifications_template']}); schema['$id'] = taskTemplate.schema['$id']; schema['$schema'] = taskTemplate.schema['$schema']; + if (taskTemplate.type_value==='observation' && task.specifications_doc.station_groups) { + station_group = task.specifications_doc.station_groups; + } let index = 0; for (const param of observStrategy.template.parameters) { if (param.refs[0].indexOf(`/tasks/${taskName}`) > 0) { @@ -164,7 +176,7 @@ export class SchedulingUnitCreate extends Component { } } - this.setState({observStrategy: observStrategy, paramsSchema: schema, paramsOutput: paramsOutput}); + this.setState({observStrategy: observStrategy, paramsSchema: schema, paramsOutput: paramsOutput, stationGroup: station_group}); // Function called to clear the JSON Editor fields and reload with new schema if (this.state.editorFunction) { @@ -278,7 +290,7 @@ export class SchedulingUnitCreate extends Component { if (Object.keys(validFields).length === Object.keys(this.formRules).length) { validForm = true; } - return validForm; + return validForm && !this.state.missingStationFieldsErrors; } /** @@ -308,9 +320,25 @@ export class SchedulingUnitCreate extends Component { } } } - /* for (let type in constStrategy.sky.transit_offset) { - constStrategy.sky.transit_offset[type] = constStrategy.sky.transit_offset[type] * 60; - }*/ + //station + const station_groups = []; + (this.state.selectedStations || []).forEach(key => { + let station_group = {}; + const stations = this.state[key] ? this.state[key].stations : []; + const max_nr_missing = parseInt(this.state[key] ? this.state[key].missing_StationFields : 0); + station_group = { + stations, + max_nr_missing + }; + station_groups.push(station_group); + }); + + this.state.customSelectedStations.forEach(station => { + station_groups.push({ + stations: station.stations, + max_nr_missing:parseInt(station.max_nr_missing) + }); + }); UnitConversion.degreeToRadians(constStrategy.sky); @@ -319,8 +347,14 @@ export class SchedulingUnitCreate extends Component { observStrategy.template.parameters.forEach(async(param, index) => { $refs.set(observStrategy.template.parameters[index]['refs'][0], this.state.paramsOutput['param_' + index]); }); + for (const taskName in observStrategy.template.tasks) { + let task = observStrategy.template.tasks[taskName]; + if (task.specifications_doc.station_groups) { + task.specifications_doc.station_groups = station_groups; + } + } const const_strategy = {scheduling_constraints_doc: constStrategy, id: this.constraintTemplates[0].id, constraint: this.constraintTemplates[0]}; - const schedulingUnit = await ScheduleService.saveSUDraftFromObservStrategy(observStrategy, this.state.schedulingUnit, const_strategy); + const schedulingUnit = await ScheduleService.saveSUDraftFromObservStrategy(observStrategy, this.state.schedulingUnit, const_strategy, station_groups); if (schedulingUnit) { // this.growl.show({severity: 'success', summary: 'Success', detail: 'Scheduling Unit and tasks created successfully!'}); const dialog = {header: 'Success', detail: 'Scheduling Unit and Tasks are created successfully. Do you want to create another Scheduling Unit?'}; @@ -358,21 +392,41 @@ export class SchedulingUnitCreate extends Component { name: '', description: '', project: this.props.match.params.project || null, - scheduling_constraints_template_id: this.constraintTemplates[0].id - }, + scheduling_constraints_template_id: this.constraintTemplates[0].id, + }, projectDisabled: (this.props.match.params.project? true:false), observStrategy: {}, + selectedStations:{}, paramsOutput: null, validEditor: false, validFields: {}, constraintSchema: null, - touched:false - }, () => { + selectedStations: null, + touched:false, + stationGroup: [] + }, () => { this.constraintStrategy(this.constraintTemplates[0]); }); + this.state.editorFunction(); + } + onUpdateStations = (state, selectedStations, missing_StationFieldsErrors, customSelectedStations) => { + this.setState({ + ...state, + selectedStations, + missing_StationFieldsErrors, + customSelectedStations + + }, () => { + this.setState({ + validForm: this.validateForm() + }); + + }); + }; + render() { if (this.state.redirect) { return <Redirect to={ {pathname: this.state.redirect} }></Redirect> @@ -475,8 +529,11 @@ export class SchedulingUnitCreate extends Component { </div> </div> - - </div> + <Stations + stationGroup={this.state.stationGroup} + onUpdateStations={this.onUpdateStations.bind(this)} + /> + </div> {this.state.constraintSchema && <div className="p-fluid"> <div className="p-grid"> <div className="p-col-12"> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.scheduleset.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.scheduleset.js index 885e56615b0bc9c712a7da26cae5a247482ab486..2e330e0fbc70c7708dd629f2a17e1229e8507b18 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.scheduleset.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/create.scheduleset.js @@ -2,8 +2,6 @@ import React, {Component} from 'react'; import { Link, Redirect } from 'react-router-dom'; import _ from 'lodash'; -import PageHeader from '../../layout/components/PageHeader'; - import {Dropdown} from 'primereact/dropdown'; import { Button } from 'primereact/button'; import {Dialog} from 'primereact/components/dialog/Dialog'; @@ -24,9 +22,9 @@ import 'ag-grid-community/dist/styles/ag-grid.css'; import 'ag-grid-community/dist/styles/ag-theme-alpine.css'; import UnitConverter from '../../utils/unit.converter' import Validator from '../../utils/validator'; - +import PageHeader from '../../layout/components/PageHeader'; /** - * Component to create / update Scheduling Unit Draft using Spreadsheet + * Component to create / update Scheduling Unit Drafts using Spreadsheet */ export class SchedulingSetCreate extends Component { constructor(props) { @@ -37,27 +35,22 @@ export class SchedulingSetCreate extends Component { this.tmpRowData = []; this.state = { - projectDisabled: (props.match?(props.match.params.project? true:false):false), isLoading: true, // Flag for loading spinner dialog: { header: '', detail: ''}, // Dialog properties redirect: null, // URL to redirect errors: [], // Form Validation errors clipboard: [], // Maintaining grid data while Ctrl+C/V - schedulingUnit: { project: (props.match?props.match.params.project:null) || null, }, - schedulingSets: [], schedulingUnitList: [], selectedSchedulingSetId: null, - // selectedStrategyId: null, - // selectedProjectId: null, observStrategy: {}, totalCount: 0, - validEditor: false, // For JSON editor validation - validFields: {}, // For Form Validation + validEditor: false, + validFields: {}, noOfSU: 10, //ag-grid columnMap: [], @@ -81,7 +74,7 @@ export class SchedulingSetCreate extends Component { editable: true, flex: 1, sortable: true, minWidth: 100, resizable: true, }, rowSelection: 'multiple', - // ag grid show row index + // ag grid to show row index components: { rowIdRenderer: function (params) { return 1 + params.rowIndex; @@ -103,7 +96,7 @@ export class SchedulingSetCreate extends Component { this.cancelCreate = this.cancelCreate.bind(this); this.clipboardEvent = this.clipboardEvent.bind(this); this.reset = this.reset.bind(this); - + this.projects = []; // All projects to load project dropdown this.schedulingSets = []; // All scheduling sets to be filtered for project this.observStrategies = []; // All Observing strategy templates @@ -204,6 +197,9 @@ export class SchedulingSetCreate extends Component { this.state.gridApi.redrawRows(); } + /** + * Resolve JSON Schema + */ async resolveSchema(schema){ let properties = schema.properties; schema.definitions = schema.definitions?schema.definitions:{}; @@ -247,76 +243,18 @@ export class SchedulingSetCreate extends Component { return schema; } + /** * Function to generate AG-Grid column definition. * @param {number} strategyId */ - async createGridColums(scheduleUnit){ - let strategyId = scheduleUnit.observation_strategy_template_id; - let tasksToUpdate = {}; - const observStrategy = _.find(this.observStrategies, {'id': strategyId}); - const tasks = observStrategy.template.tasks; - let paramsOutput = {}; - let schema = { type: 'object', additionalProperties: false, - properties: {}, definitions:{} - }; - - let taskDrafts= []; - await ScheduleService.getTasksDraftBySchedulingUnitId(scheduleUnit.id).then(response =>{ - taskDrafts= response.data.results; - }) - - for (const taskName in tasks) { - const task = tasks[taskName]; - const taskDraft = taskDrafts.find(taskD => taskD.name === taskName); - if (taskDraft) { - task.specifications_doc = taskDraft.specifications_doc; - } - //Resolve task from the strategy template - const $taskRefs = await $RefParser.resolve(task); - - // Identify the task specification template of every task in the strategy template - const taskTemplate = _.find(this.taskTemplates, {'name': task['specifications_template']}); - schema['$id'] = taskTemplate.schema['$id']; - schema['$schema'] = taskTemplate.schema['$schema']; - let index = 0; - for (const param of observStrategy.template.parameters) { - if (param.refs[0].indexOf(`/tasks/${taskName}`) > 0) { - tasksToUpdate[taskName] = taskName; - // Resolve the identified template - const $templateRefs = await $RefParser.resolve(taskTemplate); - let property = { }; - let tempProperty = null; - const taskPaths = param.refs[0].split("/"); - // Get the property type from the template and create new property in the schema for the parameters - try { - const parameterRef = param.refs[0]; - tempProperty = $templateRefs.get(parameterRef); - } catch(error) { - tempProperty = _.cloneDeep(taskTemplate.schema.properties[taskPaths[4]]); - if (tempProperty.type === 'array') { - tempProperty = tempProperty.items.properties[taskPaths[6]]; - } - property = tempProperty; - } - property.title = param.name; - property.default = $taskRefs.get(param.refs[0].replace(`#/tasks/${taskName}`, '#')); - paramsOutput[`param_${index}`] = property.default; - schema.properties[`param_${index}`] = property; - // Set property defintions taken from the task template in new schema - for (const definitionName in taskTemplate.schema.definitions) { - schema.definitions[definitionName] = taskTemplate.schema.definitions[definitionName]; - } - } - index++; - } - } - + async createGridColumns(scheduleUnit){ + let schema = await this.getTaskSchema(scheduleUnit); schema = await this.resolveSchema(schema); // AG Grid Cell Specific Properties const cellProps =[]; - cellProps['angle1'] = {type:'numberValueColumn', cellRenderer: 'timeInputMask',cellEditor: 'timeInputMask' }; - cellProps['angle2'] = {type:'numberValueColumn', cellRenderer: 'degreeInputMask',cellEditor: 'degreeInputMask' }; + cellProps['angle1'] = {type:'numberValueColumn', cellRenderer: 'timeInputMask',cellEditor: 'timeInputMask', valueSetter: 'valueSetter' }; + cellProps['angle2'] = {type:'numberValueColumn', cellRenderer: 'degreeInputMask',cellEditor: 'degreeInputMask', valueSetter: 'valueSetter' }; cellProps['angle3'] = {cellEditor: 'numericEditor',}; cellProps['direction_type'] = {cellEditor: 'agSelectCellEditor',default: schema.definitions.pointing.properties.direction_type.default, cellEditorParams: { @@ -324,6 +262,8 @@ export class SchedulingSetCreate extends Component { }, }; //Ag-grid Colums definition + + let colKeyOrder = []; let columnMap = []; let colProperty = {}; let columnDefs = [ @@ -344,6 +284,8 @@ export class SchedulingSetCreate extends Component { ], } ]; + colKeyOrder.push("suname"); + colKeyOrder.push("sudesc"); colProperty ={'ID':'id', 'Name':'suname', 'Description':'sudesc'}; columnMap['Scheduling Unit'] = colProperty; @@ -356,15 +298,15 @@ export class SchedulingSetCreate extends Component { let childern = []; colProperty = {}; - let childalais = property.title; - childalais = _.lowerCase(childalais).split(' ').map(x => x[0]).join(''); + let childalias = property.title; + childalias = _.lowerCase(childalias).split(' ').map(x => x[0]).join(''); const paramKeys = Object.keys(property.default); paramKeys.forEach(key =>{ - colProperty[key] = childalais+key; + colProperty[key] = childalias+key; let cellAttr = {}; cellAttr['headerName'] = definitions[key].title; - cellAttr['field'] = childalais+key; - + cellAttr['field'] = childalias+key; + colKeyOrder.push(childalias+key); let cellKeys = Object.keys(cellProps[key]); for(const cellKey of cellKeys){ cellAttr[cellKey] = cellProps[key][cellKey]; @@ -377,12 +319,79 @@ export class SchedulingSetCreate extends Component { }) columnMap[property.title] = colProperty; } + colProperty ={'From':'bfrom', 'Until':'buntil'}; + columnMap['Between'] = colProperty; this.setState({ columnDefs:columnDefs, columnMap:columnMap, + colKeyOrder:colKeyOrder }) + } + async getTaskSchema(scheduleUnit){ + let strategyId = scheduleUnit.observation_strategy_template_id; + let tasksToUpdate = {}; + const observStrategy = _.find(this.observStrategies, {'id': strategyId}); + const tasks = observStrategy.template.tasks; + let paramsOutput = {}; + let schema = { type: 'object', additionalProperties: false, + properties: {}, definitions:{} + }; + + let taskDrafts= []; + await ScheduleService.getTasksDraftBySchedulingUnitId(scheduleUnit.id).then(response =>{ + taskDrafts= response.data.results; + }) + + for (const taskName in tasks) { + const task = tasks[taskName]; + const taskDraft = taskDrafts.find(taskD => taskD.name === taskName); + if (taskDraft) { + task.specifications_doc = taskDraft.specifications_doc; + } + //Resolve task from the strategy template + const $taskRefs = await $RefParser.resolve(task); + + // Identify the task specification template of every task in the strategy template + const taskTemplate = _.find(this.taskTemplates, {'name': task['specifications_template']}); + schema['$id'] = taskTemplate.schema['$id']; + schema['$schema'] = taskTemplate.schema['$schema']; + let index = 0; + for (const param of observStrategy.template.parameters) { + if (param.refs[0].indexOf(`/tasks/${taskName}`) > 0) { + tasksToUpdate[taskName] = taskName; + // Resolve the identified template + const $templateRefs = await $RefParser.resolve(taskTemplate); + let property = { }; + let tempProperty = null; + const taskPaths = param.refs[0].split("/"); + // Get the property type from the template and create new property in the schema for the parameters + try { + const parameterRef = param.refs[0]; + tempProperty = $templateRefs.get(parameterRef); + } catch(error) { + tempProperty = _.cloneDeep(taskTemplate.schema.properties[taskPaths[4]]); + if (tempProperty.type === 'array') { + tempProperty = tempProperty.items.properties[taskPaths[6]]; + } + property = tempProperty; + } + property.title = param.name; + property.default = $taskRefs.get(param.refs[0].replace(`#/tasks/${taskName}`, '#')); + paramsOutput[`param_${index}`] = property.default; + schema.properties[`param_${index}`] = property; + // Set property defintions taken from the task template in new schema + for (const definitionName in taskTemplate.schema.definitions) { + schema.definitions[definitionName] = taskTemplate.schema.definitions[definitionName]; + } + } + index++; + } + } + return schema; + } + /** * Function to prepare ag-grid row data. @@ -391,15 +400,11 @@ export class SchedulingSetCreate extends Component { if(this.state.schedulingUnitList.length===0){ return; } - // const observStrategy = _.find(this.observStrategies, {'id': this.state.schedulingUnitList[0].observation_strategy_template_id}); - // this.setState({observStrategy: observStrategy}); - this.tmpRowData = []; let totalSU = this.state.noOfSU; let paramsOutput = {}; //refresh column header - await this.createGridColums(this.state.schedulingUnitList[0]); - + await this.createGridColumns(this.state.schedulingUnitList[0]); let observationPropsList = []; for(const scheduleunit of this.state.schedulingUnitList){ let observationProps = { @@ -467,7 +472,7 @@ export class SchedulingSetCreate extends Component { * @param {Stirng} cell -> contains Row ID, Column Name, Value, isDegree */ async updateAngle(rowIndex, field, value, isDegree, isValid){ - let row = this.state.rowData[rowIndex] + let row = this.state.rowData[rowIndex]; row[field] = value; row['isValid'] = isValid; //Convertverted value for Angle 1 & 2, set in SU Row @@ -477,10 +482,37 @@ export class SchedulingSetCreate extends Component { await this.setState({ rowData: tmpRowData }); - // console.log('rowdata', this.state.rowData) + } - + /** + * Read Data from clipboard + */ + async readClipBoard(){ + try{ + const queryOpts = { name: 'clipboard-read', allowWithoutGesture: true }; + const permissionStatus = await navigator.permissions.query(queryOpts); + let data = await navigator.clipboard.readText(); + return data; + }catch(err){ + console.log("Error",err); + } + } + + /** + * Check the content is JSON format + * @param {*} jsonData + */ + async isJsonData(jsonData){ + try{ + let jsonObj = JSON.parse(jsonData); + return true; + }catch(err){ + console.log("error :",err) + return false; + } + } + /** * Copy data to/from clipboard * @param {*} e @@ -490,42 +522,77 @@ export class SchedulingSetCreate extends Component { var ctrl = e.ctrlKey ? e.ctrlKey : ((key === 17) ? true : false); // ctrl detection if ( key == 86 && ctrl ) { // Ctrl+V - let emptyRow = this.state.emptyRow; this.tmpRowData = this.state.rowData; let dataRowCount = this.state.totalCount; - for(const row of this.state.clipboard){ - let copyRow = _.cloneDeep(row); - copyRow['id'] = 0; - this.tmpRowData[dataRowCount] = copyRow; - dataRowCount++; - } - - let tmpNoOfSU= this.state.noOfSU; - if(dataRowCount >= tmpNoOfSU){ - tmpNoOfSU = dataRowCount+10; - //Create additional empty row at the end - for(let i= this.tmpRowData.length; i<=tmpNoOfSU; i++){ - this.tmpRowData.push(emptyRow); + try { + let clipboardData = ''; + try{ + //Read Clipboard Data + clipboardData = await this.readClipBoard(); + }catch(err){ + console.log("error :",err); + } + if(clipboardData){ + let suGridRowData= this.state.emptyRow; + clipboardData = _.trim(clipboardData); + let suRows = clipboardData.split("\n"); + suRows.forEach(line =>{ + let colCount = 0; + suGridRowData ={}; + let suRow = line.split("\t"); + suGridRowData['id']= 0; + suGridRowData['isValid']= true; + for(const key of this.state.colKeyOrder){ + suGridRowData[key]= suRow[colCount]; + colCount++; + } + this.tmpRowData[dataRowCount]= (suGridRowData); + dataRowCount++ + }) + } + let emptyRow = this.state.emptyRow; + let tmpNoOfSU= this.state.noOfSU; + if(dataRowCount >= tmpNoOfSU){ + tmpNoOfSU = dataRowCount+10; + //Create additional empty row at the end + for(let i= this.tmpRowData.length; i<=tmpNoOfSU; i++){ + this.tmpRowData.push(emptyRow); + } } - } - await this.setState({ - rowData: this.tmpRowData, - noOfSU: this.tmpRowData.length, - totalCount: dataRowCount, - }) + await this.setState({ + rowData: this.tmpRowData, + noOfSU: this.tmpRowData.length, + totalCount: dataRowCount, + }) + + this.state.gridApi.setRowData(this.state.rowData); + this.state.gridApi.redrawRows(); - this.state.gridApi.setRowData(this.state.rowData) - this.state.gridApi.redrawRows(); + }catch (err) { + console.error('Error: ', err); + } } else if ( key == 67 && ctrl ) { - //Ctrl+C = Store the data into local state + //Ctrl+C var selectedRows = this.state.gridApi.getSelectedRows(); - this.setState({ - clipboard : selectedRows - }) + let clipboardData = ''; + for(const rowData of selectedRows){ + var line = ''; + for(const key of this.state.colKeyOrder){ + line += rowData[key] + '\t'; + } + line = _.trim(line); + clipboardData += line + '\r\n'; + } + clipboardData = _.trim(clipboardData); + const queryOpts = { name: 'clipboard-write', allowWithoutGesture: true }; + await navigator.permissions.query(queryOpts); + await navigator.clipboard.writeText(clipboardData); } } + + /** * Function to create Scheduling unit */ @@ -571,12 +638,11 @@ export class SchedulingSetCreate extends Component { index++; } if(!validRow){ - continue + continue; } observStrategy.template.parameters.forEach(async(param, index) => { $refs.set(observStrategy.template.parameters[index]['refs'][0], paramsOutput['param_' + index]); }); - if(suRow.id >0 && suRow.suname.length>0 && suRow.sudesc.length>0){ newSU = _.find(this.state.schedulingUnitList, {'id': suRow.id}); newSU['name'] = suRow.suname; @@ -597,14 +663,8 @@ export class SchedulingSetCreate extends Component { if((newSUCount+existingSUCount)>0){ const dialog = {header: 'Success', detail: '['+newSUCount+'] Scheduling Units are created & ['+existingSUCount+'] Scheduling Units are updated successfully.'}; this.setState({ dialogVisible: true, dialog: dialog}); - /* let schedulingUnitList= await ScheduleService.getSchedulingBySet(this.state.selectedSchedulingSetId); - schedulingUnitList = _.filter(schedulingUnitList,{'observation_strategy_template_id': this.state.observStrategy.id}) ; - this.setState({ - schedulingUnitList: schedulingUnitList - }) - this.prepareScheduleUnitListForGrid();*/ }else{ - this.growl.show({severity: 'error', summary: 'Warring', detail: 'Scheduling Units are not create/update '}); + this.growl.show({severity: 'error', summary: 'Warning', detail: 'No Scheduling Units create/update '}); } }catch(err){ this.growl.show({severity: 'error', summary: 'Error Occured', detail: 'Unable to create/update Scheduling Units'}); @@ -612,19 +672,18 @@ export class SchedulingSetCreate extends Component { } /** - * Refresh the grid with updated data, it helps to make next update to make immediatly for the same filter + * Refresh the grid with updated data */ async reset() { - //this.setState({dialogVisible: false}); let schedulingUnitList= await ScheduleService.getSchedulingBySet(this.state.selectedSchedulingSetId); - schedulingUnitList = _.filter(schedulingUnitList,{'observation_strategy_template_id': this.state.observStrategy.id}) ; - this.setState({ - schedulingUnitList: schedulingUnitList, - dialogVisible: false - }) - await this.prepareScheduleUnitListForGrid(); - this.state.gridApi.setRowData(this.state.rowData) - this.state.gridApi.redrawRows(); + schedulingUnitList = _.filter(schedulingUnitList,{'observation_strategy_template_id': this.state.observStrategy.id}) ; + this.setState({ + schedulingUnitList: schedulingUnitList, + dialogVisible: false + }) + await this.prepareScheduleUnitListForGrid(); + this.state.gridApi.setRowData(this.state.rowData); + this.state.gridApi.redrawRows(); } /** @@ -634,15 +693,16 @@ export class SchedulingSetCreate extends Component { this.setState({redirect: '/schedulingunit'}); } - onGridReady (params) { - this.setState({ + async onGridReady (params) { + await this.setState({ gridApi:params.api, gridColumnApi:params.columnApi, }) + this.state.gridApi.hideOverlay(); } async setNoOfSUint(value){ - if(value>=0 && value<501){ + if(value >= 0 && value < 501){ await this.setState({ noOfSU: value }) @@ -701,7 +761,8 @@ export class SchedulingSetCreate extends Component { validateEditor() { return this.validEditor?true:false; } - + + render() { if (this.state.redirect) { return <Redirect to={ {pathname: this.state.redirect} }></Redirect> @@ -772,27 +833,27 @@ export class SchedulingSetCreate extends Component { </div> </div> <> - <div className="ag-theme-alpine" style={ { height: '500px', marginBottom: '10px' } } onKeyDown={this.clipboardEvent}> - <AgGridReact - suppressClipboardPaste={false} - columnDefs={this.state.columnDefs} - columnTypes={this.state.columnTypes} - defaultColDef={this.state.defaultColDef} - rowSelection={this.state.rowSelection} - onGridReady={this.onGridReady} - rowData={this.state.rowData} - frameworkComponents={this.state.frameworkComponents} - context={this.state.context} - components={this.state.components} - modules={this.state.modules} - enableRangeSelection={true} - rowSelection={this.state.rowSelection} - - //onSelectionChanged={this.onSelectionChanged.bind(this)} - > - - </AgGridReact> - </div> + {this.state.observStrategy.id && + <div className="ag-theme-alpine" style={ { height: '500px', marginBottom: '10px' } } onKeyDown={this.clipboardEvent}> + <AgGridReact + suppressClipboardPaste={false} + columnDefs={this.state.columnDefs} + columnTypes={this.state.columnTypes} + defaultColDef={this.state.defaultColDef} + rowSelection={this.state.rowSelection} + onGridReady={this.onGridReady} + rowData={this.state.rowData} + frameworkComponents={this.state.frameworkComponents} + context={this.state.context} + components={this.state.components} + modules={this.state.modules} + enableRangeSelection={true} + rowSelection={this.state.rowSelection} + > + + </AgGridReact> + </div> + } </> <div className="p-grid p-justify-start"> <div className="p-col-1"> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/edit.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/edit.js index f3205da562ad3e81bf3654e0bd59bf30cd8fbd5d..f2f56627824593d0f270148148f2e718fb2d41ff 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/edit.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/edit.js @@ -14,6 +14,7 @@ import AppLoader from '../../layout/components/AppLoader'; import PageHeader from '../../layout/components/PageHeader'; import Jeditor from '../../components/JSONEditor/JEditor'; import UnitConversion from '../../utils/unit.converter'; +import Stations from './Stations'; import ProjectService from '../../services/project.service'; import ScheduleService from '../../services/schedule.service'; @@ -40,7 +41,10 @@ export class EditSchedulingUnit extends Component { constraintSchema:null, validEditor: false, // For JSON editor validation validFields: {}, // For Form Validation - observStrategyVisible: false + observStrategyVisible: false, + missingStationFieldsErrors: [], // Validation for max no.of missing station + stationGroup: [], + customSelectedStations: [] // Custom Stations } this.projects = []; // All projects to load project dropdown this.schedulingSets = []; // All scheduling sets to be filtered for project @@ -126,6 +130,9 @@ export class EditSchedulingUnit extends Component { } index++; } + if (taskTemplate.type_value==='observation' && task.specifications_doc.station_groups) { + tasksToUpdate[taskName] = taskName; + } } this.setState({observStrategy: observStrategy, paramsSchema: schema, paramsOutput: paramsOutput, tasksToUpdate: tasksToUpdate}); @@ -142,7 +149,8 @@ export class EditSchedulingUnit extends Component { TaskService.getTaskTemplates(), ScheduleService.getSchedulingUnitDraftById(this.props.match.params.id), ScheduleService.getTasksDraftBySchedulingUnitId(this.props.match.params.id), - ScheduleService.getSchedulingConstraintTemplates() + ScheduleService.getSchedulingConstraintTemplates(), + ScheduleService.getStationGroup() ]; Promise.all(promises).then(responses => { this.projects = responses[0]; @@ -150,11 +158,16 @@ export class EditSchedulingUnit extends Component { this.observStrategies = responses[2]; this.taskTemplates = responses[3]; this.constraintTemplates = responses[6]; + this.stations = responses[7]; responses[4].project = this.schedulingSets.find(i => i.id === responses[4].scheduling_set_id).project_id; this.setState({ schedulingUnit: responses[4], taskDrafts: responses[5].data.results, observStrategyVisible: responses[4].observation_strategy_template_id?true:false }); if (responses[4].observation_strategy_template_id) { this.changeStrategy(responses[4].observation_strategy_template_id); + const targetObservation = responses[5].data.results.find(task => {return task.specifications_doc.station_groups?true:false}); + this.setState({ + stationGroup: targetObservation?targetObservation.specifications_doc.station_groups:[] + }); } if (this.state.schedulingUnit.project) { const projectSchedSets = _.filter(this.schedulingSets, {'project_id': this.state.schedulingUnit.project}); @@ -261,7 +274,7 @@ export class EditSchedulingUnit extends Component { if (Object.keys(validFields).length === Object.keys(this.formRules).length) { validForm = true; } - return validForm; + return validForm && !this.state.missingStationFieldsErrors; } /** @@ -305,7 +318,26 @@ export class EditSchedulingUnit extends Component { }); const schUnit = { ...this.state.schedulingUnit }; schUnit.scheduling_constraints_doc = constStrategy; - const schedulingUnit = await ScheduleService.updateSUDraftFromObservStrategy(observStrategy,schUnit,this.state.taskDrafts, this.state.tasksToUpdate); + //station + const station_groups = []; + (this.state.selectedStations || []).forEach(key => { + let station_group = {}; + const stations = this.state[key] ? this.state[key].stations : []; + const max_nr_missing = parseInt(this.state[key] ? this.state[key].missing_StationFields : 0); + station_group = { + stations, + max_nr_missing + }; + station_groups.push(station_group); + }); + this.state.customSelectedStations.forEach(station => { + station_groups.push({ + stations: station.stations, + max_nr_missing: parseInt(station.max_nr_missing) + }); + }); + + const schedulingUnit = await ScheduleService.updateSUDraftFromObservStrategy(observStrategy,schUnit,this.state.taskDrafts, this.state.tasksToUpdate, station_groups); if (schedulingUnit) { // this.growl.show({severity: 'success', summary: 'Success', detail: 'Scheduling Unit and tasks edited successfully!'}); this.props.history.push({ @@ -318,7 +350,8 @@ export class EditSchedulingUnit extends Component { this.growl.show({severity: 'error', summary: 'Error Occured', detail: 'Template Missing.'}); } } - + + /** * Cancel SU creation and redirect */ @@ -330,6 +363,19 @@ export class EditSchedulingUnit extends Component { this.setState({ constraintSchema: schema, initValue: initValue}); } + onUpdateStations = (state, selectedStations, missingStationFieldsErrors, customSelectedStations) => { + this.setState({ + ...state, + selectedStations, + missingStationFieldsErrors, + customSelectedStations + }, () => { + this.setState({ + validForm: this.validateForm() + }); + }); + }; + render() { if (this.state.redirect) { return <Redirect to={ {pathname: this.state.redirect} }></Redirect> @@ -430,6 +476,14 @@ export class EditSchedulingUnit extends Component { </div> </div> </div> + + + <Stations + stationGroup={this.state.stationGroup} + onUpdateStations={this.onUpdateStations.bind(this)} + /> + + {this.state.constraintSchema && <div className="p-fluid"> <div className="p-grid"> <div className="p-col-12"> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js index 2c304f86ec3cd0815e33e5f057e49c7335fcf812..7263fc7b3441386d301f4bba6e5e1e82aedd368c 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Scheduling/summary.js @@ -1,7 +1,10 @@ import React, {Component} from 'react'; import { Link } from 'react-router-dom/cjs/react-router-dom.min'; import moment from 'moment'; +import _ from 'lodash'; import ViewTable from '../../components/ViewTable'; +import { JSONToHTMLTable } from '@kevincobain2000/json-to-html-table' +import SchedulingConstraints from './Scheduling.Constraints'; /** * Component to view summary of the scheduling unit with limited task details @@ -13,20 +16,106 @@ export class SchedulingUnitSummary extends Component { this.state = { schedulingUnit: props.schedulingUnit || null } + this.constraintsOrder = ['scheduler','time','daily','sky']; this.closeSUDets = this.closeSUDets.bind(this); + this.setConstraintsEditorOutput = this.setConstraintsEditorOutput.bind(this); } componentDidMount() {} + /** + * Function to close the summary panel and call parent callback function to close. + */ closeSUDets() { if(this.props.closeCallback) { this.props.closeCallback(); } } + /** + * Order the properties in the constraint object in the predefined order + * @param {Object} constraintsDoc + */ + getOrderedConstraints(constraintsDoc, constraintsOrder) { + let orderedConstraints = {}; + for(const constraintKey of constraintsOrder) { + /* Format the object to remove empty values*/ + const constraint = this.getFormattedConstraint(constraintsDoc[constraintKey]); + if (constraint) { + orderedConstraints[constraintKey] = constraint; + } + } + return orderedConstraints; + } + + /** + * Format the constraint object i.e removes the empty values to show only available values. + * @param {Object} constraint + */ + getFormattedConstraint(constraint) { + if (constraint) { + const objectType = typeof constraint; + switch(objectType) { + case "string": { + try { + const dateConstraint = moment.utc(constraint); + if (dateConstraint.isValid()) { + constraint = dateConstraint.format("YYYY-MM-DD HH:mm:ss"); + } + } catch (error) {} + break; + } + case "boolean": { + constraint = constraint?constraint:null; + break; + } + case "object": { + if (Array.isArray(constraint)) { + let newArray = [] + for (let arrayObj of constraint) { + arrayObj = this.getFormattedConstraint(arrayObj); + if (arrayObj) { + newArray.push(arrayObj); + } + } + constraint = newArray.length > 0?newArray:null; + } else { + let newObject = {}; + let keys = _.keys(constraint); + if (keys.indexOf('from')>=0 & keys.indexOf('to')>=0) { + constraint = this.getOrderedConstraints(constraint, ['from', 'to']); + } + for (const objectKey of _.keys(constraint)) { + let object = this.getFormattedConstraint(constraint[objectKey]); + if (object) { + newObject[objectKey] = object; + } + } + constraint = (!_.isEmpty(newObject))? newObject:null; + } + break; + } + default: {} + } + } + return constraint; + } + + /** + * Gets the output from the SchedulingConstraints editor without output formatting so that the values entered in the + * editor can be shown in the summary without any conversion. + * @param {Object} jsonOutput + */ + setConstraintsEditorOutput(jsonOutput) { + this.setState({constraintsDoc: jsonOutput}); + } + render() { const schedulingUnit = this.props.schedulingUnit; const suTaskList = this.props.suTaskList; + const constraintsTemplate = this.props.constraintsTemplate; + // After receiving output from the SchedulingConstraint editor order and format it to display + let constraintsDoc = this.state.constraintsDoc?this.getOrderedConstraints(this.state.constraintsDoc, this.constraintsOrder):null; return ( <React.Fragment> { schedulingUnit && @@ -44,7 +133,25 @@ export class SchedulingUnitSummary extends Component { <div className="col-8">{moment.utc(schedulingUnit.stop_time).format("DD-MMM-YYYY HH:mm:ss")}</div> <div className="col-4"><label>Status:</label></div> <div className="col-8">{schedulingUnit.status}</div> - <div className="col-12"> + {constraintsTemplate && schedulingUnit.suDraft.scheduling_constraints_doc && + <> + {/* SchedulingConstraints editor to pass the scheduling_constraints_doc and get the editor output to User entry format and conversions */} + <div style={{display: "none"}}> + <SchedulingConstraints constraintTemplate={constraintsTemplate} disable + formatOutput={false} initValue={schedulingUnit.suDraft.scheduling_constraints_doc} + callback={this.setConstraintsEditorOutput} /> + </div> + {/* Scheduling Constraint Display in table format */} + {constraintsDoc && + <div className="col-12 constraints-summary"> + <label>Constraints:</label> + <JSONToHTMLTable data={constraintsDoc} tableClassName="table table-sm"/> + </div> + } + </> + } + <div className="col-12 task-summary"> + <label>Tasks:</label> <ViewTable data={suTaskList} defaultcolumns={[{id: "ID", start_time:"Start Time", stop_time:"End Time", status: "Status", @@ -60,7 +167,7 @@ export class SchedulingUnitSummary extends Component { showColumnFilter={false} allowColumnSelection={false} /> - </div> + </div> </div> } </React.Fragment> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js index 16524edc0f57b879a3302545d2ba588c2fcc087e..5ab65e6a7b09886f107dc78943236999c1836156 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/view.js @@ -79,7 +79,7 @@ export class TimelineView extends Component { const suBlueprint = _.find(suBlueprints, {'id': suBlueprintId}); suBlueprint['actionpath'] = `/schedulingunit/view/blueprint/${suBlueprintId}`; suBlueprint.suDraft = suDraft; - suBlueprint.project = project; + suBlueprint.project = project.name; suBlueprint.suSet = suSet; suBlueprint.durationInSec = suBlueprint.duration; suBlueprint.duration = UnitConverter.getSecsToHHmmss(suBlueprint.duration); @@ -122,8 +122,8 @@ export class TimelineView extends Component { // suBlueprint.status = diffOfCurrAndStart>=0?"FINISHED":"DEFINED"; let item = { id: suBlueprint.id, group: suBlueprint.suDraft.id, - title: `${suBlueprint.project.name} - ${suBlueprint.suDraft.name} - ${(suBlueprint.durationInSec/3600).toFixed(2)}Hrs`, - project: suBlueprint.project.name, + title: `${suBlueprint.project} - ${suBlueprint.suDraft.name} - ${(suBlueprint.durationInSec/3600).toFixed(2)}Hrs`, + project: suBlueprint.project, name: suBlueprint.suDraft.name, duration: suBlueprint.durationInSec?`${(suBlueprint.durationInSec/3600).toFixed(2)}Hrs`:"", start_time: moment.utc(suBlueprint.start_time), @@ -158,6 +158,11 @@ export class TimelineView extends Component { } this.setState({suTaskList: _.sortBy(taskList, "id"), isSummaryLoading: false}) }); + // Get the scheduling constraint template of the selected SU block + ScheduleService.getSchedulingConstraintTemplate(suBlueprint.suDraft.scheduling_constraints_template_id) + .then(suConstraintTemplate => { + this.setState({suConstraintTemplate: suConstraintTemplate}); + }); } } } @@ -289,6 +294,11 @@ export class TimelineView extends Component { } } + setStationView(e) { + this.closeSUDets(); + this.setState({stationView: e.value}); + } + render() { if (this.state.redirect) { return <Redirect to={ {pathname: this.state.redirect} }></Redirect> @@ -313,7 +323,7 @@ export class TimelineView extends Component { data={this.state.suBlueprintList} defaultcolumns={[{name: "Name", start_time:"Start Time", stop_time:"End Time"}]} - optionalcolumns={[{description: "Description", duration:"Duration (HH:mm:ss)", actionpath: "actionpath"}]} + optionalcolumns={[{project:"Project",description: "Description", duration:"Duration (HH:mm:ss)", actionpath: "actionpath"}]} columnclassname={[{"Start Time":"filter-input-50", "End Time":"filter-input-50", "Duration (HH:mm:ss)" : "filter-input-50",}]} defaultSortColumn= {[{id: "Start Time", desc: false}]} @@ -340,7 +350,7 @@ export class TimelineView extends Component { </div> <div className="timeline-view-toolbar"> <label>Station View</label> - <InputSwitch checked={this.state.stationView} onChange={(e) => {this.closeSUDets();this.setState({stationView: e.value})}} /> + <InputSwitch checked={this.state.stationView} onChange={(e) => {this.setStationView(e)}} /> </div> <Timeline ref={(tl)=>{this.timeline=tl}} group={this.state.group} @@ -348,6 +358,7 @@ export class TimelineView extends Component { currentUTC={this.state.currentUTC} rowHeight={30} itemClickCallback={this.onItemClick} dateRangeCallback={this.dateRangeCallback} + showSunTimings={!this.state.stationView} className="timeline-toolbar-margin-top-0"></Timeline> </div> {/* Details Panel */} @@ -356,6 +367,7 @@ export class TimelineView extends Component { style={{borderLeft: "1px solid #efefef", marginTop: "0px", backgroundColor: "#f2f2f2"}}> {this.state.isSummaryLoading?<AppLoader /> : <SchedulingUnitSummary schedulingUnit={suBlueprint} suTaskList={this.state.suTaskList} + constraintsTemplate={this.state.suConstraintTemplate} closeCallback={this.closeSUDets}></SchedulingUnitSummary> } </div> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js index 4e11e19eaca14e596b889f35c882306b0352210d..b4b3ae65ec5a753e2b52bff6c251f59c3d60f71e 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Timeline/week.view.js @@ -139,8 +139,8 @@ export class WeekTimelineView extends Component { let item = { id: `${suBlueprint.id}-${suBlueprint.start_time}`, // group: suBlueprint.suDraft.id, group: moment.utc(suBlueprint.start_time).format("MMM DD ddd"), - title: `${suBlueprint.project.name} - ${(suBlueprint.durationInSec/3600).toFixed(2)}Hrs - ${antennaSet}`, - project: suBlueprint.project.name, + title: `${suBlueprint.project} - ${(suBlueprint.durationInSec/3600).toFixed(2)}Hrs - ${antennaSet}`, + project: suBlueprint.project, name: suBlueprint.suDraft.name, band: antennaSet, duration: suBlueprint.durationInSec?`${(suBlueprint.durationInSec/3600).toFixed(2)}Hrs`:"", @@ -176,6 +176,11 @@ export class WeekTimelineView extends Component { } this.setState({suTaskList: _.sortBy(taskList, "id"), isSummaryLoading: false}) }); + // Get the scheduling constraint template of the selected SU block + ScheduleService.getSchedulingConstraintTemplate(suBlueprint.suDraft.scheduling_constraints_template_id) + .then(suConstraintTemplate => { + this.setState({suConstraintTemplate: suConstraintTemplate}); + }); } } } @@ -359,6 +364,7 @@ export class WeekTimelineView extends Component { style={{borderLeft: "1px solid #efefef", marginTop: "0px", backgroundColor: "#f2f2f2"}}> {this.state.isSummaryLoading?<AppLoader /> : <SchedulingUnitSummary schedulingUnit={suBlueprint} suTaskList={this.state.suTaskList} + constraintsTemplate={this.state.suConstraintTemplate} closeCallback={this.closeSUDets} location={this.props.location}></SchedulingUnitSummary> } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Workflow/QAreporting.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Workflow/QAreporting.js new file mode 100644 index 0000000000000000000000000000000000000000..5ffe01ea22288a4b72aa794753358c400d8862c6 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Workflow/QAreporting.js @@ -0,0 +1,97 @@ +import React, { Component } from 'react'; +import PageHeader from '../../layout/components/PageHeader'; +import {Growl} from 'primereact/components/growl/Growl'; +import { Button } from 'primereact/button'; +// import AppLoader from '../../layout/components/AppLoader'; +import SunEditor from 'suneditor-react'; +import 'suneditor/dist/css/suneditor.min.css'; // Import Sun Editor's CSS File +import {Dropdown} from 'primereact/dropdown'; +// import {InputText} from 'primereact/inputtext'; +import ScheduleService from '../../services/schedule.service'; +import { Link } from 'react-router-dom'; + +class QAreporting extends Component{ + + constructor(props) { + super(props); + this.state={}; + } + + componentDidMount() { + ScheduleService.getSchedulingUnitBlueprintById(this.props.match.params.id) + .then(schedulingUnit => { + this.setState({schedulingUnit: schedulingUnit}); + }) + } + + render() { + return ( + <React.Fragment> + <Growl ref={(el) => this.growl = el} /> + <PageHeader location={this.props.location} title={'QA Reporting (TO)'} actions={[{icon:'fa-window-close',link:this.props.history.goBack, title:'Click to Close Workflow', props:{ pathname: '/schedulingunit/view'}}]}/> + {this.state.schedulingUnit && + <> + <div> + <div className="p-fluid"> + <div className="p-field p-grid"> + <label htmlFor="suStatus" className="col-lg-2 col-md-2 col-sm-12">Scheduling Unit</label> + <div className="col-lg-3 col-md-3 col-sm-12"> + <Link to={ { pathname:`/schedulingunit/view/blueprint/${this.state.schedulingUnit.id}`}}>{this.state.schedulingUnit.name}</Link> + </div> + <div className="col-lg-1 col-md-1 col-sm-12"></div> + <label htmlFor="suStatus" className="col-lg-2 col-md-2 col-sm-12">Scheduling Unit Status</label> + <div className="col-lg-3 col-md-3 col-sm-12"> + {/* <InputText id="suStatus" data-testid="name" disabled + value={this.state.schedulingUnit.status}/> */} + <span>{this.state.schedulingUnit.status}</span> + </div> + </div> + <div className="p-field p-grid"> + <label htmlFor="assignTo" className="col-lg-2 col-md-2 col-sm-12">Assign To </label> + <div className="col-lg-3 col-md-3 col-sm-12" data-testid="assignTo" > + <Dropdown inputId="projCat" optionLabel="value" optionValue="value" + options={[{value: 'User 1'},{value: 'User 2'},{value: 'User 3'}]} + placeholder="Assign To" /> + </div> + <div className="col-lg-1 col-md-1 col-sm-12"></div> + <label htmlFor="viewPlots" className="col-lg-2 col-md-2 col-sm-12">View Plots</label> + <div className="col-lg-3 col-md-3 col-sm-12" style={{paddingLeft:'2px'}}> + <label className="col-sm-10 " > + <a href="https://proxy.lofar.eu/inspect/HTML/" target="_blank">Inspection plots</a> + </label> + <label className="col-sm-10 "> + <a href="https://proxy.lofar.eu/qa" target="_blank">Adder plots</a> + </label> + <label className="col-sm-10 "> + <a href=" https://proxy.lofar.eu/lofmonitor/" target="_blank">Station Monitor</a> + </label> + </div> + </div> + <div className="p-grid" style={{padding: '10px'}}> + <label htmlFor="comments" >Comments</label> + <div className="col-lg-12 col-md-12 col-sm-12"></div> + <SunEditor height="250" enableToolbar={true} + setOptions={{ + buttonList: [ + ['undo', 'redo', 'bold', 'underline', 'fontColor', 'table', 'link', 'image', 'video','italic', 'strike', 'subscript', + 'superscript','outdent', 'indent','fullScreen', 'showBlocks', 'codeView','preview', 'print','removeFormat'] + ] + }} /> + </div> + </div> + <div className="p-grid" style={{marginTop: '20px'}}> + <div className="p-col-1"> + <Button label="Save" className="p-button-primary" icon="pi pi-check" /> + </div> + <div className="p-col-1"> + <Button label="Cancel" className="p-button-danger" icon="pi pi-times" /> + </div> + </div> + + </div> + </> + } + </React.Fragment> + )}; +} +export default QAreporting; \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/index.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/index.js index d226ed315f5bd573ccf6d06f7bdf45c2592223b5..a5c43e081e64a4c5801d01d50d1ef4ad9d4b35d7 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/index.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/index.js @@ -15,7 +15,8 @@ import SchedulingUnitCreate from './Scheduling/create'; import EditSchedulingUnit from './Scheduling/edit'; import { CycleList, CycleCreate, CycleView, CycleEdit } from './Cycle'; import {TimelineView, WeekTimelineView} from './Timeline'; -import SchedulingSetCreate from './Scheduling/create.scheduleset' +import SchedulingSetCreate from './Scheduling/create.scheduleset'; +import QAreporting from './Workflow/QAreporting'; export const routes = [ { @@ -150,7 +151,13 @@ export const routes = [ path: "/schedulingset/schedulingunit/create", component: SchedulingSetCreate, name: 'Scheduling Set Add' - } + }, + { + path: "/schedulingunit/:id/workflow", + component: QAreporting, + name: 'QA Reporting (TO)', + title: 'QA Reporting (TO)' + } ]; export const RoutedContent = () => { diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js index 0b77e10fc80469dba49272d93eec6e6bc720459e..1ace4022edceedeffc7a6916b1749edaf81cc4fa 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/services/schedule.service.js @@ -91,7 +91,7 @@ const ScheduleService = { } return taskblueprintsList; }, - getTasksBySchedulingUnit: async function(id){ + getTasksBySchedulingUnit: async function(id, loadTemplate){ let scheduletasklist=[]; // let taskblueprints = []; // Common keys for Task and Blueprint @@ -115,10 +115,13 @@ const ScheduleService = { } scheduletask['created_at'] = moment(task['created_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss"); scheduletask['updated_at'] = moment(task['updated_at'], moment.ISO_8601).format("YYYY-MMM-DD HH:mm:ss"); - + scheduletask['specifications_doc'] = task['specifications_doc']; scheduletask.duration = moment.utc((scheduletask.duration || 0)*1000).format('HH:mm:ss'); scheduletask.relative_start_time = moment.utc(scheduletask.relative_start_time*1000).format('HH:mm:ss'); scheduletask.relative_stop_time = moment.utc(scheduletask.relative_stop_time*1000).format('HH:mm:ss'); + if (loadTemplate) { + scheduletask.template = await TaskService.getTaskTemplate(task.specifications_template_id); + } //Fetch blueprint details for Task Draft const draftBlueprints = await TaskService.getDraftsTaskBlueprints(task.id); // let filteredblueprints = _.filter(taskblueprints, function(o) { @@ -140,7 +143,9 @@ const ScheduleService = { taskblueprint.duration = moment.utc((taskblueprint.duration || 0)*1000).format('HH:mm:ss'); taskblueprint.relative_start_time = moment.utc(taskblueprint.relative_start_time*1000).format('HH:mm:ss'); taskblueprint.relative_stop_time = moment.utc(taskblueprint.relative_stop_time*1000).format('HH:mm:ss'); - + if (loadTemplate) { + taskblueprint.template = scheduletask.template; + } //Add Blue print details to array scheduletasklist.push(taskblueprint); } @@ -219,7 +224,16 @@ const ScheduleService = { return []; }; }, - saveSUDraftFromObservStrategy: async function(observStrategy, schedulingUnit, constraint) { + getSchedulingConstraintTemplate: async function(id){ + try { + const response = await axios.get(`/api/scheduling_constraints_template/${id}`); + return response.data; + } catch(error) { + console.error(error); + return null; + }; + }, + saveSUDraftFromObservStrategy: async function(observStrategy, schedulingUnit, constraint,station_groups) { try { // Create the scheduling unit draft with observation strategy and scheduling set const url = `/api/scheduling_unit_observing_strategy_template/${observStrategy.id}/create_scheduling_unit/?scheduling_set_id=${schedulingUnit.scheduling_set_id}&name=${schedulingUnit.name}&description=${schedulingUnit.description}` @@ -249,13 +263,17 @@ const ScheduleService = { }; }, - updateSUDraftFromObservStrategy: async function(observStrategy,schedulingUnit,tasks,tasksToUpdate) { + updateSUDraftFromObservStrategy: async function(observStrategy,schedulingUnit,tasks,tasksToUpdate,station_groups) { try { delete schedulingUnit['duration']; + schedulingUnit = await this.updateSchedulingUnitDraft(schedulingUnit); for (const taskToUpdate in tasksToUpdate) { let task = tasks.find(task => { return task.name === taskToUpdate}); task.specifications_doc = observStrategy.template.tasks[taskToUpdate].specifications_doc; + if (task.specifications_doc.station_groups) { + task.specifications_doc.station_groups = station_groups; + } delete task['duration']; delete task['relative_start_time']; delete task['relative_stop_time']; @@ -332,6 +350,36 @@ const ScheduleService = { console.error('[project.services.getSchedulingUnitBySet]',error); } }, + getStationGroup: async function() { + try { + // const response = await axios.get('/api/station_type/'); + // return response.data.results; + return [{ + value: 'Dutch' + },{ + value: 'International' + },{ + value: 'Core' + },{ + value: 'Remote' + },{ + value: 'Superterp' + }] + } catch(error) { + console.error(error); + return []; + }; + }, + getStations: async function(e) { + try { + // const response = await axios.get('/api/station_groups/stations/1/dutch'); + const response = await axios.get(`/api/station_groups/stations/1/${e}`); + return response.data; + } catch(error) { + console.error(error); + return []; + } + }, getProjectList: async function() { try { const response = await axios.get('/api/project/'); @@ -342,5 +390,4 @@ const ScheduleService = { } } - export default ScheduleService; \ No newline at end of file diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/util.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/util.service.js index 4d0d81cba4207dafe52925d3e049a92c1e946426..5d629c58d0dbbc340bf084c0f57ff31f0868cee2 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/services/util.service.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/services/util.service.js @@ -1,3 +1,4 @@ +import moment from 'moment'; const axios = require('axios'); axios.defaults.headers.common['Authorization'] = 'Basic dGVzdDp0ZXN0'; @@ -37,6 +38,33 @@ const UtilService = { } catch(error) { console.error(error); } + }, + getSunTimings: async(timestamp, station) => { + try { + let stationTimestamp = (station?`${station}-`:"") + timestamp; + let localSunTimeMap = localStorage.getItem('SUN_TIME_MAP'); + if (localSunTimeMap) { + localSunTimeMap = JSON.parse(localSunTimeMap); + if (localSunTimeMap[stationTimestamp]) { + return Promise.resolve(localSunTimeMap[stationTimestamp]); + } + } else { + localSunTimeMap = {}; + } + // const url = `/api/sun_rise_and_set/${timestamp}`; + // const sunTimings = (await axios.get(url)).data; + let sunTimings = {sun_rise: moment.utc(moment(timestamp, "YYYYMMDDTHH:mm:ss")).format('YYYY-MM-DDT06:30:00.sssss')+"Z", + sun_set: moment.utc(moment(timestamp, "YYYYMMDDTHH:mm:ss")).format('YYYY-MM-DDT17:00:00.sssss')+"Z"}; + if (station==="CS001") { + sunTimings = {sun_rise: moment.utc(moment(timestamp, "YYYYMMDDTHH:mm:ss")).format('YYYY-MM-DDT05:30:00.sssss')+"Z", + sun_set: moment.utc(moment(timestamp, "YYYYMMDDTHH:mm:ss")).format('YYYY-MM-DDT16:00:00.sssss')+"Z"}; + } + localSunTimeMap[stationTimestamp] = sunTimings; + localStorage.setItem('SUN_TIME_MAP', JSON.stringify(localSunTimeMap)); + return sunTimings; + } catch(error) { + console.error(error); + } } } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/workflow.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/workflow.service.js new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SAS/TMSS/services/CMakeLists.txt b/SAS/TMSS/services/CMakeLists.txt index b1cdad1bc8906d3ba0302fe6c867a6eb8bff9df1..7ca90e1a5220ba1c278a45e986029e408c2506d6 100644 --- a/SAS/TMSS/services/CMakeLists.txt +++ b/SAS/TMSS/services/CMakeLists.txt @@ -1,4 +1,4 @@ -lofar_add_package(TMSSSubtaskSchedulingService subtask_scheduling) +lofar_add_package(TMSSSchedulingService scheduling) lofar_add_package(TMSSFeedbackHandlingService feedback_handling) lofar_add_package(TMSSPostgresListenerService tmss_postgres_listener) diff --git a/SAS/TMSS/services/scheduling/CMakeLists.txt b/SAS/TMSS/services/scheduling/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..34de269349de481543af911fa1ad28162fb07b2f --- /dev/null +++ b/SAS/TMSS/services/scheduling/CMakeLists.txt @@ -0,0 +1,11 @@ +lofar_package(TMSSSchedulingService 0.1 DEPENDS TMSSClient PyCommon pyparameterset PyMessaging) + +lofar_find_package(PythonInterp 3.4 REQUIRED) + +include(FindPythonModule) +find_python_module(astroplan REQUIRED) # pip3 install astroplan + +add_subdirectory(lib) +add_subdirectory(bin) +add_subdirectory(test) + diff --git a/SAS/TMSS/services/scheduling/bin/CMakeLists.txt b/SAS/TMSS/services/scheduling/bin/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..a84d2f43814392f07e0b938b47c91e386e95fe4f --- /dev/null +++ b/SAS/TMSS/services/scheduling/bin/CMakeLists.txt @@ -0,0 +1,4 @@ +lofar_add_bin_scripts(tmss_scheduling_service) + +# supervisord config files +lofar_add_sysconf_files(tmss_scheduling_service.ini DESTINATION supervisord.d) diff --git a/SAS/TMSS/services/scheduling/bin/tmss_scheduling_service b/SAS/TMSS/services/scheduling/bin/tmss_scheduling_service new file mode 100755 index 0000000000000000000000000000000000000000..5f4d206b4a453635cb8f5ffcab9234b5b468da30 --- /dev/null +++ b/SAS/TMSS/services/scheduling/bin/tmss_scheduling_service @@ -0,0 +1,57 @@ +#!/usr/bin/python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + + +import os +from optparse import OptionParser +import logging +logger = logging.getLogger(__name__) + +from lofar.messaging.config import DEFAULT_BROKER, DEFAULT_BUSNAME + +def main(): + # make sure we run in UTC timezone + os.environ['TZ'] = 'UTC' + + logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + + # Check the invocation arguments + parser = OptionParser('%prog [options]', + description='run the tmss_subtask_scheduling_service which automatically schedules the defined successor tasks for finished subtasks') + parser.add_option('-q', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, help='Address of the messaging broker, default: %default') + parser.add_option('--exchange', dest='exchange', type='string', default=DEFAULT_BUSNAME, help='Name of the exchange on the messaging broker, default: %default') + parser.add_option('-t', '--tmss_client_credentials_id', dest='tmss_client_credentials_id', type='string', + default=os.environ.get("TMSS_CLIENT_DBCREDENTIALS", "TMSSClient"), + help='the credentials id for the file in ~/.lofar/dbcredentials which holds the TMSS http REST api url and credentials, default: %default') + (options, args) = parser.parse_args() + + os.environ["DJANGO_SETTINGS_MODULE"] = "lofar.sas.tmss.tmss.settings" + import django + django.setup() + + from lofar.common.util import waitForInterrupt + from lofar.sas.tmss.services.scheduling.subtask_scheduling import create_subtask_scheduling_service + from lofar.sas.tmss.services.scheduling.dynamic_scheduling import create_dynamic_scheduling_service + + with create_subtask_scheduling_service(options.exchange, options.broker, options.tmss_client_credentials_id): + with create_dynamic_scheduling_service(options.exchange, options.broker): + waitForInterrupt() + +if __name__ == '__main__': + main() diff --git a/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service.ini b/SAS/TMSS/services/scheduling/bin/tmss_scheduling_service.ini similarity index 100% rename from SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service.ini rename to SAS/TMSS/services/scheduling/bin/tmss_scheduling_service.ini diff --git a/SAS/TMSS/services/scheduling/lib/CMakeLists.txt b/SAS/TMSS/services/scheduling/lib/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..f4808987873979c7d600174fca802f167d1689a6 --- /dev/null +++ b/SAS/TMSS/services/scheduling/lib/CMakeLists.txt @@ -0,0 +1,13 @@ +lofar_find_package(PythonInterp 3.4 REQUIRED) +include(PythonInstall) + +set(_py_files + dynamic_scheduling.py + subtask_scheduling.py + constraints/__init__.py + constraints/template_constraints_v1.py + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/services/scheduling) + diff --git a/SAS/TMSS/services/scheduling/lib/constraints/__init__.py b/SAS/TMSS/services/scheduling/lib/constraints/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..49f9857f8f2630dee58271dd8b59596fe168f702 --- /dev/null +++ b/SAS/TMSS/services/scheduling/lib/constraints/__init__.py @@ -0,0 +1,238 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2020 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# + +""" +This __init__ module for this constraints python package defines the 'API' to: + - filter a list of schedulable scheduling_units by checking their constraints: see method filter_scheduling_units_using_constraints + - sort a (possibly filtered) list of schedulable scheduling_units evaluating their constraints and computing a 'fitness' score: see method get_sorted_scheduling_units_scored_by_constraints +These main methods are used in the dynamic_scheduler to pick the next best scheduling unit, and compute the midterm schedule. + +Currently we have only one SchedulingConstraintsTemplate in TMSS, named 'constraints', version 1. +But, it is envisioned that we we get more templates. +So, based on the template the actual filter and score methods are selected from a specific module. +By convention we use one module per template. Currently, we have and use only module template_constraints_v1.py + +If/When we add a new SchedulingConstraintsTemplate, then we should add a new module with the specific filter and score methods, +and add a extra 'if' in the strategy pattern used here. (see below for implementation) +""" + +import logging +logger = logging.getLogger(__name__) +from datetime import datetime +from typing import NamedTuple + +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.exceptions import * + +################## main data struct and methods ################## + +class ScoredSchedulingUnit(NamedTuple): + '''struct for collecting scores per constraint and a weighted_score for a scheduling_unit at the given start_time + ''' + scheduling_unit: models.SchedulingUnitBlueprint + scores: dict + start_time: datetime + weighted_score: float + + +def filter_scheduling_units_using_constraints(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound: datetime, upper_bound: datetime) -> [models.SchedulingUnitBlueprint]: + """ + Filter the given scheduling_units by whether their constraints are met within the given timewindow. + If one or more scheduling units can run only within this time window and not after it, then only these exclusivly runnable scheduling units. + :param lower_bound: evaluate and score the constrains at and after lower_bound_start_time. The returned unit has a start_time guaranteed at or after lower_bound_start_time. + :param upper_bound: evaluate and score the constrains before upper_bound_stop_time. The returned unit has a stop_time guaranteed before upper_bound_stop_time. + :param scheduling_units: evaluate/filter these scheduling_units. + Returns a list scheduling_units for which their constraints are met within the given timewindow. + """ + runnable_scheduling_units = [] + runnable_exclusive_in_this_window_scheduling_units = [] + + for scheduling_unit in scheduling_units: + try: + if can_run_within_timewindow(scheduling_unit, lower_bound, upper_bound): + runnable_scheduling_units.append(scheduling_unit) + + # if a schedulingunit cannot run after this window, then apparently its limited to run exclusively in this time window. + earliest_possible_start_time = get_earliest_possible_start_time(scheduling_unit, lower_bound) + if not can_run_after(scheduling_unit, earliest_possible_start_time+scheduling_unit.duration): + runnable_exclusive_in_this_window_scheduling_units.append(scheduling_unit) + except UnknownTemplateException as e: + # TODO: how do we notify the user that we cannot dynamically schedule this sub due to an unknown template? + # current pragmatic solution: log warning, and set sub state to error via its schedulable subtasks. + # This ensures that the unit is not schedulable anymore, and forces the user to take action. + # For example, the user can choose a different template, + # or submit a feature request to implement constraint solvers for this new template. + logger.warning(e) + for subtask in models.Subtask.independent_subtasks().filter(task_blueprint__scheduling_unit_blueprint_id=scheduling_unit.id).all(): + subtask.status = models.SubtaskState.objects.get(value=models.SubtaskState.Choices.ERROR.value) + subtask.save() + + # if we have schedulingunit(s) that can run exclusively in this time window (and not afterwards), then return only these. + if runnable_exclusive_in_this_window_scheduling_units: + return runnable_exclusive_in_this_window_scheduling_units + + # there are no exclusive units, so return all runnable_scheduling_units + return runnable_scheduling_units + + +def get_best_scored_scheduling_unit_scored_by_constraints(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound_start_time:datetime, upper_bound_stop_time:datetime) -> ScoredSchedulingUnit: + """ + get the best scored schedulable scheduling_unit which can run withing the given time window from the given scheduling_units. + :param lower_bound_start_time: evaluate and score the constrains at and after lower_bound_start_time. The returned unit has a start_time guaranteed at or after lower_bound_start_time. + :param upper_bound_stop_time: evaluate and score the constrains before upper_bound_stop_time. The returned unit has a stop_time guaranteed before upper_bound_stop_time. + :param scheduling_units: evaluate these scheduling_units. + Returns a ScoredSchedulingUnit struct with the best next schedulable scheduling unit and its proposed start_time where it best fits its contraints. + """ + sorted_scored_scheduling_units = sort_scheduling_units_scored_by_constraints(scheduling_units, lower_bound_start_time, upper_bound_stop_time) + + if sorted_scored_scheduling_units: + # they are sorted best to worst, so return/use first. + best_scored_scheduling_unit = sorted_scored_scheduling_units[0] + return best_scored_scheduling_unit + + return None + + +def sort_scheduling_units_scored_by_constraints(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound_start_time: datetime, upper_bound_stop_time: datetime) -> [ScoredSchedulingUnit]: + """ + Compute the score and proposed start_time for all given scheduling_units. Return them sorted by their weighted_score. + :param lower_bound_start_time: evaluate and score the constrains at and after lower_bound_start_time. The returned unit has a start_time guaranteed at or after lower_bound_start_time. + :param upper_bound_stop_time: evaluate and score the constrains before upper_bound_stop_time. The returned unit has a stop_time guaranteed before upper_bound_stop_time. + :param scheduling_units: evaluate these scheduling_units. + Returns a list of ScoredSchedulingUnit structs with the score details, a weighted_score and a proposed start_time where it best fits its contraints. + """ + + scored_scheduling_units = [] + for scheduling_unit in scheduling_units: + try: + scored_scheduling_unit = compute_scores(scheduling_unit, lower_bound_start_time, upper_bound_stop_time) + + # check and ensure that the proposed start_time is within the required [lower_bound_start_time, upper_bound_stop_time] window. + schedulable_unit = scored_scheduling_unit.scheduling_unit + proposed_start_time = scored_scheduling_unit.start_time + proposed_stop_time = proposed_start_time + schedulable_unit.duration + + if proposed_start_time < lower_bound_start_time: + raise DynamicSchedulingException("The best next schedulable scheduling_unit id=%s has a proposed start_time '%s' before the given lower bound '%s'" % ( + schedulable_unit.id, proposed_start_time, lower_bound_start_time)) + + if proposed_stop_time > upper_bound_stop_time: + raise DynamicSchedulingException("The best next schedulable scheduling_unit id=%s has a proposed stop_time '%s' after the given upper bound '%s'" % ( + schedulable_unit.id, proposed_stop_time, upper_bound_stop_time)) + + scored_scheduling_units.append(scored_scheduling_unit) + except (UnknownTemplateException, DynamicSchedulingException) as e: + # TODO: how do we notify the user that we cannot dynamically schedule this sub due to an unknown template? + # current pragmatic solution: log warning, and set sub state to error via its schedulable subtasks. + # This ensures that the unit is not schedulable anymore, and forces the user to take action. + # For example, the user can choose a different template, + # or submit a feature request to implement constraint solvers for this new template. + logger.warning(e) + for subtask in models.Subtask.independent_subtasks().filter(task_blueprint__scheduling_unit_blueprint_id=scheduling_unit.id).all(): + subtask.status = models.SubtaskState.objects.get(value=models.SubtaskState.Choices.ERROR.value) + subtask.save() + + return sorted(scored_scheduling_units, key=lambda x: x.weighted_score, reverse=True) + + +################## helper methods ################################################################# +# # +# these helper methods are selected by a strategy pattern based on the template name and version # +# The actual implementation can be found in the other module(s) in this package # +# Currently we only have one template with one implementation in template_constraints_v1.py # +# # +################################################################################################### + +def can_run_within_timewindow(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: + '''Check if the given scheduling_unit can run somewhere within the given time window depending on the sub's constrains-template/doc.''' + constraints_template = scheduling_unit.draft.scheduling_constraints_template + + # choose appropriate method based on template (strategy pattern), or raise + if constraints_template.name == 'constraints' and constraints_template.version == 1: + # import here to prevent circular imports. Do not worry about performance loss, cause python only imports once and then uses a cache. + from . import template_constraints_v1 + return template_constraints_v1.can_run_within_timewindow(scheduling_unit, lower_bound, upper_bound) + + # TODO: if we get more constraint templates or versions, then add a check here and import and use the new module with the constraint methods for that specific template. (strategy pattern) + + raise UnknownTemplateException("Cannot check if scheduling_unit id=%s can run between '%s' and '%s', because we have no constraint checker for scheduling constraints template '%s' version=%s" % ( + scheduling_unit.id, lower_bound, upper_bound, constraints_template.name, constraints_template.version)) + + +def can_run_after(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> bool: + '''Check if the given scheduling_unit can run somewhere after the given lowerbound timestamp depending on the sub's constrains-template/doc.''' + constraints_template = scheduling_unit.draft.scheduling_constraints_template + + # choose appropriate method based on template (strategy pattern), or raise + if constraints_template.name == 'constraints' and constraints_template.version == 1: + # import here to prevent circular imports. Do not worry about performance loss, cause python only imports once and then uses a cache. + from . import template_constraints_v1 + return template_constraints_v1.can_run_after(scheduling_unit, lower_bound) + + # TODO: if we get more constraint templates or versions, then add a check here and import and use the new module with the constraint methods for that specific template. (strategy pattern) + + raise UnknownTemplateException("Cannot check if scheduling_unit id=%s can run after '%s', because we have no constraint checker for scheduling constraints template '%s' version=%s" % ( + scheduling_unit.id, lower_bound, constraints_template.name, constraints_template.version)) + + + +def compute_scores(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound:datetime, upper_bound:datetime) -> ScoredSchedulingUnit: + '''Compute the "fitness" scores per constraint for the given scheduling_unit at the given starttime depending on the sub's constrains-template/doc.''' + constraints_template = scheduling_unit.draft.scheduling_constraints_template + + # choose appropriate method based on template (strategy pattern), or raise + if constraints_template.name == 'constraints' and constraints_template.version == 1: + # import here to prevent circular imports. Do not worry about performance loss, cause python only imports once and then uses a cache. + from . import template_constraints_v1 + return template_constraints_v1.compute_scores(scheduling_unit, lower_bound, upper_bound) + + # TODO: if we get more constraint templates or versions, then add a check here and import and use the new module with the constraint methods for that specific template. (strategy pattern) + + raise UnknownTemplateException("Cannot compute scores for scheduling_unit id=%s, because we have no score computation method for scheduling constraints template '%s' version=%s" % ( + scheduling_unit.id, constraints_template.name, constraints_template.version)) + + +def get_earliest_possible_start_time(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> datetime: + '''determine the earliest possible start_time for the given scheduling unit, taking into account all its constraints''' + constraints_template = scheduling_unit.draft.scheduling_constraints_template + + # choose appropriate method based on template (strategy pattern), or raise + if constraints_template.name == 'constraints' and constraints_template.version == 1: + # import here to prevent circular imports. Do not worry about performance loss, cause python only imports once and then uses a cache. + from . import template_constraints_v1 + return template_constraints_v1.get_earliest_possible_start_time(scheduling_unit, lower_bound) + + # TODO: if we get more constraint templates or versions, then add a check here and import and use the new module with the constraint methods for that specific template. (strategy pattern) + + raise UnknownTemplateException("Cannot compute earliest possible start_time for scheduling_unit id=%s, because we have no constraint checker for scheduling constraints template '%s' version=%s" % ( + scheduling_unit.id, constraints_template.name, constraints_template.version)) + + +def get_min_earliest_possible_start_time(scheduling_units: [models.SchedulingUnitBlueprint], lower_bound: datetime) -> datetime: + '''deterimine the earliest possible starttime over all given scheduling units, taking into account all their constraints''' + try: + return min(get_earliest_possible_start_time(scheduling_unit, lower_bound) for scheduling_unit in scheduling_units) + except ValueError: + return lower_bound + + + + diff --git a/SAS/TMSS/services/scheduling/lib/constraints/template_constraints_v1.py b/SAS/TMSS/services/scheduling/lib/constraints/template_constraints_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..247f89851ccdda58cdb07b98639c1349c45825fc --- /dev/null +++ b/SAS/TMSS/services/scheduling/lib/constraints/template_constraints_v1.py @@ -0,0 +1,211 @@ +#!/usr/bin/env python3 + +# dynamic_scheduling.py +# +# Copyright (C) 2020 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id: $ + +""" +""" + +import logging +logger = logging.getLogger(__name__) +from datetime import datetime, timedelta +from dateutil import parser + +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.tmssapp.conversions import create_astroplan_observer_for_station, Time, timestamps_and_stations_to_sun_rise_and_set + +from . import ScoredSchedulingUnit + +def can_run_within_timewindow(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: + '''determine if the given scheduling_unit can run withing the given timewindow evaluating all constraints from the "constraints" version 1 template''' + if has_manual_scheduler_constraint(scheduling_unit): + return False + + if not can_run_within_timewindow_with_time_constraints(scheduling_unit, lower_bound, upper_bound): + return False + + if not can_run_within_timewindow_with_sky_constraints(scheduling_unit, lower_bound, upper_bound): + return False + + if not can_run_within_timewindow_with_daily_constraints(scheduling_unit, lower_bound, upper_bound): + return False + + return True + + +def can_run_after(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> bool: + '''Check if the given scheduling_unit can run somewhere after the given lowerbound timestamp depending on the sub's constrains-template/doc.''' + constraints = scheduling_unit.draft.scheduling_constraints_doc + if 'before' in constraints['time']: + before = parser.parse(constraints['time']['before'], ignoretz=True) + return before > lower_bound + + return True + +# only expose the can_run_within_timewindow and can_run_after methods, and keep the details hidden for this module's importers who do not need these implemnetation details +__all__ = ['can_run_within_timewindow', 'can_run_after'] + + +def has_manual_scheduler_constraint(scheduling_unit: models.SchedulingUnitBlueprint) -> bool: + '''evaluate the scheduler contraint. Should this unit be manually scheduled?''' + constraints = scheduling_unit.draft.scheduling_constraints_doc + return constraints.get('scheduler', '') == 'manual' + + +def can_run_within_timewindow_with_daily_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: + '''evaluate the daily contraint''' + constraints = scheduling_unit.draft.scheduling_constraints_doc + if not (constraints['daily']['require_day'] and constraints['daily']['require_night']): + # no day/night restrictions, can run any time + return True + + if constraints['daily']['require_day'] or constraints['daily']['require_night']: + # TODO: TMSS-254 and TMSS-255 + # TODO: take avoid_twilight into account + # Please note that this first crude proof of concept treats sunset/sunrise as 'events', + # whereas in our definition they are transition periods. See: TMSS-435 + + # Ugly code. Should be improved. Works for demo. + # create a series of timestamps in the window of opportunity, and evaluate of there are all during day or night + possible_start_time = get_earliest_possible_start_time(scheduling_unit, lower_bound) + + # ToDo: use specified total observation duration, and ignore pipelines who don't care about day/night + possible_stop_time = possible_start_time + scheduling_unit.duration + timestamps = [possible_start_time] + while timestamps[-1] < possible_stop_time - timedelta(hours=8): + timestamps.append(timestamps[-1] + timedelta(hours=8)) + timestamps.append(possible_stop_time) + + LOFAR_CENTER_OBSERVER = create_astroplan_observer_for_station('CS002') + if constraints['daily']['require_night'] and all(LOFAR_CENTER_OBSERVER.is_night(timestamp) for timestamp in timestamps): + return True + + if constraints['daily']['require_day'] and all(not LOFAR_CENTER_OBSERVER.is_night(timestamp) for timestamp in timestamps): + return True + + return False + + +def can_run_within_timewindow_with_time_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: + '''evaluate the time contraint(s)''' + constraints = scheduling_unit.draft.scheduling_constraints_doc + # TODO: TMSS-244 (and more?), evaluate the constraints in constraints['time'] + if has_manual_scheduler_constraint(scheduling_unit): + at = parser.parse(constraints['time']['at'], ignoretz=True) + return at >= lower_bound and at+scheduling_unit.duration <= upper_bound + + if 'before' in constraints['time']: + before = parser.parse(constraints['time']['before'], ignoretz=True) + return before <= upper_bound-scheduling_unit.duration + + if 'after' in constraints['time']: + after = parser.parse(constraints['time']['after'], ignoretz=True) + return lower_bound >= after + + # if 'between' in constraints['time']: + # betweens = [ dateutil.parser.parse(constraints['time']['between']) + # return lower_bound >= after + + return True # for now, ignore time contraints. + + +def can_run_within_timewindow_with_sky_constraints(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime, upper_bound: datetime) -> bool: + '''evaluate the time contraint(s)''' + constraints = scheduling_unit.draft.scheduling_constraints_doc + # TODO: TMSS-245 TMSS-250 (and more?), evaluate the constraints in constraints['sky'] + # maybe even split this method into sub methods for the very distinct sky constraints: min_calibrator_elevation, min_target_elevation, transit_offset & min_distance + return True # for now, ignore sky contraints. + + +def get_earliest_possible_start_time(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound: datetime) -> datetime: + constraints = scheduling_unit.draft.scheduling_constraints_doc + + try: + if has_manual_scheduler_constraint(scheduling_unit) and 'at' in constraints['time']: + at = parser.parse(constraints['time']['at'], ignoretz=True) + return at + + if 'after' in constraints['time']: + return parser.parse(constraints['time']['after'], ignoretz=True) + + if constraints['daily']['require_day'] or constraints['daily']['require_night']: + + # TODO: TMSS-254 and TMSS-255 + # TODO: take avoid_twilight into account + # for now, use the incorrect proof of concept which works for the demo + # but... this should be rewritten completely using Joerns new sun_events + LOFAR_CENTER_OBSERVER = create_astroplan_observer_for_station('CS002') + sun_events = timestamps_and_stations_to_sun_rise_and_set(timestamps=[lower_bound], stations=['CS002'])['CS002'] + sun_set = sun_events['sunset'][0]['start'] + sun_rise = sun_events['sunrise'][0]['end'] + if constraints['daily']['require_day']: + if lower_bound+scheduling_unit.duration > sun_set: + return LOFAR_CENTER_OBSERVER.sun_rise_time(time=Time(sun_set), which='next').to_datetime() + if lower_bound >= sun_rise: + return lower_bound + return sun_rise + + if constraints['daily']['require_night']: + if lower_bound+scheduling_unit.duration < sun_rise: + return lower_bound + if lower_bound >= sun_set: + return lower_bound + return sun_set + except Exception as e: + logger.exception(str(e)) + + # no constraints dictating starttime? make a guesstimate. + return lower_bound + + +def compute_scores(scheduling_unit: models.SchedulingUnitBlueprint, lower_bound:datetime, upper_bound:datetime) -> ScoredSchedulingUnit: + '''Compute the "fitness" scores per constraint for the given scheduling_unit at the given starttime depending on the sub's constrains-template/doc.''' + constraints = scheduling_unit.draft.scheduling_constraints_doc + + # TODO: add compute_scores methods for each type of constraint + # TODO: take start_time into account. For example, an LST constraint yields a better score when the starttime is such that the center of the obs is at LST. + # TODO: TMSS-??? (and more?), compute score using the constraints in constraints['daily'] + # TODO: TMSS-244 (and more?), compute score using the constraints in constraints['time'] + # TODO: TMSS-245 TMSS-250 (and more?), compute score using the constraints in constraints['sky'] + + # for now (as a proof of concept and sort of example), just return 1's + scores = {'daily': 1.0, + 'time': 1.0, + 'sky': 1.0 } + + # add "common" scores which do not depend on constraints, such as project rank and creation date + # TODO: should be normalized! + scores['project_rank'] = scheduling_unit.draft.scheduling_set.project.priority_rank + #scores['age'] = (datetime.utcnow() - scheduling_unit.created_at).total_seconds() + + try: + # TODO: apply weights. Needs some new weight model in django, probably linked to constraints_template. + # for now, just average the scores + weighted_score = sum(scores.values())/len(scores) + except: + weighted_score = 1 + + return ScoredSchedulingUnit(scheduling_unit=scheduling_unit, + scores=scores, + weighted_score=weighted_score, + start_time=get_earliest_possible_start_time(scheduling_unit, lower_bound)) + diff --git a/SAS/TMSS/services/scheduling/lib/dynamic_scheduling.py b/SAS/TMSS/services/scheduling/lib/dynamic_scheduling.py new file mode 100644 index 0000000000000000000000000000000000000000..a15475960a3e94e18d3dbe0afbf2bd7c93dc3fc5 --- /dev/null +++ b/SAS/TMSS/services/scheduling/lib/dynamic_scheduling.py @@ -0,0 +1,328 @@ +#!/usr/bin/env python3 + +# dynamic_scheduling.py +# +# Copyright (C) 2020 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id: $ + +""" +""" + +import os +import logging +logger = logging.getLogger(__name__) +from datetime import datetime, timedelta, time + +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.tmssapp.tasks import schedule_independent_subtasks_in_scheduling_unit_blueprint, unschedule_subtasks_in_scheduling_unit_blueprint +from lofar.sas.tmss.tmss.tmssapp.subtasks import update_subtasks_start_times_for_scheduling_unit, clear_defined_subtasks_start_stop_times_for_scheduling_unit +from lofar.sas.tmss.client.tmssbuslistener import * +from lofar.common.datetimeutils import round_to_second_precision +from threading import Thread, Event + +from lofar.sas.tmss.services.scheduling.constraints import * + +# LOFAR needs to have a gap in between observations to (re)initialize hardware. +DEFAULT_INTER_OBSERVATION_GAP = timedelta(seconds=60) + +################## core dynamic scheduling methods ################################################ +# # +# This module starts with the core dynamic scheduling methods which are used in the dynamic # +# scheduling service. These high level methods only filter/score/sort in a generic way. # +# The detailed concrete filter/score/sort methods are pick by a strategy pattern in the # +# constraints package based on each scheduling unit's scheduling_constrains template. # +# # +################################################################################################### + +def find_best_next_schedulable_unit(scheduling_units:[models.SchedulingUnitBlueprint], lower_bound_start_time: datetime, upper_bound_stop_time: datetime) -> ScoredSchedulingUnit: + """ + find the best schedulable scheduling_unit which can run withing the given time window from the given scheduling_units. + :param lower_bound_start_time: evaluate the constrains at and after lower_bound_start_time. The returned unit has a start_time guaranteed at or after lower_bound_start_time. + :param upper_bound_stop_time: evaluate the constrains before upper_bound_stop_time. The returned unit has a stop_time guaranteed before upper_bound_stop_time. + :param scheduling_units: evaluate these scheduling_units. + Returns a ScoredSchedulingUnit struct with the best next schedulable scheduling unit and its proposed start_time where it best fits its contraints. + """ + # ensure upper is greater than or equal to lower + upper_bound_stop_time = max(lower_bound_start_time, upper_bound_stop_time) + + filtered_scheduling_units = filter_scheduling_units_using_constraints(scheduling_units, lower_bound_start_time, upper_bound_stop_time) + + if filtered_scheduling_units: + best_scored_scheduling_unit = get_best_scored_scheduling_unit_scored_by_constraints(filtered_scheduling_units, lower_bound_start_time, upper_bound_stop_time) + return best_scored_scheduling_unit + + # no filtered scheduling units found... + logger.debug("No schedulable scheduling units found which meet the requirements between '%s' and '%s'", lower_bound_start_time, upper_bound_stop_time) + return None + + +def schedule_next_scheduling_unit() -> models.SchedulingUnitBlueprint: + '''find the best next schedulable scheduling unit and try to schedule it. + Overlapping existing scheduled units are unscheduled if their score is lower. + :return: the scheduled scheduling unit.''' + + # --- setup of needed variables --- + schedulable_units = get_schedulable_scheduling_units() + + # estimate the lower_bound_start_time + lower_bound_start_time = get_min_earliest_possible_start_time(schedulable_units, datetime.utcnow()) + + # estimate the upper_bound_stop_time, which may give us a small timewindow before any next scheduled unit, or a default window of a day + try: + upper_bound_stop_time = max(su.start_time for su in get_scheduled_scheduling_units(lower=lower_bound_start_time, upper=lower_bound_start_time + timedelta(days=1))) + except ValueError: + upper_bound_stop_time = lower_bound_start_time + timedelta(days=1) + + # no need to irritate user in log files with subsecond scheduling precision + lower_bound_start_time = round_to_second_precision(lower_bound_start_time) + upper_bound_stop_time = max(round_to_second_precision(upper_bound_stop_time), lower_bound_start_time) + + # --- core routine --- + while lower_bound_start_time < upper_bound_stop_time: + try: + # try to find the best next scheduling_unit + logger.info("schedule_next_scheduling_unit: searching for best scheduling unit to schedule between '%s' and '%s'", lower_bound_start_time, upper_bound_stop_time) + best_scored_scheduling_unit = find_best_next_schedulable_unit(schedulable_units, lower_bound_start_time, upper_bound_stop_time) + if best_scored_scheduling_unit: + best_scheduling_unit = best_scored_scheduling_unit.scheduling_unit + best_scheduling_unit_score = best_scored_scheduling_unit.weighted_score + best_start_time = best_scored_scheduling_unit.start_time + + # make start_time "look nice" for us humans + best_start_time = round_to_second_precision(best_start_time) + + logger.info("schedule_next_scheduling_unit: found best candidate id=%s '%s' weighted_score=%s start_time=%s", + best_scheduling_unit.id, best_scheduling_unit.name, best_scheduling_unit_score, best_start_time) + + if unschededule_blocking_scheduled_units_if_needed_and_possible(best_scored_scheduling_unit): + # no (old) scheduled scheduling_units in the way, so schedule our candidate! + scheduled_scheduling_unit = schedule_independent_subtasks_in_scheduling_unit_blueprint(best_scheduling_unit, start_time=best_start_time) + + logger.info("schedule_next_scheduling_unit: scheduled best candidate id=%s '%s' score=%s start_time=%s", + best_scheduling_unit.id, best_scheduling_unit.name, best_scheduling_unit_score, best_start_time) + return scheduled_scheduling_unit + + except SubtaskSchedulingException as e: + logger.error("Could not schedule scheduling_unit id=%s name='%s'. Error: %s", best_scheduling_unit.id, best_scheduling_unit.name, e) + + # nothing was found, or an error occurred. + # seach again... (loop) with the remaining schedulable_units and new lower_bound_start_time + schedulable_units = get_schedulable_scheduling_units() + lower_bound_start_time = get_min_earliest_possible_start_time(schedulable_units, lower_bound_start_time + timedelta(hours=1)) + + +def assign_start_stop_times_to_schedulable_scheduling_units(lower_bound_start_time: datetime): + '''''' + logger.info("Estimating mid-term schedule...") + + scheduling_units = get_schedulable_scheduling_units() + + upper_bound_stop_time = lower_bound_start_time + timedelta(days=365) + + # update the start_times of the remaining ones (so they form queue, and can be visualized in a timeline) + while scheduling_units and lower_bound_start_time < upper_bound_stop_time: + best_scored_scheduling_unit = find_best_next_schedulable_unit(scheduling_units, lower_bound_start_time, upper_bound_stop_time) + + if best_scored_scheduling_unit: + scheduling_unit = best_scored_scheduling_unit.scheduling_unit + start_time = round_to_second_precision(best_scored_scheduling_unit.start_time) + logger.info("mid-term schedule: next scheduling unit id=%s '%s' start_time=%s", scheduling_unit.id, scheduling_unit.name, start_time) + update_subtasks_start_times_for_scheduling_unit(scheduling_unit, start_time) + + # keep track of the lower_bound_start_time based on last sub.stoptime and gap + lower_bound_start_time = scheduling_unit.stop_time + DEFAULT_INTER_OBSERVATION_GAP + + scheduling_units.remove(scheduling_unit) + else: + # search again in a later timeslot + min_earliest_possible_start_time = get_min_earliest_possible_start_time(scheduling_units, lower_bound_start_time+timedelta(minutes=10)) + if min_earliest_possible_start_time > lower_bound_start_time: + lower_bound_start_time = min_earliest_possible_start_time + else: + # cannot advance anymore to find more + logger.warning("Cannot assign start/stop times to remaining scheduling units for mid-term schedule...") + for su in scheduling_units: + logger.warning("Remaining scheduling unit: id=%s '%s'", su.id, su.name) + + # clear start/stop times, so they don't show up in the timeline, + # and we can filter/show them in a seperate list which the user can tinker on the constraints + clear_defined_subtasks_start_stop_times_for_scheduling_unit(su) + break + + logger.info("Estimating mid-term schedule... finished") + + +def do_dynamic_schedule() -> models.SchedulingUnitBlueprint: + '''do a full update of the schedule: schedule next scheduling unit and assign start stop times to remaining schedulable scheduling units''' + logger.info("Updating dynamic schedule....") + scheduled_unit = schedule_next_scheduling_unit() + + # determine next possible start time for remaining scheduling_units + if scheduled_unit: + lower_bound_start_time = scheduled_unit.stop_time + DEFAULT_INTER_OBSERVATION_GAP + else: + try: + scheduled_units = get_scheduled_scheduling_units(datetime.utcnow(), datetime.utcnow()) + lower_bound_start_time = max([s.stop_time for s in scheduled_units if s.stop_time is not None]) + DEFAULT_INTER_OBSERVATION_GAP + except: + lower_bound_start_time = datetime.utcnow() + + # round up to next nearest second + lower_bound_start_time += timedelta(microseconds=1000000-lower_bound_start_time.microsecond) + + # determine mid-term schedule by assigning start/stop times to remaining schedulable units using the same search strategy + assign_start_stop_times_to_schedulable_scheduling_units(lower_bound_start_time) + logger.info("Finished updating dynamic schedule") + + return scheduled_unit + + +################## service/messagebug handler class ############################################### + +class TMSSDynamicSchedulingMessageHandler(TMSSEventMessageHandler): + ''' + The TMSSDynamicSchedulingMessageHandler reacts to TMSS EventMessages by triggering a new full update of the dynamic + schedule. + The actual schedule-update method runs on a backround thread, and can take some time to complete ranging from a + few seconds to several minutes. In the mean time new EventMessages may be received. These are handled by raising a flag + that signals the schedule-update-thread that a new full update is needed. This way, a burst of Events results in + a single update, and it also ensures that we always compute the schedule with the latest data. + ''' + + def __init__(self): + super().__init__(log_event_messages=True) + self._scheduling_thread = None + self._scheduling_thread_running = False + self._do_schedule_event = Event() + + def start_handling(self): + # start the background thread which waits until the _do_schedule_event event is set upon receiving to the correct TMSS EVentMessages. + self._scheduling_thread = Thread(target=TMSSDynamicSchedulingMessageHandler._scheduling_loop, kwargs={'self':self}) + self._scheduling_thread.daemon = True + self._scheduling_thread_running = True + self._scheduling_thread.start() + super().start_handling() + + def stop_handling(self): + self._scheduling_thread_running = False + self._scheduling_thread.join() + self._scheduling_thread = None + super().stop_handling() + + def onSchedulingUnitBlueprintStatusChanged(self, id: int, status: str): + if status in ["schedulable", "observed", "finished", "cancelled"]: + logger.info("onSchedulingUnitBlueprintStatusChanged(id=%s, status=%s): triggering update of dynamic schedule...", id, status) + # scheduling takes a long time, longer then creating many scheduling units in bulk + # so, we do not create a complete new schedule for each new unit, + # but we only trigger a new schedule update. + # This way we are sure that the latest units are always taken into account while scheduling, but we do not waste cpu cylces. + self._do_schedule_event.set() + + def onSchedulingUnitDraftConstraintsUpdated(self, id: int, scheduling_constraints_doc: dict): + affected_scheduling_units = models.SchedulingUnitBlueprint.objects.filter(draft__id=id).all() + for scheduling_unit in affected_scheduling_units: + if scheduling_unit.status == 'scheduled': + unschedule_subtasks_in_scheduling_unit_blueprint(scheduling_unit) + + self._do_schedule_event.set() + + def onSettingUpdated(self, name: str, value: bool): + if name == models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value and value: + logger.info("%s was set to %s: triggering update of dynamic schedule...", name, value) + self._do_schedule_event.set() + + def _scheduling_loop(self): + while self._scheduling_thread_running: + if self._do_schedule_event.wait(timeout=10): + self._do_schedule_event.clear() + try: + if models.Setting.objects.get(name=models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value).value: + do_dynamic_schedule() + else: + logger.warning("Skipping update of dynamic schedule because the setting %s=%s", models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value, models.Setting.objects.get(name=models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value).value) + except Exception as e: + logger.exception(str(e)) + # just continue processing events. better luck next time... + + +def create_dynamic_scheduling_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER): + return TMSSBusListener(handler_type=TMSSDynamicSchedulingMessageHandler, + handler_kwargs=None, + exchange=exchange, + broker=broker) + + + + + +################## helper methods ################################################################# + +def get_schedulable_scheduling_units() -> [models.SchedulingUnitBlueprint]: + '''get a list of all schedulable scheduling_units''' + defined_independend_subtasks = models.Subtask.independent_subtasks().filter(state__value='defined') + defined_independend_subtask_ids = defined_independend_subtasks.values('task_blueprint__scheduling_unit_blueprint_id').distinct().all() + scheduling_units = models.SchedulingUnitBlueprint.objects.filter(id__in=defined_independend_subtask_ids).select_related('draft', 'draft__scheduling_constraints_template').all() + return [su for su in scheduling_units if su.status == 'schedulable'] + + +def get_scheduled_scheduling_units(lower:datetime=None, upper:datetime=None) -> [models.SchedulingUnitBlueprint]: + '''get a list of all scheduled scheduling_units''' + scheduled_subtasks = models.Subtask.objects.filter(state__value='scheduled') + if lower is not None: + scheduled_subtasks = scheduled_subtasks.filter(stop_time__gte=lower) + if upper is not None: + scheduled_subtasks = scheduled_subtasks.filter(start_time__lte=upper) + return list(models.SchedulingUnitBlueprint.objects.filter(id__in=scheduled_subtasks.values('task_blueprint__scheduling_unit_blueprint_id').distinct()).all()) + + +def unschededule_blocking_scheduled_units_if_needed_and_possible(candidate: ScoredSchedulingUnit) -> bool: + '''check if there are any already scheduled units in the way, and unschedule them if allowed. Return True if nothing is blocking anymore.''' + # check any previously scheduled units, and unschedule if needed/allowed + scheduled_scheduling_units = get_scheduled_scheduling_units(lower=candidate.start_time, + upper=candidate.start_time + candidate.scheduling_unit.duration) + + # check if we can and need to unschedule the blocking units + for scheduled_scheduling_unit in scheduled_scheduling_units: + scheduled_score = compute_scores(scheduled_scheduling_unit, candidate.start_time, candidate.start_time + candidate.scheduling_unit.duration) + + if candidate.weighted_score > scheduled_score.weighted_score: + # ToDo: also check if the scheduled_scheduling_unit is manually/dynamically scheduled + logger.info("unscheduling id=%s '%s' because it is in the way and has a lower score than the best candidate id=%s '%s' score=%s start_time=%s", + scheduled_scheduling_unit.id, scheduled_scheduling_unit.name, + candidate.scheduling_unit.id, candidate.scheduling_unit.name, candidate.weighted_score, candidate.scheduling_unit.start_time) + + unschedule_subtasks_in_scheduling_unit_blueprint(scheduled_scheduling_unit) + + # check again... are still there any scheduled_scheduling_units in the way? + scheduled_scheduling_units = get_scheduled_scheduling_units(lower=candidate.start_time, + upper=candidate.start_time + candidate.scheduling_unit.duration) + if scheduled_scheduling_units: + # accept current solution with current scheduled_scheduling_units + logger.info("keeping current scheduled unit(s) which have a better (or equal) score: %s", "; ".join( + "id=%s '%s' start_time='%s'" % (su.id, su.name, su.start_time) for su in scheduled_scheduling_units)) + + # indicate there are still blocking units + return False + + # all clear, nothing is blocking anymore + return True + + + diff --git a/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py b/SAS/TMSS/services/scheduling/lib/subtask_scheduling.py similarity index 95% rename from SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py rename to SAS/TMSS/services/scheduling/lib/subtask_scheduling.py index 524a616a86fa35fca2351278a1d69b1df46d882f..af80ff8c94b1576407ede4b51df456d52cb0a495 100644 --- a/SAS/TMSS/services/subtask_scheduling/lib/subtask_scheduling.py +++ b/SAS/TMSS/services/scheduling/lib/subtask_scheduling.py @@ -77,7 +77,7 @@ class TMSSSubTaskSchedulingEventMessageHandler(TMSSEventMessageHandler): except Exception as e: logger.error(e) -def create_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER, tmss_client_credentials_id: str=None): +def create_subtask_scheduling_service(exchange: str=DEFAULT_BUSNAME, broker: str=DEFAULT_BROKER, tmss_client_credentials_id: str=None): return TMSSBusListener(handler_type=TMSSSubTaskSchedulingEventMessageHandler, handler_kwargs={'tmss_client_credentials_id': tmss_client_credentials_id}, exchange=exchange, @@ -99,7 +99,7 @@ def main(): help='the credentials id for the file in ~/.lofar/dbcredentials which holds the TMSS http REST api url and credentials, default: %default') (options, args) = parser.parse_args() - with create_service(options.exchange, options.broker, options.tmss_client_credentials_id): + with create_subtask_scheduling_service(options.exchange, options.broker, options.tmss_client_credentials_id): waitForInterrupt() if __name__ == '__main__': diff --git a/SAS/TMSS/services/scheduling/test/CMakeLists.txt b/SAS/TMSS/services/scheduling/test/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..e3547f4fe0a484e2a395b50411e3e7d8b7486879 --- /dev/null +++ b/SAS/TMSS/services/scheduling/test/CMakeLists.txt @@ -0,0 +1,11 @@ +# $Id: CMakeLists.txt 32679 2015-10-26 09:31:56Z schaap $ + +if(BUILD_TESTING) + include(LofarCTest) + + lofar_add_test(t_subtask_scheduling_service) + lofar_add_test(t_dynamic_scheduling) + + set_tests_properties(t_subtask_scheduling_service PROPERTIES TIMEOUT 300) + set_tests_properties(t_dynamic_scheduling PROPERTIES TIMEOUT 300) +endif() diff --git a/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.py b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.py new file mode 100755 index 0000000000000000000000000000000000000000..81acf398781285a91fefad08e53db84778fc256e --- /dev/null +++ b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.py @@ -0,0 +1,274 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import unittest +import uuid + +import logging +logger = logging.getLogger(__name__) + +from lofar.common.test_utils import skip_integration_tests +if skip_integration_tests(): + exit(3) + +TEST_UUID = uuid.uuid1() + +from datetime import datetime, timedelta +from lofar.common.json_utils import get_default_json_object_for_schema, add_defaults_to_json_object_for_schema +from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor + +tmp_exchange = TemporaryExchange("t_dynamic_scheduling_%s" % (TEST_UUID,)) +tmp_exchange.open() + +# override DEFAULT_BUSNAME +import lofar +lofar.messaging.config.DEFAULT_BUSNAME = tmp_exchange.address + +from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment +tmss_test_env = TMSSTestEnvironment(exchange=tmp_exchange.address, + populate_schemas=True, populate_test_data=False, + start_postgres_listener=True, start_subtask_scheduler=False, + start_ra_test_environment=True, enable_viewflow=False, + start_dynamic_scheduler=False) # do not start the dynamic scheduler in the testenv, because it is the object-under-test. +tmss_test_env.start() + +def tearDownModule(): + tmss_test_env.stop() + tmp_exchange.close() + +from lofar.sas.tmss.test.tmss_test_data_django_models import * +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprints_and_subtasks_from_scheduling_unit_draft +from lofar.sas.tmss.tmss.tmssapp.subtasks import unschedule_subtask +from lofar.common.postgres import PostgresDatabaseConnection + +# the module under test +from lofar.sas.tmss.services.scheduling.dynamic_scheduling import * + + +class TestDynamicScheduling(unittest.TestCase): + ''' + Tests for the Dynamic Scheduling + ''' + @classmethod + def setUpClass(cls) -> None: + # make some re-usable projects with high/low priority + cls.project_low = models.Project.objects.create(**Project_test_data("dynamic scheduling test project %s"% (uuid.uuid4(),), priority_rank=1)) + cls.project_medium = models.Project.objects.create(**Project_test_data("dynamic scheduling test project %s"% (uuid.uuid4(),), priority_rank=2)) + cls.project_high = models.Project.objects.create(**Project_test_data("dynamic scheduling test project %s"% (uuid.uuid4(),), priority_rank=3)) + cls.scheduling_set_low = models.SchedulingSet.objects.create(**SchedulingSet_test_data(project=cls.project_low)) + cls.scheduling_set_medium = models.SchedulingSet.objects.create(**SchedulingSet_test_data(project=cls.project_medium)) + cls.scheduling_set_high = models.SchedulingSet.objects.create(**SchedulingSet_test_data(project=cls.project_high)) + + def setUp(self) -> None: + # wipe all radb entries (via cascading deletes) in between tests, so the tests don't influence each other + with PostgresDatabaseConnection(tmss_test_env.ra_test_environment.radb_test_instance.dbcreds) as radb: + radb.executeQuery('DELETE FROM resource_allocation.specification;') + radb.executeQuery('TRUNCATE resource_allocation.resource_usage;') + radb.commit() + + # wipe all scheduling_unit_drafts in between tests, so the tests don't influence each other + for scheduling_set in [self.scheduling_set_low, self.scheduling_set_medium, self.scheduling_set_high]: + for scheduling_unit_draft in scheduling_set.scheduling_unit_drafts.all(): + for scheduling_unit_blueprint in scheduling_unit_draft.scheduling_unit_blueprints.all(): + for task_blueprint in scheduling_unit_blueprint.task_blueprints.all(): + for subtask in task_blueprint.subtasks.all(): + try: + if subtask.state.value == models.SubtaskState.Choices.SCHEDULED.value: + unschedule_subtask(subtask) + except Exception as e: + logger.exception(e) + for output in subtask.outputs.all(): + for dataproduct in output.dataproducts.all(): + dataproduct.delete() + for consumer in output.consumers.all(): + consumer.delete() + output.delete() + for input in subtask.inputs.all(): + input.delete() + subtask.delete() + task_blueprint.draft.delete() + task_blueprint.delete() + scheduling_unit_blueprint.delete() + scheduling_unit_draft.delete() + + @staticmethod + def create_simple_observation_scheduling_unit(name:str=None, scheduling_set=None, + obs_duration:int=60, + constraints=None): + constraints_template = models.SchedulingConstraintsTemplate.objects.get(name="constraints") + constraints = add_defaults_to_json_object_for_schema(constraints or {}, constraints_template.schema) + + strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="Simple Observation") + scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template, + strategy_template.scheduling_unit_template.schema) + scheduling_unit_spec['tasks']['Observation']['specifications_doc']['duration'] = obs_duration + + # add the scheduling_unit_doc to a new SchedulingUnitDraft instance, and were ready to use it! + return models.SchedulingUnitDraft.objects.create(name=name, + scheduling_set=scheduling_set, + requirements_template=strategy_template.scheduling_unit_template, + requirements_doc=scheduling_unit_spec, + observation_strategy_template=strategy_template, + scheduling_constraints_doc=constraints, + scheduling_constraints_template=constraints_template) + + + def test_three_simple_observations_no_constraints_different_project_priority(self): + scheduling_unit_draft_low = self.create_simple_observation_scheduling_unit("scheduling unit low", scheduling_set=self.scheduling_set_low) + scheduling_unit_blueprint_low = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_low) + + scheduling_unit_draft_medium = self.create_simple_observation_scheduling_unit("scheduling unit medium", scheduling_set=self.scheduling_set_medium) + scheduling_unit_blueprint_medium = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_medium) + + scheduling_unit_draft_high = self.create_simple_observation_scheduling_unit("scheduling unit high", scheduling_set=self.scheduling_set_high) + scheduling_unit_blueprint_high = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_high) + + # call the method-under-test. + scheduled_scheduling_unit = do_dynamic_schedule() + + # we expect the scheduling_unit with the highest project rank to be scheduled first + self.assertIsNotNone(scheduled_scheduling_unit) + self.assertEqual(scheduling_unit_blueprint_high.id, scheduled_scheduling_unit.id) + + # check the results + # we expect the sub_high to be scheduled + scheduling_unit_blueprint_low.refresh_from_db() + scheduling_unit_blueprint_medium.refresh_from_db() + scheduling_unit_blueprint_high.refresh_from_db() + self.assertEqual(scheduling_unit_blueprint_low.status, 'schedulable') + self.assertEqual(scheduling_unit_blueprint_medium.status, 'schedulable') + self.assertEqual(scheduling_unit_blueprint_high.status, 'scheduled') + + # check the scheduled subtask + upcoming_scheduled_subtasks = models.Subtask.objects.filter(state__value='scheduled', + task_blueprint__scheduling_unit_blueprint__in=(scheduling_unit_blueprint_low, + scheduling_unit_blueprint_medium, + scheduling_unit_blueprint_high)).all() + self.assertEqual(1, upcoming_scheduled_subtasks.count()) + self.assertEqual(scheduling_unit_blueprint_high.id, upcoming_scheduled_subtasks[0].task_blueprint.scheduling_unit_blueprint.id) + + # check scheduling_unit_blueprint_low starts after the scheduled scheduling_unit_blueprint_high + self.assertGreater(scheduling_unit_blueprint_low.start_time, scheduling_unit_blueprint_medium.start_time) + self.assertGreater(scheduling_unit_blueprint_medium.start_time, scheduling_unit_blueprint_high.start_time) + + # ensure DEFAULT_INTER_OBSERVATION_GAP between them + self.assertGreaterEqual(scheduling_unit_blueprint_medium.start_time - scheduling_unit_blueprint_high.stop_time, DEFAULT_INTER_OBSERVATION_GAP) + self.assertGreaterEqual(scheduling_unit_blueprint_low.start_time - scheduling_unit_blueprint_medium.stop_time, DEFAULT_INTER_OBSERVATION_GAP) + + + def test_time_bound_unit_wins_even_at_lower_priority(self): + # create two schedunits, one with high one with low prio. + # first create them without any further constraints, and check if high prio wins. + scheduling_unit_draft_low = self.create_simple_observation_scheduling_unit("scheduling unit low", scheduling_set=self.scheduling_set_low) + scheduling_unit_blueprint_low = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_low) + + scheduling_unit_draft_high = self.create_simple_observation_scheduling_unit("scheduling unit high", scheduling_set=self.scheduling_set_high) + scheduling_unit_blueprint_high = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_high) + + now = datetime.utcnow() + tomorrow = now+timedelta(days=1) + + # call the method-under-test. + best_scored_scheduling_unit = find_best_next_schedulable_unit([scheduling_unit_blueprint_low, scheduling_unit_blueprint_high], now, tomorrow) + + # we expect the scheduling_unit with the highest project rank to be scheduled first + self.assertEqual(scheduling_unit_blueprint_high.id, best_scored_scheduling_unit.scheduling_unit.id) + + #now update the low prio unit with a time constraint, "forcing" it to be run in a very thight upcoming time window. + scheduling_unit_draft_low.scheduling_constraints_doc['time'] = { 'before': (now+scheduling_unit_draft_low.duration).isoformat()+'Z' } + scheduling_unit_draft_low.save() + scheduling_unit_blueprint_low.refresh_from_db() + + # call the method-under-test. + best_scored_scheduling_unit = find_best_next_schedulable_unit([scheduling_unit_blueprint_low, scheduling_unit_blueprint_high], now, tomorrow) + + # now we expect the scheduling_unit with the lowest project rank to be scheduled first because it can only run within this limited timewindow + self.assertEqual(scheduling_unit_draft_low.id, best_scored_scheduling_unit.scheduling_unit.id) + + + # update the low prio unit. enlarge the time window constraint a bit, so both low and high prio units can fit + # this should result that the high prio goes first, and the low prio (which now fits as well) goes second + scheduling_unit_draft_low.scheduling_constraints_doc['time'] = { 'before': (now+scheduling_unit_draft_low.duration+scheduling_unit_draft_high.duration).isoformat()+'Z' } + scheduling_unit_draft_low.save() + scheduling_unit_blueprint_low.refresh_from_db() + + # call the method-under-test. + best_scored_scheduling_unit = find_best_next_schedulable_unit([scheduling_unit_blueprint_low, scheduling_unit_blueprint_high], now, tomorrow) + + # now we expect the scheduling_unit with the lowest project rank to be scheduled first because it can only run within this limited timewindow + self.assertEqual(scheduling_unit_blueprint_high.id, best_scored_scheduling_unit.scheduling_unit.id) + + # call the method-under-test again but search after first unit (should return low prio unit) + stop_time_of_first = best_scored_scheduling_unit.start_time + best_scored_scheduling_unit.scheduling_unit.duration + best_scored_scheduling_unit = find_best_next_schedulable_unit([scheduling_unit_blueprint_low, scheduling_unit_blueprint_high], stop_time_of_first, tomorrow) + self.assertEqual(scheduling_unit_blueprint_low.id, best_scored_scheduling_unit.scheduling_unit.id) + + + def test_manual_constraint_is_preventing_scheduling_unit_from_being_scheduled_dynamically(self): + scheduling_unit_draft_manual = self.create_simple_observation_scheduling_unit("scheduling unit manual low", scheduling_set=self.scheduling_set_low, + constraints={'scheduler': 'manual'}) + scheduling_unit_blueprint_manual = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_manual) + self.assertEqual(scheduling_unit_blueprint_manual.status, "schedulable") + + # call the method-under-test. + scheduled_scheduling_unit = do_dynamic_schedule() + + # we expect no scheduling_unit to be scheduled, because the only one is set to 'manual' constraint + self.assertIsNone(scheduled_scheduling_unit) + + # check the results + scheduling_unit_blueprint_manual.refresh_from_db() + self.assertEqual(scheduling_unit_blueprint_manual.status, 'schedulable') + + + def test_manually_scheduled_blocking_dynamically_scheduled(self): + scheduling_unit_draft_manual = self.create_simple_observation_scheduling_unit("scheduling unit manual low", scheduling_set=self.scheduling_set_low, + constraints={'scheduler': 'manual'}) + scheduling_unit_blueprint_manual = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_manual) + self.assertEqual(scheduling_unit_blueprint_manual.status, "schedulable") + + schedule_independent_subtasks_in_scheduling_unit_blueprint(scheduling_unit_blueprint_manual, datetime.utcnow()) + self.assertEqual(scheduling_unit_blueprint_manual.status, "scheduled") + + scheduling_unit_draft_high = self.create_simple_observation_scheduling_unit("scheduling unit online high", scheduling_set=self.scheduling_set_high) + scheduling_unit_blueprint_high = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft_high) + + # call the method-under-test. + scheduled_scheduling_unit = do_dynamic_schedule() + + # we expect the no scheduling_unit to be scheduled, because the manual is in the way + self.assertIsNone(scheduled_scheduling_unit) + + # check the results + # we expect the sub_high to be scheduled + scheduling_unit_blueprint_high.refresh_from_db() + self.assertEqual(scheduling_unit_blueprint_high.status, 'schedulable') + + # check scheduling_unit_blueprint_low starts after the scheduled scheduling_unit_blueprint_high + self.assertGreater(scheduling_unit_blueprint_high.start_time, scheduling_unit_blueprint_manual.start_time) + + # ensure DEFAULT_INTER_OBSERVATION_GAP between them + self.assertGreaterEqual(scheduling_unit_blueprint_high.start_time - scheduling_unit_blueprint_manual.stop_time, DEFAULT_INTER_OBSERVATION_GAP) + +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + +if __name__ == '__main__': + #run the unit tests + unittest.main() diff --git a/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.run b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.run new file mode 100755 index 0000000000000000000000000000000000000000..d0831a318c2949b8a6990c0cef62fa6ea3bac68b --- /dev/null +++ b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.run @@ -0,0 +1,4 @@ +#!/bin/bash + +python3 t_dynamic_scheduling.py + diff --git a/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.sh b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.sh new file mode 100755 index 0000000000000000000000000000000000000000..ee5a97caed28fae29660df70d067fd9170658d70 --- /dev/null +++ b/SAS/TMSS/services/scheduling/test/t_dynamic_scheduling.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_dynamic_scheduling \ No newline at end of file diff --git a/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.py b/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.py new file mode 100755 index 0000000000000000000000000000000000000000..57d3ca6f86bbc6ab3b9e5d5a7de7c051e75e2650 --- /dev/null +++ b/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.py @@ -0,0 +1,253 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +import unittest +import uuid + +import logging +logger = logging.getLogger('lofar.'+__name__) + +from lofar.common.test_utils import skip_integration_tests +if skip_integration_tests(): + exit(3) + +from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor +from lofar.sas.tmss.services.scheduling.subtask_scheduling import create_subtask_scheduling_service +from lofar.common.json_utils import get_default_json_object_for_schema, add_defaults_to_json_object_for_schema + +from time import sleep +from datetime import datetime, timedelta + +class TestSubtaskSchedulingService(unittest.TestCase): + ''' + Tests for the SubtaskSchedulingService + ''' + @classmethod + def setUpClass(cls) -> None: + cls.TEST_UUID = uuid.uuid1() + + cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID)) + cls.tmp_exchange.open() + + # override DEFAULT_BUSNAME + import lofar + lofar.messaging.config.DEFAULT_BUSNAME = cls.tmp_exchange.address + + # import here, and not at top of module, because DEFAULT_BUSNAME needs to be set before importing + from lofar.sas.resourceassignment.resourceassigner.test.ra_test_environment import RATestEnvironment + from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment + from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator + + cls.ra_test_env = RATestEnvironment(exchange=cls.tmp_exchange.address) + cls.ra_test_env.start() + + cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address, populate_schemas=True, populate_test_data=False, + start_subtask_scheduler=True, start_postgres_listener=True, start_ra_test_environment=True, + start_dynamic_scheduler=False, enable_viewflow=False) + cls.tmss_test_env.start() + + cls.test_data_creator = TMSSRESTTestDataCreator(cls.tmss_test_env.django_server.url, + (cls.tmss_test_env.ldap_server.dbcreds.user, cls.tmss_test_env.ldap_server.dbcreds.password)) + + @classmethod + def tearDownClass(cls) -> None: + cls.tmss_test_env.stop() + cls.ra_test_env.stop() + cls.tmp_exchange.close() + + @staticmethod + def wait_for_subtask_to_get_status(tmss_client, subtask_id, expected_status, timeout=30): + '''helper method to poll for a subtask's status. + raises TimeoutError if expected_status is not met withing timout seconds. + returns subtask when expected_status is met.''' + start = datetime.utcnow() + subtask = tmss_client.get_subtask(subtask_id) + while subtask['state_value'] != expected_status: + sleep(0.5) + logger.info("Waiting for subtask id=%s to get status '%s'. Current status='%s'. Polling...", subtask_id, expected_status, subtask['state_value']) + subtask = tmss_client.get_subtask(subtask_id) + if datetime.utcnow() - start > timedelta(seconds=timeout): + raise TimeoutError("timeout while waiting for subtask id=%s to get status '%s'. It currently has status '%s'" % ( + subtask_id, expected_status, subtask['state_value'])) + return subtask + + def test_01_for_expected_behaviour_of_two_connected_subtasks(self): + ''' + This test starts a scheduling service and tmss, creates a chain of subtasks, finishes the first, and checks if the successors are then scheduled. + ''' + return + + logger.info(' -- test_01_for_expected_behaviour -- ') + + # create and start the service (the object under test) + service = create_service(exchange=self.tmp_exchange.address, tmss_client_credentials_id=self.tmss_test_env.client_credentials.dbcreds_id) + with BusListenerJanitor(service): + # ------------------------- + # long setup of objects.... + + # setup proper template + subtask_template_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskTemplate(subtask_type_url=self.test_data_creator.django_api_url + '/subtask_type/qa_files/'), '/subtask_template/') + + # create two subtasks + subtask1 = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url), '/subtask/') + subtask2 = self.test_data_creator.post_data_and_get_response_as_json_object(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url, task_blueprint_url=subtask1['task_blueprint']), '/subtask/') + + # connect them + output_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskOutput(subtask1['url']), '/subtask_output/') + input_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskInput(subtask_url=subtask2['url'], subtask_output_url=output_url), '/subtask_input/') + + # ... end of long setup of objects + # -------------------------------- + + # now for the real test: set subtask1_id status to finished, and check that subtask2 is then properly scheduled + with self.tmss_test_env.create_tmss_client() as tmss_client: + subtask1 = tmss_client.get_subtask(subtask1['id']) + subtask2 = tmss_client.get_subtask(subtask2['id']) + + self.assertEqual(subtask1['state_value'], 'defined') + self.assertEqual(subtask2['state_value'], 'defined') + + # the first subtask ran, and is now finished... set it's status. This should trigger the scheduling service to schedule the second subtask. + tmss_client.set_subtask_status(subtask1['id'], 'finished') + + subtask2 = self.wait_for_subtask_to_get_status(tmss_client, subtask2['id'], 'scheduled') + + # subtask2 should now be scheduled + self.assertEqual(subtask2['state_value'], 'scheduled') + + def test_02_for_expected_behaviour_of_UC1_scheduling_unit(self): + ''' + This test starts a scheduling service and tmss, creates a chain of subtasks, finishes the first, and checks if the successors are then scheduled. + ''' + + logger.info(' -- test_02_for_expected_behaviour_of_UC1_scheduling_unit -- ') + + # import here, and not at top of module, because the tmsstestenv needs to be running before importing + from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprints_and_subtasks_from_scheduling_unit_draft + from lofar.sas.tmss.tmss.tmssapp.subtasks import update_subtasks_start_times_for_scheduling_unit + from lofar.sas.tmss.tmss.tmssapp import models + from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data + + # create and start the service (the object under test) + service = create_subtask_scheduling_service(exchange=self.tmp_exchange.address, tmss_client_credentials_id=self.tmss_test_env.client_credentials.dbcreds_id) + with BusListenerJanitor(service): + # ------------------------- + # setup of objects: create the UC1 scheduling unit, and then select the first runnable subtasks + strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines") + spec = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema) + scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(name="UC1 CTC+pipelines", + scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data()), + requirements_template=strategy_template.scheduling_unit_template, + requirements_doc=spec, + observation_strategy_template=strategy_template, + scheduling_constraints_doc=get_default_json_object_for_schema(models.SchedulingConstraintsTemplate.objects.get(name="constraints").schema), + scheduling_constraints_template=models.SchedulingConstraintsTemplate.objects.get(name="constraints")) + + scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + + # assign some non-overlapping starttimes, so the tasks can be scheduled + update_subtasks_start_times_for_scheduling_unit(scheduling_unit_blueprint, datetime.utcnow()) + + # scheduling_unit_blueprint now has task_blueprints and subtasks + # "unpack" the whole graph, so we can "walk" it and see if the correct subtasks are scheduled once its predecessors are finished + obs_cal1 = scheduling_unit_blueprint.task_blueprints.get(name="Calibrator Observation 1") + obs_cal1_st_obs = obs_cal1.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.OBSERVATION.value))) + obs_cal1_st_qa1 = obs_cal1.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_FILES.value))) + obs_cal1_st_qa2 = obs_cal1.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_PLOTS.value))) + + pl_cal1 = scheduling_unit_blueprint.task_blueprints.get(name="Pipeline 1") + pl_cal1_st = pl_cal1.subtasks.first() + + obs_tgt = scheduling_unit_blueprint.task_blueprints.get(name="Target Observation") + obs_tgt_st_obs = obs_tgt.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.OBSERVATION.value))) + obs_tgt_st_qa1 = obs_tgt.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_FILES.value))) + obs_tgt_st_qa2 = obs_tgt.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_PLOTS.value))) + + pl_tgt1 = scheduling_unit_blueprint.task_blueprints.get(name="Pipeline target1") + pl_tgt1_st = pl_tgt1.subtasks.first() + + pl_tgt2 = scheduling_unit_blueprint.task_blueprints.get(name="Pipeline target2") + pl_tgt2_st = pl_tgt2.subtasks.first() + + obs_cal2 = scheduling_unit_blueprint.task_blueprints.get(name="Calibrator Observation 2") + obs_cal2_st_obs = obs_cal2.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.OBSERVATION.value))) + obs_cal2_st_qa1 = obs_cal2.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_FILES.value))) + obs_cal2_st_qa2 = obs_cal2.subtasks.get(specifications_template_id__in=models.SubtaskTemplate.objects.filter(type=models.SubtaskType.objects.get(value=models.SubtaskType.Choices.QA_PLOTS.value))) + + pl_cal2 = scheduling_unit_blueprint.task_blueprints.get(name="Pipeline 2") + pl_cal2_st = pl_cal2.subtasks.first() + + # define the graph in an iterable way: as tuples of a subtask-successors-pair + # the graph is also ordered in a 'data-flow' direction + graph = (# calibrator1 obs, qa and pipeline + (obs_cal1_st_obs, (obs_cal1_st_qa1, pl_cal1_st)), + (obs_cal1_st_qa1, (obs_cal1_st_qa2,)), + (obs_cal1_st_qa2, tuple()), + (pl_cal1_st, tuple()), + #target obs, qa and pipelines + (obs_tgt_st_obs, (obs_tgt_st_qa1, pl_tgt1_st, pl_tgt2_st)), + (obs_tgt_st_qa1, (obs_tgt_st_qa2,)), + (obs_tgt_st_qa2, tuple()), + (pl_tgt1_st, tuple()), + (pl_tgt2_st, tuple()), + # calibrator2 obs, qa and pipeline + (obs_cal2_st_obs, (obs_cal2_st_qa1, pl_cal2_st)), + (obs_cal2_st_qa1, (obs_cal2_st_qa2,)), + (obs_cal2_st_qa2, tuple()), + (pl_cal2_st, tuple()) ) + + logger.info(" --- test_02_for_expected_behaviour_of_UC1_scheduling_unit setup finished. starting actual test ---") + # ... end of long setup of objects + # -------------------------------- + + # now for the real test: use only the http rest api to check statuses and call schedule methods + with self.tmss_test_env.create_tmss_client() as tmss_client: + # walk the graph in a "data-flow" direction + for subtask, successors in graph: + # get up-to-date subtask via the rest client + subtask1 = tmss_client.get_subtask(subtask.id) + logger.info("subtask id=%s status=%s successors: %s", subtask1['id'], subtask1['state_value'], ','.join(str(s.id) for s in successors)) + + if subtask1['state_value'] == 'defined': + for successor in successors: + # get up-to-date subtask via the rest client + subtask2 = tmss_client.get_subtask(successor.id) + self.assertEqual(subtask2['state_value'], 'defined') + + # simulate that some scheduler schedules the first subtask (which does not depend on predecessors)... + if len(tmss_client.get_subtask_predecessors(subtask1['id'])) == 0: + subtask1 = tmss_client.schedule_subtask(subtask1['id']) + self.assertEqual(subtask1['state_value'], 'scheduled') + + if subtask1['state_value'] == 'scheduled': + # simulate that the first subtask ran, and is now finished... + # cycle over the 'run time' statuses, concluding with status to finished. + # The finished status should trigger the scheduling service to schedule the successor subtask(s). + for status in ['queueing', 'queued', 'starting', 'started', 'finishing', 'finished']: + tmss_client.set_subtask_status(subtask1['id'], status) + + for successor in successors: + # get up-to-date subtask via the rest client + subtask2 = self.wait_for_subtask_to_get_status(tmss_client, successor.id, 'scheduled') + self.assertEqual(subtask2['state_value'], 'scheduled') + + +if __name__ == '__main__': + #run the unit tests + unittest.main() diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.run b/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.run similarity index 100% rename from SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.run rename to SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.run diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.sh b/SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.sh similarity index 100% rename from SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.sh rename to SAS/TMSS/services/scheduling/test/t_subtask_scheduling_service.sh diff --git a/SAS/TMSS/services/subtask_scheduling/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/CMakeLists.txt deleted file mode 100644 index 460e356bc2c99121eb41a48fc27fad7d20a51fac..0000000000000000000000000000000000000000 --- a/SAS/TMSS/services/subtask_scheduling/CMakeLists.txt +++ /dev/null @@ -1,8 +0,0 @@ -lofar_package(TMSSSubtaskSchedulingService 0.1 DEPENDS TMSSClient PyCommon pyparameterset PyMessaging) - -lofar_find_package(PythonInterp 3.4 REQUIRED) - -add_subdirectory(lib) -add_subdirectory(bin) -add_subdirectory(test) - diff --git a/SAS/TMSS/services/subtask_scheduling/bin/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/bin/CMakeLists.txt deleted file mode 100644 index 07e30a532f710dd1242ba026ad12e9ce014f1125..0000000000000000000000000000000000000000 --- a/SAS/TMSS/services/subtask_scheduling/bin/CMakeLists.txt +++ /dev/null @@ -1,4 +0,0 @@ -lofar_add_bin_scripts(tmss_subtask_scheduling_service) - -# supervisord config files -lofar_add_sysconf_files(tmss_subtask_scheduling_service.ini DESTINATION supervisord.d) diff --git a/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service b/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service deleted file mode 100755 index 2ecd686a25fd88e45094bf4cda143e41de1fb61d..0000000000000000000000000000000000000000 --- a/SAS/TMSS/services/subtask_scheduling/bin/tmss_subtask_scheduling_service +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/python3 - -# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) -# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands -# -# This file is part of the LOFAR software suite. -# The LOFAR software suite is free software: you can redistribute it and/or -# modify it under the terms of the GNU General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# The LOFAR software suite is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. - - -from lofar.sas.tmss.services.subtask_scheduling import main - -if __name__ == "__main__": - main() diff --git a/SAS/TMSS/services/subtask_scheduling/lib/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/lib/CMakeLists.txt deleted file mode 100644 index 7cf0b591612ccb75bc2a73c1a6f9d1d8a2c2d9da..0000000000000000000000000000000000000000 --- a/SAS/TMSS/services/subtask_scheduling/lib/CMakeLists.txt +++ /dev/null @@ -1,10 +0,0 @@ -lofar_find_package(PythonInterp 3.4 REQUIRED) -include(PythonInstall) - -set(_py_files - subtask_scheduling.py - ) - -python_install(${_py_files} - DESTINATION lofar/sas/tmss/services) - diff --git a/SAS/TMSS/services/subtask_scheduling/test/CMakeLists.txt b/SAS/TMSS/services/subtask_scheduling/test/CMakeLists.txt deleted file mode 100644 index b9da06a5dc6b27fde81e26c6cc5ba027cae2d821..0000000000000000000000000000000000000000 --- a/SAS/TMSS/services/subtask_scheduling/test/CMakeLists.txt +++ /dev/null @@ -1,7 +0,0 @@ -# $Id: CMakeLists.txt 32679 2015-10-26 09:31:56Z schaap $ - -if(BUILD_TESTING) - include(LofarCTest) - - lofar_add_test(t_subtask_scheduling_service) -endif() diff --git a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py b/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py deleted file mode 100755 index 84d85d879019b0a5d09832d7cf5815f53ef12a2b..0000000000000000000000000000000000000000 --- a/SAS/TMSS/services/subtask_scheduling/test/t_subtask_scheduling_service.py +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) -# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands -# -# This file is part of the LOFAR software suite. -# The LOFAR software suite is free software: you can redistribute it and/or -# modify it under the terms of the GNU General Public License as published -# by the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# The LOFAR software suite is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. - -import unittest -import uuid - -import logging -logger = logging.getLogger(__name__) -logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) - -from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment -from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator - -from lofar.messaging.messagebus import TemporaryExchange, BusListenerJanitor -from lofar.sas.tmss.services.subtask_scheduling import create_service -from lofar.common.test_utils import integration_test -from time import sleep -from datetime import datetime, timedelta - -@integration_test -class TestSubtaskSchedulingService(unittest.TestCase): - ''' - Tests for the SubtaskSchedulingService - ''' - @classmethod - def setUpClass(cls) -> None: - cls.TEST_UUID = uuid.uuid1() - - cls.tmp_exchange = TemporaryExchange("%s_%s" % (cls.__name__, cls.TEST_UUID)) - cls.tmp_exchange.open() - - cls.tmss_test_env = TMSSTestEnvironment(exchange=cls.tmp_exchange.address) - cls.tmss_test_env.start() - - cls.test_data_creator = TMSSRESTTestDataCreator(cls.tmss_test_env.django_server.url, - (cls.tmss_test_env.ldap_server.dbcreds.user, cls.tmss_test_env.ldap_server.dbcreds.password)) - - @classmethod - def tearDownClass(cls) -> None: - cls.tmss_test_env.stop() - cls.tmp_exchange.close() - - def test_01_for_expected_behaviour(self): - ''' - This test starts a scheduling service and tmss, creates a chain of subtasks, finishes the first, and checks if the successors are then scheduled. - ''' - - logger.info(' -- test_01_for_expected_behaviour -- ') - - # create and start the service (the object under test) - service = create_service(exchange=self.tmp_exchange.address, tmss_client_credentials_id=self.tmss_test_env.client_credentials.dbcreds_id) - with BusListenerJanitor(service): - # ------------------------- - # long setup of objects.... - - # setup proper template - subtask_template_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskTemplate(subtask_type_url=self.test_data_creator.django_api_url + '/subtask_type/qa_files/'), '/subtask_template/') - - # create two subtasks - subtask1_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url), '/subtask/') - subtask2_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.Subtask(state="defined", specifications_template_url=subtask_template_url), '/subtask/') - - # ugly - subtask1_id = subtask1_url.split('/')[subtask1_url.split('/').index('subtask') + 1] - subtask2_id = subtask2_url.split('/')[subtask2_url.split('/').index('subtask') + 1] - - # connect them - output_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskOutput(subtask1_url), '/subtask_output/') - input_url = self.test_data_creator.post_data_and_get_url(self.test_data_creator.SubtaskInput(subtask_url=subtask2_url, subtask_output_url=output_url), '/subtask_input/') - - # ... end of long setup of objects - # -------------------------------- - - # now for the real test: set subtask1_id status to finished, and check that subtask2 is then properly scheduled - with self.tmss_test_env.create_tmss_client() as tmss_client: - subtask1 = tmss_client.get_subtask(subtask1_id) - subtask2 = tmss_client.get_subtask(subtask2_id) - - self.assertEqual(subtask1['state_value'], 'defined') - self.assertEqual(subtask2['state_value'], 'defined') - - # the first subtask ran, and is now finished... set it's status. This should trigger the scheduling service to schedule the second subtask. - tmss_client.set_subtask_status(subtask1_id, 'finished') - - # allow some time for the scheduling service to do its thing... - start = datetime.utcnow() - while subtask2['state_value'] != 'scheduled': - subtask2 = tmss_client.get_subtask(subtask2_id) - sleep(0.5) - if datetime.utcnow() - start > timedelta(seconds=2): - raise TimeoutError() - - # subtask2 should now be scheduled - self.assertEqual(subtask2['state_value'], 'scheduled') - -if __name__ == '__main__': - #run the unit tests - unittest.main() diff --git a/SAS/TMSS/services/tmss_postgres_listener/bin/tmss_postgres_listener_service b/SAS/TMSS/services/tmss_postgres_listener/bin/tmss_postgres_listener_service old mode 100755 new mode 100644 diff --git a/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py b/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py index 3cf20c24ec7ed26321f2c8acc85e09a14961b6eb..51532b9390cc3e2b54a2f637f4bc26faf992b4e7 100644 --- a/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py +++ b/SAS/TMSS/services/tmss_postgres_listener/lib/tmss_postgres_listener.py @@ -54,7 +54,7 @@ class TMSSPGListener(PostgresListener): self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_subtask', 'delete')) self.subscribe('tmssapp_subtask_delete', self.onSubTaskDeleted) - self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_subtask', 'update', 'state_id')) + self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_subtask', 'update', column_name='state_id', quote_column_value=True)) self.subscribe('tmssapp_subtask_update_column_state_id', self.onSubTaskStateUpdated) @@ -107,6 +107,13 @@ class TMSSPGListener(PostgresListener): self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_schedulingunitdraft', 'delete')) self.subscribe('tmssapp_schedulingunitdraft_delete', self.onSchedulingUnitDraftDeleted) + self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_schedulingunitdraft', 'update', column_name='scheduling_constraints_doc', quote_column_value=False)) + self.subscribe('tmssapp_schedulingunitdraft_update_column_scheduling_constraints_doc'[:63], self.onSchedulingUnitDraftConstraintsUpdated) + + # Settings + self.executeQuery(makePostgresNotificationQueries('', 'tmssapp_setting', 'update', id_column_name='name_id', quote_id_value=True, column_name='value', quote_column_value=True)) + self.subscribe('tmssapp_setting_update_column_value', self.onSettingUpdated) + return super().start() def __exit__(self, exc_type, exc_val, exc_tb): @@ -191,6 +198,17 @@ class TMSSPGListener(PostgresListener): def onSchedulingUnitDraftDeleted(self, payload = None): self._sendNotification(TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX+'.Deleted', payload) + def onSchedulingUnitDraftConstraintsUpdated(self, payload = None): + # convert payload string to nested json doc + self._sendNotification(TMSS_SCHEDULINGUNITDRAFT_OBJECT_EVENT_PREFIX+'.Constraints.Updated', payload) + + def onSettingUpdated(self, payload = None): + payload = json.loads(payload) + payload['name'] = payload['name_id'] + del payload['name_id'] + payload['value'] = payload['value'] in ('true', 'True', 't') + self._sendNotification(TMSS_SETTING_OBJECT_EVENT_PREFIX+'.Updated', payload) + def create_service(dbcreds, exchange=DEFAULT_BUSNAME, broker=DEFAULT_BROKER): '''create a TMSSPGListener instance''' @@ -216,7 +234,7 @@ def main(): parser.add_option_group(group) parser.add_option_group(dbcredentials.options_group(parser)) - parser.set_defaults(dbcredentials=os.environ.get('TMSS_CLIENT_DBCREDENTIALS', 'TMSS')) + parser.set_defaults(dbcredentials=os.environ.get('TMSS_DBCREDENTIALS', 'TMSS')) (options, args) = parser.parse_args() dbcreds = dbcredentials.parse_options(options) diff --git a/SAS/TMSS/src/CMakeCache.txt b/SAS/TMSS/src/CMakeCache.txt deleted file mode 100644 index 0b2dc14cb11f159cf34cbf5f5ad840ce0aaab7d0..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/CMakeCache.txt +++ /dev/null @@ -1,326 +0,0 @@ -# This is the CMakeCache file. -# For build in directory: /lofar/SAS/TMSS/src -# It was generated by CMake: /usr/bin/cmake -# You can edit this file to change values found and used by cmake. -# If you do not want to change any of the values, simply exit the editor. -# If you do want to change a value, simply edit, save, and exit the editor. -# The syntax for the file is as follows: -# KEY:TYPE=VALUE -# KEY is the name of a variable in the cache. -# TYPE is a hint to GUIs for the type of VALUE, DO NOT EDIT TYPE!. -# VALUE is the current value for the KEY. - -######################## -# EXTERNAL cache entries -######################## - -//No help, variable specified on the command line. -BUILD_PACKAGES:UNINITIALIZED=TMSS - -//Path to a program. -CMAKE_AR:FILEPATH=/usr/bin/ar - -//For backwards compatibility, what version of CMake commands and -// syntax should this version of CMake try to support. -CMAKE_BACKWARDS_COMPATIBILITY:STRING=2.4 - -//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or -// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel. -CMAKE_BUILD_TYPE:STRING= - -//Enable/Disable color output during build. -CMAKE_COLOR_MAKEFILE:BOOL=ON - -//CXX compiler. -CMAKE_CXX_COMPILER:FILEPATH=/usr/bin/c++ - -//Flags used by the compiler during all build types. -CMAKE_CXX_FLAGS:STRING= - -//Flags used by the compiler during debug builds. -CMAKE_CXX_FLAGS_DEBUG:STRING=-g - -//Flags used by the compiler during release minsize builds. -CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG - -//Flags used by the compiler during release builds (/MD /Ob1 /Oi -// /Ot /Oy /Gs will produce slightly less optimized but smaller -// files). -CMAKE_CXX_FLAGS_RELEASE:STRING=-O3 -DNDEBUG - -//Flags used by the compiler during Release with Debug Info builds. -CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g -DNDEBUG - -//C compiler. -CMAKE_C_COMPILER:FILEPATH=/usr/bin/cc - -//Flags used by the compiler during all build types. -CMAKE_C_FLAGS:STRING= - -//Flags used by the compiler during debug builds. -CMAKE_C_FLAGS_DEBUG:STRING=-g - -//Flags used by the compiler during release minsize builds. -CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG - -//Flags used by the compiler during release builds (/MD /Ob1 /Oi -// /Ot /Oy /Gs will produce slightly less optimized but smaller -// files). -CMAKE_C_FLAGS_RELEASE:STRING=-O3 -DNDEBUG - -//Flags used by the compiler during Release with Debug Info builds. -CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g -DNDEBUG - -//Flags used by the linker. -CMAKE_EXE_LINKER_FLAGS:STRING=' ' - -//Flags used by the linker during debug builds. -CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//Enable/Disable output of compile commands during generation. -CMAKE_EXPORT_COMPILE_COMMANDS:BOOL=OFF - -//Install path prefix, prepended onto install directories. -CMAKE_INSTALL_PREFIX:PATH=/usr/local - -//Path to a program. -CMAKE_LINKER:FILEPATH=/usr/bin/ld - -//Path to a program. -CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/gmake - -//Flags used by the linker during the creation of modules. -CMAKE_MODULE_LINKER_FLAGS:STRING=' ' - -//Flags used by the linker during debug builds. -CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//Path to a program. -CMAKE_NM:FILEPATH=/usr/bin/nm - -//Path to a program. -CMAKE_OBJCOPY:FILEPATH=/usr/bin/objcopy - -//Path to a program. -CMAKE_OBJDUMP:FILEPATH=/usr/bin/objdump - -//Value Computed by CMake -CMAKE_PROJECT_NAME:STATIC=Project - -//Path to a program. -CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib - -//Flags used by the linker during the creation of dll's. -CMAKE_SHARED_LINKER_FLAGS:STRING=' ' - -//Flags used by the linker during debug builds. -CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//If set, runtime paths are not added when installing shared libraries, -// but are added when building. -CMAKE_SKIP_INSTALL_RPATH:BOOL=NO - -//If set, runtime paths are not added when using shared libraries. -CMAKE_SKIP_RPATH:BOOL=NO - -//Flags used by the linker during the creation of static libraries. -CMAKE_STATIC_LINKER_FLAGS:STRING= - -//Flags used by the linker during debug builds. -CMAKE_STATIC_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_STATIC_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//Path to a program. -CMAKE_STRIP:FILEPATH=/usr/bin/strip - -//If true, cmake will use relative paths in makefiles and projects. -CMAKE_USE_RELATIVE_PATHS:BOOL=OFF - -//If this value is on, makefiles will be generated without the -// .SILENT directive, and all commands will be echoed to the console -// during the make. This is useful for debugging only. With Visual -// Studio IDE projects all commands are done without /nologo. -CMAKE_VERBOSE_MAKEFILE:BOOL=FALSE - -//Single output directory for building all executables. -EXECUTABLE_OUTPUT_PATH:PATH= - -//Single output directory for building all libraries. -LIBRARY_OUTPUT_PATH:PATH= - -//Value Computed by CMake -Project_BINARY_DIR:STATIC=/lofar/SAS/TMSS/src - -//Value Computed by CMake -Project_SOURCE_DIR:STATIC=/lofar/SAS - - -######################## -# INTERNAL cache entries -######################## - -//ADVANCED property for variable: CMAKE_AR -CMAKE_AR-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_BUILD_TOOL -CMAKE_BUILD_TOOL-ADVANCED:INTERNAL=1 -//What is the target build tool cmake is generating for. -CMAKE_BUILD_TOOL:INTERNAL=/usr/bin/gmake -//This is the directory where this CMakeCache.txt was created -CMAKE_CACHEFILE_DIR:INTERNAL=/lofar/SAS/TMSS/src -//Major version of cmake used to create the current loaded cache -CMAKE_CACHE_MAJOR_VERSION:INTERNAL=2 -//Minor version of cmake used to create the current loaded cache -CMAKE_CACHE_MINOR_VERSION:INTERNAL=8 -//Patch version of cmake used to create the current loaded cache -CMAKE_CACHE_PATCH_VERSION:INTERNAL=12 -//ADVANCED property for variable: CMAKE_COLOR_MAKEFILE -CMAKE_COLOR_MAKEFILE-ADVANCED:INTERNAL=1 -//Path to CMake executable. -CMAKE_COMMAND:INTERNAL=/usr/bin/cmake -//Path to cpack program executable. -CMAKE_CPACK_COMMAND:INTERNAL=/usr/bin/cpack -//Path to ctest program executable. -CMAKE_CTEST_COMMAND:INTERNAL=/usr/bin/ctest -//ADVANCED property for variable: CMAKE_CXX_COMPILER -CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_CXX_FLAGS -CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_CXX_FLAGS_DEBUG -CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_CXX_FLAGS_MINSIZEREL -CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELEASE -CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO -CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_C_COMPILER -CMAKE_C_COMPILER-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_C_FLAGS -CMAKE_C_FLAGS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_C_FLAGS_DEBUG -CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_C_FLAGS_MINSIZEREL -CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_C_FLAGS_RELEASE -CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_C_FLAGS_RELWITHDEBINFO -CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//Path to cache edit program executable. -CMAKE_EDIT_COMMAND:INTERNAL=/usr/bin/ccmake -//Executable file format -CMAKE_EXECUTABLE_FORMAT:INTERNAL=ELF -//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS -CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG -CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL -CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE -CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO -CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_EXPORT_COMPILE_COMMANDS -CMAKE_EXPORT_COMPILE_COMMANDS-ADVANCED:INTERNAL=1 -//Name of generator. -CMAKE_GENERATOR:INTERNAL=Unix Makefiles -//Name of generator toolset. -CMAKE_GENERATOR_TOOLSET:INTERNAL= -//Start directory with the top level CMakeLists.txt file for this -// project -CMAKE_HOME_DIRECTORY:INTERNAL=/lofar/SAS -//Install .so files without execute permission. -CMAKE_INSTALL_SO_NO_EXE:INTERNAL=0 -//ADVANCED property for variable: CMAKE_LINKER -CMAKE_LINKER-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_MAKE_PROGRAM -CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS -CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG -CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL -CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE -CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO -CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_NM -CMAKE_NM-ADVANCED:INTERNAL=1 -//number of local generators -CMAKE_NUMBER_OF_LOCAL_GENERATORS:INTERNAL=1 -//ADVANCED property for variable: CMAKE_OBJCOPY -CMAKE_OBJCOPY-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_OBJDUMP -CMAKE_OBJDUMP-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_RANLIB -CMAKE_RANLIB-ADVANCED:INTERNAL=1 -//Path to CMake installation. -CMAKE_ROOT:INTERNAL=/usr/share/cmake -//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS -CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG -CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL -CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE -CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO -CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_SKIP_INSTALL_RPATH -CMAKE_SKIP_INSTALL_RPATH-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_SKIP_RPATH -CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS -CMAKE_STATIC_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_DEBUG -CMAKE_STATIC_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL -CMAKE_STATIC_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_RELEASE -CMAKE_STATIC_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO -CMAKE_STATIC_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_STRIP -CMAKE_STRIP-ADVANCED:INTERNAL=1 -//uname command -CMAKE_UNAME:INTERNAL=/usr/bin/uname -//ADVANCED property for variable: CMAKE_USE_RELATIVE_PATHS -CMAKE_USE_RELATIVE_PATHS-ADVANCED:INTERNAL=1 -//ADVANCED property for variable: CMAKE_VERBOSE_MAKEFILE -CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1 - diff --git a/SAS/TMSS/src/CMakeLists.txt b/SAS/TMSS/src/CMakeLists.txt index fd5a8389a74c27f43c3def1fadb5a87813d9212f..1b99aca609835d03bc2d0a7714fac842de58eb63 100644 --- a/SAS/TMSS/src/CMakeLists.txt +++ b/SAS/TMSS/src/CMakeLists.txt @@ -1,6 +1,6 @@ set(USE_PYTHON_COMPILATION Off) -lofar_find_package(PythonInterp 3.4 REQUIRED) +lofar_find_package(PythonInterp 3.5 REQUIRED) #lofar_find_package(PostgreSQL 9.4) include(PythonInstall) @@ -17,6 +17,7 @@ find_python_module(django_jsonforms REQUIRED) # pip3 install django-jsonforms find_python_module(django_json_widget REQUIRED) # pip3 install django-json-widget find_python_module(jsoneditor REQUIRED) # pip3 install django-jsoneditor find_python_module(jsonschema REQUIRED) # pip3 install jsonschema +find_python_module(astropy REQUIRED) # pip3 install astropy # modules for swagger API export find_python_module(drf_yasg REQUIRED) # pip install drf-yasg diff --git a/SAS/TMSS/src/migrate_momdb_to_tmss.py b/SAS/TMSS/src/migrate_momdb_to_tmss.py index 13efa43bbc7759f453875c51cdbfb3f9b5734fb9..e2d0c8102979755204db98ddc326c00a62a44230 100755 --- a/SAS/TMSS/src/migrate_momdb_to_tmss.py +++ b/SAS/TMSS/src/migrate_momdb_to_tmss.py @@ -512,8 +512,7 @@ def create_subtask_trees_for_project_in_momdb(project_mom2id, project): "priority": project.priority_rank, # todo: correct to derive from project? # optional: "start_time": start_time, - "stop_time": stop_time, - "schedule_method": models.ScheduleMethod.objects.get(value="manual"), # todo: correct? Or leave None? + "stop_time": stop_time # "created_or_updated_by_user" = None, # "raw_feedback" = None, # "do_cancel": None, diff --git a/SAS/TMSS/src/tmss/exceptions.py b/SAS/TMSS/src/tmss/exceptions.py index 018622ec7aa35dc6af7727fe8852013779baaf45..c918a64950632d8573d7e1cef3f2745f2383dcdc 100644 --- a/SAS/TMSS/src/tmss/exceptions.py +++ b/SAS/TMSS/src/tmss/exceptions.py @@ -25,3 +25,11 @@ class SubtaskSchedulingException(SchedulingException): class TaskSchedulingException(SchedulingException): pass + +class DynamicSchedulingException(SchedulingException): + pass + +class UnknownTemplateException(TMSSException): + '''raised when TMSS trying to base its processing routines on the chosen template, but this specific template is unknown.''' + pass + diff --git a/SAS/TMSS/src/tmss/settings.py b/SAS/TMSS/src/tmss/settings.py index 7f160668b40ac7164efdfaea77f44fb018e32d7d..9ba919e02252205cd5b2d7c0e83565bd2cf088c4 100644 --- a/SAS/TMSS/src/tmss/settings.py +++ b/SAS/TMSS/src/tmss/settings.py @@ -24,52 +24,21 @@ logger = logging.getLogger(__name__) LOGGING = { 'version': 1, 'disable_existing_loggers': False, - 'filters': { - 'require_debug_false': { - '()': 'django.utils.log.RequireDebugFalse', - }, - 'require_debug_true': { - '()': 'django.utils.log.RequireDebugTrue', - }, - }, 'formatters': { - 'django.server': { - '()': 'django.utils.log.ServerFormatter', - 'format': '%(asctime)s %(levelname)s %(message)s', - }, - 'lofar': { + 'lofar_formatter': { 'format': '%(asctime)s %(levelname)s %(message)s', }, }, 'handlers': { 'console': { 'level': 'DEBUG', - 'filters': ['require_debug_true'], 'class': 'logging.StreamHandler', - }, - 'django.server': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'django.server', - }, - 'mail_admins': { - 'level': 'ERROR', - 'filters': ['require_debug_false'], - 'class': 'django.utils.log.AdminEmailHandler' - }, - 'lofar': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'lofar', + 'formatter': 'lofar_formatter', }, }, 'loggers': { - 'django': { - 'handlers': ['console', 'mail_admins'], - 'level': 'INFO', - }, 'django.server': { - 'handlers': ['django.server'], + 'handlers': ['console'], 'level': 'INFO', 'propagate': False, }, @@ -82,13 +51,15 @@ LOGGING = { 'level': 'DEBUG', # change debug level as appropiate 'propagate': False, }, - 'django.db.backends': { - 'level': 'INFO', - 'handlers': ['console'], - }, + # 'django.db.backends': { # uncomment to enable logging of each db query. Very spammy and slow, but also usefull for performance improvement. Gives more even detail/insight than django debug toolbar. + # 'level': 'DEBUG', + # 'handlers': ['console'], + # 'propagate': False, + # }, 'lofar': { - 'handlers': ['lofar'], + 'handlers': ['console'], 'level': 'INFO', + 'propagate': False, }, } } @@ -121,20 +92,9 @@ INSTALLED_APPS = [ 'drf_yasg', 'django_filters', 'material', - 'material.frontend', - 'viewflow', - 'viewflow.frontend', - 'lofar.sas.tmss.tmss.workflowapp', + 'material.frontend' ] - -def show_debug_toolbar(*args, **kwargs): - return os.environ.get('SHOW_DJANGO_DEBUG_TOOLBAR', False) - -DEBUG_TOOLBAR_CONFIG = { - 'SHOW_TOOLBAR_CALLBACK': show_debug_toolbar -} - MIDDLEWARE = [ 'django.middleware.gzip.GZipMiddleware', 'django.middleware.security.SecurityMiddleware', @@ -146,16 +106,25 @@ MIDDLEWARE = [ 'django.middleware.clickjacking.XFrameOptionsMiddleware' ] +def show_debug_toolbar(*args, **kwargs): + return bool(os.environ.get('SHOW_DJANGO_DEBUG_TOOLBAR', False)) + if show_debug_toolbar(): + DEBUG_TOOLBAR_CONFIG = { 'SHOW_TOOLBAR_CALLBACK': show_debug_toolbar } INSTALLED_APPS.append('debug_toolbar') MIDDLEWARE.insert(MIDDLEWARE.index('django.middleware.gzip.GZipMiddleware')+1, 'debug_toolbar.middleware.DebugToolbarMiddleware') + +if bool(os.environ.get('TMSS_ENABLE_VIEWFLOW', False)): + INSTALLED_APPS.extend(['viewflow', 'viewflow.frontend', 'lofar.sas.tmss.tmss.workflowapp']) + + ROOT_URLCONF = 'lofar.sas.tmss.tmss.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [BASE_DIR, os.path.join(BASE_DIR, 'templates'), os.path.join(os.environ.get('LOFARROOT'), 'SAS/TMSS/frontend','tmss_webapp')], + 'DIRS': [BASE_DIR, os.path.join(BASE_DIR, 'templates'), os.path.join(os.environ.get('LOFARROOT'), 'lib64/python3.6/site-packages/lofar/SAS/TMSS/frontend','tmss_webapp')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ @@ -169,7 +138,7 @@ TEMPLATES = [ ] STATICFILES_DIRS = [ - os.path.join(os.environ.get('LOFARROOT'), 'SAS/TMSS/frontend','tmss_webapp/build/static') + os.path.join(os.environ.get('LOFARROOT'), 'lib64/python3.6/site-packages/lofar/SAS/TMSS/frontend','tmss_webapp/build/static') ] WSGI_APPLICATION = 'lofar.sas.tmss.tmss.wsgi.application' diff --git a/SAS/TMSS/src/tmss/tmssapp/conversions.py b/SAS/TMSS/src/tmss/tmssapp/conversions.py index ce112f7b30b8f697baf91d4da9202899703715ba..ab8437eb1fa304aa6690c73d83974e6fb9130a24 100644 --- a/SAS/TMSS/src/tmss/tmssapp/conversions.py +++ b/SAS/TMSS/src/tmss/tmssapp/conversions.py @@ -1,11 +1,12 @@ from astropy.time import Time import astropy.units -from lofar.lta.sip import station_coordinates from datetime import datetime from astropy.coordinates.earth import EarthLocation from astropy.coordinates import Angle from astroplan.observer import Observer +import logging +logger = logging.getLogger(__name__) def create_astroplan_observer_for_station(station: str) -> Observer: ''' @@ -13,7 +14,7 @@ def create_astroplan_observer_for_station(station: str) -> Observer: :param station: a station name, e.g. "CS002" :return: astroplan.observer.Observer object ''' - + from lofar.lta.sip import station_coordinates coords = station_coordinates.parse_station_coordinates()["%s_LBA" % station.upper()] location = EarthLocation.from_geocentric(x=coords['x'], y=coords['y'], z=coords['z'], unit=astropy.units.m) observer = Observer(location, name="LOFAR", timezone="UTC") @@ -46,6 +47,8 @@ def timestamps_and_stations_to_sun_rise_and_set(timestamps: [datetime], stations for timestamp in timestamps: observer = create_astroplan_observer_for_station(station) sunrise_start = observer.sun_rise_time(time=Time(timestamp), which='previous') + if sunrise_start.to_datetime().date() < timestamp.date(): + sunrise_start = observer.sun_rise_time(time=Time(timestamp), horizon=-angle_to_horizon, which='nearest') if sunrise_start.to_datetime().date() < timestamp.date(): sunrise_start = observer.sun_rise_time(time=Time(timestamp), horizon=-angle_to_horizon, which='next') sunrise_end = observer.sun_rise_time(time=Time(timestamp), horizon=angle_to_horizon, which='next') @@ -71,6 +74,8 @@ def local_sidereal_time_for_utc_and_station(timestamp: datetime = None, :param kind: 'mean' or 'apparent' :return: """ + from lofar.lta.sip import station_coordinates + if timestamp is None: timestamp = datetime.utcnow() station_coords = station_coordinates.parse_station_coordinates() diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py index 7faae82d9b3a694ab79ebddd5b3d0c5676d8ffac..f375f739ae5a435eff01474107753925b5b4208f 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.0.7 on 2020-10-29 16:37 +# Generated by Django 3.0.9 on 2020-11-17 13:44 from django.conf import settings import django.contrib.postgres.fields @@ -17,34 +17,6 @@ class Migration(migrations.Migration): ] operations = [ - migrations.CreateModel( - name='SchedulingUnitBlueprintSummary', - fields=[ - ('id', models.IntegerField(primary_key=True, serialize=False)), - ('sub_id', models.IntegerField()), - ('taskblueprint_id', models.IntegerField()), - ('task_type', models.CharField(max_length=128)), - ('derived_task_status', models.CharField(max_length=128)), - ], - options={ - 'db_table': 'tmssapp_schedulingunitblueprintsummary', - 'managed': False, - }, - ), - migrations.CreateModel( - name='TaskBlueprintSummary', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('taskblueprint_id', models.IntegerField()), - ('subtask_id', models.IntegerField()), - ('substate', models.CharField(max_length=128)), - ('subtask_type', models.CharField(max_length=128)), - ], - options={ - 'db_table': 'tmssapp_taskblueprintsummary', - 'managed': False, - }, - ), migrations.CreateModel( name='Algorithm', fields=[ @@ -483,15 +455,6 @@ class Migration(migrations.Migration): 'abstract': False, }, ), - migrations.CreateModel( - name='ScheduleMethod', - fields=[ - ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), - ], - options={ - 'abstract': False, - }, - ), migrations.CreateModel( name='SchedulingConstraintsTemplate', fields=[ @@ -630,7 +593,6 @@ class Migration(migrations.Migration): ('stop_time', models.DateTimeField(help_text='Stop this subtask at the specified time (NULLable).', null=True)), ('specifications_doc', django.contrib.postgres.fields.jsonb.JSONField(help_text='Final specifications, as input for the controller.')), ('do_cancel', models.DateTimeField(help_text='Timestamp when the subtask has been ordered to cancel (NULLable).', null=True)), - ('priority', models.IntegerField(help_text='Absolute priority of this subtask (higher value means more important).')), ('raw_feedback', models.CharField(help_text='The raw feedback for this Subtask', max_length=1048576, null=True)), ], options={ @@ -852,9 +814,9 @@ class Migration(migrations.Migration): ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), ('time_offset', models.IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.')), - ('first', models.ForeignKey(help_text='First Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_to_connect', to='tmssapp.TaskDraft')), + ('first', models.ForeignKey(help_text='First Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_scheduling_relation', to='tmssapp.TaskDraft')), ('placement', models.ForeignKey(help_text='Task scheduling relation placement.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingRelationPlacement')), - ('second', models.ForeignKey(help_text='Second Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_to_connect', to='tmssapp.TaskDraft')), + ('second', models.ForeignKey(help_text='Second Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_scheduling_relation', to='tmssapp.TaskDraft')), ], options={ 'abstract': False, @@ -868,9 +830,9 @@ class Migration(migrations.Migration): ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), ('time_offset', models.IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.')), - ('first', models.ForeignKey(help_text='First Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_to_connect', to='tmssapp.TaskBlueprint')), + ('first', models.ForeignKey(help_text='First Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_scheduling_relation', to='tmssapp.TaskBlueprint')), ('placement', models.ForeignKey(default='after', help_text='Task scheduling relation placement.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingRelationPlacement')), - ('second', models.ForeignKey(help_text='Second Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_to_connect', to='tmssapp.TaskBlueprint')), + ('second', models.ForeignKey(help_text='Second Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_scheduling_relation', to='tmssapp.TaskBlueprint')), ], options={ 'abstract': False, @@ -1075,11 +1037,6 @@ class Migration(migrations.Migration): name='global_identifier', field=models.ForeignKey(editable=False, help_text='The global unique identifier for LTA SIP.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SIPidentifier'), ), - migrations.AddField( - model_name='subtask', - name='schedule_method', - field=models.ForeignKey(help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ScheduleMethod'), - ), migrations.AddField( model_name='subtask', name='specifications_template', diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py index 6e58f28a9dcd373dc38be715dd609274e2e6deb1..188b5c3086547549a8f527febaf37f6749044238 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py @@ -47,6 +47,7 @@ class SubtaskState(AbstractChoice): DEFINED = "defined" SCHEDULING = "scheduling" SCHEDULED = "scheduled" + UNSCHEDULING = "unscheduling" QUEUEING = "queueing" QUEUED = "queued" STARTING = "starting" @@ -92,16 +93,6 @@ class Algorithm(AbstractChoice): AES256 = 'aes256' -class ScheduleMethod(AbstractChoice): - """Defines the model and predefined list of possible Algorithm's for DataproductHash. - The items in the Choices class below are automagically populated into the database via a data migration.""" - - class Choices(Enum): - MANUAL = 'manual' - BATCH = 'batch' - DYNAMIC = 'dynamic' - - # # Templates # @@ -152,8 +143,6 @@ class Subtask(BasicCommon): task_blueprint = ForeignKey('TaskBlueprint', related_name='subtasks', null=True, on_delete=SET_NULL, help_text='Task Blueprint to which this Subtask belongs.') specifications_template = ForeignKey('SubtaskTemplate', null=False, on_delete=PROTECT, help_text='Schema used for specifications_doc.') do_cancel = DateTimeField(null=True, help_text='Timestamp when the subtask has been ordered to cancel (NULLable).') - priority = IntegerField(help_text='Absolute priority of this subtask (higher value means more important).') - schedule_method = ForeignKey('ScheduleMethod', null=False, on_delete=PROTECT, help_text='Which method to use for scheduling this Subtask. One of (MANUAL, BATCH, DYNAMIC).') cluster = ForeignKey('Cluster', null=True, on_delete=PROTECT, help_text='Where the Subtask is scheduled to run (NULLable).') # resource_claim = ForeignKey("ResourceClaim", null=False, on_delete=PROTECT) # todo <-- how is this external reference supposed to work? created_or_updated_by_user = ForeignKey(User, null=True, editable=False, on_delete=PROTECT, help_text='The user who created / updated the subtask.') @@ -166,6 +155,34 @@ class Subtask(BasicCommon): # keep original state for logging self.__original_state_id = self.state_id + @property + def duration(self) -> timedelta: + '''the duration of this subtask (stop-start), or 0 if start/stop are None''' + if self.start_time is None or self.stop_time is None: + return timedelta(seconds=0) + return self.stop_time - self.start_time + + @property + def specified_duration(self) -> timedelta: + '''get the specified (or estimated) duration of this subtask based on the specified task duration and the subtask type''' + if self.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value: + # observations have a specified duration, so grab it from the spec. + return timedelta(seconds=self.task_blueprint.specifications_doc.get('duration', 0)) + + if self.specifications_template.type.value == SubtaskType.Choices.PIPELINE.value: + # pipelines usually do not have a specified duration, so make a guess (half the obs duration?). + return timedelta(seconds=self.task_blueprint.specifications_doc.get('duration', max(p.specified_duration.total_seconds() for p in self.predecessors)/2)) + + # other subtasktypes usually depend on cpu/data/network etc. So, make a guess (for now) + return timedelta(minutes=5) + + @staticmethod + def independent_subtasks() -> QuerySet: + '''return a QuerySet of all subtasks with no input (i.e. which are "independent" because they have no predecessors) + If you want the result, add .all() like so: Subtask.independent_subtasks().all() + ''' + return Subtask.objects.filter(inputs=None) + @property def successors(self) -> QuerySet: '''return the connect successor subtask(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets) @@ -188,6 +205,20 @@ class Subtask(BasicCommon): "INNER JOIN tmssapp_subtaskinput as st_input on st_input.producer_id = st_output.id\n" "WHERE st_input.subtask_id = %s", params=[self.id])) + @property + def input_dataproducts(self) -> QuerySet: + '''return the input dataproducts(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets) + If you want the result, add .all() like so: my_subtask.input_dataproducts.all() + ''' + return Dataproduct.objects.filter(subtaskinput__subtask_id=self.id) + + @property + def output_dataproducts(self) -> QuerySet: + '''return the output dataproducts(s) as queryset (over which you can perform extended queries, or return via the serializers/viewsets) + If you want the result, add .all() like so: my_subtask.input_dataproducts.all() + ''' + return Dataproduct.objects.filter(producer__subtask_id=self.id) + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): creating = self._state.adding # True on create, False on update @@ -204,16 +235,10 @@ class Subtask(BasicCommon): if duplicate_names: raise ValidationError("Pointings defined in the same Subtask must have unique names. Duplicate names %s in subtask id=%s." % (duplicate_names, self.pk)) - # check if we have a start time or there were predecessors + # check if we have a start time when scheduling if self.state.value == SubtaskState.Choices.SCHEDULED.value and self.__original_state_id == SubtaskState.Choices.SCHEDULING.value: if self.start_time is None: - if self.predecessors.all().count() == 0: raise SubtaskSchedulingException("Cannot schedule subtask id=%s when start time is 'None'." % (self.pk, )) - else: - self.start_time = datetime.utcnow() - - if self.state.value == SubtaskState.Choices.FINISHING.value: - self.stop_time = datetime.utcnow() super().save(force_insert, force_update, using, update_fields) diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py index 7ec6f980a09fb9dd3d765efb164611a5d898b8a6..f2f04f15009885ff04473a3601256dd878108803 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py @@ -21,6 +21,7 @@ from django.urls import reverse as revese_url from collections import Counter from django.utils.functional import cached_property + # # Common # @@ -150,12 +151,11 @@ class SchedulingRelationPlacement(AbstractChoice): BEFORE = "before" PARALLEL = "parallel" - class Flag(AbstractChoice): """Defines the model and predefined list of possible Flags to be used in Setting. The items in the Choises class below are automagically populated into the database via a data migration.""" class Choices(Enum): - AUTOSCHEDULE = "allow_scheduling_observations" + DYNAMIC_SCHEDULING_ENABLED = "dynamic_scheduling_enabled" @@ -375,35 +375,6 @@ class DefaultTaskRelationSelectionTemplate(BasicCommon): name = CharField(max_length=128, unique=True) template = ForeignKey("TaskRelationSelectionTemplate", on_delete=PROTECT) - -# -# DatabaseView objects -# -class TaskBlueprintSummary(Model): - taskblueprint_id = IntegerField() - subtask_id = IntegerField() - substate = CharField(max_length=128) - subtask_type = CharField(max_length=128) - - class Meta: - managed = False - db_table = 'tmssapp_taskblueprintsummary' - - -class SchedulingUnitBlueprintSummary(Model): - # Using in an id and ForeignKey is not common for a view BUT the id is a 'dummy' to be able to use in Django - # https://resources.rescale.com/using-database-views-in-django-orm/ - # otherwise an exception will be thrown - id = IntegerField(primary_key=True) - sub_id = IntegerField() - taskblueprint_id = IntegerField() - task_type = CharField(max_length=128) - derived_task_status = CharField(max_length=128) - - class Meta: - managed = False - db_table = 'tmssapp_schedulingunitblueprintsummary' - # # Instance Objects # @@ -813,7 +784,7 @@ class TaskDraft(NamedCommon): def relative_start_time(self) -> datetime.timedelta: '''return the earliest relative start time of all subtasks of this task ''' - scheduling_relations = list(self.first_to_connect.all()) + list(self.second_to_connect.all()) + scheduling_relations = list(self.first_scheduling_relation.all()) + list(self.second_scheduling_relation.all()) for scheduling_relation in scheduling_relations: # sometimes self._id does not exist so use self.id instead to avoid Exception if hasattr(self, '_id'): @@ -934,9 +905,11 @@ class TaskBlueprint(NamedCommon): @cached_property def relative_start_time(self) -> datetime.timedelta: - '''return the earliest relative start time of all subtasks of this task + '''The relative start time is relative to the start time of the 'start' of the parent scheduling unit. + It's based on the scheduling_relation's from the scheduling_unit's specification, + and hence it determines the order in which the tasks within the unit should be executed. ''' - scheduling_relations = list(self.first_to_connect.all()) + list(self.second_to_connect.all()) + scheduling_relations = list(self.first_scheduling_relation.all()) + list(self.second_scheduling_relation.all()) for scheduling_relation in scheduling_relations: # sometimes self._id does not exist so use self.id instead to avoid Exception if hasattr(self, '_id'): @@ -960,7 +933,8 @@ class TaskBlueprint(NamedCommon): @cached_property def relative_stop_time(self) -> datetime.timedelta: - '''return the latest relative stop time of all subtasks of this task + '''The relative_stop_time is the relative_start_time+duration. + See relative_start_time for an explanation of it's intended usage. ''' # todo: when it was added, check if subtask.specifications_template.type.value == TaskType.Choices.OBSERVATION.value: try: @@ -1010,7 +984,7 @@ class TaskBlueprint(NamedCommon): return "defined" if len([s for s in subtasks if s['state'] == 'finished']) == nr_of_subtasks: - return "finished" + return "finished" if any(s for s in subtasks if s['state'] in ('cancelling', 'cancelled')): return "cancelled" @@ -1069,8 +1043,8 @@ class TaskRelationBlueprint(BasicCommon): class TaskSchedulingRelationBlueprint(BasicCommon): - first = ForeignKey('TaskBlueprint', related_name='first_to_connect', on_delete=CASCADE, help_text='First Task Blueprint to connect.') - second = ForeignKey('TaskBlueprint', related_name='second_to_connect', on_delete=CASCADE, help_text='Second Task Blueprint to connect.') + first = ForeignKey('TaskBlueprint', related_name='first_scheduling_relation', on_delete=CASCADE, help_text='First Task Blueprint to connect.') + second = ForeignKey('TaskBlueprint', related_name='second_scheduling_relation', on_delete=CASCADE, help_text='Second Task Blueprint to connect.') placement = ForeignKey('SchedulingRelationPlacement', null=False, default="after", on_delete=PROTECT, help_text='Task scheduling relation placement.') time_offset = IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.') @@ -1083,8 +1057,8 @@ class TaskSchedulingRelationBlueprint(BasicCommon): class TaskSchedulingRelationDraft(BasicCommon): - first = ForeignKey('TaskDraft', related_name='first_to_connect', on_delete=CASCADE, help_text='First Task Draft to connect.') - second = ForeignKey('TaskDraft', related_name='second_to_connect', on_delete=CASCADE, help_text='Second Task Draft to connect.') + first = ForeignKey('TaskDraft', related_name='first_scheduling_relation', on_delete=CASCADE, help_text='First Task Draft to connect.') + second = ForeignKey('TaskDraft', related_name='second_scheduling_relation', on_delete=CASCADE, help_text='Second Task Draft to connect.') placement = ForeignKey('SchedulingRelationPlacement', null=False, on_delete=PROTECT, help_text='Task scheduling relation placement.') time_offset = IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.') diff --git a/SAS/TMSS/src/tmss/tmssapp/populate.py b/SAS/TMSS/src/tmss/tmssapp/populate.py index b786248f34773046434364d3ddc887ecd6d59e3a..05ec07e83f2f102caa1f65d1bcadf8ffb3447935 100644 --- a/SAS/TMSS/src/tmss/tmssapp/populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/populate.py @@ -35,12 +35,12 @@ def populate_choices(apps, schema_editor): :return: None ''' for choice_class in [Role, Datatype, Dataformat, CopyReason, - SubtaskState, SubtaskType, StationType, Algorithm, ScheduleMethod, SchedulingRelationPlacement, + SubtaskState, SubtaskType, StationType, Algorithm, SchedulingRelationPlacement, Flag, ProjectCategory, PeriodCategory, Quantity, TaskType]: choice_class.objects.bulk_create([choice_class(value=x.value) for x in choice_class.Choices]) def populate_settings(apps, schema_editor): - Setting.objects.create(name=Flag.objects.get(value='allow_scheduling_observations'), value=True) + Setting.objects.create(name=Flag.objects.get(value='dynamic_scheduling_enabled'), value=False) def populate_test_data(): """ @@ -52,7 +52,7 @@ def populate_test_data(): # only add (with expensive setup time) example data when developing/testing and we're not unittesting if isTestEnvironment() or isDevelopmentEnvironment(): from lofar.sas.tmss.tmss.exceptions import TMSSException - from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data, SchedulingUnitDraft_test_data + from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingUnitDraft_test_data from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprints_and_subtasks_from_scheduling_unit_draft, create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_draft from lofar.sas.tmss.tmss.tmssapp.subtasks import schedule_subtask from lofar.common.json_utils import get_default_json_object_for_schema @@ -60,48 +60,51 @@ def populate_test_data(): constraints_template = models.SchedulingConstraintsTemplate.objects.get(name="constraints") constraints_spec = get_default_json_object_for_schema(constraints_template.schema) - strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines") - - # create a Test Scheduling Set UC1 under project TMSS-Commissioning - tmss_project = models.Project.objects.get(name="TMSS-Commissioning") - for set_nr in range(2): - scheduling_set_data = SchedulingSet_test_data(name="Test Scheduling Set UC1 example %s" % (set_nr,), project=tmss_project) - scheduling_set = models.SchedulingSet.objects.create(**scheduling_set_data) - scheduling_set.tags = ["TEST", "UC1"] - scheduling_set.save() - - logger.info('created test scheduling_set: %s', scheduling_set.name) - - for unit_nr in range(5): - - # the 'template' in the strategy_template is a predefined json-data blob which validates against the given scheduling_unit_template - # a user might 'upload' a partial json-data blob, so add all the known defaults - scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema) - - # add the scheduling_unit_doc to a new SchedulingUnitDraft instance, and were ready to use it! - scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(name="UC1 test scheduling unit %s.%s" % (set_nr+1, unit_nr+1), - scheduling_set=scheduling_set, - requirements_template=strategy_template.scheduling_unit_template, - requirements_doc=scheduling_unit_spec, - observation_strategy_template=strategy_template, - scheduling_constraints_doc=constraints_spec, - scheduling_constraints_template=constraints_template) - scheduling_unit_draft.tags = ["TEST", "UC1"] - scheduling_unit_draft.save() - - logger.info('created test scheduling_unit_draft: %s', scheduling_unit_draft.name) - - try: - if set_nr==0 and unit_nr==0: - scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) - scheduled_subtasks = models.Subtask.objects.filter(task_blueprint__scheduling_unit_blueprint=scheduling_unit_blueprint, task_blueprint__name='Calibrator Observation 1', specifications_template__type='observation').all() - for subtask in scheduled_subtasks: - schedule_subtask(subtask) - else: - create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) - - except TMSSException as e: - logger.exception(e) + uc1_strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="UC1 CTC+pipelines") + simple_strategy_template = models.SchedulingUnitObservingStrategyTemplate.objects.get(name="Simple Observation") + + projects = models.Project.objects.order_by('-priority_rank').all() + for tmss_project in projects: + if 'Commissioning' not in tmss_project.tags: + continue + + for scheduling_set in tmss_project.scheduling_sets.all(): + for unit_nr in range(2): + for strategy_template in [uc1_strategy_template, simple_strategy_template]: + # the 'template' in the strategy_template is a predefined json-data blob which validates against the given scheduling_unit_template + # a user might 'upload' a partial json-data blob, so add all the known defaults + scheduling_unit_spec = add_defaults_to_json_object_for_schema(strategy_template.template, strategy_template.scheduling_unit_template.schema) + + # limit target obs duration for demo data + if strategy_template == uc1_strategy_template: + scheduling_unit_spec['tasks']['Calibrator Observation 1']['specifications_doc']['duration'] = 2*60 + scheduling_unit_spec['tasks']['Target Observation']['specifications_doc']['duration'] = 2*3600 + scheduling_unit_spec['tasks']['Calibrator Observation 2']['specifications_doc']['duration'] = 2*60 + elif strategy_template == simple_strategy_template: + scheduling_unit_spec['tasks']['Observation']['specifications_doc']['duration'] = 5*60 + + # set some constraints, so the dynamic scheduler has something to chew on. + # DISABLED for now, because the 'daily' constraint solver is not ready yet. + # constraints_spec['daily']['require_day'] = unit_nr%2==0 + # constraints_spec['daily']['require_night'] = unit_nr%2==1 + # constraints_spec['daily']['avoid_twilight'] = unit_nr%4>1 + + # add the scheduling_unit_doc to a new SchedulingUnitDraft instance, and were ready to use it! + scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(name="%s %s %0d" % ('UC1' if strategy_template==uc1_strategy_template else 'Obs', tmss_project.name, unit_nr+1), + scheduling_set=scheduling_set, + description="Test scheduling unit", + requirements_template=strategy_template.scheduling_unit_template, + requirements_doc=scheduling_unit_spec, + observation_strategy_template=strategy_template, + scheduling_constraints_doc=constraints_spec, + scheduling_constraints_template=constraints_template) + + logger.info('created test scheduling_unit_draft: %s', scheduling_unit_draft.name) + + try: + create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + except TMSSException as e: + logger.exception(e) except ImportError: pass @@ -136,14 +139,22 @@ def populate_cycles(apps, schema_editor): def populate_projects(apps, schema_editor): - tmss_project = models.Project.objects.create(name="TMSS-Commissioning", - description="Project for all TMSS tests and commissioning", - priority_rank=1.0, - can_trigger=False, - private_data=True, - expert=True, - filler=False) - tmss_project.cycles.set([models.Cycle.objects.get(name="Cycle 14")]) + from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data + + for name, rank in (("high", 3), ("normal", 2), ("low", 1)): + tmss_project = models.Project.objects.create(name=name, + description="Project for all TMSS tests and commissioning (%s priority)" % (name,), + priority_rank=rank, + can_trigger=False, + private_data=True, + expert=True, + filler=False) + tmss_project.tags = ["Commissioning"] + tmss_project.cycles.set([models.Cycle.objects.get(name="Cycle 14")]) + tmss_project.save() + + # for convenience, create a schedulingset for each project + models.SchedulingSet.objects.create(**SchedulingSet_test_data(name="Test Scheduling Set", project=tmss_project)) def populate_resources(apps, schema_editor): diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/common_schema_template-datetime-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/common_schema_template-datetime-1.json new file mode 100644 index 0000000000000000000000000000000000000000..d12982a89ed3b79fc306a4a26c2c667f60662e6a --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/common_schema_template-datetime-1.json @@ -0,0 +1,33 @@ +{ + "$id": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1#", + "$schema": "http://json-schema.org/draft-06/schema#", + "title": "datetime", + "description": "This schema defines datetime objects like timestamp and timewindow.", + "version": 1, + "type": "object", + "definitions": { + "timestamp": { + "description": "A timestamp defined in UTC", + "type": "string", + "pattern": "\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d(\\.\\d+)?Z?", + "format": "date-time" + }, + "timewindow": { + "type": "object", + "description": "A timewindow interval: [from, to)", + "properties": { + "from": { + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp" + }, + "to": { + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp" + } + }, + "additionalProperties": false, + "required": [ + "from", + "to" + ] + } + } +} \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json b/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json index 77a916705c8df50c069f5929e11fc03d5586acf7..9caf086d923d583720925e44d47dfbc255f95885 100644 --- a/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling_constraints_template-constraints-1.json @@ -5,29 +5,6 @@ "description": "This schema defines the scheduling constraints for a scheduling unit", "version": 1, "definitions": { - "timestamp": { - "description": "A timestamp defined in UTC", - "type": "string", - "pattern": "\\d{4}-[01]\\d-[0-3]\\dT[0-2]\\d:[0-5]\\d:[0-5]\\d(\\.\\d+)?Z", - "format": "date-time" - }, - "timewindow": { - "type": "object", - "description": "A timewindow interval: [from, to)", - "properties": { - "from": { - "$ref": "#/definitions/timestamp" - }, - "to": { - "$ref": "#/definitions/timestamp" - } - }, - "additionalProperties": false, - "required": [ - "from", - "to" - ] - }, "distance_on_sky": { "type": "number", "minimum": 0, @@ -40,38 +17,39 @@ } }, "type": "object", + "default": {}, "properties": { "scheduler": { "name": "Scheduler", - "description": "Which scheduling system will schedule this", + "description": "Schedule manually at the 'time.at' moment, of dynamically taking all time constraints into consideration.", "type": "string", "enum": [ "manual", - "online" + "dynamic" ], - "default": "online" + "default": "dynamic" }, "time": { "type": "object", "default": {}, "properties": { "at": { - "description": "Start at this moment", - "$ref": "#/definitions/timestamp" + "description": "Start at this moment. Requires 'scheduler' to be set to 'manual'.", + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp" }, "after": { "description": "Start after this moment", - "$ref": "#/definitions/timestamp" + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp" }, "before": { "description": "End before this moment", - "$ref": "#/definitions/timestamp" + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timestamp" }, "between": { "description": "Run within one of these time windows", "type": "array", "items": { - "$ref": "#/definitions/timewindow" + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timewindow" }, "minItems":0, "uniqueItems":true, @@ -81,7 +59,7 @@ "description": "Do NOT run within any of these time windows", "type": "array", "items": { - "$ref": "#/definitions/timewindow" + "$ref": "http://tmss.lofar.org/api/schemas/commonschematemplate/datetime/1/#/definitions/timewindow" }, "minItems":0, "uniqueItems":true, @@ -129,6 +107,7 @@ "transit_offset": { "description": "Offset window to LST centering", "type": "object", + "default": {}, "properties": { "from": { "type": "number", @@ -145,6 +124,7 @@ }, "min_distance": { "type": "object", + "default": {}, "properties": { "sun": { "$ref": "#/definitions/distance_on_sky", diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json b/SAS/TMSS/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json new file mode 100644 index 0000000000000000000000000000000000000000..cfa908a68e642538b03c398a65e6d752f2e81db2 --- /dev/null +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/simple-observation-scheduling-unit-observation-strategy.json @@ -0,0 +1,74 @@ +{ + "tasks": { + "Observation": { + "description": "A simple short test observation", + "tags": [], + "specifications_doc": { + "QA": { + "plots": { + "enabled": true, + "autocorrelation": true, + "crosscorrelation": true + }, + "file_conversion": { + "enabled": true, + "nr_of_subbands": -1, + "nr_of_timestamps": 256 + } + }, + "duration": 600, + "correlator": { + "storage_cluster": "CEP4", + "integration_time": 1, + "channels_per_subband": 64 + }, + "antenna_set": "HBA_DUAL_INNER", + "filter": "HBA_110_190", + "stations": ["CS001"], + "tile_beam": { + "direction_type": "J2000", + "angle1": 0.42, + "angle2": 0.43, + "angle3": 0.44 + }, + "SAPs": [ + { + "name": "target0", + "digital_pointing": { + "direction_type": "J2000", + "angle1": 0.42, + "angle2": 0.43, + "angle3": 0.44 + }, + "subbands": [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15] + } + ] + }, + "specifications_template": "target observation" + } + }, + "task_relations": [ + ], + "task_scheduling_relations": [ + ], + "parameters": [ + { + "refs": [ + "#/tasks/Observation/specifications_doc/duration" + ], + "name": "Duration" + }, + { + "refs": [ + "#/tasks/Observation/specifications_doc/SAPs/0/digital_pointing" + ], + "name": "Target Pointing" + }, + { + "refs": [ + "#/tasks/Observation/specifications_doc/tile_beam" + ], + "name": "Tile Beam" + } + ] +} \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json index 27f52ee1913374218f741e80cd33a3ac96a84e06..6e1d2c710101efe1a396935340fcdee899fe3ded 100644 --- a/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/templates.json @@ -1,4 +1,8 @@ [ + { + "file_name": "common_schema_template-datetime-1.json", + "template": "common_schema_template" + }, { "file_name": "common_schema_template-pointing-1.json", "template": "common_schema_template" @@ -96,6 +100,10 @@ "realtime": true, "queue": false }, + { + "file_name": "scheduling_constraints_template-constraints-1.json", + "template": "scheduling_constraints_template" + }, { "file_name": "UC1-scheduling-unit-observation-strategy.json", "template": "scheduling_unit_observing_strategy_template", @@ -106,8 +114,13 @@ "version": 1 }, { - "file_name": "scheduling_constraints_template-constraints-1.json", - "template": "scheduling_constraints_template" + "file_name": "simple-observation-scheduling-unit-observation-strategy.json", + "template": "scheduling_unit_observing_strategy_template", + "scheduling_unit_template_name": "scheduling unit", + "scheduling_unit_template_version": "1", + "name": "Simple Observation", + "description": "This observation strategy template defines a single simple Target observation.", + "version": 1 }, { "file_name": "sap_template-1.json", diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py index 85d7bd21c54ca2ad78badd911131847c11fb3375..e70f7585074cf5c87edce6ae0c8d10f7475d712e 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py @@ -40,12 +40,6 @@ class AlgorithmSerializer(RelationalHyperlinkedModelSerializer): fields = '__all__' -class ScheduleMethodSerializer(RelationalHyperlinkedModelSerializer): - class Meta: - model = models.ScheduleMethod - fields = '__all__' - - class SubtaskTemplateSerializer(AbstractTemplateSerializer): class Meta: model = models.SubtaskTemplate diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py index 279d0ae76212c863bdbb69146dc0cebe9c375612..0c215aa57d1915e0660bd31572775bd3992d00d9 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py @@ -259,7 +259,7 @@ class FlagSerializer(RelationalHyperlinkedModelSerializer): fields = '__all__' -class SettingSerializer(RelationalHyperlinkedModelSerializer): +class SettingSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = models.Setting fields = '__all__' @@ -334,7 +334,7 @@ class TaskDraftSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.TaskDraft fields = '__all__' - extra_fields = ['task_blueprints', 'produced_by', 'consumed_by', 'first_to_connect', 'second_to_connect', 'duration', 'relative_start_time', 'relative_stop_time'] + extra_fields = ['task_blueprints', 'produced_by', 'consumed_by', 'first_scheduling_relation', 'second_scheduling_relation', 'duration', 'relative_start_time', 'relative_stop_time'] class TaskBlueprintSerializer(RelationalHyperlinkedModelSerializer): @@ -347,7 +347,7 @@ class TaskBlueprintSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.TaskBlueprint fields = '__all__' - extra_fields = ['subtasks', 'produced_by', 'consumed_by', 'first_to_connect', 'second_to_connect', 'duration', + extra_fields = ['subtasks', 'produced_by', 'consumed_by', 'first_scheduling_relation', 'second_scheduling_relation', 'duration', 'start_time', 'stop_time', 'relative_start_time', 'relative_stop_time', 'status'] diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py index 18690cde9c0eae546da912da38bf134996d68edd..506eafd5a8aa495d4ac5a44b20dc6084ae2892a6 100644 --- a/SAS/TMSS/src/tmss/tmssapp/subtasks.py +++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py @@ -250,8 +250,6 @@ def create_observation_control_subtask_from_task_blueprint(task_blueprint: TaskB "task_blueprint": task_blueprint, "specifications_template": subtask_template, "tags": [], - "priority": 1, - "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), "cluster": Cluster.objects.get(name=cluster_name) } subtask = Subtask.objects.create(**subtask_data) @@ -313,8 +311,6 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask) "task_blueprint": observation_subtask.task_blueprint, "specifications_template": qafile_subtask_template, "specifications_doc": qafile_subtask_spec, - "priority": 1, - "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), "cluster": observation_subtask.cluster} qafile_subtask = Subtask.objects.create(**qafile_subtask_data) @@ -336,7 +332,6 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask) def create_qaplots_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask: - qafile_subtasks = [st for st in task_blueprint.subtasks.all() if st.specifications_template.type.value == SubtaskType.Choices.QA_FILES.value] if qafile_subtasks: qafile_subtask = qafile_subtasks[0] # TODO: decide what to do when there are multiple qafile subtasks? @@ -378,8 +373,6 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta "task_blueprint": qafile_subtask.task_blueprint, "specifications_template": qaplots_subtask_template, "specifications_doc": qaplots_subtask_spec_doc, - "priority": 1, - "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), "cluster": qafile_subtask.cluster} qaplots_subtask = Subtask.objects.create(**qaplots_subtask_data) @@ -425,8 +418,6 @@ def create_preprocessing_subtask_from_task_blueprint(task_blueprint: TaskBluepri "task_blueprint": task_blueprint, "specifications_template": subtask_template, "specifications_doc": subtask_specs, - "priority": 1, - "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), "cluster": Cluster.objects.get(name=cluster_name) } subtask = Subtask.objects.create(**subtask_data) @@ -471,8 +462,6 @@ def create_ingest_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> "task_blueprint": task_blueprint, "specifications_template": subtask_template, "specifications_doc": subtask_specs, - "priority": 1, - "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), "cluster": Cluster.objects.get(name=cluster_name)} subtask = Subtask.objects.create(**subtask_data) @@ -520,6 +509,35 @@ def schedule_subtask(subtask: Subtask) -> Subtask: raise SubtaskSchedulingException("Cannot schedule subtask id=%d because there is no schedule-method known for this subtasktype=%s." % (subtask.pk, subtask.specifications_template.type.value)) + except Exception as e: + try: + # set the subtask to state 'ERROR'... + subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.ERROR.value) + subtask.save() + except Exception as e2: + logger.error(e2) + finally: + # ... and re-raise the original exception (wrapped) + raise SubtaskSchedulingException("Error while scheduling subtask id=%d: %s" % (subtask.pk, str(e))) + + +def unschedule_subtask(subtask: Subtask) -> Subtask: + '''unschedule the given subtask, removing all output dataproducts, and setting its state back to 'defined'.''' + if subtask.state.value != SubtaskState.Choices.SCHEDULED.value: + raise SubtaskSchedulingException("Cannot unschedule subtask id=%d because it is not SCHEDULED. Current state=%s" % (subtask.pk, subtask.state.value)) + + try: + subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.UNSCHEDULING.value) + subtask.save() + + for output in subtask.outputs.all(): + output.dataproducts.all().delete() + #TODO: delete dataproduct transforms + + _assign_or_unassign_resources(subtask) + + subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.DEFINED.value) + subtask.save() except Exception as e: try: # set the subtask to state 'ERROR'... @@ -531,6 +549,62 @@ def schedule_subtask(subtask: Subtask) -> Subtask: # ... and re-raise the original exception raise +def unschedule_subtasks_in_task_blueprint(task_blueprint: TaskBlueprint): + '''Convenience method: Unschedule (and return) all scheduled subtasks in the task_blueprint''' + scheduled_subtasks = list(task_blueprint.subtasks.filter(state__value=SubtaskState.Choices.SCHEDULED.value).all()) + for subtask in scheduled_subtasks: + unschedule_subtask(subtask) + + +def schedule_subtask_and_update_successor_start_times(subtask: Subtask) -> Subtask: + scheduled_subtask = schedule_subtask(subtask) + shift_successors_until_after_stop_time(scheduled_subtask) + return scheduled_subtask + + +def update_subtasks_start_times_for_scheduling_unit(scheduling_unit: SchedulingUnitBlueprint, start_time: datetime): + for task_blueprint in scheduling_unit.task_blueprints.all(): + defined_independend_subtasks = task_blueprint.subtasks.filter(state__value='defined').filter(inputs=None).all() + for subtask in defined_independend_subtasks: + update_start_time_and_shift_successors_until_after_stop_time(subtask, start_time + subtask.task_blueprint.relative_start_time) + + +def update_start_time_and_shift_successors_until_after_stop_time(subtask: Subtask, start_time: datetime): + subtask.start_time = start_time + subtask.stop_time = subtask.start_time + subtask.specified_duration + subtask.save() + + shift_successors_until_after_stop_time(subtask) + + +def shift_successors_until_after_stop_time(subtask: Subtask): + for successor in subtask.successors: + # by default, let the successor directly follow this tasks... + successor_start_time = subtask.stop_time + + # ... but adjust it if there is a scheduling_relation with an offset. + # so, check if these successive subtasks have different task_blueprint parents + if subtask.task_blueprint.id != successor.task_blueprint.id: + relations = (TaskSchedulingRelationBlueprint.objects.filter(first=subtask.task_blueprint, second=successor.task_blueprint) | + TaskSchedulingRelationBlueprint.objects.filter(first=successor.task_blueprint, second=subtask.task_blueprint)).all() + if relations: + # there should be only one scheduling relation between the tasks + relation = relations[0] + successor_start_time += timedelta(seconds=relation.time_offset) + + # update the starttime and recurse to shift the successor successors as well + update_start_time_and_shift_successors_until_after_stop_time(successor, successor_start_time) + + +def clear_defined_subtasks_start_stop_times_for_scheduling_unit(scheduling_unit: SchedulingUnitBlueprint): + '''set start/stop times of all the subtasks in the scheduling unit to None''' + for task_blueprint in scheduling_unit.task_blueprints.all(): + defined_subtasks = task_blueprint.subtasks.filter(state__value='defined').all() + for subtask in defined_subtasks: + subtask.start_time = None + subtask.stop_time = None + subtask.save() + def check_prerequities_for_scheduling(subtask: Subtask) -> bool: if subtask.state.value != SubtaskState.Choices.DEFINED.value: @@ -541,14 +615,12 @@ def check_prerequities_for_scheduling(subtask: Subtask) -> bool: raise SubtaskSchedulingException("Cannot schedule subtask id=%d because its predecessor id=%s in not FINISHED but state=%s" % (subtask.pk, predecessor.pk, predecessor.state.value)) - # check if settings allow scheduling observations - setting = Setting.objects.get(name='allow_scheduling_observations') - if not setting.value: - raise SubtaskSchedulingException("Cannot schedule subtask id=%d because setting %s=%s does not allow that." % - (subtask.pk, setting.name, setting.value)) - return True +def _assign_or_unassign_resources(subtask: Subtask): + if subtask.state.value not in [SubtaskState.Choices.SCHEDULING.value, SubtaskState.Choices.UNSCHEDULING.value]: + raise SubtaskSchedulingException("Cannot assign resources for subtask id=%d because it is not in (UN)SCHEDULING state. " + "Current state=%s" % (subtask.pk, subtask.state.value)) def _create_ra_specification(_subtask): # Should we do something with station list, for 'detecting' conflicts it can be empty @@ -825,7 +897,7 @@ def get_previous_related_task_blueprint_with_time_offset(task_blueprint): previous_related_task_blueprint = None time_offset = 0 - scheduling_relations = list(task_blueprint.first_to_connect.all()) + list(task_blueprint.second_to_connect.all()) + scheduling_relations = list(task_blueprint.first_scheduling_relation.all()) + list(task_blueprint.second_scheduling_relation.all()) for scheduling_relation in scheduling_relations: if scheduling_relation.first.id == task_blueprint.id and scheduling_relation.placement.value == "after": previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.second.id) @@ -890,17 +962,18 @@ def schedule_observation_subtask(observation_subtask: Subtask): observation_subtask.save() # step 1a: check start/stop times + # start time should be known. If not raise. Then the user and/or scheduling service should supply a properly calculated/estimated start_time first. if observation_subtask.start_time is None: - next_start_time = calculate_start_time(observation_subtask) - logger.info("observation id=%s has no starttime. assigned default: %s", observation_subtask.pk, formatDatetime(next_start_time)) - observation_subtask.start_time = next_start_time + raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because it has no start_time" % (observation_subtask.pk, + observation_subtask.specifications_template.type)) + + if observation_subtask.specified_duration < timedelta(seconds=1): + raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because its specified duration is too short: %s" % (observation_subtask.pk, + observation_subtask.specifications_template.type, + observation_subtask.specified_duration)) - if observation_subtask.stop_time is None: - duration_in_sec = observation_subtask.task_blueprint.specifications_doc["duration"] - logger.info("Duration of observation id=%s is %d seconds", observation_subtask.pk, duration_in_sec) - stop_time = observation_subtask.start_time + timedelta(seconds=duration_in_sec) - logger.info("observation id=%s has no stop_time. assigned default: %s", observation_subtask.pk, formatDatetime(stop_time)) - observation_subtask.stop_time = stop_time + # always update the stop_time according to the spec + observation_subtask.stop_time = observation_subtask.start_time + observation_subtask.specified_duration # step 2: define input dataproducts # TODO: are there any observations that take input dataproducts? @@ -931,23 +1004,24 @@ def schedule_observation_subtask(observation_subtask: Subtask): } }, specifications_template=SAPTemplate.objects.get(name="SAP")) - sap.save() - for sb_nr in pointing['subbands']: - dp = Dataproduct.objects.create(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr), - directory=directory, - dataformat=Dataformat.objects.get(value="MeasurementSet"), - datatype=Datatype.objects.get(value="visibilities"), # todo: is this correct? - producer=subtask_output, - specifications_doc={"sap": [str(sap_nr)]}, # todo: set correct value. This will be provided by the RA somehow - specifications_template=dataproduct_specifications_template, - feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), - feedback_template=dataproduct_feedback_template, - size=0 if sb_nr%10==0 else 1024*1024*1024*sb_nr, - expected_size=1024*1024*1024*sb_nr, - sap=sap) - dp.save() + + Dataproduct.objects.bulk_create([Dataproduct(filename="L%d_SAP%03d_SB%03d_uv.MS" % (observation_subtask.id, sap_nr, sb_nr), + directory=directory, + dataformat=Dataformat.objects.get(value="MeasurementSet"), + datatype=Datatype.objects.get(value="visibilities"), + producer=subtask_output, + specifications_doc={"sap": [str(sap_nr)]}, + specifications_template=dataproduct_specifications_template, + feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), + feedback_template=dataproduct_feedback_template, + size=0 if sb_nr%10==0 else 1024*1024*1024*sb_nr, + expected_size=1024*1024*1024*sb_nr, + sap=sap) for sb_nr in pointing['subbands']]) + # step 4: resource assigner (if possible) - assign_resources(observation_subtask) + assign_or_unassign_resources(observation_subtask) + + # TODO: TMSS-382: evaluate the scheduled stations and see if the requiments given in the subtask.task_bluepring.specifications_doc are met for the station_groups and max_nr_missing. # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it) observation_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value) @@ -981,10 +1055,13 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): logger.info("pipeline id=%s has no starttime. assigned default: %s", pipeline_subtask.pk, formatDatetime(now)) pipeline_subtask.start_time = now - if pipeline_subtask.stop_time is None: - stop_time = pipeline_subtask.start_time + timedelta(hours=+1) - logger.info("pipeline id=%s has no stop_time. assigned default: %s", pipeline_subtask.pk, formatDatetime(stop_time)) - pipeline_subtask.stop_time = stop_time + if pipeline_subtask.specified_duration < timedelta(seconds=1): + raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because its specified duration is too short: %s" % (pipeline_subtask.pk, + pipeline_subtask.specifications_template.type, + pipeline_subtask.specified_duration)) + + # always update the stop_time according to the spec + pipeline_subtask.stop_time = pipeline_subtask.start_time + pipeline_subtask.specified_duration # step 2: link input dataproducts if pipeline_subtask.inputs.count() == 0: @@ -1008,31 +1085,35 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): # step 3: create output dataproducts, and link these to the output # TODO: create them from the spec, instead of "copying" the input filename - output_dps = [] + dataformat = Dataformat.objects.get(value="MeasurementSet") + input_dps = list(pipeline_subtask_input.dataproducts.all()) + output_dp_objects = [] for input_dp in pipeline_subtask_input.dataproducts.all(): if '_' in input_dp.filename and input_dp.filename.startswith('L'): filename = "L%s_%s" % (pipeline_subtask.pk, input_dp.filename.split('_', 1)[1]) else: filename = "L%s_%s" % (pipeline_subtask.pk, input_dp.filename) - output_dp = Dataproduct.objects.create(filename=filename, - directory=input_dp.directory.replace(str(pipeline_subtask_input.producer.subtask.pk), str(pipeline_subtask.pk)), - dataformat=Dataformat.objects.get(value="MeasurementSet"), - datatype=Datatype.objects.get(value="visibilities"), # todo: is this correct? - producer=pipeline_subtask_output, - specifications_doc={}, - specifications_template=dataproduct_specifications_template, - feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), - feedback_template=dataproduct_feedback_template, - sap=input_dp.sap) - output_dp.save() - DataproductTransform.objects.create(input=input_dp, output=output_dp, identity=False) - output_dps.append(output_dp) - + output_dp = Dataproduct(filename=filename, + directory=input_dp.directory.replace(str(pipeline_subtask_input.producer.subtask.pk), str(pipeline_subtask.pk)), + dataformat=dataformat, + datatype=Datatype.objects.get(value="visibilities"), # todo: is this correct? + producer=pipeline_subtask_output, + specifications_doc=get_default_json_object_for_schema(dataproduct_specifications_template.schema), + specifications_template=dataproduct_specifications_template, + feedback_doc=get_default_json_object_for_schema(dataproduct_feedback_template.schema), + feedback_template=dataproduct_feedback_template, + sap=input_dp.sap) + output_dp_objects.append(output_dp) + + output_dps = Dataproduct.objects.bulk_create(output_dp_objects) pipeline_subtask_output.dataproducts.set(output_dps) + transforms = [DataproductTransform(input=input_dp, output=output_dp, identity=False) for input_dp,output_dp in zip(input_dps, output_dps)] + DataproductTransform.objects.bulk_create(transforms) + # step 4: resource assigner (if possible) - assign_resources(pipeline_subtask) + assign_or_unassign_resources(pipeline_subtask) # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it) pipeline_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value) @@ -1100,14 +1181,17 @@ def create_and_schedule_subtasks_from_task_blueprint(task_blueprint: TaskBluepri create_subtasks_from_task_blueprint(task_blueprint) return schedule_independent_subtasks_in_task_blueprint(task_blueprint) -def schedule_independent_subtasks_in_task_blueprint(task_blueprint: TaskBlueprint) -> [Subtask]: - '''Convenience method: Schedule the subtasks in the task_blueprint that are not dependend on predecessors''' - subtasks = list(task_blueprint.subtasks.all()) - for subtask in subtasks: - if len(subtask.predecessors.all()) == len(subtask.predecessors.filter(state__value='finished').all()): - schedule_subtask(subtask) - return subtasks +def schedule_independent_subtasks_in_task_blueprint(task_blueprint: TaskBlueprint, start_time: datetime=None) -> [Subtask]: + '''Convenience method: Schedule (and return) the subtasks in the task_blueprint that are not dependend on any predecessors''' + independent_subtasks = list(Subtask.independent_subtasks().filter(task_blueprint_id=task_blueprint.id, state__value=SubtaskState.Choices.DEFINED.value).all()) + + for subtask in independent_subtasks: + if start_time is not None: + subtask.start_time = start_time + schedule_subtask_and_update_successor_start_times(subtask) + + return independent_subtasks def _generate_subtask_specs_from_preprocessing_task_specs(preprocessing_task_specs, default_subtask_specs): diff --git a/SAS/TMSS/src/tmss/tmssapp/tasks.py b/SAS/TMSS/src/tmss/tmssapp/tasks.py index 7f7ad51bd577466945b176d334aaeda4feffa880..987f89153e14aa5f91c90993cb00ee70b780dd79 100644 --- a/SAS/TMSS/src/tmss/tmssapp/tasks.py +++ b/SAS/TMSS/src/tmss/tmssapp/tasks.py @@ -1,12 +1,14 @@ from lofar.sas.tmss.tmss.exceptions import * from lofar.sas.tmss.tmss.tmssapp import models -from lofar.sas.tmss.tmss.tmssapp.subtasks import create_and_schedule_subtasks_from_task_blueprint, create_subtasks_from_task_blueprint, schedule_independent_subtasks_in_task_blueprint -from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskBlueprint, SchedulingUnitBlueprint, SchedulingUnitDraft, TaskDraft, SchedulingRelationPlacement +from lofar.sas.tmss.tmss.tmssapp.subtasks import unschedule_subtasks_in_task_blueprint +from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskBlueprint, SchedulingUnitBlueprint from lofar.sas.tmss.tmss.tmssapp.subtasks import create_and_schedule_subtasks_from_task_blueprint, create_subtasks_from_task_blueprint, schedule_independent_subtasks_in_task_blueprint from functools import cmp_to_key +import os from copy import deepcopy from lofar.common.json_utils import add_defaults_to_json_object_for_schema import logging +from datetime import datetime logger = logging.getLogger(__name__) @@ -18,8 +20,8 @@ def create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_ logger.debug("create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft.id=%s name='%s')", scheduling_unit_draft.pk, scheduling_unit_draft.name) scheduling_unit_blueprint = SchedulingUnitBlueprint.objects.create( - name="%s (SchedulingUnitBlueprint)" % (scheduling_unit_draft.name,), - description="%s (SchedulingUnitBlueprint)" % (scheduling_unit_draft.description or "<no description>",), + name=scheduling_unit_draft.name, + description=scheduling_unit_draft.description, requirements_doc=scheduling_unit_draft.requirements_doc, do_cancel=False, draft=scheduling_unit_draft, @@ -90,7 +92,7 @@ def create_scheduling_unit_draft_from_scheduling_unit_blueprint(scheduling_unit_ def copy_task_draft(task_draft: models.TaskDraft, copy_reason: str) -> models.TaskDraft: - + task_template_name = task_draft.specifications_template task_template = models.TaskTemplate.objects.get(name=task_template_name) @@ -114,7 +116,7 @@ def copy_task_blueprint_to_task_draft(task_blueprint:models.TaskBlueprint ) -> m :raises Exception if instantiate fails. """ logger.debug("Create Task Draft from Task Blueprint (id=%s)", task_blueprint.pk) - + original_task_draft = task_blueprint.draft task_template_name = original_task_draft.specifications_template task_template = models.TaskTemplate.objects.get(name=task_template_name) @@ -276,7 +278,7 @@ def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft) -> model task_relation_blueprint.pk, producing_task_blueprint.pk, consuming_task_blueprint.pk,) # Do the same 'trick' for Task Scheduling Relation Draft to Blueprint - task_draft_scheduling_relations = list(task_draft.first_to_connect.all()) + list(task_draft.second_to_connect.all()) + task_draft_scheduling_relations = list(task_draft.first_scheduling_relation.all()) + list(task_draft.second_scheduling_relation.all()) for task_scheduling_relation_draft in task_draft_scheduling_relations: for first_task_blueprint in task_scheduling_relation_draft.first.task_blueprints.all(): for second_task_blueprint in task_scheduling_relation_draft.second.task_blueprints.all(): @@ -360,21 +362,35 @@ def create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(schedulin # refresh so all related fields are updated. scheduling_unit_blueprint.refresh_from_db() + return scheduling_unit_blueprint def create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: '''Convenience method: Create the scheduling_unit_blueprint's task_blueprint(s), then create the task_blueprint's subtasks, and schedule the ones that are not dependend on predecessors''' scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) + scheduling_unit_blueprint = schedule_independent_subtasks_in_scheduling_unit_blueprint(scheduling_unit_blueprint) + return scheduling_unit_blueprint + + +def schedule_independent_subtasks_in_scheduling_unit_blueprint(scheduling_unit_blueprint: SchedulingUnitBlueprint, start_time: datetime=None) -> models.SchedulingUnitBlueprint: + '''Convenience method: Schedule the subtasks in the scheduling_unit_blueprint that are not dependend on predecessors''' task_blueprints = list(scheduling_unit_blueprint.task_blueprints.all()) - # sort task_blueprint(s) in 'data-flow'-order, - # because successors can depend on predecessors, so the first tbp's need to be subtask'd first. - task_blueprints.sort(key=cmp_to_key(lambda tbp_a, tbp_b: -1 if tbp_a in tbp_b.predecessors else 1 if tbp_b in tbp_a.predecessors else 0)) + for task_blueprint in task_blueprints: + schedule_independent_subtasks_in_task_blueprint(task_blueprint, start_time=start_time+task_blueprint.relative_start_time) + + scheduling_unit_blueprint.refresh_from_db() + return scheduling_unit_blueprint + + +def unschedule_subtasks_in_scheduling_unit_blueprint(scheduling_unit_blueprint: SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: + '''Convenience method: Unschedule all scheduled subtasks in the scheduling_unit_blueprint''' + task_blueprints = list(scheduling_unit_blueprint.task_blueprints.all()) for task_blueprint in task_blueprints: - schedule_independent_subtasks_in_task_blueprint(task_blueprint) + unschedule_subtasks_in_task_blueprint(task_blueprint) - # refresh so all related fields are updated. scheduling_unit_blueprint.refresh_from_db() return scheduling_unit_blueprint + diff --git a/SAS/TMSS/src/tmss/tmssapp/views.py b/SAS/TMSS/src/tmss/tmssapp/views.py index 851a625197765c401e1cc54db50c4b33d986b2e7..af0fdfe11678419260d4603c189b25c4e7b49e74 100644 --- a/SAS/TMSS/src/tmss/tmssapp/views.py +++ b/SAS/TMSS/src/tmss/tmssapp/views.py @@ -35,7 +35,7 @@ def subtask_parset(request, subtask_pk:int): def index(request): - return render(request, os.path.join(os.environ.get('LOFARROOT'), 'SAS/TMSS/frontend','tmss_webapp/build/index.html')) + return render(request, os.path.join(os.environ.get('LOFARROOT'), 'lib64/python3.6/site-packages/lofar/SAS/TMSS/frontend','tmss_webapp/build/index.html')) #return render(request, "../../../frontend/frontend_poc/build/index.html") @@ -147,7 +147,7 @@ def get_sun_rise_and_set(request): timestamps = [datetime.utcnow()] else: timestamps = timestamps.split(',') - timestamps = [dateutil.parser.parse(timestamp) for timestamp in timestamps] # isot to datetime + timestamps = [dateutil.parser.parse(timestamp, ignoretz=True) for timestamp in timestamps] # isot to datetime if stations is None: stations = ['CS002'] else: diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py index 2bc7b1814e5c667bcdd9fae7bea322e7696cdf82..bcd3eaf22671451c5d005e36c178c56f66b1c0f3 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py @@ -35,8 +35,6 @@ from lofar.sas.tmss.tmss.tmssapp.renderers import PlainTextRenderer from rest_framework.views import APIView from rest_framework.decorators import api_view, renderer_classes -from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct -from lofar.lta.sip import visualizer class TextPlainAutoSchema(SwaggerAutoSchema): def get_produces(self): @@ -79,10 +77,6 @@ class AlgorithmViewSet(LOFARViewSet): queryset = models.Algorithm.objects.all() serializer_class = serializers.AlgorithmSerializer -class ScheduleMethodViewSet(LOFARViewSet): - queryset = models.ScheduleMethod.objects.all() - serializer_class = serializers.ScheduleMethodSerializer - class SubtaskTemplateFilter(filters.FilterSet): class Meta: model = models.SubtaskTemplate @@ -145,7 +139,8 @@ class SubtaskViewSet(LOFARViewSet): filter_class = SubTaskFilter ordering = ('start_time',) - queryset = queryset.prefetch_related('state') + # performance boost: select the related models in a single db call. + queryset = queryset.select_related('state', 'specifications_template', 'specifications_template__type', 'cluster', 'created_or_updated_by_user') @swagger_auto_schema(auto_schema=TextPlainAutoSchema, responses={200: 'A LOFAR parset for this subtask (as plain text)', @@ -292,6 +287,9 @@ class SubtaskNestedViewSet(LOFARNestedViewSet): filter_class = SubTaskFilter ordering = ('start_time',) + # performance boost: select the related models in a single db call. + queryset = queryset.select_related('state', 'specifications_template', 'specifications_template__type', 'cluster', 'created_or_updated_by_user') + def get_queryset(self): if 'task_blueprint_id' in self.kwargs: task_blueprint = get_object_or_404(models.TaskBlueprint, pk=self.kwargs['task_blueprint_id']) @@ -318,6 +316,7 @@ class DataproductViewSet(LOFARViewSet): @action(methods=['get'], detail=True, url_name="sip") def sip(self, request, pk=None): dataproduct = get_object_or_404(models.Dataproduct, pk=pk) + from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct return HttpResponse(generate_sip_for_dataproduct(dataproduct).get_prettyxml(), content_type='application/xml') @swagger_auto_schema(responses={200: 'The SIP graph for this dataproduct', @@ -326,6 +325,8 @@ class DataproductViewSet(LOFARViewSet): @action(methods=['get'], detail=True, url_name="sip_graph") def sip_graph(self, request, pk=None): dataproduct = get_object_or_404(models.Dataproduct, pk=pk) + from lofar.lta.sip import visualizer + from lofar.sas.tmss.tmss.tmssapp.adapters.sip import generate_sip_for_dataproduct return HttpResponse(visualizer.visualize_sip(generate_sip_for_dataproduct(dataproduct)), content_type='image/svg+xml') diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py index 6e292b61afa714df6356cf528da69ebc18a555f3..f4f1e95ddbe38152855429597c6360be6448e4dc 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py @@ -318,6 +318,10 @@ class SchedulingUnitDraftViewSet(LOFARViewSet): .select_related('copy_reason') \ .select_related('scheduling_set') + # use select_related for forward related references + queryset = queryset.select_related('copy_reason', 'scheduling_set', 'requirements_template', 'observation_strategy_template', 'scheduling_constraints_template') + + @swagger_auto_schema(responses={201: 'The Created SchedulingUnitBlueprint, see Location in Response header', 403: 'forbidden'}, operation_description="Carve SchedulingUnitDraft in stone, and make an (uneditable) blueprint out of it.") @@ -594,6 +598,9 @@ class SchedulingUnitBlueprintViewSet(LOFARViewSet): # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries queryset = queryset.prefetch_related('task_blueprints') + # use select_related for forward related references + queryset = queryset.select_related('requirements_template', 'draft') + @swagger_auto_schema(responses={201: "This SchedulingUnitBlueprint, with references to its created TaskBlueprints and (scheduled) Subtasks.", 403: 'forbidden'}, operation_description="Create TaskBlueprint(s) for this scheduling unit, create subtasks, and schedule the ones that are not dependend on predecessors.") @@ -662,16 +669,16 @@ class TaskDraftViewSet(LOFARViewSet): serializer_class = serializers.TaskDraftSerializer # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries - queryset = queryset.prefetch_related('first_to_connect') \ - .prefetch_related('second_to_connect')\ + queryset = queryset.prefetch_related('first_scheduling_relation') \ + .prefetch_related('second_scheduling_relation')\ .prefetch_related('produced_by')\ .prefetch_related('consumed_by')\ .prefetch_related('task_blueprints')\ .prefetch_related('copied_from') # prefetch nested references in reverse models to avoid duplicate lookup queries - queryset = queryset.prefetch_related('first_to_connect__placement') \ - .prefetch_related('second_to_connect__placement') + queryset = queryset.prefetch_related('first_scheduling_relation__placement') \ + .prefetch_related('second_scheduling_relation__placement') # select all references to other models to avoid even more duplicate queries queryset = queryset.select_related('copies') \ @@ -771,15 +778,19 @@ class TaskBlueprintViewSet(LOFARViewSet): serializer_class = serializers.TaskBlueprintSerializer # prefetch all reverse related references from other models on their related_name to avoid a ton of duplicate queries - queryset = queryset.prefetch_related('first_to_connect')\ - .prefetch_related('second_to_connect')\ + queryset = queryset.prefetch_related('first_scheduling_relation')\ + .prefetch_related('second_scheduling_relation')\ .prefetch_related('produced_by')\ .prefetch_related('consumed_by')\ .prefetch_related('subtasks') # prefetch nested references in reverse models to avoid duplicate lookup queries - queryset = queryset.prefetch_related('first_to_connect__placement') \ - .prefetch_related('second_to_connect__placement') + queryset = queryset.prefetch_related('first_scheduling_relation__placement') \ + .prefetch_related('second_scheduling_relation__placement') \ + .prefetch_related('subtasks__specifications_template') + + # use select_related for forward related references + queryset = queryset.select_related('draft', 'specifications_template', 'specifications_template__type', 'scheduling_unit_blueprint') @swagger_auto_schema(responses={201: "This TaskBlueprint, with it is created subtasks", 403: 'forbidden'}, diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py index 781e6af696a5bc3f0827c84b8c60286fa898112f..1327d5b5a41ba2e80d100c254ef60c7ddc91aa0b 100644 --- a/SAS/TMSS/src/tmss/urls.py +++ b/SAS/TMSS/src/tmss/urls.py @@ -31,7 +31,6 @@ from drf_yasg import openapi from datetime import datetime import os from material.frontend import urls as frontend_urls -from viewflow.flow.viewset import FlowViewSet # @@ -175,7 +174,6 @@ router.register(r'subtask_state', viewsets.SubtaskStateViewSet) router.register(r'subtask_type', viewsets.SubtaskTypeViewSet) router.register(r'station_type', viewsets.StationTypeViewSet) router.register(r'algorithm', viewsets.AlgorithmViewSet) -router.register(r'schedule_method', viewsets.ScheduleMethodViewSet) router.register(r'scheduling_relation_placement', viewsets.SchedulingRelationPlacement) # templates @@ -215,7 +213,26 @@ urlpatterns = [url(r'^api$', RedirectView.as_view(url='/api/')), url(r'^api/', include(urlpatterns)), url(r'^oidc$', RedirectView.as_view(url='/oidc/')), url(r'^oidc/', include('mozilla_django_oidc.urls')), - url(r'^workflow$', RedirectView.as_view(url='/workflow/', permanent=False)), url(r'', include(frontend_urls)), url(r'^.*', include(frontend_urlpatterns)), ] + + + +# --- +# QA Workflow steps +if bool(os.environ.get('TMSS_ENABLE_VIEWFLOW', False)): + from .workflowapp import viewsets as workflow_viewsets + + viewflow_router = OptionalSlashRouter() + viewflow_router.APIRootView = TMSSAPIRootView + + viewflow_router.register('scheduling_unit_flow/su', workflow_viewsets.SchedulingUnitFlowViewSet, basename='su') + viewflow_router.register('scheduling_unit_flow/qa_reporting_to', workflow_viewsets.QAReportingTOViewSet, basename='qa_reporting_to') + viewflow_router.register('scheduling_unit_flow/qa_reporting_sos', workflow_viewsets.QAReportingSOSViewSet, basename='qa_reporting_sos') + viewflow_router.register('scheduling_unit_flow/qa_pi_verification', workflow_viewsets.PIVerificationViewSet, basename='qa_pi_verification') + viewflow_router.register('scheduling_unit_flow/qa_decide_acceptance', workflow_viewsets.DecideAcceptanceViewSet, basename='qa_decide_acceptance') + viewflow_router.register('scheduling_unit_flow/qa_scheduling_unit_process', workflow_viewsets.SchedulingUnitProcessViewSet, basename='qa_scheduling_unit_process') + + urlpatterns.extend([url(r'^workflow$', RedirectView.as_view(url='/workflow/', permanent=False)), + url(r'^workflow_api/', include(viewflow_router.urls))]) \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt index 94b72e83e35a77ab9b16f84b7647f8ab0c8af94a..e7c3171661a6fd3927e6b4214251c21f0240d0b1 100644 --- a/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/workflowapp/CMakeLists.txt @@ -15,3 +15,6 @@ add_subdirectory(migrations) add_subdirectory(models) add_subdirectory(flows) add_subdirectory(viewsets) +add_subdirectory(forms) +add_subdirectory(templates) +add_subdirectory(serializers) diff --git a/SAS/TMSS/src/tmss/workflowapp/flows/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/flows/CMakeLists.txt index 769f922e4781a912f1c0488c3655f6ab61363d3a..ba35dcf6abd1341333f5da54b43b2977805ef628 100644 --- a/SAS/TMSS/src/tmss/workflowapp/flows/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/workflowapp/flows/CMakeLists.txt @@ -4,7 +4,7 @@ include(PythonInstall) set(_py_files __init__.py helloworldflow.py - schedulingunitdemoflow.py + schedulingunitflow.py ) python_install(${_py_files} diff --git a/SAS/TMSS/src/tmss/workflowapp/flows/__init__.py b/SAS/TMSS/src/tmss/workflowapp/flows/__init__.py index 45516795a25730483ebfa40c1fbdb5f533df8ebe..a0ae3713747c0b28c5595736d06f4bcb800da5b5 100644 --- a/SAS/TMSS/src/tmss/workflowapp/flows/__init__.py +++ b/SAS/TMSS/src/tmss/workflowapp/flows/__init__.py @@ -1,2 +1,2 @@ from .helloworldflow import * -from .schedulingunitdemoflow import * \ No newline at end of file +from .schedulingunitflow import * \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitdemoflow.py b/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitflow.py similarity index 57% rename from SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitdemoflow.py rename to SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitflow.py index 0a2882d7a4550ef3ff8e60b190c4074f60356795..8d01c51a15bc840bdb775acce1297938234a1611 100644 --- a/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitdemoflow.py +++ b/SAS/TMSS/src/tmss/workflowapp/flows/schedulingunitflow.py @@ -9,6 +9,7 @@ from viewflow.flow.nodes import Signal from viewflow import mixins from .. import models +from .. import viewsets from viewflow import frontend, ThisObject from viewflow.activation import STATUS @@ -26,7 +27,7 @@ class ConditionActivation(FuncActivation): return activation class Condition(Signal): - #task_type = "HUMAN" # makes it show up in the unassigned task lists + task_type = "HUMAN" # makes it show up in the unassigned task lists activation_class = ConditionActivation def __init__(self, condition_check, signal, sender=None, task_loader=None, **kwargs): @@ -65,55 +66,14 @@ class Condition(Signal): super(Condition, self).ready() @frontend.register -class SchedulingUnitDemoFlow(Flow): - process_class = models.SchedulingUnitDemoProcess - - # 0. Start on SU instantiation - # 1. To be Manually scheduled? -> Go to 1a - # 1a. Present view to manually schedule. - # 2. Wait on signal SU got finished/error/cancelled (might have already!!) -> - # - Wait for assignment to RO user - # View: - Present any quality plots - # - Present any error info - # - Present fixing options - # - Present choice to fix & redo, discard, or continue. - # Continue: - # View: - Present any quality plots - # - Present any error info - # - Submit quality report/score - # - Submit recommendation - # 3. - Assign ticket to Contact Author - # - Present quality plots to user - # - Present quality report/score, and recommendation - # - Submit acceptance & report - # 4. - Assign ticket to owner in step 2. - # - Present quality report/score, and recommendation - # - Present acceptance & report - # - Present choice to ingest or discard. - # Ingest: - # Set ingestable flag on SU. - # Discard: - Cancel SU (triggering garbage collection - # - # Fix & Redo: - # - Wait for user to confirm SU is fixed - # - Go to 2 - # - - # Consider adding to any/all views: - # - Present any opened JIRA tickets - # - Present opportunity to open JIRA ticket - # Note that previously submitted info can be found by clicking through the task. So - # we only need to show whats nominally needed. - # Note that orthogonally to the above flow: - # - Users need to be informed tasks are assigned to them (e-mail?) - # - Users already have an overview in viewflow of tickets assigned to them - # - We likely want to control what e-mails are sent. +class SchedulingUnitFlow(Flow): + process_class = models.SchedulingUnitProcess start = ( flow.StartSignal( post_save, this.on_save_can_start, - sender=models.SchedulingUnitDemo + sender=models.SchedulingUnit ).Next(this.wait_schedulable) ) @@ -121,39 +81,77 @@ class SchedulingUnitDemoFlow(Flow): Condition( this.check_condition, post_save, - sender=models.SchedulingUnitDemo, + sender=models.SchedulingUnit, task_loader=this.get_scheduling_unit_task ) - .Next(this.form) + .Next(this.qa_reporting_to) ) - form = ( + #QA Reporting (TO) + qa_reporting_to = ( flow.View( - UpdateProcessView, - fields=["text"] + viewsets.QAReportingTOView, + task_description='QA Reporting (TO)' ).Permission( auto_create=True - ).Next(this.approve) + ).Next(this.check_operator_accept) + ) + + #Quality Acceptable + check_operator_accept = ( + flow.If(lambda activation: activation.process.qa_reporting_to.operator_accept) + .Then(this.qa_reporting_sos) + .Else(this.mark_sub) ) - approve = ( + #QA Reporting (SOS) + qa_reporting_sos = ( flow.View( - UpdateProcessView, - fields=["approved"] + viewsets.QAReportingSOSView, + task_description='QA Reporting (SOS)' ).Permission( auto_create=True - ).Next(this.check_approve) + ).Next(this.check_sos_accept_show_pi) + ) + + #Quality Acceptable + check_sos_accept_show_pi = ( + flow.If(lambda activation: activation.process.qa_reporting_sos.sos_accept_show_pi) + .Then(this.pi_verification) + .Else(this.mark_sub) ) - check_approve = ( - flow.If(lambda activation: activation.process.approved) - .Then(this.send) - .Else(this.end) + #PI Verification + pi_verification = ( + flow.View( + viewsets.PIVerificationView, + task_description='PI Verification' + ).Permission( + auto_create=True + ).Next(this.decide_acceptance) ) - send = ( + #Decide Acceptance + decide_acceptance = ( + flow.View( + viewsets.DecideAcceptanceView, + task_description='Decide Acceptance' + ).Permission( + auto_create=True + ).Next(this.check_sos_accept_after_pi) + ) + + #Quality Acceptable + check_sos_accept_after_pi = ( + flow.If(lambda activation: activation.process.decide_acceptance.sos_accept_after_pi) + .Then(this.mark_sub) + .Else(this.mark_sub) + ) + + #Mark SUB Successful/failed + mark_sub = ( flow.Handler( - this.send_hello_world_request + this.do_mark_sub ).Next(this.end) ) @@ -164,14 +162,29 @@ class SchedulingUnitDemoFlow(Flow): if created: activation.prepare() activation.process.su = instance + activation.done() print("workflow started") else: print("no workflow started") return activation - def send_hello_world_request(self, activation): - print(activation.process.text) + + def do_mark_sub(self, activation): + + activation.process.can_delete = True + activation.process.results_accepted = ((activation.process.qa_reporting_to is not None and activation.process.qa_reporting_to.operator_accept) + and (activation.process.qa_reporting_sos is not None and activation.process.qa_reporting_sos.sos_accept_show_pi) + and (activation.process.decide_acceptance is not None and activation.process.decide_acceptance.sos_accept_after_pi)) + + print("!!!!!!!!!!!END FLOW!!!!!!!!!!!") + print ("can_delete:") + print (activation.process.can_delete) + print ("results_accepted:") + print (activation.process.results_accepted) + + return activation + def check_condition(self, activation, instance): if instance is None: @@ -183,5 +196,5 @@ class SchedulingUnitDemoFlow(Flow): def get_scheduling_unit_task(self, flow_task, sender, instance, **kwargs): print(kwargs) - process = models.SchedulingUnitDemoProcess.objects.get(su=instance) + process = models.SchedulingUnitProcess.objects.get(su=instance) return Task.objects.get(process=process,flow_task=flow_task) diff --git a/SAS/TMSS/src/tmss/workflowapp/forms/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/forms/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..42b8a34fd53a59e7a7a15885e307a27d4874296a --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/forms/CMakeLists.txt @@ -0,0 +1,10 @@ + +include(PythonInstall) + +set(_py_files + __init__.py + schedulingunitflow.py + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/tmss/workflowapp/forms) diff --git a/SAS/TMSS/src/tmss/workflowapp/forms/__init__.py b/SAS/TMSS/src/tmss/workflowapp/forms/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bfdfbc84e07beb363937412fd7fb6d5788c684d0 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/forms/__init__.py @@ -0,0 +1 @@ +from .schedulingunitflow import * \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/forms/schedulingunitflow.py b/SAS/TMSS/src/tmss/workflowapp/forms/schedulingunitflow.py new file mode 100644 index 0000000000000000000000000000000000000000..a967367b38ff77d43ffdf08fb3b30e0f824907ab --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/forms/schedulingunitflow.py @@ -0,0 +1,16 @@ +from django import forms +from material import Layout, Row, Span2 + +from .. import models + + +class QAReportingTO(forms.ModelForm): + layout = Layout( + Row('operator_report'), + Row('operator_accept'), + ) + + class Meta: + model = models.QAReportingTO + fields = ['operator_report','operator_accept'] + #fields = '__all__' \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py index 2e95b97379265e5eb14cfd44e85357218eb63948..1da372c3f5a8ea06e95f13d9861676f8bcdf8636 100644 --- a/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py +++ b/SAS/TMSS/src/tmss/workflowapp/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.0.9 on 2020-10-01 12:30 +# Generated by Django 3.0.9 on 2020-11-02 14:31 from django.db import migrations, models import django.db.models.deletion @@ -14,7 +14,39 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='SchedulingUnitDemo', + name='DecideAcceptance', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('sos_accept_after_pi', models.BooleanField(default=False)), + ], + ), + migrations.CreateModel( + name='PIVerification', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('pi_report', models.CharField(max_length=150)), + ('pi_accept', models.BooleanField(default=False)), + ], + ), + migrations.CreateModel( + name='QAReportingSOS', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('sos_report', models.CharField(max_length=150)), + ('quality_within_policy', models.CharField(max_length=150)), + ('sos_accept_show_pi', models.BooleanField(default=False)), + ], + ), + migrations.CreateModel( + name='QAReportingTO', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('operator_report', models.CharField(max_length=150)), + ('operator_accept', models.BooleanField(default=False)), + ], + ), + migrations.CreateModel( + name='SchedulingUnit', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), @@ -35,12 +67,16 @@ class Migration(migrations.Migration): bases=('viewflow.process',), ), migrations.CreateModel( - name='SchedulingUnitDemoProcess', + name='SchedulingUnitProcess', fields=[ ('process_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='viewflow.Process')), - ('text', models.CharField(max_length=150)), - ('approved', models.BooleanField(default=False)), - ('su', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.SchedulingUnitDemo')), + ('can_delete', models.BooleanField(default=False)), + ('results_accepted', models.BooleanField(default=False)), + ('decide_acceptance', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.DecideAcceptance')), + ('pi_verification', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.PIVerification')), + ('qa_reporting_sos', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.QAReportingSOS')), + ('qa_reporting_to', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.QAReportingTO')), + ('su', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflowapp.SchedulingUnit')), ], options={ 'abstract': False, diff --git a/SAS/TMSS/src/tmss/workflowapp/models/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/models/CMakeLists.txt index 1c94f0a15d5ade684111945ce5bb79dfe25f7a91..57e7e39aac465b8acf7b209fa3dc901ae4c2076f 100644 --- a/SAS/TMSS/src/tmss/workflowapp/models/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/workflowapp/models/CMakeLists.txt @@ -4,7 +4,7 @@ include(PythonInstall) set(_py_files __init__.py helloworldflow.py - schedulingunitdemoflow.py + schedulingunitflow.py ) python_install(${_py_files} diff --git a/SAS/TMSS/src/tmss/workflowapp/models/__init__.py b/SAS/TMSS/src/tmss/workflowapp/models/__init__.py index 45516795a25730483ebfa40c1fbdb5f533df8ebe..a0ae3713747c0b28c5595736d06f4bcb800da5b5 100644 --- a/SAS/TMSS/src/tmss/workflowapp/models/__init__.py +++ b/SAS/TMSS/src/tmss/workflowapp/models/__init__.py @@ -1,2 +1,2 @@ from .helloworldflow import * -from .schedulingunitdemoflow import * \ No newline at end of file +from .schedulingunitflow import * \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitdemoflow.py b/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitdemoflow.py deleted file mode 100644 index b9797a0b12e56ffb6f284da503f43263561522c4..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitdemoflow.py +++ /dev/null @@ -1,13 +0,0 @@ -# Create your models here. - -from django.db.models import CharField, IntegerField,BooleanField, ForeignKey, CASCADE, Model -from viewflow.models import Process - -class SchedulingUnitDemo(Model): - name = CharField(max_length=50) - state = IntegerField() - -class SchedulingUnitDemoProcess(Process): - text = CharField(max_length=150) - approved = BooleanField(default=False) - su = ForeignKey(SchedulingUnitDemo, blank=True, null=True, on_delete=CASCADE) diff --git a/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitflow.py b/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitflow.py new file mode 100644 index 0000000000000000000000000000000000000000..3e340fbf8c9713fbd37daec0dc977e3d453eb69f --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/models/schedulingunitflow.py @@ -0,0 +1,38 @@ +# Create your models here. + +from django.db.models import CharField, IntegerField,BooleanField, ForeignKey, CASCADE, Model,NullBooleanField +from viewflow.models import Process + +class QAReportingTO(Model): + operator_report = CharField(max_length=150) + operator_accept = BooleanField(default=False) + + +class QAReportingSOS(Model): + sos_report = CharField(max_length=150) + quality_within_policy = CharField(max_length=150) + sos_accept_show_pi = BooleanField(default=False) + + +class PIVerification(Model): + pi_report = CharField(max_length=150) + pi_accept = BooleanField(default=False) + + +class DecideAcceptance(Model): + sos_accept_after_pi = BooleanField(default=False) + + +class SchedulingUnit(Model): + name = CharField(max_length=50) + state = IntegerField() + + +class SchedulingUnitProcess(Process): + su = ForeignKey(SchedulingUnit, blank=True, null=True, on_delete=CASCADE) + qa_reporting_to=ForeignKey(QAReportingTO, blank=True, null=True, on_delete=CASCADE) + qa_reporting_sos=ForeignKey(QAReportingSOS, blank=True, null=True, on_delete=CASCADE) + pi_verification=ForeignKey(PIVerification, blank=True, null=True, on_delete=CASCADE) + decide_acceptance=ForeignKey(DecideAcceptance, blank=True, null=True, on_delete=CASCADE) + can_delete = BooleanField(default=False) + results_accepted = BooleanField(default=False) \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/serializers/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/serializers/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..93e5e84e9ee2e14a5b311ad8f204c7d62920dae0 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/serializers/CMakeLists.txt @@ -0,0 +1,11 @@ + +include(PythonInstall) + +set(_py_files + __init__.py + schedulingunitflow.py + + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/tmss/workflowapp/serializers) diff --git a/SAS/TMSS/src/tmss/workflowapp/serializers/__init__.py b/SAS/TMSS/src/tmss/workflowapp/serializers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bfdfbc84e07beb363937412fd7fb6d5788c684d0 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/serializers/__init__.py @@ -0,0 +1 @@ +from .schedulingunitflow import * \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/serializers/schedulingunitflow.py b/SAS/TMSS/src/tmss/workflowapp/serializers/schedulingunitflow.py new file mode 100644 index 0000000000000000000000000000000000000000..e29cf3cb9796afcce95e94e63636fe300791f5b0 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/serializers/schedulingunitflow.py @@ -0,0 +1,41 @@ +from rest_framework.serializers import ModelSerializer +from lofar.sas.tmss.tmss.workflowapp import models + +from django.views import generic +from django.forms.models import modelform_factory + + +from .. import forms + +#View to add a fake Scheduling Unit for the QA Workflow +class SchedulingUnitSerializer(ModelSerializer): + class Meta: + model = models.SchedulingUnit + fields = '__all__' + +#Viewsets and serializers to access intermediate steps of the QA Workflow +#through DRF +class QAReportingTOSerializer(ModelSerializer): + class Meta: + model = models.QAReportingTO + fields = '__all__' + +class QAReportingSOSSerializer(ModelSerializer): + class Meta: + model = models.QAReportingSOS + fields = '__all__' + +class PIVerificationSerializer(ModelSerializer): + class Meta: + model = models.PIVerification + fields = '__all__' + +class DecideAcceptanceSerializer(ModelSerializer): + class Meta: + model = models.DecideAcceptance + fields = '__all__' + +class SchedulingUnitProcessSerializer(ModelSerializer): + class Meta: + model = models.SchedulingUnitProcess + fields = '__all__' \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/templates/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/templates/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..3047f7e1561039003dec0c8630b75de4ce5f3037 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/templates/CMakeLists.txt @@ -0,0 +1,10 @@ + +include(PythonInstall) + +set(_py_files + __init__.py + qa_reporting.html + ) + +python_install(${_py_files} + DESTINATION lofar/sas/tmss/tmss/workflowapp/templates) diff --git a/SAS/TMSS/src/tmss/workflowapp/templates/__init__.py b/SAS/TMSS/src/tmss/workflowapp/templates/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SAS/TMSS/src/tmss/workflowapp/templates/qa_reporting.html b/SAS/TMSS/src/tmss/workflowapp/templates/qa_reporting.html new file mode 100644 index 0000000000000000000000000000000000000000..822e7eb45e1677261b67fda229a2848d49963cfc --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/templates/qa_reporting.html @@ -0,0 +1 @@ +{% extends 'viewflow/flow/task.html' %} diff --git a/SAS/TMSS/src/tmss/workflowapp/viewsets/CMakeLists.txt b/SAS/TMSS/src/tmss/workflowapp/viewsets/CMakeLists.txt index 7adc12fcf7a85912784409d17f37177986c94298..eaf3c5ab4b9afa9063deda344de3644dcfbc388d 100644 --- a/SAS/TMSS/src/tmss/workflowapp/viewsets/CMakeLists.txt +++ b/SAS/TMSS/src/tmss/workflowapp/viewsets/CMakeLists.txt @@ -3,7 +3,7 @@ include(PythonInstall) set(_py_files __init__.py - schedulingunitdemoflow.py + schedulingunitflow.py ) python_install(${_py_files} diff --git a/SAS/TMSS/src/tmss/workflowapp/viewsets/__init__.py b/SAS/TMSS/src/tmss/workflowapp/viewsets/__init__.py index b77c70aeb959e9d4f63c395fd1079cfbbe3bc078..bfdfbc84e07beb363937412fd7fb6d5788c684d0 100644 --- a/SAS/TMSS/src/tmss/workflowapp/viewsets/__init__.py +++ b/SAS/TMSS/src/tmss/workflowapp/viewsets/__init__.py @@ -1 +1 @@ -from .schedulingunitdemoflow import * \ No newline at end of file +from .schedulingunitflow import * \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitdemoflow.py b/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitdemoflow.py deleted file mode 100644 index da3dc24e15ff6f3bd93da9037101a718f4ebed66..0000000000000000000000000000000000000000 --- a/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitdemoflow.py +++ /dev/null @@ -1,22 +0,0 @@ -from django.shortcuts import render -from rest_framework import viewsets -from rest_framework.response import Response -from rest_framework.decorators import action -from rest_framework.serializers import ModelSerializer -from lofar.sas.tmss.tmss.workflowapp import models - -# Create your views here. - -class SchedulingUnitDemoSerializer(ModelSerializer): - class Meta: - model = models.SchedulingUnitDemo - fields = '__all__' - -class SchedulingUnitFlowViewSet(viewsets.ModelViewSet): - queryset = models.SchedulingUnitDemo.objects.all() - serializer_class = SchedulingUnitDemoSerializer - - @action(methods=['get'], detail=True) - def trigger(self, request, pk=None): - SchedulingUnitDemoFlow - return Response("ok") \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitflow.py b/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitflow.py new file mode 100644 index 0000000000000000000000000000000000000000..1c70e87e110fd31d5f2533712165f973d0701733 --- /dev/null +++ b/SAS/TMSS/src/tmss/workflowapp/viewsets/schedulingunitflow.py @@ -0,0 +1,117 @@ +from django.shortcuts import render, redirect +from rest_framework import viewsets +from rest_framework.response import Response +from rest_framework.decorators import action +from lofar.sas.tmss.tmss.workflowapp import models + +from django.views import generic +from viewflow.flow.views import StartFlowMixin, FlowMixin +from viewflow.decorators import flow_start_view, flow_view +from viewflow.flow.views.utils import get_next_task_url +from django.forms import CharField, CheckboxInput +from django.forms.models import modelform_factory + + +from .. import forms, models, serializers + +class SchedulingUnitFlowViewSet(viewsets.ModelViewSet): + queryset = models.SchedulingUnit.objects.all() + serializer_class = serializers.SchedulingUnitSerializer + + @action(methods=['get'], detail=True) + def trigger(self, request, pk=None): + SchedulingUnitFlow + return Response("ok") + +#Viewsets and serializers to access intermediate steps of the QA Workflow +#through DRF +class QAReportingTOViewSet(viewsets.ModelViewSet): + queryset = models.QAReportingTO.objects.all() + serializer_class = serializers.QAReportingTOSerializer + +class QAReportingSOSViewSet(viewsets.ModelViewSet): + queryset = models.QAReportingSOS.objects.all() + serializer_class = serializers.QAReportingSOSSerializer + +class PIVerificationViewSet(viewsets.ModelViewSet): + queryset = models.PIVerification.objects.all() + serializer_class = serializers.PIVerificationSerializer + +class DecideAcceptanceViewSet(viewsets.ModelViewSet): + queryset = models.DecideAcceptance.objects.all() + serializer_class = serializers.DecideAcceptanceSerializer + +class SchedulingUnitProcessViewSet(viewsets.ModelViewSet): + queryset = models.SchedulingUnitProcess.objects.all() + serializer_class = serializers.SchedulingUnitProcessSerializer + +class QAReportingTOView(FlowMixin, generic.CreateView): + template_name = 'qa_reporting.html' + model = models.QAReportingTO + fields = [ + 'operator_report', 'operator_accept' + ] + + def form_valid(self, form): + report_data = form.save(commit=False) + report_data.save() + + self.activation.process.qa_reporting_to = report_data + self.activation.process.save() + + self.activation_done() + return redirect(self.get_success_url()) + + +class QAReportingSOSView(FlowMixin, generic.CreateView): + template_name = 'qa_reporting.html' + model = models.QAReportingSOS + fields = [ + 'sos_report', 'quality_within_policy','sos_accept_show_pi' + ] + + def form_valid(self, form): + report_data = form.save(commit=False) + report_data.save() + + self.activation.process.qa_reporting_sos = report_data + self.activation.process.save() + + self.activation_done() + return redirect(self.get_success_url()) + + +class PIVerificationView(FlowMixin, generic.CreateView): + template_name = 'qa_reporting.html' + model = models.PIVerification + fields = [ + 'pi_report', 'pi_accept' + ] + + def form_valid(self, form): + report_data = form.save(commit=False) + report_data.save() + + self.activation.process.pi_verification = report_data + self.activation.process.save() + + self.activation_done() + return redirect(self.get_success_url()) + + +class DecideAcceptanceView(FlowMixin, generic.CreateView): + template_name = 'qa_reporting.html' + model = models.DecideAcceptance + fields = [ + 'sos_accept_after_pi' + ] + + def form_valid(self, form): + report_data = form.save(commit=False) + report_data.save() + + self.activation.process.decide_acceptance = report_data + self.activation.process.save() + + self.activation_done() + return redirect(self.get_success_url()) \ No newline at end of file diff --git a/SAS/TMSS/test/t_scheduling.py b/SAS/TMSS/test/t_scheduling.py index b4151f5efb486abf203fd5ba68dc87eb930e8e4d..4504be5959bf66d7eb3ec1b047a95055fb5d51ed 100755 --- a/SAS/TMSS/test/t_scheduling.py +++ b/SAS/TMSS/test/t_scheduling.py @@ -34,25 +34,21 @@ if skip_integration_tests(): # before we import any django modules the DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS need to be known/set. # import and start an isolated RATestEnvironment and TMSSTestEnvironment (with fresh database and attached django and ldap server on free ports) # this automagically sets the required DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS envvars. -from lofar.sas.resourceassignment.resourceassigner.test.ra_test_environment import RATestEnvironment from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment -ra_test_env = RATestEnvironment() -tmss_test_env = TMSSTestEnvironment() +tmss_test_env = TMSSTestEnvironment(populate_schemas=True, populate_test_data=False, start_ra_test_environment=True, + start_postgres_listener=False, start_subtask_scheduler=False, start_dynamic_scheduler=False, + enable_viewflow=False) try: - ra_test_env.start() tmss_test_env.start() - tmss_test_env.populate_schemas() except: - ra_test_env.stop() tmss_test_env.stop() exit(1) # tell unittest to stop (and automagically cleanup) the test database once all testing is done. def tearDownModule(): tmss_test_env.stop() - ra_test_env.stop() from lofar.sas.tmss.test.tmss_test_data_django_models import * @@ -73,9 +69,10 @@ def create_subtask_object_for_testing(subtask_type_value, subtask_state_value): as string (no object) For these testcases 'pipeline control' and 'observation control' is relevant """ + task_blueprint = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data(specifications_template=models.TaskTemplate.objects.get(name='target observation' if subtask_type_value=='observation' else 'preprocessing pipeline'))) subtask_template_obj = models.SubtaskTemplate.objects.get(name="%s control" % subtask_type_value) subtask_state_obj = models.SubtaskState.objects.get(value=subtask_state_value) - subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj) + subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj, task_blueprint=task_blueprint) return models.Subtask.objects.create(**subtask_data) @@ -103,11 +100,13 @@ def create_reserved_stations_for_testing(station_list): class SchedulingTest(unittest.TestCase): def setUp(self): # clean all specs/tasks/claims in RADB (cascading delete) - for spec in ra_test_env.radb.getSpecifications(): - ra_test_env.radb.deleteSpecification(spec['id']) + for spec in tmss_test_env.ra_test_environment.radb.getSpecifications(): + tmss_test_env.ra_test_environment.radb.deleteSpecification(spec['id']) def test_schedule_observation_subtask_with_enough_resources_available(self): with tmss_test_env.create_tmss_client() as client: + task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url']) + task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(task_blueprint_data, '/task_blueprint/') subtask_template = client.get_subtask_template("observation control") spec = get_default_json_object_for_schema(subtask_template['schema']) spec['stations']['digital_pointings'][0]['subbands'] = [0] @@ -117,7 +116,7 @@ class SchedulingTest(unittest.TestCase): specifications_doc=spec, cluster_url=cluster_url, start_time=datetime.utcnow()+timedelta(minutes=5), - stop_time=datetime.utcnow()+timedelta(minutes=15)) + task_blueprint_url=task_blueprint['url']) subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/') subtask_id = subtask['id'] test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/') @@ -126,7 +125,7 @@ class SchedulingTest(unittest.TestCase): subtask = client.schedule_subtask(subtask_id) self.assertEqual('scheduled', subtask['state_value']) - self.assertEqual('scheduled', ra_test_env.radb.getTask(tmss_id=subtask_id)['status']) + self.assertEqual('scheduled', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=subtask_id)['status']) def test_schedule_observation_subtask_with_one_blocking_reservation_failed(self): """ @@ -168,6 +167,9 @@ class SchedulingTest(unittest.TestCase): self.assertTrue(create_reserved_stations_for_testing(['CS001','CS002','CS501','CS401' ])) with tmss_test_env.create_tmss_client() as client: + task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url']) + task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(task_blueprint_data, '/task_blueprint/') + subtask_template = client.get_subtask_template("observation control") spec = get_default_json_object_for_schema(subtask_template['schema']) spec['stations']['digital_pointings'][0]['subbands'] = [0] @@ -178,7 +180,7 @@ class SchedulingTest(unittest.TestCase): subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'], specifications_doc=spec, cluster_url=cluster_url, - task_blueprint_url=test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/')) + task_blueprint_url=task_blueprint['url']) subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/') subtask_id = subtask['id'] test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/') @@ -190,7 +192,9 @@ class SchedulingTest(unittest.TestCase): subtask = client.get_subtask(subtask_id) self.assertEqual('error', subtask['state_value']) - self.assertEqual('conflict', ra_test_env.radb.getTask(tmss_id=subtask_id)['status']) + ra_task = tmss_test_env.ra_test_environment.radb.getTask(tmss_id=subtask_id) + self.assertIsNotNone(ra_task) + self.assertEqual('conflict', ra_task['status']) def test_schedule_observation_subtask_with_blocking_reservation_ok(self): """ @@ -227,6 +231,8 @@ class SchedulingTest(unittest.TestCase): cluster_url = client.get_path_as_json_object('/cluster/1')['url'] # setup: first create an observation, so the pipeline can have input. + obs_task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url']) + obs_task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(obs_task_blueprint_data, '/task_blueprint/') obs_subtask_template = client.get_subtask_template("observation control") obs_spec = get_default_json_object_for_schema(obs_subtask_template['schema']) obs_spec['stations']['digital_pointings'][0]['subbands'] = [0] @@ -234,19 +240,22 @@ class SchedulingTest(unittest.TestCase): obs_subtask_data = test_data_creator.Subtask(specifications_template_url=obs_subtask_template['url'], specifications_doc=obs_spec, cluster_url=cluster_url, - task_blueprint_url=test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/')) + task_blueprint_url=obs_task_blueprint['url']) obs_subtask = test_data_creator.post_data_and_get_response_as_json_object(obs_subtask_data, '/subtask/') obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url']), '/subtask_output/') test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(filename="L%s_SB000.MS"%obs_subtask['id'], subtask_output_url=obs_subtask_output_url), '/dataproduct/') # now create the pipeline... + pipe_task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="preprocessing pipeline")['url']) + pipe_task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(pipe_task_blueprint_data, '/task_blueprint/') + pipe_subtask_template = client.get_subtask_template("pipeline control") pipe_spec = get_default_json_object_for_schema(pipe_subtask_template['schema']) pipe_subtask_data = test_data_creator.Subtask(specifications_template_url=pipe_subtask_template['url'], specifications_doc=pipe_spec, - task_blueprint_url=obs_subtask['task_blueprint'], + task_blueprint_url=pipe_task_blueprint['url'], cluster_url=cluster_url) pipe_subtask = test_data_creator.post_data_and_get_response_as_json_object(pipe_subtask_data, '/subtask/') @@ -261,7 +270,7 @@ class SchedulingTest(unittest.TestCase): subtask = client.schedule_subtask(pipe_subtask['id']) self.assertEqual('scheduled', subtask['state_value']) - self.assertEqual('scheduled', ra_test_env.radb.getTask(tmss_id=pipe_subtask['id'])['status']) + self.assertEqual('scheduled', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=pipe_subtask['id'])['status']) def test_schedule_ingest_subtask(self): with tmss_test_env.create_tmss_client() as client: @@ -353,11 +362,13 @@ class SchedulingTest(unittest.TestCase): self.assertEqual(1, len(task_blueprint['subtasks'])) subtask = client.get_url_as_json_object(task_blueprint['subtasks'][0]) + client.session.patch(subtask['url'], {'start_time': datetime.utcnow() + timedelta(minutes=5)}) client.set_subtask_status(subtask['id'], 'defined') + subtask = client.schedule_subtask(subtask['id']) self.assertEqual('scheduled', subtask['state_value']) - self.assertEqual('scheduled', ra_test_env.radb.getTask(tmss_id=subtask['id'])['status']) + self.assertEqual('scheduled', tmss_test_env.ra_test_environment.radb.getTask(tmss_id=subtask['id'])['status']) client.set_subtask_status(subtask['id'], 'finished') @@ -371,11 +382,11 @@ class SubtaskInputOutputTest(unittest.TestCase): def setUp(self) -> None: # make sure we're allowed to schedule - setting = Setting.objects.get(name='allow_scheduling_observations') + setting = Setting.objects.get(name='dynamic_scheduling_enabled') setting.value = True setting.save() - @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks.assign_resources") + @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks.assign_or_unassign_resources") def test_schedule_pipeline_subtask_filters_predecessor_output_dataproducts_for_input(self, assign_resources_mock): # setup: # create observation subtask and outputs and dataproducts @@ -415,12 +426,14 @@ class SAPTest(unittest.TestCase): def setUp(self) -> None: # make sure we're allowed to schedule - setting = Setting.objects.get(name='allow_scheduling_observations') + setting = Setting.objects.get(name='dynamic_scheduling_enabled') setting.value = True setting.save() def test_schedule_observation_subtask_creates_sap_with_correct_pointing(self): with tmss_test_env.create_tmss_client() as client: + task_blueprint_data = test_data_creator.TaskBlueprint(template_url=client.get_task_template(name="target observation")['url']) + task_blueprint = test_data_creator.post_data_and_get_response_as_json_object(task_blueprint_data, '/task_blueprint/') subtask_template = client.get_subtask_template("observation control") spec = get_default_json_object_for_schema(subtask_template['schema']) spec['stations']['digital_pointings'][0]['subbands'] = [0] @@ -431,6 +444,7 @@ class SAPTest(unittest.TestCase): subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'], specifications_doc=spec, cluster_url = cluster_url, + task_blueprint_url=task_blueprint['url'], start_time=datetime.utcnow() + timedelta(minutes=5), stop_time=datetime.utcnow() + timedelta(minutes=15)) subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/') @@ -438,15 +452,17 @@ class SAPTest(unittest.TestCase): test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/') - sap_count_before_scheduling = models.SAP.objects.count() + subtask_model = models.Subtask.objects.get(id=subtask_id) + self.assertEqual(0, subtask_model.output_dataproducts.values('sap').count()) + client.set_subtask_status(subtask_id, 'defined') subtask = client.schedule_subtask(subtask_id) - self.assertGreater(models.SAP.objects.count(), sap_count_before_scheduling) + self.assertEqual(1, subtask_model.output_dataproducts.values('sap').count()) self.assertEqual(models.SAP.objects.first().specifications_doc['pointing']['angle1'], pointing['angle1']) self.assertEqual(models.SAP.objects.first().specifications_doc['pointing']['angle2'], pointing['angle2']) - @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks.assign_resources") + @mock.patch("lofar.sas.tmss.tmss.tmssapp.subtasks.assign_or_unassign_resources") def test_schedule_pipeline_subtask_copies_sap_from_input_to_output(self, assign_resources_mock): # setup: # create observation subtask and outputs and dataproducts diff --git a/SAS/TMSS/test/t_subtasks.py b/SAS/TMSS/test/t_subtasks.py index b9021a86f94d25f5fcccd620daf7705c07c8d88e..0cdb95de14d749d73d32ff03728e0daacb5ce79f 100755 --- a/SAS/TMSS/test/t_subtasks.py +++ b/SAS/TMSS/test/t_subtasks.py @@ -301,7 +301,7 @@ class SubtaskInputSelectionFilteringTest(unittest.TestCase): def setUp(self) -> None: # make sure we're allowed to schedule - setting = Setting.objects.get(name='allow_scheduling_observations') + setting = Setting.objects.get(name='dynamic_scheduling_enabled') setting.value = True setting.save() @@ -371,7 +371,7 @@ class SubtaskInputSelectionFilteringTest(unittest.TestCase): class SettingTest(unittest.TestCase): def test_schedule_observation_subtask_raises_when_flag_is_false(self): - setting = Setting.objects.get(name='allow_scheduling_observations') + setting = Setting.objects.get(name='dynamic_scheduling_enabled') setting.value = False setting.save() obs_st = create_subtask_object_for_testing('observation', 'defined') diff --git a/SAS/TMSS/test/test_utils.py b/SAS/TMSS/test/test_utils.py index 7d559bb9800d4ad3112d49df59d3aa3094fec86a..1029deb3474ce830e83f3d8d0a26f07c9bf3620f 100644 --- a/SAS/TMSS/test/test_utils.py +++ b/SAS/TMSS/test/test_utils.py @@ -270,7 +270,9 @@ class TMSSTestEnvironment: def __init__(self, host: str='127.0.0.1', preferred_django_port: int=8000, public_host: str=None, exchange: str=os.environ.get("TMSS_EXCHANGE", DEFAULT_BUSNAME), broker: str=os.environ.get("TMSS_BROKER", DEFAULT_BROKER), populate_schemas:bool=False, populate_test_data:bool=False, - start_postgres_listener: bool=True): + start_ra_test_environment: bool=False, start_postgres_listener: bool=False, + start_subtask_scheduler: bool=False, start_dynamic_scheduler: bool=False, + start_pipeline_control: bool=False, enable_viewflow: bool=False): self._exchange = exchange self._broker = broker self._populate_schemas = populate_schemas @@ -284,9 +286,25 @@ class TMSSTestEnvironment: public_host=public_host) self.client_credentials = TemporaryCredentials(user=self.ldap_server.dbcreds.user, password=self.ldap_server.dbcreds.password) + + self._start_ra_test_environment = start_ra_test_environment + self.ra_test_environment = None + self._start_postgres_listener = start_postgres_listener self.postgres_listener = None + self._start_subtask_scheduler = start_subtask_scheduler + self.subtask_scheduler = None + + self._start_dynamic_scheduler = start_dynamic_scheduler + self.dynamic_scheduler = None + + self._start_pipeline_control = start_pipeline_control + self.pipeline_control = None + + if enable_viewflow: + os.environ['TMSS_ENABLE_VIEWFLOW'] = 'True' + # Check for correct Django version, should be at least 3.0 if django.VERSION[0] < 3: print("\nWARNING: YOU ARE USING DJANGO VERSION '%s', WHICH WILL NOT SUPPORT ALL FEATURES IN TMSS!\n" % @@ -318,12 +336,36 @@ class TMSSTestEnvironment: user.is_superuser = True user.save() + if self._start_ra_test_environment: + self.ra_test_environment = RATestEnvironment(exchange=self._exchange, broker=self._broker) + self.ra_test_environment.start() + if self._start_postgres_listener: # start the TMSSPGListener, so the changes in the database are posted as EventMessages on the bus from lofar.sas.tmss.services.tmss_postgres_listener import TMSSPGListener self.postgres_listener = TMSSPGListener(exchange=self._exchange, broker=self._broker, dbcreds=self.database.dbcreds) self.postgres_listener.start() + if self._start_subtask_scheduler: + from lofar.sas.tmss.services.scheduling.subtask_scheduling import create_subtask_scheduling_service + self.subtask_scheduler = create_subtask_scheduling_service(exchange=self._exchange, broker=self._broker) + self.subtask_scheduler.start_listening() + + if self._start_dynamic_scheduler: + from lofar.sas.tmss.services.scheduling.dynamic_scheduling import create_dynamic_scheduling_service, models + # by default, dynamic scheduling is disabled in TMSS. + # In this test environment, we do want to have it enabled. Why else would we wanna start this service? + setting = models.Setting.objects.get(name=models.Flag.Choices.DYNAMIC_SCHEDULING_ENABLED.value) + setting.value = True + setting.save() + self.dynamic_scheduler = create_dynamic_scheduling_service(exchange=self._exchange, broker=self._broker) + self.dynamic_scheduler.start_listening() + + if self._start_pipeline_control: + from lofar.mac.PipelineControl import PipelineControlTMSS + self.pipeline_control = PipelineControlTMSS(exchange=self._exchange, broker=self._broker) + self.pipeline_control.start_listening() + if self._populate_schemas or self._populate_test_data: self.populate_schemas() @@ -336,6 +378,22 @@ class TMSSTestEnvironment: self.postgres_listener.stop() self.postgres_listener = None + if self.subtask_scheduler is not None: + self.subtask_scheduler.stop_listening() + self.subtask_scheduler = None + + if self.dynamic_scheduler is not None: + self.dynamic_scheduler.stop_listening() + self.dynamic_scheduler = None + + if self.pipeline_control is not None: + self.pipeline_control.stop_listening() + self.pipeline_control = None + + if self.ra_test_environment is not None: + self.ra_test_environment.stop() + self.ra_test_environment = None + self.django_server.stop() self.ldap_server.stop() self.database.destroy() @@ -406,9 +464,15 @@ def main_test_environment(): group.add_option("-P", "--public_host", dest="public_host", type="string", default='127.0.0.1', help="expose the TMSS Django REST API via this host. [default=%default]") - group = OptionGroup(parser, 'Example/Test data') + group = OptionGroup(parser, 'Example/Test data, schemas and services', + description='Options to enable/create example/test data, schemas and services. ' \ + 'Without these options you get a lean and mean TMSS test environment, but then you need to run the background services yourselves, and create test data yourself. ' \ + 'For standalone commissioning/testing/playing around you need all these options.') parser.add_option_group(group) group.add_option('-d', '--data', dest='data', action='store_true', help='populate the test-database with test/example data') + group.add_option('-s', '--schemas', dest='schemas', action='store_true', help='populate the test-database with the TMSS JSON schemas') + group.add_option('-S', '--services', dest='services', action='store_true', help='start the TMSS background services.') + group.add_option('-v', '--viewflow', dest='viewflow', action='store_true', help='Enable the viewflow app for workflows on top of TMSS') group = OptionGroup(parser, 'Messaging options') parser.add_option_group(group) @@ -419,10 +483,12 @@ def main_test_environment(): logging.basicConfig(format = '%(asctime)s %(levelname)s %(message)s', level = logging.INFO) - with RATestEnvironment(exchange=options.exchange, broker=options.broker): - with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port, public_host=options.public_host, - exchange=options.exchange, broker=options.broker, - populate_schemas=True, populate_test_data=options.data) as instance: + with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port, public_host=options.public_host, + exchange=options.exchange, broker=options.broker, + populate_schemas=options.schemas, populate_test_data=options.data, + start_ra_test_environment=options.services, start_postgres_listener=options.services, + start_subtask_scheduler=options.services, start_dynamic_scheduler=options.services, + start_pipeline_control=options.services, enable_viewflow=options.viewflow) as tmss_test_env: # print some nice info for the user to use the test servers... # use print instead of log for clean lines. @@ -433,19 +499,20 @@ def main_test_environment(): print("*****************************************************") print("Test-TMSS database, LDAP and Django up and running...") print("*****************************************************") - print("DB Credentials ID: %s" % (instance.database.dbcreds_id, )) - print("LDAP Credentials ID: %s" % (instance.django_server.ldap_dbcreds_id, )) - print("TMSS Client Credentials ID: %s" % (instance.client_credentials.dbcreds_id, )) - print("Django URL: %s" % (instance.django_server.url)) + print("DB Credentials ID: %s" % (tmss_test_env.database.dbcreds_id, )) + print("LDAP Credentials ID: %s" % (tmss_test_env.django_server.ldap_dbcreds_id, )) + print("TMSS Client Credentials ID: %s" % (tmss_test_env.client_credentials.dbcreds_id, )) + print("Django URL: %s" % (tmss_test_env.django_server.url)) print() print("Example cmdlines to run tmss or tmss_manage_django:") - print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) - print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss_manage_django" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) + print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss" % (tmss_test_env.database.dbcreds_id, tmss_test_env.django_server.ldap_dbcreds_id)) + print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss_manage_django" % (tmss_test_env.database.dbcreds_id, tmss_test_env.django_server.ldap_dbcreds_id)) print() print("Example cmdline to run tmss client call:") - print("TMSS_CLIENT_DBCREDENTIALS=%s tmss_set_subtask_state <id> <state>" % (instance.client_credentials.dbcreds_id, )) + print("TMSS_CLIENT_DBCREDENTIALS=%s tmss_set_subtask_state <id> <state>" % (tmss_test_env.client_credentials.dbcreds_id, )) print() print("Press Ctrl-C to exit (and remove the test database and django server automatically)") + waitForInterrupt() diff --git a/SAS/TMSS/test/testdata/subtasks.json b/SAS/TMSS/test/testdata/subtasks.json index 70f8b97d95e9c2c830bcc42092bcf0144a506f9e..2596021102cda14054c339f651d9b7c0c0eb7a55 100644 --- a/SAS/TMSS/test/testdata/subtasks.json +++ b/SAS/TMSS/test/testdata/subtasks.json @@ -31,11 +31,9 @@ "stop_time": "2020-01-02T12:00:00", "specifications_doc": 1, "do_cancel": null, - "priority": 1, "state": "defined", "task_blueprint": null, "specifications_template": 1, - "schedule_method": "manual", "cluster": 2, "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], "created_at": "2020-02-24T13:19:57", @@ -50,11 +48,9 @@ "stop_time": "2020-01-03T12:00:00", "specifications_doc": 1, "do_cancel": null, - "priority": 1, "state": "defined", "task_blueprint": null, "specifications_template": 1, - "schedule_method": "manual", "cluster": 3, "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], "created_at": "2020-02-24T13:19:57", @@ -69,11 +65,9 @@ "stop_time": "2020-01-04T12:00:00", "specifications_doc": 1, "do_cancel": null, - "priority": 1, "state": "defined", "task_blueprint": null, "specifications_template": 1, - "schedule_method": "manual", "cluster": 1, "tags": [ "loaded by fixture", "TMSS-154", "manual testing" ], "created_at": "2020-02-24T13:19:57", diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py index f58583962a1887ddc6e3e6e136351ede386ba255..5edc2d0b9a87be9a108937fb0467fdff1476860d 100644 --- a/SAS/TMSS/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/test/tmss_test_data_django_models.py @@ -118,12 +118,15 @@ def Cycle_test_data() -> dict: "start": datetime.utcnow().isoformat(), "stop": datetime.utcnow().isoformat()} -def Project_test_data(archive_subdirectory="my_project/") -> dict: +def Project_test_data(name: str=None, priority_rank: int = 1, archive_subdirectory="my_project/") -> dict: + if name is None: + name = 'my_project_' + str(uuid.uuid4()) + return { #"cycles": [models.Cycle.objects.create(**Cycle_test_data())], # ManyToMany, use set() - "name": 'my_project_' + str(uuid.uuid4()), + "name": name, "description": 'my description ' + str(uuid.uuid4()), "tags": [], - "priority_rank": 1.0, + "priority_rank": priority_rank, "trigger_priority": 1000, "can_trigger": False, "private_data": True, @@ -234,20 +237,26 @@ def SchedulingUnitBlueprint_test_data(name='my_scheduling_unit_blueprint', requi "do_cancel": False, "draft": models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()) } -def TaskBlueprint_test_data(name='my_task_blueprint', task_draft: models.TaskDraft = None, scheduling_unit_blueprint: models.SchedulingUnitBlueprint = None) -> dict: +def TaskBlueprint_test_data(name='my_task_blueprint', task_draft: models.TaskDraft = None, scheduling_unit_blueprint: models.SchedulingUnitBlueprint = None, specifications_template: models.TaskTemplate=None, specifications_doc: dict=None) -> dict: if task_draft is None: task_draft = models.TaskDraft.objects.create(**TaskDraft_test_data()) + if specifications_template is None: + specifications_template = task_draft.specifications_template + + if specifications_doc is None: + specifications_doc = get_default_json_object_for_schema(specifications_template.schema) + if scheduling_unit_blueprint is None: scheduling_unit_blueprint = models.SchedulingUnitBlueprint.objects.create(**SchedulingUnitBlueprint_test_data()) return {"name": name, "description": "", "tags": [], - "specifications_doc": task_draft.specifications_doc, + "specifications_doc": specifications_doc, "do_cancel": False, "draft": task_draft, - "specifications_template": task_draft.specifications_template, + "specifications_template": specifications_template, "scheduling_unit_blueprint": scheduling_unit_blueprint} def TaskRelationBlueprint_test_data(producer: models.TaskBlueprint = None, consumer: models.TaskBlueprint = None) -> dict: @@ -375,8 +384,6 @@ def Subtask_test_data(task_blueprint: models.TaskBlueprint=None, subtask_templat "specifications_template": subtask_template, "tags": ["TMSS", "TESTING"], "do_cancel": datetime.utcnow(), - "priority": 1, - "schedule_method": models.ScheduleMethod.objects.get(value='manual'), "cluster": cluster, "raw_feedback": raw_feedback} diff --git a/SAS/TMSS/test/tmss_test_data_rest.py b/SAS/TMSS/test/tmss_test_data_rest.py index 82f35cf01ae41d98230365c02cc85fbdc0ec8908..1a16d480f10c74cd783b3ea88d39fd363b1c2cfc 100644 --- a/SAS/TMSS/test/tmss_test_data_rest.py +++ b/SAS/TMSS/test/tmss_test_data_rest.py @@ -298,8 +298,8 @@ class TMSSRESTTestDataCreator(): 'task_blueprints': [], 'produced_by': [], 'consumed_by': [], - 'first_to_connect': [], - 'second_to_connect': []} + 'first_scheduling_relation': [], + 'second_scheduling_relation': []} def TaskRelationDraft(self, producer_url=None, consumer_url=None, template_url=None, input_role_url=None, output_role_url=None, selection_doc=None): @@ -380,8 +380,8 @@ class TMSSRESTTestDataCreator(): "subtasks": [], "produced_by": [], "consumed_by": [], - 'first_to_connect': [], - 'second_to_connect': []} + 'first_scheduling_relation': [], + 'second_scheduling_relation': []} def TaskRelationBlueprint(self, draft_url=None, template_url=None, input_role_url=None, output_role_url=None, consumer_url=None, producer_url=None, selection_doc=None): if draft_url is None: @@ -517,8 +517,6 @@ class TMSSRESTTestDataCreator(): "specifications_template": specifications_template_url, "tags": ["TMSS", "TESTING"], "do_cancel": datetime.utcnow().isoformat(), - "priority": 1, - "schedule_method": self.django_api_url + '/schedule_method/manual', "cluster": cluster_url, "raw_feedback": raw_feedack} diff --git a/SubSystems/RAServices/CMakeLists.txt b/SubSystems/RAServices/CMakeLists.txt index fba2f3ff4837f061ce7251daaeae624ee7bddee6..43896bd6785b73d1aa7f65bb64aa004ad5f6abb8 100644 --- a/SubSystems/RAServices/CMakeLists.txt +++ b/SubSystems/RAServices/CMakeLists.txt @@ -27,7 +27,7 @@ lofar_package(RAServices ltastorageoverview QA_Service MessageLogger - TMSSSubtaskSchedulingService) + TMSSSchedulingService) # supervisord config files lofar_add_sysconf_files(RAServices.ini