diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ca2d341bd9bee6a1976a8026d522e19277228a9f..02eb22467f56319e16296528badbd15a5a893a4b 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -107,9 +107,9 @@ build_MCU_MAC: - cd build/gnucxx11_opt - cmake -DBUILD_PACKAGES=$PACKAGE -DWINCC_ROOT_DIR=/opt/WinCC_OA/3.16/ -DBLITZ_ROOT_DIR=/opt/blitz/ -DCASACORE_ROOT_DIR=/opt/casacore/ -DCMAKE_INSTALL_PREFIX=/opt/lofar ../.. - make -j 12 - - make DESTDIR=${CI_BUILDS_DIR}/install install - - cd ${CI_BUILDS_DIR}/install/opt/lofar - - tar --ignore-failed-read --exclude=include -czf MCU_MAC_${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}.ztar * + - make DESTDIR=${CI_BUILDS_DIR}/${CI_COMMIT_SHORT_SHA}/install install + - cd ${CI_BUILDS_DIR}/${CI_COMMIT_SHORT_SHA}/install/opt/lofar + - tar --ignore-failed-read --exclude=include --exclude="*.ztar" -czf MCU_MAC_${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}.ztar * - curl --insecure --upload-file MCU_MAC_${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}.ztar -u upload:upload https://support.astron.nl/nexus/content/repositories/branches/nl/astron/lofar/${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}/MCU_MAC_${CI_COMMIT_REF_NAME}-${CI_COMMIT_SHORT_SHA}.x86_64.ztar dependencies: @@ -133,6 +133,12 @@ unit_test_TMSS: - SKIP_INTEGRATION_TESTS=true ctest dependencies: - build_TMSS + services: + - rabbitmq:latest + variables: + RABBITMQ_DEFAULT_USER: guest + RABBITMQ_DEFAULT_PASS: guest + LOFAR_DEFAULT_BROKER: 'rabbitmq' # override default 'localhost' which does not work for CI service rabbitmq. artifacts: name: unit-test-report when: always diff --git a/CEP/DP3/DPPP/src/MSWriter.cc b/CEP/DP3/DPPP/src/MSWriter.cc index a313258948a27a502f35c1df95d82b8d350fa915..0c6106cb67ef917e40bfd94828ceabb3e9fd6303 100644 --- a/CEP/DP3/DPPP/src/MSWriter.cc +++ b/CEP/DP3/DPPP/src/MSWriter.cc @@ -46,6 +46,7 @@ #include <casacore/casa/Arrays/ArrayLogical.h> #include <casacore/casa/Containers/Record.h> #include <casacore/casa/OS/Path.h> +#include <casacore/casa/version.h> #include <iostream> #include <limits> @@ -282,7 +283,12 @@ namespace LOFAR { } } // Remove possible hypercolumn definitions. +// Test for casacore version 3.1.1 or smaller +#if CASACORE_MAJOR_VERSION<3 || (CASACORE_MAJOR_VERSION==3 && (CASACORE_MINOR_VERSION==0 || (CASACORE_MINOR_VERSION==1 && CASACORE_PATCH_VERSION < 2))) newdesc.adjustHypercolumns (SimpleOrderedMap<String,String>(String())); +#else + newdesc.adjustHypercolumns (std::map<String,String>()); +#endif // Set data manager info. Record dminfo = temptable.dataManagerInfo(); // Determine the DATA tile shape. Use all corrs and the given #channels. diff --git a/CEP/MS/src/BaselineSelect.cc b/CEP/MS/src/BaselineSelect.cc index 0c1bdad20053e0e2f805441c8c1b7c80a770eabc..806b736e6c94eaf76cac8a25b80bc42daed2c156 100644 --- a/CEP/MS/src/BaselineSelect.cc +++ b/CEP/MS/src/BaselineSelect.cc @@ -45,7 +45,7 @@ #include <casacore/measures/Measures/MPosition.h> #include <casacore/casa/Arrays/Matrix.h> #include <casacore/casa/Arrays/Vector.h> - +#include <casacore/casa/version.h> using namespace casacore; @@ -126,9 +126,17 @@ namespace LOFAR { Vector<Int> selectedAnts1; Vector<Int> selectedAnts2; Matrix<Int> selectedBaselines; - MSSelectionErrorHandler* curHandler = MSAntennaParse::thisMSAErrorHandler; + auto curHandler = MSAntennaParse::thisMSAErrorHandler; +#if CASACORE_MAJOR_VERSION<3 || (CASACORE_MAJOR_VERSION==3 && (CASACORE_MINOR_VERSION==0 || (CASACORE_MINOR_VERSION==1 && CASACORE_PATCH_VERSION < 2))) + // In casacore < 3.1.2 thisMSAErrorHandler is a raw pointer, + // From casacore 3.1.2. it's a CountedPtr BaselineSelectErrorHandler errorHandler (os); MSAntennaParse::thisMSAErrorHandler = &errorHandler; +#else + CountedPtr<MSSelectionErrorHandler> errorHandler( + new BaselineSelectErrorHandler (os)); + MSAntennaParse::thisMSAErrorHandler = errorHandler; +#endif try { // Create a table expression representing the selection. TableExprNode node = msAntennaGramParseCommand diff --git a/Docker/lofar-ci/Dockerfile_ci_sas b/Docker/lofar-ci/Dockerfile_ci_sas index e49f816720424fb51a0a7c139c166f2622d881c4..1076fbe2f569d87021cc2b6291c3235e4fc20389 100644 --- a/Docker/lofar-ci/Dockerfile_ci_sas +++ b/Docker/lofar-ci/Dockerfile_ci_sas @@ -7,7 +7,7 @@ ARG BASE_VERSION=latest FROM ci_base:$BASE_VERSION RUN echo "Installing packages for SAS..." && \ - yum install -y log4cplus log4cplus-devel python3 python3-libs python3-devel boost readline-devel boost-devel binutils-devel boost-python36 boost-python36-devel gettext which openldap-devel npm nodejs git java-11-openjdk python-twisted-core + yum install -y log4cplus log4cplus-devel python3 python3-libs python3-devel boost readline-devel boost-devel binutils-devel boost-python36 boost-python36-devel gettext which openldap-devel git java-11-openjdk python-twisted-core # see https://www.postgresql.org/download/linux/redhat/ on how to install postgresql-server > 9.2 on centos7 RUN yum erase -y postgresql postgresql-server postgresql-devel && \ @@ -16,11 +16,15 @@ RUN yum erase -y postgresql postgresql-server postgresql-devel && \ cd /bin && ln -s /usr/pgsql-9.6/bin/initdb && ln -s /usr/pgsql-9.6/bin/postgres ENV PATH /usr/pgsql-9.6/bin:$PATH -RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil django djangorestframework djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 djangorestframework django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc jsonschema comet +RUN pip3 install cython kombu lxml requests pygcn xmljson mysql-connector-python python-dateutil Django==3.0.9 djangorestframework djangorestframework-xml ldap==1.0.2 flask fabric coverage python-qpid-proton PyGreSQL numpy h5py psycopg2 testing.postgresql Flask-Testing scipy Markdown django-filter python-ldap python-ldap-test ldap3 djangorestframework django-jsonforms django-json-widget django-jsoneditor drf-yasg flex swagger-spec-validator django-auth-ldap mozilla-django-oidc jsonschema comet -RUN npm install -g npx && \ - npm install -g n && \ - n stable && \ +# Note: nodejs now comes with npm, do not install the npm package separately, since that will be taken from the epel repo and is conflicting. +RUN echo "Installing Nodejs packages..." && \ + curl -sL https://rpm.nodesource.com/setup_14.x | bash - && \ + yum install -y nodejs && \ + npm -v && \ + node -v && \ npm install -g serve + USER lofarsys \ No newline at end of file diff --git a/Docker/lofar-outputproc/Dockerfile.tmpl b/Docker/lofar-outputproc/Dockerfile.tmpl index 495fd3f145425e46e060326d852eaa203ffa54fd..6c5ffa7b4a9707b9b2478e47615dacc58efc3b99 100644 --- a/Docker/lofar-outputproc/Dockerfile.tmpl +++ b/Docker/lofar-outputproc/Dockerfile.tmpl @@ -51,14 +51,14 @@ RUN export BUILD_PACKAGES="git cmake g++ swig3.0 python3-setuptools python3-dev # # Run-time dependencies -RUN apt-get update && apt-get install -y binutils liblog4cplus-1.1-9 libxml2 libboost-thread${BOOST_VERSION}.1 libboost-filesystem${BOOST_VERSION}.1 libboost-date-time${BOOST_VERSION}.1 libpng16-16 libsigc++-2.0-dev libxml++2.6-2v5 libboost-regex${BOOST_VERSION}.1 +RUN apt-get update && apt-get install -y binutils liblog4cplus-1.1-9 libxml2 libboost-thread${BOOST_VERSION}.1 libboost-filesystem${BOOST_VERSION}.1 libboost-date-time${BOOST_VERSION}.1 libpng16-16 libsigc++-2.0-dev libxml++2.6-2v5 libboost-regex${BOOST_VERSION}.1 libreadline${READLINE_VERSION} # Tell image build information ENV LOFAR_BRANCH=${LOFAR_VERSION} \ LOFAR_BUILDVARIANT=gnucxx11_opt # Install -RUN apt-get update && apt-get install -y git cmake g++ gfortran bison flex autogen liblog4cplus-dev libhdf5-dev libboost-dev boost-python${BOOST_VERSION}-dev libxml2-dev pkg-config libpng-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev libboost-regex${BOOST_VERSION}-dev binutils-dev libopenblas-dev libcfitsio-dev wcslib-dev libcap2-bin && \ +RUN apt-get update && apt-get install -y git cmake g++ gfortran bison flex autogen liblog4cplus-dev libhdf5-dev libboost-dev boost-python${BOOST_VERSION}-dev libxml2-dev pkg-config libpng-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev libboost-regex${BOOST_VERSION}-dev binutils-dev libopenblas-dev libcfitsio-dev wcslib-dev libcap2-bin libreadline-dev && \ mkdir -p ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && \ cd ${INSTALLDIR}/lofar && git clone https://git.astron.nl/ro/lofar.git ${INSTALLDIR}/lofar/src && \ cd ${INSTALLDIR}/lofar/src && git checkout ${LOFAR_VERSION} && \ @@ -66,11 +66,11 @@ RUN apt-get update && apt-get install -y git cmake g++ gfortran bison flex autog cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && sed -i '29,31d' include/ApplCommon/PosixTime.h && \ cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && make -j ${J} && \ cd ${INSTALLDIR}/lofar/build/${LOFAR_BUILDVARIANT} && make install && \ - bash -c "mkdir -p /home/${USER}/lofar/var/{log,run}" && \ - bash -c "ln -sfT /home/${USER}/lofar/var ${INSTALLDIR}/lofar/var" && \ + bash -c "mkdir -p ${INSTALLDIR}/lofar/var/{log,run}" && \ + bash -c "chmod a+rwx ${INSTALLDIR}/lofar/var/{log,run}" && \ bash -c "strip ${INSTALLDIR}/lofar/{bin,sbin,lib64}/* || true" && \ bash -c "rm -rf ${INSTALLDIR}/lofar/{build,src}" && \ setcap cap_sys_nice,cap_sys_admin=ep ${INSTALLDIR}/lofar/bin/outputProc && \ - apt-get purge -y subversion cmake g++ gfortran bison flex autogen liblog4cplus-dev libhdf5-dev libboost-dev libboost-python${BOOST_VERSION}-dev libxml2-dev pkg-config libpng12-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev binutils-dev libcfitsio3-dev wcslib-dev libopenblas-dev && \ + apt-get purge -y subversion cmake g++ gfortran bison flex autogen liblog4cplus-dev libhdf5-dev libboost-dev libboost-python${BOOST_VERSION}-dev libxml2-dev pkg-config libpng12-dev libfftw3-dev libunittest++-dev libxml++2.6-dev libboost-filesystem${BOOST_VERSION}-dev libboost-date-time${BOOST_VERSION}-dev libboost-thread${BOOST_VERSION}-dev binutils-dev libcfitsio3-dev wcslib-dev libopenblas-dev libreadline-dev && \ apt-get autoremove -y --purge diff --git a/LCS/PyCommon/lcu_utils.py b/LCS/PyCommon/lcu_utils.py index 1377d1004ef4c716ab7ab2fd8b4ec062c0b021bc..c0a17b1e811a91f0a176eb2a21d8c804c6662564 100755 --- a/LCS/PyCommon/lcu_utils.py +++ b/LCS/PyCommon/lcu_utils.py @@ -126,6 +126,11 @@ def get_current_stations(station_group='today', as_host_names=True): :param as_host_names - bool: return the station names as ssh-able hostnames if True (like cs001c, cs002c). return the station names as parset-like VirtualInstrument.stationList names if False (like CS001, CS002). :return: the station names for the given station_group as ssh-able hostnames if as_host_names=True (like cs001c, cs002c) or as parset-like VirtualInstrument.stationList names if as_host_names=False (like CS001, CS002). ''' + # sanitize + station_group = station_group.lower().strip() + if station_group == 'all': + station_group = 'today' + cmd = ['cat', '/opt/operations/bin/stations.txt'] cmd = wrap_command_in_lcu_head_node_ssh_call(cmd) logger.debug('executing cmd: %s', ' '.join(cmd)) @@ -136,7 +141,7 @@ def get_current_stations(station_group='today', as_host_names=True): raise LCURuntimeError("Could not fetch stations.txt file. sdterr=%s" % (err, )) station_file_lines = out.splitlines(False) - station_group_filter = station_group.strip()+' ' + station_group_filter = station_group +' ' station_group_line = next(l for l in station_file_lines if l.startswith(station_group_filter)) station_aliases = station_group_line.split(' ')[-1].split(',') station_hostnames = [] diff --git a/MAC/Services/CMakeLists.txt b/MAC/Services/CMakeLists.txt index 9382db18d5d8a7af897dfc20e61de2ffccb8ab61..5a28a358a0b37cc84a27a1bdaa1a8b4c12eeda3d 100644 --- a/MAC/Services/CMakeLists.txt +++ b/MAC/Services/CMakeLists.txt @@ -1,11 +1,6 @@ # $Id$ - -IF(BUILD_TESTING) - lofar_package(MAC_Services 1.0 DEPENDS PyMessaging OTDB_Services pyparameterset Docker ResourceAssignmentService TBBService TMSSClient TMSS) -ELSE() - lofar_package(MAC_Services 1.0 DEPENDS PyMessaging OTDB_Services pyparameterset Docker ResourceAssignmentService TBBService TMSSClient) -ENDIF(BUILD_TESTING) +lofar_package(MAC_Services 1.0 DEPENDS PyMessaging OTDB_Services pyparameterset Docker ResourceAssignmentService TBBService TMSSClient) add_subdirectory(src) add_subdirectory(test) diff --git a/MAC/Services/test/tPipelineControl.py b/MAC/Services/test/tPipelineControl.py index c4603858fbb871d7fdbe2f877d52f4aa37b942d6..8c2e72031dd310c08fa3d190331d4ab1d205bedf 100644 --- a/MAC/Services/test/tPipelineControl.py +++ b/MAC/Services/test/tPipelineControl.py @@ -99,7 +99,7 @@ class MockRAService(ServiceMessageHandler): for x in predecessors} self.status = status - def GetTask(self, id, mom_id, otdb_id, specification_id): + def GetTask(self, id, mom_id, otdb_id, specification_id, **kwargs): logger.info("***** GetTask(%s) *****", otdb_id) return { @@ -113,7 +113,7 @@ class MockRAService(ServiceMessageHandler): } def GetTasks(self, lower_bound, upper_bound, task_ids, task_status, task_type, mom_ids, - otdb_ids, cluster): + otdb_ids, cluster, **kwargs): logger.info("***** GetTasks(%s) *****", task_ids) if task_ids is None: diff --git a/RTCP/Cobalt/OutputProc/CMakeLists.txt b/RTCP/Cobalt/OutputProc/CMakeLists.txt index 8f16d1b5f8c4b62a83ab8f62b4862201eeb778ac..16c962e801ae794eb21528d4e8f989ce1efe23ac 100644 --- a/RTCP/Cobalt/OutputProc/CMakeLists.txt +++ b/RTCP/Cobalt/OutputProc/CMakeLists.txt @@ -7,6 +7,7 @@ include(LofarFindPackage) lofar_find_package(OpenMP REQUIRED) lofar_find_package(Boost REQUIRED) lofar_find_package(Casacore COMPONENTS casa ms tables REQUIRED) +lofar_find_package(DAL REQUIRED) # Skip explicit lofar_find_package(HDF5 REQUIRED) needed for casacore and DAL. # On RHEL/CentOS 7 an explicit find HDF5 returns into OutputProc: # -D_BSD_SOURCE -D_FORTIFY_SOURCE=2 -D_LARGEFILE64_SOURCE -D_LARGEFILE_SOURCE diff --git a/SAS/ResourceAssignment/Common/lib/specification.py b/SAS/ResourceAssignment/Common/lib/specification.py index bd5c5aeb29d4e17b17f194084b39841b8ca5bed0..b11b045ec2e335c21153aba477ef1a0976512075 100644 --- a/SAS/ResourceAssignment/Common/lib/specification.py +++ b/SAS/ResourceAssignment/Common/lib/specification.py @@ -77,6 +77,7 @@ class Specification: self.otdb_id = None # Task Id in OTDB self.mom_id = None # Task Id in MoM self.radb_id = None # Task Id in RADB + self.tmss_id = None # (Sub)Task Id in TMSS self.trigger_id = None # Id of trigger is this was specified in a trigger self.type = None # Task type in RADB self.subtype = None # Task type in RADB @@ -139,6 +140,7 @@ class Specification: result["otdb_id"] = self.otdb_id result["mom_id"] = self.mom_id result["task_id"] = self.radb_id + result["tmss_id"] = self.tmss_id result["trigger_id"] = self.trigger_id result["status"] = self.status result["task_type"] = self.type @@ -167,13 +169,14 @@ class Specification: :param input_dict: Serialized version of a Specification and any predecessors. """ - self.otdb_id = input_dict["otdb_id"] + self.otdb_id = input_dict.get("otdb_id") self.mom_id = input_dict.get("mom_id") - self.radb_id = input_dict["task_id"] + self.radb_id = input_dict.get("task_id") + self.tmss_id = input_dict.get("tmss_id") self.trigger_id = input_dict.get("trigger_id") self.status = input_dict["status"] - self.type = input_dict["task_type"] - self.subtype = input_dict.get("task_subtype") + self.type = input_dict.get("task_type", input_dict.get("specification", {}).get("ObsSW.Observation.processType")) + self.subtype = input_dict.get("task_subtype", input_dict.get("specification", {}).get("ObsSW.Observation.processSubtype")) self.starttime = Specification.parse_datetime(input_dict.get("starttime")) self.endtime = Specification.parse_datetime(input_dict.get("endtime")) self.duration = Specification.parse_timedelta(input_dict.get("duration")) @@ -980,6 +983,7 @@ class Specification: self.radb_id = task["id"] # Should be the same as radb_id, but self.radb_id might not yet be set self.mom_id = task["mom_id"] self.otdb_id = task["otdb_id"] + self.tmss_id = task.get("tmss_id") self.status = task["status"] self.type = task["type"] self.duration = timedelta(seconds = task["duration"]) @@ -1023,13 +1027,13 @@ class Specification: assignable_task_states = ['approved', 'prescheduled', 'error'] if self.status in assignable_task_states: - logger.info('Task otdb_id=%s with status \'%s\' is assignable' % (self.otdb_id, self.status)) + logger.info('Task otdb_id=%s tmss_id=%s with status \'%s\' is assignable' % (self.otdb_id, self.tmss_id, self.status)) else: assignable_task_states_str = ', '.join(assignable_task_states) - logger.warn('Task otdb_id=%s with status \'%s\' is not assignable. Allowed statuses are %s' % - (self.otdb_id, self.status, assignable_task_states_str)) + logger.warn('Task otdb_id=%s tmss_id=%s with status \'%s\' is not assignable. Allowed statuses are %s' % + (self.otdb_id, self.tmss_id, self.status, assignable_task_states_str)) - message = "Unsupported status '%s' of task with OTDB ID: %s" % (self.status, self.otdb_id) + message = "Unsupported status '%s' of task with otdb_id=%s tmss_id=%s" % (self.status, self.otdb_id, self.tmss_id) raise Exception(message) #TODO more specific exception type? def set_status(self, new_status): @@ -1063,10 +1067,10 @@ class Specification: """ logger.info( - 'insertSpecification mom_id=%s, otdb_id=%s, status=%s, task_type=%s, start_time=%s, end_time=%s ' - 'cluster=%s' % (self.mom_id, self.otdb_id, self.status, self.type, self.starttime, self.endtime, self.cluster) + 'insertSpecification mom_id=%s, otdb_id=%s, tmss_id=%s, status=%s, task_type=%s, start_time=%s, end_time=%s ' + 'cluster=%s' % (self.mom_id, self.otdb_id, self.tmss_id, self.status, self.type, self.starttime, self.endtime, self.cluster) ) - result = self.radb.insertOrUpdateSpecificationAndTask(self.mom_id, self.otdb_id, self.status, self.type, self.starttime, + result = self.radb.insertOrUpdateSpecificationAndTask(self.mom_id, self.otdb_id, self.tmss_id, self.status, self.type, self.starttime, self.endtime, str(self.as_dict()), self.cluster, commit=True) #TODO use internal_dict? specification_id = result['specification_id'] # We never seem to need this again @@ -1079,6 +1083,8 @@ class Specification: Links a task to its predecessors in RADB """ #TODO how to keep the predecessors in MoM and in OTDB in sync here? Does it matter? + if not self.mom_id: + return predecessor_ids = self.momquery.getPredecessorIds(self.mom_id) if str(self.mom_id) not in predecessor_ids or not predecessor_ids[str(self.mom_id)]: @@ -1129,6 +1135,9 @@ class Specification: """ #FIXME Not sure if this works, as self.successor_ids might not be set outside of here + if not self.mom_id: + return + successor_ids = self.momquery.getSuccessorIds(self.mom_id) if str(self.mom_id) not in successor_ids or not successor_ids[str(self.mom_id)]: logger.info('no successors for otdb_id=%s mom_id=%s', self.otdb_id, self.mom_id) diff --git a/SAS/ResourceAssignment/ResourceAssigner/CMakeLists.txt b/SAS/ResourceAssignment/ResourceAssigner/CMakeLists.txt index 3f386b214455432b72c24fd2cae47f690acafabd..7ede896d3932949d108e18984a29db61cad392a8 100644 --- a/SAS/ResourceAssignment/ResourceAssigner/CMakeLists.txt +++ b/SAS/ResourceAssignment/ResourceAssigner/CMakeLists.txt @@ -1,6 +1,6 @@ # $Id: CMakeLists.txt 30355 2014-11-04 13:46:05Z loose $ -lofar_package(ResourceAssigner 0.1 DEPENDS PyMessaging PyCommon pyparameterset OTDB_Services RACommon ResourceAssignmentService MoMQueryServiceClient ResourceAssignmentEstimator CleanupClient StorageQueryService MAC_Services MessageBus ) +lofar_package(ResourceAssigner 0.1 DEPENDS PyMessaging PyCommon pyparameterset OTDB_Services RACommon ResourceAssignmentService MoMQueryServiceClient ResourceAssignmentEstimator CleanupClient StorageQueryService MAC_Services MessageBus RATaskSpecifiedService) include(PythonInstall) set(USE_PYTHON_COMPILATION Off) diff --git a/SAS/ResourceAssignment/ResourceAssigner/bin/CMakeLists.txt b/SAS/ResourceAssignment/ResourceAssigner/bin/CMakeLists.txt index a860d72c8d5105b62dcef737b7bbeaf1456d57c8..6ba46d9d944d009ae027a1326050c3ae3e322f28 100644 --- a/SAS/ResourceAssignment/ResourceAssigner/bin/CMakeLists.txt +++ b/SAS/ResourceAssignment/ResourceAssigner/bin/CMakeLists.txt @@ -1,6 +1,7 @@ # $Id: CMakeLists.txt 32341 2015-08-28 11:59:26Z schaap $ lofar_add_bin_scripts(resourceassigner) +lofar_add_bin_scripts(ra_test_environment) # supervisord config files lofar_add_sysconf_files(resourceassigner.ini diff --git a/SAS/ResourceAssignment/ResourceAssigner/bin/ra_test_environment b/SAS/ResourceAssignment/ResourceAssigner/bin/ra_test_environment new file mode 100755 index 0000000000000000000000000000000000000000..65e7110c5b4278e4a35dbfdb0af84d156bf8eec9 --- /dev/null +++ b/SAS/ResourceAssignment/ResourceAssigner/bin/ra_test_environment @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +'''Startup script for resourceassigner service''' + +import sys +from lofar.sas.resourceassignment.resourceassigner.test.ra_test_environment import main + +if __name__ == '__main__': + main() + diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/CMakeLists.txt b/SAS/ResourceAssignment/ResourceAssigner/lib/CMakeLists.txt index a0bdc997103cd89e0b439b214d6ae217e903292e..3d94fc4e5d3be20e76d4df2db12408812ebf7bce 100644 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/CMakeLists.txt +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/CMakeLists.txt @@ -3,6 +3,7 @@ python_install( __init__.py raservice.py + rarpc.py resource_assigner.py resource_availability_checker.py rabuslistener.py diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py b/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py new file mode 100755 index 0000000000000000000000000000000000000000..75fe6059ed1fc2f9098c774c600d3439e7810960 --- /dev/null +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/rarpc.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python3 + +# ResourceAssigner.py: ResourceAssigner listens on the lofar ?? bus and calls onTaskSpecified +# +# Copyright (C) 2015 +# ASTRON (Netherlands Institute for Radio Astronomy) +# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it +# and/or modify it under the terms of the GNU General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be +# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. +# +# $Id: raservice.py 1580 2015-09-30 14:18:57Z loose $ + +""" +TaskSpecifiedListener listens to a bus on which specified tasks get published. It will then try +to assign resources to these tasks. +""" + +import logging +logger = logging.getLogger(__name__) + +from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME +from lofar.messaging.rpc import RPCClientContextManagerMixin, RPCClient + + +class RARPC(RPCClientContextManagerMixin): + def __init__(self, rpc_client: RPCClient = None): + """Create an instance of the RARPC using the given RPCClient, + or if None given, to a default RPCClient connecting to the "RAService" service""" + super().__init__() + self._rpc_client = rpc_client or RPCClient(service_name="RAService") + + @staticmethod + def create(exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER): + """Create an RARPC connecting to the given exchange/broker on the default "RAService" service""" + return RARPC(RPCClient(service_name="RAService", exchange=exchange, broker=broker)) + + def do_assignment(self, specification_tree): + return self._rpc_client.execute('do_assignment', specification_tree=specification_tree) + + diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/raservice.py b/SAS/ResourceAssignment/ResourceAssigner/lib/raservice.py index 66709f74dffe1411ff16e8be70de95ad1eee5745..87bd0c28498da6667c44864f5a9f59d403b0cbbe 100755 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/raservice.py +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/raservice.py @@ -31,6 +31,7 @@ import logging from lofar.common import dbcredentials from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME +from lofar.messaging.rpc import RPCService, ServiceMessageHandler from lofar.sas.resourceassignment.rataskspecified.RABusListener import RATaskSpecifiedEventMessageHandler, RATaskSpecifiedBusListener from lofar.sas.resourceassignment.resourceassigner.resource_assigner import ResourceAssigner from lofar.sas.resourceassignment.resourceassigner.schedulechecker import ScheduleChecker @@ -53,11 +54,63 @@ class SpecifiedTaskEventMessageHandler(RATaskSpecifiedEventMessageHandler): logger.info('onTaskSpecified: otdb_id=%s status=%s', otdb_id, specification_tree.get('status', '').lower()) try: - self.assigner.do_assignment(otdb_id, specification_tree) + self.assigner.do_assignment(specification_tree) except Exception as e: logger.error(str(e)) -__all__ = ["SpecifiedTaskEventMessageHandler"] +class ResourceAssignerServiceMessageHandler(ServiceMessageHandler): + def __init__(self, assigner=None): + super().__init__() + self.assigner = assigner + + def do_assignment(self, specification_tree): + return self.assigner.do_assignment(specification_tree) + +class RAService: + def __init__(self, radbcreds: dbcredentials.DBCredentials=None, exchange: str = DEFAULT_BUSNAME, broker: str = DEFAULT_BROKER): + if radbcreds is None: + radbcreds = dbcredentials.DBCredentials().get("RADB") + logger.info("Read default RADB dbcreds from disk: %s" % radbcreds.stringWithHiddenPassword()) + + self.assigner = ResourceAssigner(exchange=exchange, broker=broker, radb_dbcreds=radbcreds) + + # create a buslistener for the Event-Driven paradigm, doing resourceassignment upon receiving a RATaskSpecified-EventMessage + self.rpcservice = RPCService(service_name="RAService", + handler_type=ResourceAssignerServiceMessageHandler, + handler_kwargs={"assigner": self.assigner}, + exchange=exchange, + broker=broker) + + # create a service for the blocking RPC call, doing resourceassignment upon receiving a RequestMessage + self.rataskspecifiedbuslistener = RATaskSpecifiedBusListener(handler_type=SpecifiedTaskEventMessageHandler, + handler_kwargs={"assigner": self.assigner}, + exchange=exchange, + broker=broker) + + def start_listening(self): + self.assigner.open() + self.rpcservice.start_listening() + self.rataskspecifiedbuslistener.start_listening() + + def stop_listening(self): + self.rpcservice.stop_listening() + self.rataskspecifiedbuslistener.stop_listening() + self.assigner.close() + + def __enter__(self): + try: + self.start_listening() + return self + except Exception as e: + # __exit__ (and hence stop_listening) is not called when an exception is raised in __enter__ + # so, do our own cleanup (log, stop_listening and re-raise). + logger.exception("%s error: %s", self, e) + self.stop_listening() + raise + + def __exit__(self, exc_type, exc_val, exc_tb): + self.stop_listening() + def main(): @@ -84,21 +137,14 @@ def main(): (options, args) = parser.parse_args() - radb_dbcreds = dbcredentials.parse_options(options) + radbcreds = dbcredentials.parse_options(options) logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.DEBUG if options.verbose else logging.INFO) - with ResourceAssigner(exchange=options.exchange, - broker=options.broker, - radb_dbcreds=radb_dbcreds) as assigner: - with RATaskSpecifiedBusListener(handler_type=SpecifiedTaskEventMessageHandler, - handler_kwargs={"assigner": assigner}, - exchange=options.exchange, - broker=options.broker): - with ScheduleChecker(exchange=options.exchange, - broker=options.broker): - waitForInterrupt() + with RAService(radbcreds=radbcreds, exchange=options.exchange, broker=options.broker): + with ScheduleChecker(exchange=options.exchange, broker=options.broker): + waitForInterrupt() if __name__ == '__main__': main() diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py b/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py index 113d6ab2198c5930ae1c6b791c3c4b1961816bd2..e97993b95a5533e282a1c2b106dbd514abc9b071 100755 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/resource_assigner.py @@ -129,7 +129,7 @@ class ResourceAssigner(object): return {rt['name']: rt['id'] for rt in self.radb.getResourceTypes()} - def do_assignment(self, otdb_id, specification_tree): + def do_assignment(self, specification_tree): """ Makes the given task known to RADB and attempts to assign (schedule) its requested resources. @@ -138,13 +138,15 @@ class ResourceAssigner(object): with other tasks, hence its status will be set to "conflict" in RADB. If all requested resources are successfully assigned, its status will be put to "scheduled" in RADB. - :param otdb_id: OTDB ID of the main task which resources need to be assigned :param specification_tree: the specification tree containing the main task and its resources :raises an Exception if something unforeseen happened while scheduling """ - logger.info('do_assignment: otdb_id=%s specification_tree=%s', otdb_id, specification_tree) + otdb_id = specification_tree.get('otdb_id') + tmss_id = specification_tree.get('tmss_id') + + logger.info('do_assignment: otdb_id=%s tmss_id=%s specification_tree=%s', otdb_id, tmss_id, specification_tree) spec = Specification(self.otdbrpc, self.momrpc, self.radb) spec.from_dict(specification_tree) @@ -154,7 +156,7 @@ class ResourceAssigner(object): # specification, task and predecessor/successor relations, so approved tasks appear correctly in the web # scheduler. if spec.status == 'approved': # Only needed to send misc field info (storagemanager) to OTDB - logger.info('Task otdb_id=%s is only approved, no resource assignment needed yet' % otdb_id) + logger.info('Task otdb_id=%s tmss_id=%s is only approved, no resource assignment needed yet' % (otdb_id, tmss_id)) self._send_task_status_notification(spec, 'approved') return #TODO have Specification propagate to the estimator? @@ -166,6 +168,7 @@ class ResourceAssigner(object): # our subscribers spec.set_status('scheduled') self._send_task_status_notification(spec, 'scheduled') + return True else: # Scheduling of resources for this task failed, # check if any of the claims has status conflict, @@ -185,6 +188,7 @@ class ResourceAssigner(object): # The task is in an unexpected state, so force it to 'error' state and notify our subscribers spec.set_status('error') self._send_task_status_notification(spec, 'error') + return False def _send_task_status_notification(self, spec, new_status): """ @@ -199,6 +203,7 @@ class ResourceAssigner(object): content = { 'radb_id': spec.radb_id, 'otdb_id': spec.otdb_id, + 'tmss_id': spec.tmss_id, 'mom_id': spec.mom_id } subject = 'Task' + new_status[0].upper() + new_status[1:] #TODO this is MAGIC, needs explanation! @@ -235,7 +240,8 @@ class ResourceAssigner(object): validation failed. """ - otdb_id = specification_tree['otdb_id'] + otdb_id = specification_tree.get('otdb_id') + tmss_id = specification_tree.get('tmss_id') estimates = self.rerpc.get_estimated_resources(specification_tree) logger.info('Resource Estimator reply = %s', estimates) @@ -243,7 +249,7 @@ class ResourceAssigner(object): if estimates['errors']: for error in estimates['errors']: logger.error("Error from Resource Estimator: %s", error) - raise ValueError("Error(s) in estimator for otdb_id=%s" % (otdb_id, )) + raise ValueError("Error(s) in estimator for otdb_id=%s tmss_id=%s" % (otdb_id, tmss_id)) if any('resource_types' not in est for est in estimates['estimates']): raise ValueError("missing 'resource_types' in 'estimates' in estimator results: %s" % estimates) @@ -323,7 +329,7 @@ class ResourceAssigner(object): """ # Only needed for pipeline tasks - if spec.type == 'pipeline': + if spec.type == 'pipeline' and spec.otdb_id is not None: try: du_result = self.sqrpc.getDiskUsageForOTDBId(spec.otdb_id, include_scratch_paths=True, diff --git a/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py b/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py index 334008e611918f6959bcdd166e50f0da12b6cf99..ea374250236b38524742631147c1c98879f7867b 100644 --- a/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py +++ b/SAS/ResourceAssignment/ResourceAssigner/lib/schedulers.py @@ -552,6 +552,10 @@ class PriorityScheduler(StationScheduler): logger.debug("my_task_priority, messing around with MoM QS") my_momid = self.task["mom_id"] + if my_momid is None: + logger.debug("PriorityScheduler: returning default priority of -1 for non-MoM task %s", self.task.get('otdb_id', self.task.get('tmss_id'))) + return -1 + priority_dict = self.momqueryservice.get_project_priorities_for_objects([my_momid]) my_priority = priority_dict[my_momid] @@ -676,10 +680,12 @@ class PriorityScheduler(StationScheduler): logger.debug("PriorityScheduler: conflicting tasks are %s", conflicting_tasks) # check which tasks we can kill - task_priorities = self.momqueryservice.get_project_priorities_for_objects(conflicting_task_momids) - logger.debug("PriorityScheduler: conflicting task priorities are %s", task_priorities) - # We can't kill tasks without a mom_id (reservations and such) ! - kill_task_list = [t for t in conflicting_tasks if t["mom_id"] is not None and task_priorities[t["mom_id"]] < self._my_task_priority()] + kill_task_list = [] + if conflicting_task_momids: + task_priorities = self.momqueryservice.get_project_priorities_for_objects(conflicting_task_momids) + logger.debug("PriorityScheduler: conflicting task priorities are %s", task_priorities) + # We can't kill tasks without a mom_id (reservations and such) ! + kill_task_list = [t for t in conflicting_tasks if t["mom_id"] is not None and task_priorities[t["mom_id"]] < self._my_task_priority()] logger.debug("PriorityScheduler: task kill list is %s", kill_task_list) # update if we're blocked by an earlier task than we know so far diff --git a/SAS/ResourceAssignment/ResourceAssigner/test/CMakeLists.txt b/SAS/ResourceAssignment/ResourceAssigner/test/CMakeLists.txt index d027157190e03bbc9dbe5fa71be08049de3aa056..a20e9331a4ed9473feefea7c19a4a5328a1486b9 100644 --- a/SAS/ResourceAssignment/ResourceAssigner/test/CMakeLists.txt +++ b/SAS/ResourceAssignment/ResourceAssigner/test/CMakeLists.txt @@ -1,6 +1,11 @@ # $Id: CMakeLists.txt 32333 2015-08-28 08:15:24Z schaap $ include(LofarCTest) +python_install( + ra_test_environment.py + DESTINATION lofar/sas/resourceassignment/resourceassigner/test) + + lofar_add_test(t_resourceassigner) lofar_add_test(t_schedulechecker) lofar_add_test(t_schedulers) diff --git a/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py b/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py new file mode 100644 index 0000000000000000000000000000000000000000..5046b4eb97f63c4354418a1352c9e4803c641054 --- /dev/null +++ b/SAS/ResourceAssignment/ResourceAssigner/test/ra_test_environment.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id: $ + +import os +import time +from multiprocessing import Process, Event + +import logging +logger = logging.getLogger(__name__) + +from lofar.common.dbcredentials import Credentials, DBCredentials +from lofar.common.util import find_free_port, waitForInterrupt +from lofar.messaging.config import DEFAULT_BROKER, DEFAULT_BUSNAME +from lofar.sas.resourceassignment.database.radb import RADatabase +from lofar.sas.resourceassignment.database.testing.radb_common_testing import RADBTestDatabaseInstance +from lofar.sas.resourceassignment.resourceassigner.raservice import RAService +from lofar.sas.resourceassignment.resourceassignmentservice.service import createService as createRADBService +from lofar.sas.resourceassignment.resourceassignmentestimator.service import createService as createEstimatorService + +class RATestEnvironment: + '''Create and run a several ResourrceAssigner services in an isolated test environment''' + def __init__(self, + exchange: str=os.environ.get("RA_EXCHANGE", DEFAULT_BUSNAME), broker: str=os.environ.get("RA_BROKER", DEFAULT_BROKER)): + self.radb_test_instance = RADBTestDatabaseInstance() + self.radb = self.radb_test_instance.create_database_connection() + self.radb_service = createRADBService(dbcreds=self.radb_test_instance.dbcreds, exchange=exchange, broker=broker) + self.re_service = createEstimatorService(exchange=exchange, broker=broker) + self.ra_service = RAService(radbcreds=self.radb_test_instance.dbcreds, exchange=exchange, broker=broker) + + def start(self): + self.radb_test_instance.create() + self.radb.connect() + self.radb_service.start_listening() + self.re_service.start_listening() + self.ra_service.start_listening() + + def stop(self): + self.radb.disconnect() + self.ra_service.stop_listening() + self.re_service.stop_listening() + self.radb_service.stop_listening() + self.radb_test_instance.destroy() + + def __enter__(self): + try: + self.start() + except Exception as e: + logger.error(e) + self.stop() + raise + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.stop() + +def main(): + """instantiate, run and destroy a ResourceAssignment test environment""" + from optparse import OptionParser, OptionGroup + os.environ['TZ'] = 'UTC' + + parser = OptionParser('%prog [options]', + description='setup/run/teardown a full RA test environment including a fresh and isolated RA database, and resourceassigment services.') + group = OptionGroup(parser, 'Messaging options') + group.add_option('-b', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, help='Address of the message broker, default: %default') + group.add_option('-e', "--exchange", dest="exchange", type="string", default=DEFAULT_BUSNAME, help="Bus or queue where the TMSS messages are published. [default: %default]") + parser.add_option_group(group) + (options, args) = parser.parse_args() + + logging.basicConfig(format = '%(asctime)s %(levelname)s %(message)s', level = logging.INFO) + + from lofar.sas.resourceassignment.resourceassigner.rarpc import RARPC + from datetime import datetime, timedelta + + with RATestEnvironment(exchange=options.exchange, broker=options.broker) as instance: + # print some nice info for the user to use the test servers... + # use print instead of log for clean lines. + for h in logging.root.handlers: + h.flush() + print() + print() + print("*****************************************************") + print("RADB, and RA-services up and running...") + print("*****************************************************") + print("RADB Credentials ID: %s" % (instance.radb_test_instance.dbcreds_id, )) + print() + print("Press Ctrl-C to exit (and remove the test database and django server automatically)") + waitForInterrupt() + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/SAS/ResourceAssignment/ResourceAssigner/test/t_resourceassigner.py b/SAS/ResourceAssignment/ResourceAssigner/test/t_resourceassigner.py index ec65936f4e1dc6276ceac195e6fd88eaf737a54b..983d25b0597acbe298983ad245881a2b12961e16 100755 --- a/SAS/ResourceAssignment/ResourceAssigner/test/t_resourceassigner.py +++ b/SAS/ResourceAssignment/ResourceAssigner/test/t_resourceassigner.py @@ -485,10 +485,9 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): @unit_test def test_do_assignment_logs_specification(self): - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) - self.logger_mock.info.assert_any_call('do_assignment: otdb_id=%s specification_tree=%s', - self.specification_tree['otdb_id'], self.specification_tree) + self.logger_mock.info.assert_any_call('do_assignment: otdb_id=%s tmss_id=%s specification_tree=%s', self.specification_tree['otdb_id'], None, self.specification_tree) @unit_test def test_do_assignment_log_non_approved_or_prescheduled_states(self): @@ -497,7 +496,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): spec_tree = self.non_approved_or_prescheduled_specification_tree with self.assertRaises(Exception): - self.resource_assigner.do_assignment(otdb_id, spec_tree) + self.resource_assigner.do_assignment(spec_tree) assignable_task_states_str = "approved, prescheduled" self.logger_mock.warn.assert_any_call( @@ -511,13 +510,13 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.assertIsNone(self.radb.getTask(otdb_id=otdb_id)) self.specification_tree['status'] = 'approved' - self.resource_assigner.do_assignment(otdb_id, self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) # assure task is known now, and scheduled self.assertIsNotNone(self.radb.getTask(otdb_id=otdb_id)) self.assertEqual('approved', self.radb.getTask(otdb_id=otdb_id)['status']) - self.logger_mock.info.assert_any_call('Task otdb_id=%s is only approved, no resource assignment needed yet' % otdb_id) + self.logger_mock.info.assert_any_call('Task otdb_id=%s tmss_id=None is only approved, no resource assignment needed yet' % otdb_id) def freeze_time_one_day_in_the_future(self, datetime_mock): now = datetime.datetime.utcnow() + datetime.timedelta(days=1) @@ -543,7 +542,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.specification_tree["otdb_id"] = self.otdb_id + 11 self.specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) task = self.radb.getTask(otdb_id=self.specification_tree["otdb_id"]) self.assertEqual('error', task['status']) @@ -553,7 +552,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.specification_tree["otdb_id"] = self.no_resources_otdb_id self.specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) task = self.radb.getTask(otdb_id=self.specification_tree["otdb_id"]) self.assertEqual([], self.radb.getResourceClaims(task_ids=task['id'])) @@ -563,7 +562,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): wrong_task_type = "observation" self.specification_tree["task_type"] = wrong_task_type self.specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) task = self.radb.getTask(otdb_id=self.specification_tree["otdb_id"]) self.assertEqual('error', task['status']) @@ -607,7 +606,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): @integration_test def test_do_assignment_notifies_bus_when_it_was_unable_to_schedule_Conflict(self): # prepare: insert a blocking task with a huge claim on storage (directly via the radb, not via the resource_assigner) - task_id = self.radb.insertOrUpdateSpecificationAndTask(9876, 9876, 'prescheduled', 'observation', + task_id = self.radb.insertOrUpdateSpecificationAndTask(9876, 9876, None, 'prescheduled', 'observation', datetime.datetime.utcnow()-datetime.timedelta(days=1), datetime.datetime.utcnow()+datetime.timedelta(days=1), "", "CEP4")['task_id'] @@ -628,7 +627,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): # now test the resource_assigner.do_assignment. Should not succeed. Task and claims should go to conflict status. self.specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) # check if task is in the radb, and if status is in conflict resulting_task = self.radb.getTask(otdb_id=self.specification_tree['otdb_id']) @@ -636,7 +635,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.assertEqual('conflict', resulting_task['status']) # check if TaskConflict notification was logged and send - content = {'radb_id': resulting_task['id'], 'otdb_id': resulting_task['otdb_id'], 'mom_id': resulting_task['mom_id']} + content = {'radb_id': resulting_task['id'], 'otdb_id': resulting_task['otdb_id'], 'tmss_id': None, 'mom_id': resulting_task['mom_id']} subject = 'TaskConflict' self.assertBusNotificationAndLogging(content, subject) @@ -655,7 +654,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): # now test the resource_assigner.do_assignment. Should not succeed. Task should go to error status. self.specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) # check if task is in the radb, and if status is in error resulting_task = self.radb.getTask(otdb_id=self.specification_tree['otdb_id']) @@ -663,7 +662,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.assertEqual('error', resulting_task['status']) # check if TaskError notification was logged and send - content = {'radb_id': resulting_task['id'], 'otdb_id': resulting_task['otdb_id'], 'mom_id': resulting_task['mom_id']} + content = {'radb_id': resulting_task['id'], 'otdb_id': resulting_task['otdb_id'], 'tmss_id': None, 'mom_id': resulting_task['mom_id']} subject = 'TaskError' self.assertBusNotificationAndLogging(content, subject) @@ -683,7 +682,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): # check if the task assignment results in an error twice (apparently it didn't someday for whatever reason) for i in range(2): self.specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) # check if task is in the radb, and if status is in error resulting_task = self.radb.getTask(otdb_id=self.specification_tree['otdb_id']) @@ -691,7 +690,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.assertEqual('error', resulting_task['status']) # check if TaskError notification was logged and send - content = {'radb_id': resulting_task['id'], 'otdb_id': resulting_task['otdb_id'], 'mom_id': resulting_task['mom_id']} + content = {'radb_id': resulting_task['id'], 'otdb_id': resulting_task['otdb_id'], 'tmss_id': None, 'mom_id': resulting_task['mom_id']} subject = 'TaskError' self.assertBusNotificationAndLogging(content, subject) @@ -702,14 +701,14 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): @unit_test def test_do_assignment_logs_task_data_removal_if_task_is_pipeline(self): self.specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) self.logger_mock.info.assert_any_call("removing data on disk from previous run for otdb_id %s", self.otdb_id) @unit_test def test_do_assignment_removes_task_data_if_task_is_pipeline(self): self.specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) self.curpc_mock.removeTaskData.assert_any_call(self.specification_tree['otdb_id']) @@ -719,7 +718,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.curpc_mock.removeTaskData.return_value = {'deleted': False, 'message': message} self.specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) self.logger_mock.warning.assert_any_call( "could not remove all data on disk from previous run for otdb_id %s: %s", self.otdb_id, message) @@ -735,14 +734,14 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.assertIsNone(self.radb.getTask(otdb_id=otdb_id)) self.specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(otdb_id, self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) self.logger_mock.error.assert_any_call("Exception in cleaning up earlier data: %s", exception_str) @integration_test def test_do_assignment_notifies_bus_when_task_is_scheduled(self): self.specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) # check if task is in the radb, and if status is scheduled resulting_task = self.radb.getTask(otdb_id=self.specification_tree['otdb_id']) @@ -750,7 +749,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.assertEqual('scheduled', resulting_task['status']) # check if TaskScheduled notification was logged and send - content = {'radb_id': resulting_task['id'], 'otdb_id': resulting_task['otdb_id'], 'mom_id': resulting_task['mom_id']} + content = {'radb_id': resulting_task['id'], 'otdb_id': resulting_task['otdb_id'], 'tmss_id': None, 'mom_id': resulting_task['mom_id']} subject = 'TaskScheduled' self.assertBusNotificationAndLogging(content, subject) @@ -761,7 +760,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.assertIsNone(self.radb.getTask(otdb_id=otdb_id)) self.specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(otdb_id, self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) # assure task is known now, and scheduled self.assertIsNotNone(self.radb.getTask(otdb_id=otdb_id)) @@ -779,8 +778,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): with self.assertRaisesRegexp(Exception, exception_str): self.mom_bug_specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(self.mom_bug_specification_tree['otdb_id'], - self.mom_bug_specification_tree) + self.resource_assigner.do_assignment(self.mom_bug_specification_tree) self.logger_mock.error.assert_any_call(exception_str) @@ -790,7 +788,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.rerpc_mock.side_effect = Exception(exception_msg) with self.assertRaisesRegexp(Exception, exception_msg): - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) self.logger_mock.error.assert_any_call(exception_msg) @@ -800,7 +798,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.ra_notification_bus_mock.send.side_effect = Exception(exception_msg) with self.assertRaisesRegexp(Exception, exception_msg): - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) self.logger_mock.error.assert_any_call(exception_msg) @@ -810,7 +808,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.momrpc_mock.getPredecessorIds.side_effect = Exception(exception_msg) with self.assertRaisesRegexp(Exception, exception_msg): - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) self.logger_mock.error.assert_any_call(exception_msg) @@ -820,7 +818,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.momrpc_mock.getSuccessorIds.side_effect = Exception(exception_msg) with self.assertRaisesRegexp(Exception, exception_msg): - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) self.logger_mock.error.assert_any_call(exception_msg) @@ -844,7 +842,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): prio_scheduler_mock().allocate_resources.return_value = (False, None) self.specification_tree['status'] = 'prescheduled' - self.resource_assigner.do_assignment(self.specification_tree['otdb_id'], self.specification_tree) + self.resource_assigner.do_assignment(self.specification_tree) basic_scheduler_mock().__enter__.assert_called() basic_scheduler_mock().__exit__.assert_called() @@ -863,7 +861,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): predecessor_otdb_id = predecessor_spec['otdb_id'] predecessor_spec['status'] = 'prescheduled' predecessor_spec['predecessors'] = [] - self.resource_assigner.do_assignment(predecessor_otdb_id, predecessor_spec) + self.resource_assigner.do_assignment(predecessor_spec) # check if task is in the radb, and if status is scheduled predecessor_task = self.radb.getTask(otdb_id=predecessor_otdb_id) @@ -891,7 +889,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): # it should be possible to scheduled the successor twice and link it twice to the predecessor. # the second time, it should just be 'cleaned-up' and rescheduled/relinked. for i in range(2): - self.resource_assigner.do_assignment(successor_otdb_id, successor_spec) + self.resource_assigner.do_assignment(successor_spec) # check if task is in the radb, and if status is scheduled successor_task = self.radb.getTask(otdb_id=successor_otdb_id) @@ -911,7 +909,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): The resource assigner should be able to handle that, or prevent that.''' # prepare: insert a blocking task with a huge claim on storage (directly via the radb, not via the resource_assigner) - task_id = self.radb.insertOrUpdateSpecificationAndTask(9876, 9876, 'approved', 'observation', + task_id = self.radb.insertOrUpdateSpecificationAndTask(9876, 9876, None, 'approved', 'observation', datetime.datetime.utcnow()-datetime.timedelta(days=1), datetime.datetime.utcnow()+datetime.timedelta(days=1), "", "CEP4")['task_id'] @@ -932,7 +930,7 @@ class ResourceAssignerTest(RADBCommonTestMixin, unittest.TestCase): self.assertEqual('active', self.radb.getTask(task_id)['status']) # create a second task (caused by a trigger) - task2_id = self.radb.insertOrUpdateSpecificationAndTask(8765, 8765, 'approved', 'observation', + task2_id = self.radb.insertOrUpdateSpecificationAndTask(8765, 8765, None, 'approved', 'observation', datetime.datetime.utcnow(), datetime.datetime.utcnow()+datetime.timedelta(hours=1), "", "CEP4")['task_id'] diff --git a/SAS/ResourceAssignment/ResourceAssigner/test/t_schedulers.py b/SAS/ResourceAssignment/ResourceAssigner/test/t_schedulers.py index b6d87543c97c90854446df9ed5ad29958d1bb727..4a2ba929b7c594430338881da6e1b099a2e9a8f3 100755 --- a/SAS/ResourceAssignment/ResourceAssigner/test/t_schedulers.py +++ b/SAS/ResourceAssignment/ResourceAssigner/test/t_schedulers.py @@ -86,6 +86,7 @@ class BasicSchedulerTest(SchedulerTest): return self.radb.insertOrUpdateSpecificationAndTask(mom_id=mom_otdb_id, otdb_id=mom_otdb_id, + tmss_id=None, task_status='approved', task_type='observation', starttime=starttime, @@ -506,6 +507,7 @@ class PrioritySchedulerTest(StationSchedulerTest): def new_task_without_momid(self, otdb_id): return self.radb.insertOrUpdateSpecificationAndTask(mom_id=None, otdb_id=otdb_id, + tmss_id=None, task_status='approved', task_type='observation', starttime=datetime.datetime(2017, 1, 1, 1, 0, 0), diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py index 43a7c7674b9666c27efe62c574796e425ef6e7ff..29358de27d660b822a48c48a705f5dd0ec6ff135 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb.py @@ -134,8 +134,8 @@ class RADatabase(PostgresDatabaseConnection): raise KeyError('No such status_id: %s. Valid values are: %s' % (status_id, ', '.join([x['id'] for x in self.getResourceClaimStatuses()]))) - def getTasksTimeWindow(self, task_ids=None, mom_ids=None, otdb_ids=None): - if len([x for x in [task_ids, mom_ids, otdb_ids] if x != None]) > 1: + def getTasksTimeWindow(self, task_ids=None, mom_ids=None, otdb_ids=None, tmss_ids=None): + if len([x for x in [task_ids, mom_ids, otdb_ids, tmss_ids] if x != None]) > 1: raise KeyError("Provide either task_ids or mom_ids or otdb_ids, not multiple kinds.") query = '''SELECT min(starttime) as min_starttime, max(endtime) as max_endtime from resource_allocation.task_view''' @@ -173,6 +173,16 @@ class RADatabase(PostgresDatabaseConnection): elif len(otdb_ids) == 0: #assume a list/enumerable of id's, length 0 return [] + if tmss_ids is not None: + if isinstance(tmss_ids, int): # just a single id + conditions.append('tmss_id = %s') + qargs.append(tmss_ids) + elif len(tmss_ids) > 0: #assume a list/enumerable of id's + conditions.append('tmss_id in %s') + qargs.append(tuple(tmss_ids)) + elif len(tmss_ids) == 0: #assume a list/enumerable of id's, length 0 + return [] + if conditions: query += ' WHERE ' + ' AND '.join(conditions) @@ -184,9 +194,9 @@ class RADatabase(PostgresDatabaseConnection): return result - def getTasks(self, lower_bound=None, upper_bound=None, task_ids=None, task_status=None, task_type=None, mom_ids=None, otdb_ids=None, cluster=None): - if len([x for x in [task_ids, mom_ids, otdb_ids] if x != None]) > 1: - raise KeyError("Provide either task_ids or mom_ids or otdb_ids, not multiple kinds.") + def getTasks(self, lower_bound=None, upper_bound=None, task_ids=None, task_status=None, task_type=None, mom_ids=None, otdb_ids=None, tmss_ids=None, cluster=None): + if len([x for x in [task_ids, mom_ids, otdb_ids, tmss_ids] if x != None]) > 1: + raise KeyError("Provide either task_ids or mom_ids or otdb_ids or tmss_ids, not multiple kinds.") query = '''SELECT * from resource_allocation.task_view''' @@ -231,6 +241,16 @@ class RADatabase(PostgresDatabaseConnection): elif len(otdb_ids) == 0: #assume a list/enumerable of id's, length 0 return [] + if tmss_ids is not None: + if isinstance(tmss_ids, int): # just a single id + conditions.append('tmss_id = %s') + qargs.append(tmss_ids) + elif len(tmss_ids) > 0: #assume a list/enumerable of id's + conditions.append('tmss_id in %s') + qargs.append(tuple(tmss_ids)) + elif len(tmss_ids) == 0: #assume a list/enumerable of id's, length 0 + return [] + task_status, task_type = self._convertTaskTypeAndStatusToIds(task_status, task_type) if task_status is not None: @@ -272,9 +292,9 @@ class RADatabase(PostgresDatabaseConnection): return tasks - def getTask(self, id=None, mom_id=None, otdb_id=None, specification_id=None): - '''get a task for either the given (task)id, or for the given mom_id, or for the given otdb_id, or for the given specification_id''' - ids = [id, mom_id, otdb_id, specification_id] + def getTask(self, id=None, mom_id=None, otdb_id=None, tmss_id=None, specification_id=None): + '''get a task for either the given (task)id, or for the given mom_id, or for the given otdb_id, or for the given tmss_id, or for the given specification_id''' + ids = [id, mom_id, otdb_id, tmss_id, specification_id] validIds = [x for x in ids if x != None] if len(validIds) != 1: @@ -287,6 +307,8 @@ class RADatabase(PostgresDatabaseConnection): query += '''where tv.mom_id = (%s);''' elif otdb_id is not None: query += '''where tv.otdb_id = (%s);''' + elif tmss_id is not None: + query += '''where tv.tmss_id = (%s);''' elif specification_id is not None: query += '''where tv.specification_id = (%s);''' @@ -327,24 +349,27 @@ class RADatabase(PostgresDatabaseConnection): '''converts task_status and task_type to id's in case one and/or the other are strings''' return self._convertTaskStatusToId(task_status), self._convertTaskTypeToId(task_type) - def insertTask(self, mom_id, otdb_id, task_status, task_type, specification_id, commit=True): + def insertTask(self, mom_id, otdb_id, tmss_id, task_status, task_type, specification_id, commit=True): if isinstance(mom_id, int) and mom_id < 0: mom_id = None if isinstance(otdb_id, int) and otdb_id < 0: otdb_id = None - logger.info('insertTask mom_id=%s, otdb_id=%s, task_status=%s, task_type=%s, specification_id=%s' % - (mom_id, otdb_id, task_status, task_type, specification_id)) + if isinstance(tmss_id, int) and tmss_id < 0: + tmss_id = None + + logger.info('insertTask mom_id=%s, otdb_id=%s, tmss_id=%s, task_status=%s, task_type=%s, specification_id=%s' % + (mom_id, otdb_id, tmss_id, task_status, task_type, specification_id)) task_status, task_type = self._convertTaskTypeAndStatusToIds(task_status, task_type) query = '''LOCK TABLE resource_allocation.resource_claim, resource_allocation.resource_usage, resource_allocation.task IN EXCLUSIVE MODE; '''\ '''INSERT INTO resource_allocation.task - (mom_id, otdb_id, status_id, type_id, specification_id) - VALUES (%s, %s, %s, %s, %s) + (mom_id, otdb_id, tmss_id, status_id, type_id, specification_id) + VALUES (%s, %s, %s, %s, %s, %s) RETURNING id;''' - id = self.executeQuery(query, (mom_id, otdb_id, task_status, task_type, specification_id), fetch=FETCH_ONE).get('id') + id = self.executeQuery(query, (mom_id, otdb_id, tmss_id, task_status, task_type, specification_id), fetch=FETCH_ONE).get('id') if commit: self.commit() return id @@ -377,7 +402,24 @@ class RADatabase(PostgresDatabaseConnection): return self._cursor.rowcount > 0 - def updateTask(self, task_id, mom_id=None, otdb_id=None, task_status=None, task_type=None, specification_id=None, commit=True): + def updateTaskStatusForTMSSId(self, tmss_id, task_status, commit=True): + '''converts task_status and task_type to id's in case one and/or the other are strings''' + if task_status is not None and isinstance(task_status, str): + #convert task_status string to task_status.id + task_status = self.getTaskStatusId(task_status, True) + + query = '''LOCK TABLE resource_allocation.resource_claim, resource_allocation.resource_usage, resource_allocation.task IN EXCLUSIVE MODE; '''\ + '''UPDATE resource_allocation.task + SET status_id = %s + WHERE resource_allocation.task.tmss_id = %s;''' + + self.executeQuery(query, [task_status, tmss_id]) + if commit: + self.commit() + + return self._cursor.rowcount > 0 + + def updateTask(self, task_id, mom_id=None, otdb_id=None, tmss_id=None, task_status=None, task_type=None, specification_id=None, commit=True): '''Update the given paramenters for the task with given task_id. Inside the database consistency checks are made. When one or more claims of a task are in conflict status, then its task is set to conflict as well, and hence cannot be scheduled. @@ -398,6 +440,10 @@ class RADatabase(PostgresDatabaseConnection): fields.append('otdb_id') values.append(otdb_id) + if tmss_id is not None : + fields.append('tmss_id') + values.append(tmss_id) + if task_status is not None : fields.append('status_id') values.append(task_status) @@ -1567,13 +1613,17 @@ class RADatabase(PostgresDatabaseConnection): else: self.executeQuery('SELECT * from resource_allocation.rebuild_resource_usages_from_claims_for_resource_of_status(%s, %s)', (resource_id, claim_status_id), fetch=FETCH_NONE) - def insertOrUpdateSpecificationAndTask(self, mom_id, otdb_id, task_status, task_type, starttime, endtime, content, cluster, commit=True): + def insertOrUpdateSpecificationAndTask(self, mom_id, otdb_id, tmss_id, task_status, task_type, starttime, endtime, content, cluster, commit=True): ''' Insert a new specification and task in one transaction. Removes resource_claims for existing task with same otdb_id if present in the same transaction. ''' try: - existing_task = self.getTask(otdb_id=otdb_id) + existing_task=None + if otdb_id is not None: + existing_task = self.getTask(otdb_id=otdb_id) + if existing_task is None and tmss_id is not None: + existing_task = self.getTask(tmss_id=tmss_id) if existing_task is None and mom_id is not None: existing_task = self.getTask(mom_id=mom_id) @@ -1584,10 +1634,10 @@ class RADatabase(PostgresDatabaseConnection): taskId = existing_task['id'] self.deleteResourceClaimForTask(existing_task['id'], False) self.updateSpecification(specId, starttime=starttime, endtime=endtime, content=content, cluster=cluster, commit=False) - self.updateTask(taskId, mom_id=mom_id, otdb_id=otdb_id, task_status=task_status, task_type=task_type, commit=False) + self.updateTask(taskId, mom_id=mom_id, otdb_id=otdb_id, tmss_id=tmss_id, task_status=task_status, task_type=task_type, commit=False) else: specId = self.insertSpecification(starttime, endtime, content, cluster, False) - taskId = self.insertTask(mom_id, otdb_id, task_status, task_type, specId, False) + taskId = self.insertTask(mom_id, otdb_id, tmss_id, task_status, task_type, specId, False) if commit: self.commit() diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_database.sql b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_database.sql index 489b3f6a4d48126a96be4986a18161f92c1f56a5..f2d625e128d4affff62fc1be14f62bece6371bb7 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_database.sql +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/radb/sql/create_database.sql @@ -177,6 +177,7 @@ CREATE TABLE resource_allocation.task ( id serial NOT NULL, mom_id integer UNIQUE, otdb_id integer UNIQUE, + tmss_id integer UNIQUE, status_id integer NOT NULL REFERENCES resource_allocation.task_status DEFERRABLE INITIALLY IMMEDIATE, type_id integer NOT NULL REFERENCES resource_allocation.task_type DEFERRABLE INITIALLY IMMEDIATE, specification_id integer NOT NULL REFERENCES resource_allocation.specification ON DELETE CASCADE DEFERRABLE INITIALLY IMMEDIATE, @@ -191,6 +192,9 @@ CREATE INDEX task_mom_id_idx CREATE INDEX task_otdb_id_idx ON resource_allocation.task (otdb_id); +CREATE INDEX task_tmss_id_idx + ON resource_allocation.task (tmss_id); + CREATE INDEX task_status_id_idx ON resource_allocation.task (status_id); @@ -416,7 +420,7 @@ ALTER TABLE resource_allocation.config -- VIEWS ---------------------------------------------- CREATE OR REPLACE VIEW resource_allocation.task_view AS - SELECT t.id, t.mom_id, t.otdb_id, t.status_id, t.type_id, t.specification_id, + SELECT t.id, t.mom_id, t.otdb_id, t.tmss_id, t.status_id, t.type_id, t.specification_id, ts.name AS status, tt.name AS type, s.starttime, s.endtime, extract(epoch from age(s.endtime, s.starttime)) as duration, s.cluster, (SELECT array_agg(tp.predecessor_id) FROM resource_allocation.task_predecessor tp where tp.task_id=t.id) as predecessor_ids, (SELECT array_agg(tp.task_id) FROM resource_allocation.task_predecessor tp where tp.predecessor_id=t.id) as successor_ids, diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py index 3d27628889007f9e4a19ba2f2be6d4ebec2018af..8ecc6fb4b4ebbb380d168e83e695431d4db9af91 100755 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/radb_common_testing.py @@ -56,7 +56,6 @@ class RADBTestDatabaseInstance(PostgresTestDatabaseInstance): logger.debug("setting up database. applying sql file: %s", sql_path) with open(sql_path) as sql: db.executeQuery(sql.read()) - db.commit() def create_database_connection(self) -> RADatabase: self.radb = RADatabase(self.dbcreds) diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb_functionality.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb_functionality.py index 8650940395a9c6ed92b4adf9eadceed98f159b83..c7f35b9e962057b9cb80a6ccef115cba3f539b48 100755 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb_functionality.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb_functionality.py @@ -139,6 +139,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): task = self.radb.insertOrUpdateSpecificationAndTask(mom_id=mom_id, otdb_id=otdb_id, + tmss_id=None, task_status=self.test_task.task_status, task_type=self.test_task.task_type, starttime=self.test_task.starttime, @@ -434,7 +435,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): """ Verify if radb.insertTask() raises an exception when called with non-existing specification ID """ with self.assertRaises(Exception): - self.radb.insertTask(0, 0, 'conflict', 'observation', 1) + self.radb.insertTask(0, 0, None, 'conflict', 'observation', 1) @integration_test def test_insertTask_with_invalid_id_type_raises_exception(self): @@ -445,7 +446,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): content="", cluster="CEP4") with self.assertRaises(Exception): - self.radb.insertTask('monkey see', 'is monkey do', 'conflict', 'observation', spec_id) + self.radb.insertTask('monkey see', 'is monkey do', None, 'conflict', 'observation', spec_id) @integration_test def test_insertTask_allows_nonexisting_mom_and_otdb_ids(self): @@ -456,7 +457,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): content="", cluster="CEP4") mom_id = otdb_id = -1 - task_id = self.radb.insertTask(mom_id, otdb_id, 'conflict', 'observation', spec_id) + task_id = self.radb.insertTask(mom_id, otdb_id, None, 'conflict', 'observation', spec_id) self.assertIsNotNone(task_id) @@ -469,8 +470,8 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): content="", cluster="CEP4") with self.assertRaises(Exception): - self.radb.insertTask(1, 1, 'conflict', 'observation', spec_id) - self.radb.insertTask(1, 2, 'conflict', 'observation', spec_id) + self.radb.insertTask(1, 1, None, 'conflict', 'observation', spec_id) + self.radb.insertTask(1, 2, None, 'conflict', 'observation', spec_id) @integration_test def test_insertTask_duplicate_otdb_ids_fails(self): @@ -481,8 +482,8 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): content="", cluster="CEP4") with self.assertRaises(Exception): - self.radb.insertTask(1, 1, 'conflict', 'observation', spec_id) - self.radb.insertTask(2, 1, 'conflict', 'observation', spec_id) + self.radb.insertTask(1, 1, None, 'conflict', 'observation', spec_id) + self.radb.insertTask(2, 1, None, 'conflict', 'observation', spec_id) @integration_test def test_insertTask_with_invalid_task_status_raises_exception(self): @@ -494,7 +495,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): content="", cluster="CEP4") with self.assertRaises(Exception): - self.radb.insertTask(0, 0, 'willywonka', 'observation', specification_id) + self.radb.insertTask(0, 0, None, 'willywonka', 'observation', specification_id) @integration_test def test_insertTask_with_invalid_task_type_raises_exception(self): @@ -506,7 +507,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): content="", cluster="CEP4") with self.assertRaises(Exception): - self.radb.insertTask(0, 0, 'conflict', 'willywonka', specification_id) + self.radb.insertTask(0, 0, None, 'conflict', 'willywonka', specification_id) @integration_test def test_insertTask_normal_use_succeeds(self): @@ -525,6 +526,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): 'type_id': 0, 'mom_id': 0, 'otdb_id': 0, + 'tmss_id': None, 'blocked_by_ids': [], 'predecessor_ids': [], 'successor_ids': [], @@ -537,7 +539,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): cluster=sample_task['cluster'], content='',) - task_id = self.radb.insertTask(sample_task['mom_id'], sample_task['otdb_id'], sample_task['status'], + task_id = self.radb.insertTask(sample_task['mom_id'], sample_task['otdb_id'], None, sample_task['status'], sample_task['type'], sample_task['specification_id']) sample_task['id'] = task_id task = self.radb.getTask(id=task_id) @@ -940,7 +942,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): now = datetime.utcnow() now -= timedelta(minutes=now.minute, seconds=now.second, microseconds=now.microsecond) # round to full hour - result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', now, now+timedelta(hours=1), + result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', now, now+timedelta(hours=1), 'foo', 'CEP4') self.assertTrue(result['inserted']) spec_id1 = result['specification_id'] @@ -1032,7 +1034,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): logger.info('-- now test with a 2nd task, and test resource availability, conflicts etc. --') # another task, fully overlapping with task1 - result = self.radb.insertOrUpdateSpecificationAndTask(1, 1, 'approved', 'observation', now, now+timedelta(hours=1), 'foo', 'CEP4') + result = self.radb.insertOrUpdateSpecificationAndTask(1, 1, None, 'approved', 'observation', now, now+timedelta(hours=1), 'foo', 'CEP4') self.assertTrue(result['inserted']) spec_id2 = result['specification_id'] task_id2 = result['task_id'] @@ -1157,7 +1159,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): task2 = self.radb.getTask(task_id2) # another task, partially overlapping with both task1 & task3 - result = self.radb.insertOrUpdateSpecificationAndTask(2, 2, 'approved', 'observation', + result = self.radb.insertOrUpdateSpecificationAndTask(2, 2, None, 'approved', 'observation', task1['starttime'] + (task1['endtime']-task1['starttime'])/2, task2['starttime'] + (task2['endtime']-task2['starttime'])/2, 'foo', 'CEP4') @@ -1266,7 +1268,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): future = now + timedelta(hours=2) # insert one task, and reuse that for multiple claims - result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', future, future + timedelta(hours=1), + result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', future, future + timedelta(hours=1), 'content', 'CEP4') self.assertTrue(result['inserted']) task_id = result['task_id'] @@ -1464,7 +1466,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): future = now + timedelta(hours=2) #insert one task, and reuse that for multiple overlapping claims - result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', now, now+timedelta(hours=1), + result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', now, now+timedelta(hours=1), 'foo', 'CEP4') self.assertTrue(result['inserted']) task_id = result['task_id'] @@ -1587,7 +1589,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): future = now + timedelta(hours=2) # insert one task, and reuse that for multiple overlapping claims - result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', future, future + timedelta(hours=1), 'first content', + result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', future, future + timedelta(hours=1), 'first content', 'CEP4') self.assertTrue(result['inserted']) task_id = result['task_id'] @@ -1598,7 +1600,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): self.assertEqual('first content', self.radb.getSpecification(task['specification_id'])['content']) # prove that we can re-insert the spec/task, and that the new task is indeed updated - result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', future, future + timedelta(hours=1), 'second content', + result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', future, future + timedelta(hours=1), 'second content', 'CEP4') self.assertTrue(result['inserted']) self.assertEqual(task_id, result['task_id']) # as per 20190916 inserting a task again should not yield a new task id. @@ -1635,7 +1637,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): # prove again that we can re-insert the spec/task (future with claims), and that the new task is indeed inserted and new, # and that the claim(s) and usage(s) were actually deleted (via cascading deletes) - result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', future, future + timedelta(hours=1), 'third content', + result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', future, future + timedelta(hours=1), 'third content', 'CEP4') self.assertTrue(result['inserted']) self.assertEqual(task_id, result['task_id']) # as per 20190916 inserting a task again should not yield a new task id. @@ -1688,7 +1690,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): # and prove again that we can re-insert the spec/task (future with claims and a corrupted usage table), and that the new task is indeed inserted and new, # and that the claim(s) and usage(s) were actually deleted (via cascading deletes) # 2017-08-29: YEAH! the insert fails just like on production. Now we can start making a fix! - result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', future, future + timedelta(hours=1), 'fourth content', + result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', future, future + timedelta(hours=1), 'fourth content', 'CEP4') self.assertTrue(result['inserted']) self.assertEqual(task_id, result['task_id']) # as per 20190916 inserting a task again should not yield a new task id. @@ -1730,7 +1732,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): start = now - timedelta(minutes=now.minute, seconds=now.second, microseconds=now.microsecond) # round to current full hour #insert a task - result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', start, start+timedelta(hours=2), 'foo', 'CEP4') + result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', start, start+timedelta(hours=2), 'foo', 'CEP4') self.assertTrue(result['inserted']) task_id = result['task_id'] @@ -1856,7 +1858,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): for spec in self.radb.getSpecifications(): self.radb.deleteSpecification(spec['id']) # cascades into tasks and claims - result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', start, + result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', start, start + timedelta(hours=2), 'foo', 'CEP4') task1_id = result1['task_id'] @@ -1872,7 +1874,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): # claim same - result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, 'approved', 'observation', start + timedelta(minutes=5), + result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, None, 'approved', 'observation', start + timedelta(minutes=5), start + timedelta(hours=2, minutes=5), 'foo', 'CEP4') task2_id = result2['task_id'] @@ -1899,7 +1901,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): for spec in self.radb.getSpecifications(): self.radb.deleteSpecification(spec['id']) # cascades into tasks and claims - result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', start, + result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', start, start + timedelta(hours=2), 'foo', 'CEP4') task1_id = result1['task_id'] @@ -1915,7 +1917,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): # claim same - result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, 'approved', 'observation', start + timedelta(minutes=5), + result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, None, 'approved', 'observation', start + timedelta(minutes=5), start + timedelta(hours=1, minutes=50), 'foo', 'CEP4') task2_id = result2['task_id'] @@ -1942,7 +1944,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): for spec in self.radb.getSpecifications(): self.radb.deleteSpecification(spec['id']) # cascades into tasks and claims - result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', start, + result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', start, start + timedelta(hours=2), 'foo', 'CEP4') task1_id = result1['task_id'] @@ -1958,7 +1960,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): # claim same - result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, 'approved', 'observation', start + timedelta(minutes=-5), + result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, None, 'approved', 'observation', start + timedelta(minutes=-5), start + timedelta(hours=1, minutes=55), 'foo', 'CEP4') task2_id = result2['task_id'] @@ -1989,7 +1991,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): for spec in self.radb.getSpecifications(): self.radb.deleteSpecification(spec['id']) # cascades into tasks and claims - result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', start, + result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', start, start + timedelta(hours=2), 'foo', 'CEP4') task1_id = result1['task_id'] @@ -2005,7 +2007,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): # claim same - result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, 'approved', 'observation', start + timedelta(minutes=-5), + result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, None, 'approved', 'observation', start + timedelta(minutes=-5), start + timedelta(hours=2, minutes=5), 'foo', 'CEP4') task2_id = result2['task_id'] @@ -2032,7 +2034,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): for spec in self.radb.getSpecifications(): self.radb.deleteSpecification(spec['id']) # cascades into tasks and claims - result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', start, + result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', start, start + timedelta(hours=2), 'foo', 'CEP4') task1_id = result1['task_id'] @@ -2048,7 +2050,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): # claim same - result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, 'approved', 'observation', start, + result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, None, 'approved', 'observation', start, start + timedelta(hours=2), 'foo', 'CEP4') task2_id = result2['task_id'] @@ -2075,7 +2077,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): for spec in self.radb.getSpecifications(): self.radb.deleteSpecification(spec['id']) # cascades into tasks and claims - result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', start, + result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', start, start + timedelta(hours=2), 'foo', 'CEP4') task1_id = result1['task_id'] @@ -2091,7 +2093,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): # claim same - result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, 'approved', 'observation', start + timedelta(hours=3), + result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, None, 'approved', 'observation', start + timedelta(hours=3), start + timedelta(hours=5), 'foo', 'CEP4') task2_id = result2['task_id'] @@ -2116,7 +2118,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): for spec in self.radb.getSpecifications(): self.radb.deleteSpecification(spec['id']) # cascades into tasks and claims - result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', start + timedelta(hours=3), + result1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', start + timedelta(hours=3), start + timedelta(hours=5), 'foo', 'CEP4') task1_id = result1['task_id'] @@ -2132,7 +2134,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): # claim same - result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, 'approved', 'observation', start, + result2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, None, 'approved', 'observation', start, start + timedelta(hours=2), 'foo', 'CEP4') task2_id = result2['task_id'] @@ -2172,7 +2174,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): resource_max_cap = self.radb.get_resource_claimable_capacity(RESOURCE_ID, base_time, base_time) # insert the 'low prio' spec, task... - spec_task_low = self.radb.insertOrUpdateSpecificationAndTask(1, 1, 'prescheduled', 'observation', + spec_task_low = self.radb.insertOrUpdateSpecificationAndTask(1, 1, None, 'prescheduled', 'observation', base_time + timedelta(minutes=5), base_time + timedelta(minutes=10), 'foo', 'CEP4') task_low_id = spec_task_low['task_id'] @@ -2213,7 +2215,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): # overlapping with the beginning of task_low # so, the dwellscheduler finds task_low in task_high's higway # so, task_low is aborted by the dwellscheduler (later in the code). - spec_task_high1 = self.radb.insertOrUpdateSpecificationAndTask(2, 2, 'approved', 'observation', + spec_task_high1 = self.radb.insertOrUpdateSpecificationAndTask(2, 2, None, 'approved', 'observation', base_time, base_time + timedelta(minutes=7), 'foo', 'CEP4') task_high1_id = spec_task_high1['task_id'] @@ -2283,7 +2285,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): now = datetime.utcnow() now -= timedelta(minutes=now.minute, seconds=now.second, microseconds=now.microsecond) # round to full hour - result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', + result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', now, now+timedelta(hours=1), 'foo', 'CEP4') self.assertTrue(result['inserted']) self.assertIsNotNone(result['task_id']) @@ -2327,7 +2329,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): now = datetime.utcnow() now -= timedelta(minutes=now.minute, seconds=now.second, microseconds=now.microsecond) # round to full hour - result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', + result = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', now, now+timedelta(hours=1), 'foo', 'CEP4') self.assertTrue(result['inserted']) self.assertIsNotNone(result['task_id']) @@ -2403,7 +2405,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): now = now - timedelta(minutes=now.minute, seconds=now.second, microseconds=now.microsecond) now = now + timedelta(hours=1) - spec_task = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', + spec_task = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', now, now + timedelta(minutes=10), 'foo', 'CEP4') @@ -2477,7 +2479,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): now = datetime.utcnow() - spec_task = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', + spec_task = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', now, now, # tasks can have zero duration 'foo', 'CEP4') @@ -2527,7 +2529,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): resource_max_cap = self.radb.get_resource_claimable_capacity(RESOURCE_ID, base_time, base_time) # insert a first task and full claim on a resource... - spec_task1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', + spec_task1 = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', base_time + timedelta(minutes=+0), base_time + timedelta(minutes=+10), 'foo', 'CEP4') self.assertTrue(spec_task1['inserted']) @@ -2544,7 +2546,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): # insert second (partially overlapping) task and claim on same resource, which we expect to get a conflict status # because the first claim already claims the resource fully. - spec_task2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, 'approved', 'observation', + spec_task2 = self.radb.insertOrUpdateSpecificationAndTask(1, 1, None, 'approved', 'observation', base_time + timedelta(minutes=+5), base_time + timedelta(minutes=+15), 'foo', 'CEP4') self.assertTrue(spec_task2['inserted']) @@ -2600,7 +2602,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): microseconds=base_time.microsecond) # insert a first task and full claim on a resource... - spec_task = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'observation', + spec_task = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'observation', base_time + timedelta(minutes=-20), base_time + timedelta(minutes=-10), 'foo', 'CEP4') self.assertTrue(spec_task['inserted']) @@ -2658,7 +2660,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): RESOURCE_ID = 0 resource_max_cap = int(self.radb.get_resource_claimable_capacity(RESOURCE_ID, now, now)) - task1_id = self.radb.insertOrUpdateSpecificationAndTask(1, 1, 'approved', 'observation', + task1_id = self.radb.insertOrUpdateSpecificationAndTask(1, 1, None, 'approved', 'observation', now+timedelta(hours=1), now + timedelta(hours=2), 'content', 'CEP4')['task_id'] @@ -2704,7 +2706,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): self.radb.getResourceUsages(task1['starttime'], task1['endtime'], RESOURCE_ID)[RESOURCE_ID]['claimed']) # insert second task after the first one (not overlapping) - task2_id = self.radb.insertOrUpdateSpecificationAndTask(2, 2, 'approved', 'observation', + task2_id = self.radb.insertOrUpdateSpecificationAndTask(2, 2, None, 'approved', 'observation', now + timedelta(hours=3), now + timedelta(hours=4), 'content', 'CEP4')['task_id'] @@ -2756,7 +2758,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): now = datetime.utcnow() now -= timedelta(minutes=now.minute, seconds=now.second, microseconds=now.microsecond) # round to full hour for i in [1,2]: - task_id = self.radb.insertOrUpdateSpecificationAndTask(i, i, 'approved', 'observation', + task_id = self.radb.insertOrUpdateSpecificationAndTask(i, i, None, 'approved', 'observation', now+timedelta(hours=1), now + timedelta(hours=2), 'content', 'CEP4')['task_id'] task = self.radb.getTask(task_id) @@ -2839,7 +2841,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): # we need a task.... now = datetime.utcnow() now -= timedelta(minutes=now.minute, seconds=now.second, microseconds=now.microsecond) # round to full hour - task_id = self.radb.insertOrUpdateSpecificationAndTask(0, 0, 'approved', 'reservation', + task_id = self.radb.insertOrUpdateSpecificationAndTask(0, 0, None, 'approved', 'reservation', now + timedelta(hours=1), now + timedelta(hours=2), 'content', 'CEP4')['task_id'] @@ -2911,7 +2913,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): self.assertEqual(0, len(usages)) self.assertEqual(0, len(usage_deltas)) - task_id = self.radb.insertOrUpdateSpecificationAndTask(mom_id=0, otdb_id=0, task_status='approved', task_type='observation', + task_id = self.radb.insertOrUpdateSpecificationAndTask(mom_id=0, otdb_id=0, tmss_id=None, task_status='approved', task_type='observation', starttime=now+timedelta(hours=1), endtime=now+timedelta(hours=2), content="", cluster="CEP4")['task_id'] task = self.radb.getTask(task_id) diff --git a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb_performance.py b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb_performance.py index 6a1758c71f98fa6916ac25c2aa4235d05124ccda..54e624c715ccc5d0cbd70e4eeee3b6dc07d1f00e 100755 --- a/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb_performance.py +++ b/SAS/ResourceAssignment/ResourceAssignmentDatabase/tests/t_radb_performance.py @@ -75,7 +75,7 @@ class ResourceAssignmentDatabaseTest(RADBCommonTestMixin, unittest.TestCase): logger.info('starting task and claim scheduling: counter=%s num_claims_per_resource=%s num_claims_to_insert=%s oversubscription_factor=%s', counter, num_claims_per_resource, num_claims_to_insert, oversubscription_factor) - result = self.radb.insertOrUpdateSpecificationAndTask(counter, counter, 'approved', 'observation', + result = self.radb.insertOrUpdateSpecificationAndTask(counter, counter, None, 'approved', 'observation', now+timedelta(hours=3*counter), now + timedelta(hours=3*counter + 1), 'content', 'CEP4') diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/base_resource_estimator.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/base_resource_estimator.py index 40ff888781b77231f053ed6334d22a01527e14a0..e80fca0991dfe0857e2f01d403a123504d772eb2 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/base_resource_estimator.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/base_resource_estimator.py @@ -44,7 +44,7 @@ class BaseResourceEstimator(object): """ Check if all required keys needed are available """ logger.debug("required keys: %s" % ', '.join(self.required_keys)) logger.debug("parset keys: %s" % ', '.join(list(parset.keys()))) - missing_keys = set(self.required_keys) - set(parset.keys()) + missing_keys = sorted(list(set(self.required_keys) - set(parset.keys()))) if missing_keys: logger.error("missing keys: %s" % ', '.join(missing_keys)) return False diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/calibration_pipeline.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/calibration_pipeline.py index 1ad38b616acc82b88b51f378e81610b283ac7937..7cc01e1d01e3ef94918470f1bba7040e9380af93 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/calibration_pipeline.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/calibration_pipeline.py @@ -39,17 +39,6 @@ class CalibrationPipelineResourceEstimator(BasePipelineResourceEstimator): BasePipelineResourceEstimator.__init__(self, name='calibration_pipeline') self.required_keys = ('Observation.startTime', 'Observation.stopTime', - DATAPRODUCTS + 'Input_Correlated.enabled', - DATAPRODUCTS + 'Input_Correlated.identifications', - #DATAPRODUCTS + 'Input_Correlated.storageClusterName', # enable if input bandwidth is also estimated - DATAPRODUCTS + 'Input_InstrumentModel.enabled', - DATAPRODUCTS + 'Input_InstrumentModel.identifications', - DATAPRODUCTS + 'Output_InstrumentModel.enabled', - DATAPRODUCTS + 'Output_InstrumentModel.identifications', - DATAPRODUCTS + 'Output_InstrumentModel.storageClusterName', - DATAPRODUCTS + 'Output_Correlated.enabled', - DATAPRODUCTS + 'Output_Correlated.identifications', - DATAPRODUCTS + 'Output_Correlated.storageClusterName', PIPELINE + 'DPPP.demixer.freqstep', PIPELINE + 'DPPP.demixer.timestep') @@ -146,22 +135,22 @@ class CalibrationPipelineResourceEstimator(BasePipelineResourceEstimator): if reduction_factor < 1: logger.error('freqstep * timestep is not positive: %d' % reduction_factor) result['errors'].append('freqstep * timestep is not positive: %d' % reduction_factor) - if not parset.getBool(DATAPRODUCTS + 'Input_Correlated.enabled') or \ - not parset.getBool(DATAPRODUCTS + 'Output_Correlated.enabled'): + if not parset.getBool(DATAPRODUCTS + 'Input_Correlated.enabled', False) or \ + not parset.getBool(DATAPRODUCTS + 'Output_Correlated.enabled', False): logger.error('Input_Correlated or Output_Correlated is not enabled') result['errors'].append('Input_Correlated or Output_Correlated is not enabled') duration = self._getDuration(parset.getString('Observation.startTime'), parset.getString('Observation.stopTime')) - input_idents_uv = parset.getStringVector(DATAPRODUCTS + 'Input_Correlated.identifications') + input_idents_uv = parset.getStringVector(DATAPRODUCTS + 'Input_Correlated.identifications',[]) input_files_uv = self.get_inputs_from_predecessors(predecessor_estimates, input_idents_uv, 'uv') if not input_files_uv: logger.error('Missing uv dataproducts in predecessor output_files') result['errors'].append('Missing uv dataproducts in predecessor output_files') input_files = input_files_uv - have_im_input = parset.getBool(DATAPRODUCTS + 'Input_InstrumentModel.enabled') + have_im_input = parset.getBool(DATAPRODUCTS + 'Input_InstrumentModel.enabled', False) if have_im_input: input_idents_im = parset.getStringVector(DATAPRODUCTS + 'Input_InstrumentModel.identifications') input_files_im = self.get_inputs_from_predecessors(predecessor_estimates, input_idents_im, 'im') @@ -180,11 +169,11 @@ class CalibrationPipelineResourceEstimator(BasePipelineResourceEstimator): # multiple passes, nr nodes and caching, but for sure also because bandwidth must be tied to *predecessor* storage! #input_cluster_uv = parset.getString(DATAPRODUCTS + 'Input_Correlated.storageClusterName') - output_ident_uv = self._getOutputIdentification( parset.getStringVector(DATAPRODUCTS + 'Output_Correlated.identifications') ) + output_ident_uv = self._getOutputIdentification( parset.getStringVector(DATAPRODUCTS + 'Output_Correlated.identifications', []) ) output_cluster_uv = parset.getString(DATAPRODUCTS + 'Output_Correlated.storageClusterName') - have_im_output = parset.getBool(DATAPRODUCTS + 'Output_InstrumentModel.enabled') + have_im_output = parset.getBool(DATAPRODUCTS + 'Output_InstrumentModel.enabled', False) if have_im_output: - output_ident_im = self._getOutputIdentification( parset.getStringVector(DATAPRODUCTS + 'Output_InstrumentModel.identifications') ) + output_ident_im = self._getOutputIdentification( parset.getStringVector(DATAPRODUCTS + 'Output_InstrumentModel.identifications',[]) ) output_cluster_im = parset.getString(DATAPRODUCTS + 'Output_InstrumentModel.storageClusterName') if output_cluster_uv != output_cluster_im: diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/observation.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/observation.py index 495111b586d398f0dff56271e82a8b02de4a90f4..a46e91122caf1f237e122bafff7463cd7e99305c 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/observation.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/resource_estimators/observation.py @@ -45,24 +45,7 @@ class ObservationResourceEstimator(BaseResourceEstimator): 'Observation.Beam[0].subbandList', 'Observation.nrBitsPerSample', 'Observation.VirtualInstrument.stationList', - COBALT + 'Correlator.nrChannelsPerSubband', - COBALT + 'Correlator.integrationTime', - COBALT + 'BeamFormer.flysEye', - COBALT + 'BeamFormer.CoherentStokes.timeIntegrationFactor', - COBALT + 'BeamFormer.IncoherentStokes.timeIntegrationFactor', - 'Observation.VirtualInstrument.stationList', - DATAPRODUCTS + 'Output_Correlated.enabled', - DATAPRODUCTS + 'Output_Correlated.identifications', - DATAPRODUCTS + 'Output_Correlated.storageClusterName', - DATAPRODUCTS + 'Output_CoherentStokes.enabled', - DATAPRODUCTS + 'Output_CoherentStokes.identifications', - DATAPRODUCTS + 'Output_CoherentStokes.storageClusterName', - COBALT + 'BeamFormer.CoherentStokes.which', - DATAPRODUCTS + 'Output_IncoherentStokes.enabled', - DATAPRODUCTS + 'Output_IncoherentStokes.identifications', - DATAPRODUCTS + 'Output_IncoherentStokes.storageClusterName', - COBALT + 'BeamFormer.IncoherentStokes.which' - ) + COBALT + 'Correlator.nrChannelsPerSubband' ) self.asp = AntennaSetsParser() def _calculate(self, parset, predecessor_estimates=[]): @@ -130,21 +113,21 @@ class ObservationResourceEstimator(BaseResourceEstimator): estimates = [] try: - if parset.getBool('Observation.DataProducts.Output_Correlated.enabled'): + if parset.getBool('Observation.DataProducts.Output_Correlated.enabled', False): estimates.extend(self.correlated(parset, duration)) except ValueError as exc: logger.error(exc) errors.append(str(exc)) try: - if parset.getBool('Observation.DataProducts.Output_CoherentStokes.enabled'): + if parset.getBool('Observation.DataProducts.Output_CoherentStokes.enabled', False): estimates.extend(self.coherentstokes(parset, duration)) except ValueError as exc: logger.error(exc) errors.append(str(exc)) try: - if parset.getBool('Observation.DataProducts.Output_IncoherentStokes.enabled'): + if parset.getBool('Observation.DataProducts.Output_IncoherentStokes.enabled', False): estimates.extend(self.incoherentstokes(parset, duration)) except ValueError as exc: logger.error(exc) @@ -202,7 +185,7 @@ class ObservationResourceEstimator(BaseResourceEstimator): # Estimates may differ per SAP for CS/IS. Decided to always produce a separate estimate per SAP. # Hence, need to annotate each SAP with the right identifications for pipeline predecessor input filtering. - identifications = parset.getStringVector(DATAPRODUCTS + 'Output_Correlated.identifications') + identifications = parset.getStringVector(DATAPRODUCTS + 'Output_Correlated.identifications', []) sap_idents = self._sap_identifications(identifications, nr_saps) total_files = 0 # sum of all subbands in all digital beams @@ -257,7 +240,7 @@ class ObservationResourceEstimator(BaseResourceEstimator): # Estimates may differ per SAP for CS/IS. Decided to always produce a separate estimate per SAP. # Hence, need to annotate each SAP with the right identifications for pipeline predecessor input filtering. - identifications = parset.getStringVector(DATAPRODUCTS + 'Output_CoherentStokes.identifications') + identifications = parset.getStringVector(DATAPRODUCTS + 'Output_CoherentStokes.identifications', []) sap_idents = self._sap_identifications(identifications, nr_saps) estimates = [] @@ -350,7 +333,7 @@ class ObservationResourceEstimator(BaseResourceEstimator): # Estimates may differ per SAP for CS/IS. Decided to always produce a separate estimate per SAP. # Hence, need to annotate each SAP with the right identifications for pipeline predecessor input filtering. - identifications = parset.getStringVector(DATAPRODUCTS + 'Output_IncoherentStokes.identifications') + identifications = parset.getStringVector(DATAPRODUCTS + 'Output_IncoherentStokes.identifications', []) sap_idents = self._sap_identifications(identifications, nr_saps) estimates = [] diff --git a/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py b/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py index af277dee5943c1c812c93e770a2c592f6906e5c1..399b826974ae0275845fc6f639a66be40dddd980 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py +++ b/SAS/ResourceAssignment/ResourceAssignmentEstimator/service.py @@ -57,40 +57,44 @@ class ResourceEstimatorHandler(ServiceMessageHandler): ##FIXME dirty hack def add_id(self, task_estimate, otdb_id): - estimate_list = task_estimate['estimates'] - for est in estimate_list: - if 'storage' in est['resource_types']: - # We only need to do output files, it will be someone else's input - output_files = est.get('output_files') - if output_files is None: - continue - for dptype in output_files: - for dptype_dict in output_files[dptype]: - dptype_dict['properties'][dptype + '_otdb_id'] = otdb_id - logger.info('add_id: added %s to properties of data type %s' % (otdb_id, dptype)) + if otdb_id is not None: + estimate_list = task_estimate['estimates'] + for est in estimate_list: + if 'storage' in est['resource_types']: + # We only need to do output files, it will be someone else's input + output_files = est.get('output_files') + if output_files is None: + continue + for dptype in output_files: + for dptype_dict in output_files[dptype]: + dptype_dict['properties'][dptype + '_otdb_id'] = otdb_id + logger.info('add_id: added %s to properties of data type %s' % (otdb_id, dptype)) return task_estimate #TODO use something else than .values()[0]['estimates'] ?? def get_subtree_estimate(self, specification_tree): ''' Returns a dict { 'estimates': estimates, 'errors': [errors] }. ''' - otdb_id = specification_tree['otdb_id'] + otdb_id = specification_tree.get('otdb_id') + tmss_id = specification_tree.get('tmss_id') + task_id = otdb_id or tmss_id + parset = specification_tree['specification'] - predecessors = specification_tree['predecessors'] + predecessors = specification_tree.get('predecessors', []) # Recursively get estimates for predecessors, which are needed to determine the requested estimates. branch_estimates = {} for branch in predecessors: - branch_otdb_id = branch['otdb_id'] + branch_task_id = branch.get('otdb_id', branch.get('tmss_id')) subtree_estimate = self.get_subtree_estimate(branch) if subtree_estimate['errors']: - logger.warning("Could not estimate %s because predecessor %s has errors" % (otdb_id, branch)) - return {'errors': ["Could not estimate %s because predecessor %s has errors" % (otdb_id, branch)]} + logger.warning("Could not estimate %s because predecessor %s has errors" % (task_id, branch)) + return {'errors': ["Could not estimate %s because predecessor %s has errors" % (task_id, branch)]} - branch_estimates[branch_otdb_id] = subtree_estimate + branch_estimates[branch_task_id] = subtree_estimate - logger.info(("Branch estimates for %s\n" % otdb_id) + pprint.pformat(branch_estimates)) + logger.info(("Branch estimates for %s\n" % task_id) + pprint.pformat(branch_estimates)) # Construct the requested estimates if specification_tree['task_type'] == 'observation': @@ -99,10 +103,10 @@ class ResourceEstimatorHandler(ServiceMessageHandler): return self.add_id(self.reservation.verify_and_estimate(parset), otdb_id) elif specification_tree['task_type'] == 'pipeline': # Averaging pipeline - if specification_tree['task_subtype'] in ['averaging pipeline', 'calibration pipeline']: + if specification_tree.get('task_subtype') in ['averaging pipeline', 'calibration pipeline']: predecessor_estimates = [] - for branch_otdb_id, branch_estimate in list(branch_estimates.items()): - logger.info('Looking at predecessor %s' % branch_otdb_id) + for branch_task_id, branch_estimate in list(branch_estimates.items()): + logger.info('Looking at predecessor %s' % branch_task_id) estimates = branch_estimate['estimates'] for est in estimates: @@ -111,11 +115,11 @@ class ResourceEstimatorHandler(ServiceMessageHandler): has_uv = 'uv' in est['output_files'] has_im = 'im' in est['output_files'] if has_uv and not has_im: # Not a calibrator pipeline - logger.info('found %s as the target of pipeline %s' % (branch_otdb_id, otdb_id)) + logger.info('found %s as the target of pipeline %s' % (branch_task_id, task_id)) predecessor_estimates.extend(estimates) break elif has_im: - logger.info('found %s as the calibrator of pipeline %s' % (branch_otdb_id, otdb_id)) + logger.info('found %s as the calibrator of pipeline %s' % (branch_task_id, task_id)) predecessor_estimates.extend(estimates) break @@ -128,13 +132,13 @@ class ResourceEstimatorHandler(ServiceMessageHandler): predecessor_estimates = list(branch_estimates.values())[0]['estimates'] - if specification_tree['task_subtype'] in ['imaging pipeline', 'imaging pipeline msss']: + if specification_tree.get('task_subtype') in ['imaging pipeline', 'imaging pipeline msss']: return self.add_id(self.imaging_pipeline.verify_and_estimate(parset, predecessor_estimates), otdb_id) - if specification_tree['task_subtype'] in ['long baseline pipeline']: + if specification_tree.get('task_subtype') in ['long baseline pipeline']: return self.add_id(self.longbaseline_pipeline.verify_and_estimate(parset, predecessor_estimates), otdb_id) - if specification_tree['task_subtype'] in ['pulsar pipeline']: + if specification_tree.get('task_subtype') in ['pulsar pipeline']: return self.add_id(self.pulsar_pipeline.verify_and_estimate(parset, predecessor_estimates), otdb_id) else: # system tasks? logger.warning("ID %s is not a pipeline, observation or reservation." % otdb_id) diff --git a/SAS/ResourceAssignment/ResourceAssignmentService/rpc.py b/SAS/ResourceAssignment/ResourceAssignmentService/rpc.py index 2e2f67b91c069b9d22fd81f3212106e539b6ba09..cc8781a70f5af60ddda1eb0065f21321c75e3cfe 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentService/rpc.py +++ b/SAS/ResourceAssignment/ResourceAssignmentService/rpc.py @@ -175,13 +175,14 @@ class RADBRPC(RPCClientContextManagerMixin): available_capacity=available_capacity, total_capacity=total_capacity) - def getTask(self, id=None, mom_id=None, otdb_id=None, specification_id=None): + def getTask(self, id=None, mom_id=None, otdb_id=None, tmss_id=None, specification_id=None): '''get a task for either the given (task)id, or for the given mom_id, or for the given otdb_id, or for the given specification_id''' - return self._rpc_client.execute('GetTask', id=id, mom_id=mom_id, otdb_id=otdb_id, specification_id=specification_id) + return self._rpc_client.execute('GetTask', id=id, mom_id=mom_id, otdb_id=otdb_id, tmss_id=tmss_id, specification_id=specification_id) - def insertTask(self, mom_id, otdb_id, task_status, task_type, specification_id): + def insertTask(self, mom_id, otdb_id, tmss_id, task_status, task_type, specification_id): return self._rpc_client.execute('InsertTask', mom_id=mom_id, otdb_id=otdb_id, + tmss_id=tmss_id, task_status=task_status, task_type=task_type, specification_id=specification_id) @@ -189,11 +190,12 @@ class RADBRPC(RPCClientContextManagerMixin): def deleteTask(self, id): return self._rpc_client.execute('DeleteTask', id=id) - def updateTask(self, task_id, mom_id=None, otdb_id=None, task_status=None, task_type=None, specification_id=None): + def updateTask(self, task_id, mom_id=None, otdb_id=None, tmss_id=None, task_status=None, task_type=None, specification_id=None): return self._rpc_client.execute('UpdateTask', id=task_id, mom_id=mom_id, otdb_id=otdb_id, + tmss_id=tmss_id, task_status=task_status, task_type=task_type, specification_id=specification_id) @@ -203,10 +205,15 @@ class RADBRPC(RPCClientContextManagerMixin): otdb_id=otdb_id, task_status=task_status) - def getTasksTimeWindow(self, task_ids=None, mom_ids=None, otdb_ids=None): - return self._rpc_client.execute('GetTasksTimeWindow', task_ids=task_ids, mom_ids=mom_ids, otdb_ids=otdb_ids) + def updateTaskStatusForTMSSId(self, tmss_id, task_status): + return self._rpc_client.execute('UpdateTaskStatusForTMSSId', + tmss_id=tmss_id, + task_status=task_status) + + def getTasksTimeWindow(self, task_ids=None, mom_ids=None, otdb_ids=None, tmss_ids=None): + return self._rpc_client.execute('GetTasksTimeWindow', task_ids=task_ids, mom_ids=mom_ids, otdb_ids=otdb_ids, tmss_ids=tmss_ids) - def getTasks(self, lower_bound=None, upper_bound=None, task_ids=None, task_status=None, task_type=None, mom_ids=None, otdb_ids=None, cluster=None): + def getTasks(self, lower_bound=None, upper_bound=None, task_ids=None, task_status=None, task_type=None, mom_ids=None, otdb_ids=None, tmss_ids=None, cluster=None): '''getTasks let's you query tasks from the radb with many optional filters. :param lower_bound: datetime specifies the lower_bound of a time window above which to select tasks :param upper_bound: datetime specifies the upper_bound of a time window below which to select tasks @@ -217,7 +224,7 @@ class RADBRPC(RPCClientContextManagerMixin): :param otdb_ids: int/list/tuple specifies one or more otdb_ids to select :param cluster: string specifies the cluster to select ''' - return self._rpc_client.execute('GetTasks', lower_bound=lower_bound, upper_bound=upper_bound, task_ids=task_ids, task_status=task_status, task_type=task_type, mom_ids=mom_ids, otdb_ids=otdb_ids, cluster=cluster) + return self._rpc_client.execute('GetTasks', lower_bound=lower_bound, upper_bound=upper_bound, task_ids=task_ids, task_status=task_status, task_type=task_type, mom_ids=mom_ids, otdb_ids=otdb_ids, tmss_ids=tmss_ids, cluster=cluster) def getTaskPredecessorIds(self, id=None): return self._rpc_client.execute('GetTaskPredecessorIds', id=id) @@ -240,10 +247,11 @@ class RADBRPC(RPCClientContextManagerMixin): def getSpecification(self, id): return self._rpc_client.execute('GetSpecification', id=id) - def insertOrUpdateSpecificationAndTask(self, mom_id, otdb_id, task_status, task_type, starttime, endtime, content, cluster): + def insertOrUpdateSpecificationAndTask(self, mom_id, otdb_id, tmss_id, task_status, task_type, starttime, endtime, content, cluster): return self._rpc_client.execute('insertOrUpdateSpecificationAndTask', mom_id=mom_id, otdb_id=otdb_id, + tmss_id=tmss_id, task_status=task_status, task_type=task_type, starttime=starttime, diff --git a/SAS/ResourceAssignment/ResourceAssignmentService/service.py b/SAS/ResourceAssignment/ResourceAssignmentService/service.py index ddfd4fad090f93ddc6a54e1c870c1a2fb7b4da3c..732404dbadbe236fb8668ae75ed32b62e021a6c9 100644 --- a/SAS/ResourceAssignment/ResourceAssignmentService/service.py +++ b/SAS/ResourceAssignment/ResourceAssignmentService/service.py @@ -240,7 +240,8 @@ class RADBServiceMessageHandler(ServiceMessageHandler): logger.info('GetTasksTimeWindow: %s' % dict({k:v for k,v in list(kwargs.items()) if v != None})) return self.radb.getTasksTimeWindow(task_ids=kwargs.get('task_ids'), mom_ids=kwargs.get('mom_ids'), - otdb_ids=kwargs.get('otdb_ids')) + otdb_ids=kwargs.get('otdb_ids'), + tmss_ids=kwargs.get('tmss_ids')) def _getTasks(self, **kwargs): logger.info('GetTasks: %s' % dict({k:v for k,v in list(kwargs.items()) if v != None})) @@ -251,17 +252,19 @@ class RADBServiceMessageHandler(ServiceMessageHandler): task_type=kwargs.get('task_type'), mom_ids=kwargs.get('mom_ids'), otdb_ids=kwargs.get('otdb_ids'), + tmss_ids=kwargs.get('tmss_ids'), cluster=kwargs.get('cluster')) def _getTask(self, **kwargs): logger.info('GetTask: %s' % dict({k:v for k,v in list(kwargs.items()) if v != None})) - task = self.radb.getTask(id=kwargs.get('id'), mom_id=kwargs.get('mom_id'), otdb_id=kwargs.get('otdb_id'), specification_id=kwargs.get('specification_id')) + task = self.radb.getTask(id=kwargs.get('id'), mom_id=kwargs.get('mom_id'), otdb_id=kwargs.get('otdb_id'), tmss_id=kwargs.get('tmss_id'), specification_id=kwargs.get('specification_id')) return task def _insertTask(self, **kwargs): logger.info('InsertTask: %s' % dict({k:v for k,v in list(kwargs.items()) if v != None})) - task_id = self.radb.insertTask(kwargs['mom_id'], - kwargs['otdb_id'], + task_id = self.radb.insertTask(kwargs.get('mom_id'), + kwargs.get('otdb_id'), + kwargs.get('tmss_id'), kwargs.get('status_id', kwargs.get('task_status', 'prepared')), kwargs.get('type_id', kwargs.get('task_type')), kwargs['specification_id']) @@ -286,6 +289,7 @@ class RADBServiceMessageHandler(ServiceMessageHandler): updated = self.radb.updateTask(id, mom_id=kwargs.get('mom_id'), otdb_id=kwargs.get('otdb_id'), + tmss_id=kwargs.get('tmss_id'), task_status=kwargs.get('status_id', kwargs.get('task_status')), task_type=kwargs.get('type_id', kwargs.get('task_type')), specification_id=kwargs.get('specification_id')) @@ -321,6 +325,7 @@ class RADBServiceMessageHandler(ServiceMessageHandler): logger.info('insertOrUpdateSpecificationAndTask: %s' % dict({k:v for k,v in list(kwargs.items()) if v != None and k != 'content'})) return self.radb.insertOrUpdateSpecificationAndTask(kwargs['mom_id'], kwargs['otdb_id'], + kwargs.get('tmss_id'), kwargs['task_status'], kwargs['task_type'], kwargs.get('starttime'), diff --git a/SAS/Scheduler/src/CMakeLists.txt.build_without_lofar_cmake b/SAS/Scheduler/src/CMakeLists.txt.build_without_lofar_cmake new file mode 100644 index 0000000000000000000000000000000000000000..a78e14a803643e2f92f61bd59783cb1ee9dcd4b3 --- /dev/null +++ b/SAS/Scheduler/src/CMakeLists.txt.build_without_lofar_cmake @@ -0,0 +1,123 @@ +cmake_minimum_required(VERSION 3.13.0 FATAL_ERROR) + +project(Scheduler) + +set(CMAKE_AUTOMOC ON) +set(CMAKE_AUTORCC ON) +set(CMAKE_AUTOUIC ON) + +if(CMAKE_VERSION VERSION_LESS "3.7.0") + set(CMAKE_INCLUDE_CURRENT_DIR ON) +endif() + +find_package(Qt5 COMPONENTS Core Sql Widgets Gui REQUIRED) + +include_directories(AFTER .) + +add_library(LOFAR_Scheduler + Angle.cpp + astrodate.cpp + astrodatetime.cpp + astrotime.cpp + blocksize.cpp + calibrationpipeline.cpp + CheckBox.cpp + ComboBox.cpp + conflictdialog.cpp + Controller.cpp + DataHandler.cpp + DataMonitorConnection.cpp + dataslotdialog.cpp + DataTreeWidgetItem.cpp + DateEdit.cpp + DateTimeEdit.cpp + debug_lofar.cpp + demixingsettings.cpp + DigitalBeam.cpp + digitalbeamdialog.cpp + doublespinbox.cpp + FileUtils.cpp + GraphicCurrentTimeLine.cpp + GraphicResourceScene.cpp + GraphicStationTaskLine.cpp + graphicstoragescene.cpp + GraphicStorageTimeLine.cpp + GraphicTask.cpp + GraphicTimeLine.cpp + imagingpipeline.cpp + LineEdit.cpp + ListWidget.cpp + lofar_utils.cpp + longbaselinepipeline.cpp + neighboursolution.cpp + observation.cpp + OTDBnode.cpp + OTDBtree.cpp + parsettreeviewer.cpp + pipeline.cpp + publishdialog.cpp + pulsarpipeline.cpp + qlofardatamodel.cpp + redistributetasksdialog.cpp + sasconnectdialog.cpp + SASConnection.cpp + sasprogressdialog.cpp + sasstatusdialog.cpp + sasuploaddialog.cpp + Scheduler.cpp + schedulerdatablock.cpp + schedulerdata.cpp + schedulergui.cpp + schedulerLib.cpp + schedulersettings.cpp + schedulesettingsdialog.cpp + scheduletabledelegate.cpp + shifttasksdialog.cpp + signalhandler.cpp + SpinBox.cpp + statehistorydialog.cpp + station.cpp + stationlistwidget.cpp + stationtask.cpp + stationtreewidget.cpp + Storage.cpp + StorageNode.cpp + tablecolumnselectdialog.cpp + tableview.cpp + taskcopydialog.cpp + task.cpp + taskdialog.cpp + taskstorage.cpp + thrashbin.cpp + TiedArrayBeam.cpp + tiedarraybeamdialog.cpp + TimeEdit.cpp + conflictdialog.ui + dataslotdialog.ui + digitalbeamdialog.ui + graphicstoragescene.ui + parsettreeviewer.ui + publishdialog.ui + redistributetasksdialog.ui + sasconnectdialog.ui + sasprogressdialog.ui + sasstatusdialog.ui + sasuploaddialog.ui + schedulergui.ui + schedulesettingsdialog.ui + shifttasksdialog.ui + statehistorydialog.ui + stationlistwidget.ui + stationtreewidget.ui + tablecolumnselectdialog.ui + taskcopydialog.ui + taskdialog.ui + thrashbin.ui + tiedarraybeamdialog.ui + scheduler_resources.qrc +) + +target_link_libraries(LOFAR_Scheduler Qt::Core Qt::Sql Qt::Gui Qt::Widgets) + +add_executable(scheduler main.cpp) +target_link_libraries(scheduler LOFAR_Scheduler Qt::Core Qt::Sql Qt::Gui Qt::Widgets) diff --git a/SAS/Scheduler/src/schedulergui.cpp b/SAS/Scheduler/src/schedulergui.cpp index 204200af9f1b19a49f2eca189850c312d1fff255..f13885ee16df483140c5b6ffe2c3a106678b65f4 100644 --- a/SAS/Scheduler/src/schedulergui.cpp +++ b/SAS/Scheduler/src/schedulergui.cpp @@ -16,6 +16,8 @@ #include "qlofardatamodel.h" #include <QTableView> #include <QDesktopWidget> +#include <QDockWidget> +#include <QStatusBar> #include <QLCDNumber> #include <QFileDialog> #include <sstream> diff --git a/SAS/TMSS/CMakeLists.txt b/SAS/TMSS/CMakeLists.txt index 8c0d7575c2b5ab1194e3115d8faca0debdc0586a..23e6616f579313fb8e5eab6c5e172e306c927c33 100644 --- a/SAS/TMSS/CMakeLists.txt +++ b/SAS/TMSS/CMakeLists.txt @@ -1,6 +1,6 @@ -lofar_package(TMSS 0.1 DEPENDS PyCommon pyparameterset PyMessaging) +lofar_package(TMSS 0.1 DEPENDS PyCommon pyparameterset PyMessaging ResourceAssigner TaskPrescheduler) lofar_add_package(TMSSClient client) add_subdirectory(src) diff --git a/SAS/TMSS/client/lib/tmss_http_rest_client.py b/SAS/TMSS/client/lib/tmss_http_rest_client.py index 1752c53477991e7b30244d6b346564e610624939..a2f9534a64ad48df2fd515e69cc6138d8b550dd4 100644 --- a/SAS/TMSS/client/lib/tmss_http_rest_client.py +++ b/SAS/TMSS/client/lib/tmss_http_rest_client.py @@ -157,7 +157,7 @@ class TMSSsession(object): def get_path_as_json_object(self, path: str, params={}) -> object: '''get resource at the given path, interpret it as json, and return it as as native object (usually a dict or a list of dicts)''' - full_url = '%s/%s/' % (self.base_url, path) + full_url = '%s/%s/' % (self.base_url, path.strip('/')) return self.get_url_as_json_object(full_url, params=params) def get_url_as_json_object(self, full_url: str, params={}) -> object: @@ -188,22 +188,34 @@ class TMSSsession(object): raise Exception("Could not get %s - %s %s - %s" % (full_url, response.status_code, responses.get(response.status_code), error_msg)) - def get_subtask_template(self, name: str, version: str=None) -> dict: - '''get the subtask_template as dict for the given name (and version)''' + def _get_template(self, template_type_name: str, name: str, version: str=None) -> dict: + '''get the template of the given type as dict for the given name (and version)''' clauses = {} if name is not None: clauses["name"] = name if version is not None: clauses["version"] = version - result = self.get_path_as_json_object('subtask_template', clauses) + result = self.get_path_as_json_object(template_type_name, clauses) if isinstance(result, list): if len(result) > 1: - raise ValueError("Found more then one SubtaskTemplate for clauses: %s" % (clauses,)) + raise ValueError("Found more then one %s for clauses: %s" % (template_type_name, clauses)) elif len(result) == 1: return result[0] return None return result + def get_schedulingunit_template(self, name: str, version: str=None) -> dict: + '''get the schedulingunit_template as dict for the given name (and version)''' + return self._get_template('scheduling_unit_template', name, version) + + def get_task_template(self, name: str, version: str=None) -> dict: + '''get the task_template as dict for the given name (and version)''' + return self._get_template('task_template', name, version) + + def get_subtask_template(self, name: str, version: str=None) -> dict: + '''get the subtask_template as dict for the given name (and version)''' + return self._get_template('subtask_template', name, version) + def get_subtask_output_dataproducts(self, subtask_id: int) -> []: '''get the output dataproducts of the subtask with the given subtask_id''' return self.get_path_as_json_object('subtask/%s/output_dataproducts' % subtask_id) @@ -219,6 +231,11 @@ class TMSSsession(object): return result.content.decode('utf-8') raise Exception("Could not specify observation for task %s.\nResponse: %s" % (task_id, result)) + def create_blueprints_and_subtasks_from_scheduling_unit_draft(self, scheduling_unit_draft_id: int) -> {}: + """create a scheduling_unit_blueprint, its specified taskblueprints and subtasks for the given scheduling_unit_draft_id. + returns the scheduled subtask upon success, or raises.""" + return self.get_path_as_json_object('scheduling_unit_draft/%s/create_blueprints_and_subtasks' % scheduling_unit_draft_id) + def schedule_subtask(self, subtask_id: int) -> {}: """schedule the subtask for the given subtask_id. returns the scheduled subtask upon success, or raises.""" diff --git a/SAS/TMSS/frontend/CMakeLists.txt b/SAS/TMSS/frontend/CMakeLists.txt index b4a09541f6dd24d668847a68334265259315bcef..cb74bd697d19246e5b3467bc7fa6f874498791c8 100644 --- a/SAS/TMSS/frontend/CMakeLists.txt +++ b/SAS/TMSS/frontend/CMakeLists.txt @@ -1 +1,2 @@ -add_subdirectory(frontend_poc) \ No newline at end of file +# add_subdirectory(frontend_poc) +add_subdirectory(tmss_webapp) diff --git a/SAS/TMSS/frontend/tmss_webapp/.env b/SAS/TMSS/frontend/tmss_webapp/.env new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/SAS/TMSS/frontend/tmss_webapp/CMakeLists.txt b/SAS/TMSS/frontend/tmss_webapp/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..b20842bcefd3a4313c19ad5e7001a2f9b9175aac --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/CMakeLists.txt @@ -0,0 +1,2 @@ +include(NPMInstall) +npm_install(package.json PUBLIC public SOURCE src DESTINATION ${PYTHON_INSTALL_DIR}/lofar/sas/frontend/tmss_webapp/build) diff --git a/SAS/TMSS/frontend/tmss_webapp/package.json b/SAS/TMSS/frontend/tmss_webapp/package.json index 00d256ce0460bbbcf7b2f6b2d928181091d09f2c..b8a53534056c2171aa2bddc4e885529473f5ef16 100644 --- a/SAS/TMSS/frontend/tmss_webapp/package.json +++ b/SAS/TMSS/frontend/tmss_webapp/package.json @@ -45,7 +45,7 @@ "test": "react-scripts test", "eject": "react-scripts eject" }, - "proxy": "http://127.0.0.1:8008", + "proxy": "http://127.0.0.1:8008/", "eslintConfig": { "extends": "react-app" }, diff --git a/SAS/TMSS/frontend/tmss_webapp/src/App.css b/SAS/TMSS/frontend/tmss_webapp/src/App.css index 6d1f1131b5899107f7cd9aa8889252455cd26e0a..766fff47baad6747a35c03a164125f8d181f5956 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/App.css +++ b/SAS/TMSS/frontend/tmss_webapp/src/App.css @@ -45,6 +45,10 @@ h3, .h3 { font-size: 1.25rem; } +h5, .h5 { + font-size: 15px !important; +} + a{ margin-bottom: 10px; } @@ -86,6 +90,20 @@ p { margin-bottom: 5px; } +.p-field { + margin-bottom: 0.5rem; +} + +.p-inputtext { + padding-top: 0.25em !important; + padding-bottom: 0.25em !important; + padding-left: 0.25em !important; +} + +.act-btn-grp { + margin-top: 20px; +} + .task-list { padding-inline-start: 0px; } diff --git a/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js b/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js index 6d8f5516ebaed9f3790fffaaeb7759868db37748..797dc51aaaf15841d816294fb8e3d443ba334502 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/components/ViewTable.js @@ -198,7 +198,9 @@ function Table({ columns, data, defaultheader, optionalheader }) { <tr {...row.getRowProps()}> {row.cells.map(cell => { if(cell.column.id !== 'actionpath') - return <td {...cell.getCellProps()} >{cell.render('Cell')}</td> + return <td {...cell.getCellProps()} >{cell.render('Cell')}</td> + else + return ""; })} </tr> ) diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/ResourceInputList.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/ResourceInputList.js index aa608dabcab94707ddd8f5e515c550a4afbe8efd..c1d9019421ff16c5570e3371e8ff338342b404f7 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/ResourceInputList.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/ResourceInputList.js @@ -36,7 +36,7 @@ export class ResourceInputList extends Component { <> {this.props.list.length>0 && this.props.list.map((item, index) => ( <React.Fragment key={index+10}> - <label key={'label1-'+ index} className="col-lg-3 col-md-3 col-sm-12">{item.name}</label> + <label key={'label1-'+ index} className="col-lg-2 col-md-2 col-sm-12">{item.name}</label> <div key={'div1-'+ index} className="col-lg-3 col-md-3 col-sm-12"> <InputNumber key={'item1-'+ index} id={'item1-'+ index} name={'item1-'+ index} suffix={` ${this.props.unitMap[item.quantity_value]?this.props.unitMap[item.quantity_value].display:''}`} @@ -49,6 +49,7 @@ export class ResourceInputList extends Component { <button className="p-link" data-testid={`${item.name}-btn`} onClick={(e) => this.removeInput(item.name)}> <i className="fa fa-trash pi-error"></i></button> </div> + <div className="col-lg-1 col-md-1 col-sm-12"></div> </React.Fragment> ))} </> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/create.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/create.js index fc07f034438730eef9d48d1691e7f8d219cafa03..e8df560a75cc56560d2eb39adc94a404697e259d 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/create.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/create.js @@ -18,6 +18,7 @@ import AppLoader from '../../layout/components/AppLoader'; import CycleService from '../../services/cycle.service'; import ProjectService from '../../services/project.service'; import UnitConverter from '../../utils/unit.converter'; +import UIConstants from '../../utils/ui.constants'; /** * Component to create a new Project @@ -61,7 +62,7 @@ export class ProjectCreate extends Component { this.projectResourceDefaults = {}; // Default values for default resources this.resourceUnitMap = UnitConverter.resourceUnitMap; // Resource unit conversion factor and constraints this.cycleOptionTemplate = this.cycleOptionTemplate.bind(this); // Template for cycle multiselect - this.tooltipOptions = {position: 'left', event: 'hover', className:"p-tooltip-custom"}; + this.tooltipOptions = UIConstants.tooltipOptions; this.setProjectQuotaDefaults = this.setProjectQuotaDefaults.bind(this); this.setProjectParams = this.setProjectParams.bind(this); @@ -349,13 +350,13 @@ export class ProjectCreate extends Component { <div className="p-fluid"> <div className="p-field p-grid" style={{display: 'none'}}> <label htmlFor="projectId" className="col-lg-2 col-md-2 col-sm-12">URL </label> - <div className="col-lg-4 col-md-4 col-sm-12"> + <div className="col-lg-3 col-md-3 col-sm-12"> <input id="projectId" data-testid="projectId" value={this.state.project.url} /> </div> </div> <div className="p-field p-grid"> <label htmlFor="projectName" className="col-lg-2 col-md-2 col-sm-12">Name <span style={{color:'red'}}>*</span></label> - <div className="col-lg-4 col-md-4 col-sm-12"> + <div className="col-lg-3 col-md-3 col-sm-12"> <InputText className={this.state.errors.name ?'input-error':''} id="projectName" data-testid="name" tooltip="Enter name of the project" tooltipOptions={this.tooltipOptions} maxLength="128" value={this.state.project.name} @@ -365,8 +366,9 @@ export class ProjectCreate extends Component { {this.state.errors.name ? this.state.errors.name : "Max 128 characters"} </label> </div> + <div className="col-lg-1 col-md-1 col-sm-12"></div> <label htmlFor="description" className="col-lg-2 col-md-2 col-sm-12">Description <span style={{color:'red'}}>*</span></label> - <div className="col-lg-4 col-md-4 col-sm-12"> + <div className="col-lg-3 col-md-3 col-sm-12"> <InputTextarea className={this.state.errors.description ?'input-error':''} rows={3} cols={30} tooltip="Short description of the project" tooltipOptions={this.tooltipOptions} maxLength="128" data-testid="description" value={this.state.project.description} @@ -379,9 +381,9 @@ export class ProjectCreate extends Component { </div> <div className="p-field p-grid"> <label htmlFor="triggerPriority" className="col-lg-2 col-md-2 col-sm-12">Trigger Priority </label> - <div className="col-lg-4 col-md-4 col-sm-12" data-testid="trig_prio"> + <div className="col-lg-3 col-md-3 col-sm-12" data-testid="trig_prio"> <InputNumber inputId="trig_prio" name="trig_prio" value={this.state.project.trigger_priority} - tooltip="Priority of this project w.r.t. triggers" tooltipOptions={this.tooltipOptions} + tooltip="Priority of this project with respect to triggers" tooltipOptions={this.tooltipOptions} mode="decimal" showButtons min={0} max={1001} step={10} useGrouping={false} onChange={(e) => this.setProjectParams('trigger_priority', e.value)} onBlur={(e) => this.setProjectParams('trigger_priority', e.target.value, 'NUMBER')} /> @@ -390,8 +392,9 @@ export class ProjectCreate extends Component { {this.state.errors.trigger_priority ? this.state.errors.trigger_priority : ""} </label> </div> + <div className="col-lg-1 col-md-1 col-sm-12"></div> <label htmlFor="trigger" className="col-lg-2 col-md-2 col-sm-12">Allows Trigger Submission</label> - <div className="col-lg-4 col-md-4 col-sm-12" data-testid="trigger"> + <div className="col-lg-3 col-md-3 col-sm-12" data-testid="trigger"> <Checkbox inputId="trigger" role="trigger" tooltip="Is this project allowed to supply observation requests on the fly, possibly interrupting currently running observations (responsive telescope)?" tooltipOptions={this.tooltipOptions} @@ -400,7 +403,7 @@ export class ProjectCreate extends Component { </div> <div className="p-field p-grid"> <label htmlFor="projCat" className="col-lg-2 col-md-2 col-sm-12">Project Category </label> - <div className="col-lg-4 col-md-4 col-sm-12" data-testid="projCat" > + <div className="col-lg-3 col-md-3 col-sm-12" data-testid="projCat" > <Dropdown inputId="projCat" optionLabel="value" optionValue="url" tooltip="Project Category" tooltipOptions={this.tooltipOptions} value={this.state.project.project_category} @@ -408,8 +411,9 @@ export class ProjectCreate extends Component { onChange={(e) => {this.setProjectParams('project_category', e.value)}} placeholder="Select Project Category" /> </div> + <div className="col-lg-1 col-md-1 col-sm-12"></div> <label htmlFor="periodCategory" className="col-lg-2 col-md-2 col-sm-12">Period Category</label> - <div className="col-lg-4 col-md-4 col-sm-12"> + <div className="col-lg-3 col-md-3 col-sm-12"> <Dropdown data-testid="period-cat" id="period-cat" optionLabel="value" optionValue="url" tooltip="Period Category" tooltipOptions={this.tooltipOptions} value={this.state.project.period_category} @@ -420,7 +424,7 @@ export class ProjectCreate extends Component { </div> <div className="p-field p-grid"> <label htmlFor="triggerPriority" className="col-lg-2 col-md-2 col-sm-12">Cycle(s)</label> - <div className="col-lg-4 col-md-4 col-sm-12"> + <div className="col-lg-3 col-md-3 col-sm-12"> <MultiSelect data-testid="cycle" id="cycle" optionLabel="name" optionValue="url" filter={true} tooltip="Cycle(s) to which this project belongs" tooltipOptions={this.tooltipOptions} value={this.state.project.cycles} @@ -429,10 +433,11 @@ export class ProjectCreate extends Component { /> </div> + <div className="col-lg-1 col-md-1 col-sm-12"></div> <label htmlFor="projRank" className="col-lg-2 col-md-2 col-sm-12">Project Rank <span style={{color:'red'}}>*</span></label> - <div className="col-lg-4 col-md-4 col-sm-12" data-testid="proj-rank" > + <div className="col-lg-3 col-md-3 col-sm-12" data-testid="proj-rank" > <InputNumber inputId="proj-rank" name="rank" data-testid="rank" value={this.state.project.priority_rank} - tooltip="Priority of this project w.r.t. other projects. Projects can interrupt observations of lower-priority projects." + tooltip="Priority of this project with respect to other projects. Projects can interrupt observations of lower-priority projects." tooltipOptions={this.tooltipOptions} mode="decimal" showButtons min={0} max={100} onChange={(e) => this.setProjectParams('priority_rank', e.value)} @@ -446,18 +451,20 @@ export class ProjectCreate extends Component { {this.defaultResourcesEnabled && this.state.resourceList && <div className="p-fluid"> <div className="p-field p-grid"> - <div className="col-lg-3 col-md-3 col-sm-112"> + <div className="col-lg-2 col-md-2 col-sm-112"> <h5 data-testid="resource_alloc">Resource Allocations</h5> </div> <div className="col-lg-3 col-md-3 col-sm-10"> <Dropdown optionLabel="name" optionValue="name" + tooltip="Resources to be allotted for the project" + tooltipOptions={this.tooltipOptions} value={this.state.newResource} options={this.state.resourceList} onChange={(e) => {this.setState({'newResource': e.value})}} placeholder="Add Resources" /> </div> <div className="col-lg-2 col-md-2 col-sm-2"> - <Button label="" className="p-button-primary" icon="pi pi-plus" onClick={this.addNewResource} data-testid="add_res_btn" /> + <Button label="" className="p-button-primary" icon="pi pi-plus" onClick={this.addNewResource} disabled={!this.state.newResource} data-testid="add_res_btn" /> </div> </div> <div className="p-field p-grid resource-input-grid"> @@ -469,7 +476,7 @@ export class ProjectCreate extends Component { } </div> </div> - <div className="p-grid p-justify-start"> + <div className="p-grid p-justify-start act-btn-grp"> <div className="col-lg-1 col-md-2 col-sm-6"> <Button label="Save" className="p-button-primary" id="save-btn" data-testid="save-btn" icon="pi pi-check" onClick={this.saveProject} disabled={!this.state.validForm} /> </div> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/edit.js b/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/edit.js index 54672a71a6834f7fb9be9875e28f95ecd065979b..78b443a5c2d1eb457b3806a0cc4fe89e3fd2ab99 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/edit.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/routes/Project/edit.js @@ -18,6 +18,7 @@ import AppLoader from '../../layout/components/AppLoader'; import CycleService from '../../services/cycle.service'; import ProjectService from '../../services/project.service'; import UnitConverter from '../../utils/unit.converter'; +import UIConstants from '../../utils/ui.constants'; export class ProjectEdit extends Component { constructor(props) { @@ -57,6 +58,7 @@ export class ProjectEdit extends Component { {name:'LOFAR Support hours'} ]; this.projectResourceDefaults = {}; this.resourceUnitMap = UnitConverter.resourceUnitMap; + this.tooltipOptions = UIConstants.tooltipOptions; this.getProjectDetails = this.getProjectDetails.bind(this); this.cycleOptionTemplate = this.cycleOptionTemplate.bind(this); @@ -156,8 +158,9 @@ export class ProjectEdit extends Component { if (this.state.newResource) { let resourceList = this.state.resourceList; const newResource = _.remove(resourceList, {'name': this.state.newResource}); - let resources = this.state.resources; + let resources = this.state.resources?this.state.resources:[]; resources.push(newResource[0]); + console.log(resources); this.setState({resources: resources, resourceList: resourceList, newResource: null}); } } @@ -372,30 +375,34 @@ export class ProjectEdit extends Component { <div className="p-fluid"> <div className="p-field p-grid"> <label htmlFor="projectName" className="col-lg-2 col-md-2 col-sm-12">Name <span style={{color:'red'}}>*</span></label> - <div className="col-lg-4 col-md-4 col-sm-12"> + <div className="col-lg-3 col-md-3 col-sm-12"> <InputText className={this.state.errors.name ?'input-error':''} id="projectName" data-testid="name" + tooltip="Enter name of the project" tooltipOptions={this.tooltipOptions} maxLength="128" value={this.state.project.name} onChange={(e) => this.setProjectParams('name', e.target.value)} onBlur={(e) => this.setProjectParams('name', e.target.value)}/> - <label className="error"> - {this.state.errors.name ? this.state.errors.name : ""} + <label className={this.state.errors.name?"error":"info"}> + {this.state.errors.name ? this.state.errors.name : "Max 128 characters"} </label> </div> + <div className="col-lg-1 col-md-1 col-sm-12"></div> <label htmlFor="description" className="col-lg-2 col-md-2 col-sm-12">Description <span style={{color:'red'}}>*</span></label> - <div className="col-lg-4 col-md-4 col-sm-12"> + <div className="col-lg-3 col-md-3 col-sm-12"> <InputTextarea className={this.state.errors.description ?'input-error':''} rows={3} cols={30} + tooltip="Short description of the project" tooltipOptions={this.tooltipOptions} maxLength="128" data-testid="description" value={this.state.project.description} onChange={(e) => this.setProjectParams('description', e.target.value)} onBlur={(e) => this.setProjectParams('description', e.target.value)}/> - <label className="error"> - {this.state.errors.description ? this.state.errors.description : ""} + <label className={this.state.errors.description ?"error":"info"}> + {this.state.errors.description ? this.state.errors.description : "Max 255 characters"} </label> </div> </div> <div className="p-field p-grid"> <label htmlFor="triggerPriority" className="col-lg-2 col-md-2 col-sm-12">Trigger Priority </label> - <div className="col-lg-4 col-md-4 col-sm-12"> + <div className="col-lg-3 col-md-3 col-sm-12"> <InputNumber inputId="trig_prio" name="trig_prio" className={this.state.errors.name ?'input-error':''} + tooltip="Priority of this project with respect to triggers" tooltipOptions={this.tooltipOptions} value={this.state.project.trigger_priority} showButtons min={0} max={1001} step={10} useGrouping={false} onChange={(e) => this.setProjectParams('trigger_priority', e.value)} @@ -404,23 +411,30 @@ export class ProjectEdit extends Component { {this.state.errors.trigger_priority ? this.state.errors.trigger_priority : ""} </label> </div> + <div className="col-lg-1 col-md-1 col-sm-12"></div> <label htmlFor="trigger" className="col-lg-2 col-md-2 col-sm-12">Allows Trigger Submission</label> - <div className="col-lg-4 col-md-4 col-sm-12"> - <Checkbox inputId="trigger" role="trigger" checked={this.state.project.can_trigger} onChange={e => this.setProjectParams('can_trigger', e.target.checked)}></Checkbox> + <div className="col-lg-3 col-md-3 col-sm-12"> + <Checkbox inputId="trigger" role="trigger" + tooltip="Is this project allowed to supply observation requests on the fly, possibly interrupting currently running observations (responsive telescope)?" + tooltipOptions={this.tooltipOptions} + checked={this.state.project.can_trigger} onChange={e => this.setProjectParams('can_trigger', e.target.checked)}></Checkbox> </div> </div> <div className="p-field p-grid"> <label htmlFor="projCategory" className="col-lg-2 col-md-2 col-sm-12">Project Category </label> - <div className="col-lg-4 col-md-4 col-sm-12"> + <div className="col-lg-3 col-md-3 col-sm-12"> <Dropdown inputId="projCat" optionLabel="value" optionValue="url" + tooltip="Project Category" tooltipOptions={this.tooltipOptions} value={this.state.project.project_category} options={this.state.projectCategories} onChange={(e) => {this.setProjectParams('project_category', e.value)}} placeholder="Select Project Category" /> </div> + <div className="col-lg-1 col-md-1 col-sm-12"></div> <label htmlFor="periodCategory" className="col-lg-2 col-md-2 col-sm-12">Period Category</label> - <div className="col-lg-4 col-md-4 col-sm-12"> + <div className="col-lg-3 col-md-3 col-sm-12"> <Dropdown data-testid="period-cat" id="period-cat" optionLabel="value" optionValue="url" + tooltip="Period Category" tooltipOptions={this.tooltipOptions} value={this.state.project.period_category} options={this.state.periodCategories} onChange={(e) => {this.setProjectParams('period_category',e.value)}} @@ -429,17 +443,21 @@ export class ProjectEdit extends Component { </div> <div className="p-field p-grid"> <label htmlFor="triggerPriority" className="col-lg-2 col-md-2 col-sm-12">Cycle(s)</label> - <div className="col-lg-4 col-md-4 col-sm-12"> + <div className="col-lg-3 col-md-3 col-sm-12"> <MultiSelect data-testid="cycle" id="cycle" optionLabel="name" optionValue="url" filter={true} + tooltip="Cycle(s) to which this project belongs" tooltipOptions={this.tooltipOptions} value={this.state.project.cycles} options={this.state.cycles} onChange={(e) => {this.setProjectParams('cycles',e.value)}} /> </div> + <div className="col-lg-1 col-md-1 col-sm-12"></div> <label htmlFor="projRank" className="col-lg-2 col-md-2 col-sm-12">Project Rank <span style={{color:'red'}}>*</span></label> - <div className="col-lg-4 col-md-4 col-sm-12"> + <div className="col-lg-3 col-md-3 col-sm-12"> <InputNumber inputId="proj-rank" name="rank" data-testid="rank" value={this.state.project.priority_rank} + tooltip="Priority of this project with respect to other projects. Projects can interrupt observations of lower-priority projects." + tooltipOptions={this.tooltipOptions} mode="decimal" showButtons min={0} max={100} onChange={(e) => this.setProjectParams('priority_rank', e.value)} onBlur={(e) => this.setProjectParams('priority_rank', e.target.value, 'NUMBER')} /> @@ -451,32 +469,34 @@ export class ProjectEdit extends Component { {this.state.resourceList && <div className="p-fluid"> <div className="p-field p-grid"> - <div className="col-lg-3 col-md-3 col-sm-112"> + <div className="col-lg-2 col-md-2 col-sm-12"> <h5>Resource Allocations:</h5> </div> <div className="col-lg-3 col-md-3 col-sm-10"> <Dropdown optionLabel="name" optionValue="name" + tooltip="Resources to be allotted for the project" + tooltipOptions={this.tooltipOptions} value={this.state.newResource} options={_.sortBy(this.state.resourceList, ['name'])} onChange={(e) => {this.setState({'newResource': e.value})}} placeholder="Add Resources" /> </div> <div className="col-lg-2 col-md-2 col-sm-2"> - <Button label="" className="p-button-primary" icon="pi pi-plus" onClick={this.addNewResource} data-testid="add_res_btn" /> + <Button label="" className="p-button-primary" icon="pi pi-plus" onClick={this.addNewResource} disabled={!this.state.newResource} data-testid="add_res_btn" /> </div> </div> - {_.keys(this.state.projectQuota).length>0 && + {/* {_.keys(this.state.projectQuota).length>0 && */} <div className="p-field p-grid resource-input-grid"> <ResourceInputList list={this.state.resources} unitMap={this.resourceUnitMap} projectQuota={this.state.projectQuota} callback={this.setProjectQuotaParams} removeInputCallback={this.removeResource} /> </div> - } + {/* } */} </div> } </div> </div> - <div className="p-grid p-justify-start"> + <div className="p-grid p-justify-start act-btn-grp"> <div className="p-col-1"> <Button label="Save" className="p-button-primary" id="save-btn" data-testid="save-btn" icon="pi pi-check" onClick={this.saveProject} disabled={!this.state.validForm} /> </div> diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/cycle.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/cycle.service.js index ab00c074294f4e14a288a46bcb0a6c07f56f0ec4..722f46262f8d16cf3d3f8eccda6266027b67432c 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/services/cycle.service.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/services/cycle.service.js @@ -43,7 +43,29 @@ const CycleService = { }); return res; - } + }, + getCycle: async function(id) { + try { + const url = `/api/cycle/${id}`; + const response = await axios.get(url); + return response.data.results; + } catch (error) { + console.error(error); + } + }, + // To be rmoved + getAllCycle: async function (){ + let res = []; + await axios.get('/api/cycle/') + .then(response => { + res= response; + }).catch(function(error) { + console.error('[cycle.services.getAllCycle]',error); + }); + + return res; + }, + } export default CycleService; diff --git a/SAS/TMSS/frontend/tmss_webapp/src/services/project.service.js b/SAS/TMSS/frontend/tmss_webapp/src/services/project.service.js index 36bc7a17016df8168cc36f7167a20d0c38e1aee2..18598cc1c90e304931d4f4e55c1a4c39a4d6c69e 100644 --- a/SAS/TMSS/frontend/tmss_webapp/src/services/project.service.js +++ b/SAS/TMSS/frontend/tmss_webapp/src/services/project.service.js @@ -183,7 +183,8 @@ const ProjectService = { project['actionpath']= '/project/view'; projects[index] = project; } - }) + return pro; + }); } }); results.projects = projects; diff --git a/SAS/TMSS/frontend/tmss_webapp/src/utils/ui.constants.js b/SAS/TMSS/frontend/tmss_webapp/src/utils/ui.constants.js new file mode 100644 index 0000000000000000000000000000000000000000..732079588cf68c3938c6dc9efb8e86d029b3bb98 --- /dev/null +++ b/SAS/TMSS/frontend/tmss_webapp/src/utils/ui.constants.js @@ -0,0 +1,5 @@ +const UIConstants = { + tooltipOptions: {position: 'left', event: 'hover', className:"p-tooltip-custom"} +} + +export default UIConstants; \ No newline at end of file diff --git a/SAS/TMSS/src/remakemigrations.py b/SAS/TMSS/src/remakemigrations.py index 03fdec4cec3f94aa2345d79cb6c91d279b51395c..ed3475278af7c5c1fee2d8232bbe0250630e3131 100755 --- a/SAS/TMSS/src/remakemigrations.py +++ b/SAS/TMSS/src/remakemigrations.py @@ -76,9 +76,12 @@ class Migration(migrations.Migration): # Start SubTask id with 2 000 000 to avoid overlap with 'old' (test/production) OTDB operations = [ migrations.RunSQL('ALTER SEQUENCE tmssapp_SubTask_id_seq RESTART WITH 2000000;'), migrations.RunPython(populate_choices), + migrations.RunPython(populate_settings), migrations.RunPython(populate_misc), migrations.RunPython(populate_lofar_json_schemas), - migrations.RunPython(populate_settings)] + migrations.RunPython(populate_resources), + migrations.RunPython(populate_cycles), + migrations.RunPython(populate_projects) ] """ diff --git a/SAS/TMSS/src/tmss/exceptions.py b/SAS/TMSS/src/tmss/exceptions.py index ccdf97362bd9d4e8f9c899f162458948c502ee22..a320dbd527a5a58a0d7274836beb66f9f5387c1c 100644 --- a/SAS/TMSS/src/tmss/exceptions.py +++ b/SAS/TMSS/src/tmss/exceptions.py @@ -8,6 +8,9 @@ class SchemaValidationException(TMSSException): class ConversionException(TMSSException): pass +class BlueprintCreationException(ConversionException): + pass + class SubtaskCreationException(ConversionException): pass diff --git a/SAS/TMSS/src/tmss/settings.py b/SAS/TMSS/src/tmss/settings.py index 9596a203b8ae2b802f444de31994db4501dcfbb4..6ad450c3e6b4f378fe1c3ea88e6a494f58471c8d 100644 --- a/SAS/TMSS/src/tmss/settings.py +++ b/SAS/TMSS/src/tmss/settings.py @@ -14,6 +14,11 @@ import os import logging from lofar.common import dbcredentials, isDevelopmentEnvironment, isTestEnvironment +# remove basic handler, and allow TMSS/Django to do more complicated logging setup +for h in logging.root.handlers[:]: + logging.root.removeHandler(h) + h.close() + logger = logging.getLogger(__name__) LOGGING = { @@ -128,7 +133,7 @@ ROOT_URLCONF = 'lofar.sas.tmss.tmss.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [BASE_DIR, os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, '../frontend','frontend_poc')], + 'DIRS': [BASE_DIR, os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, '../frontend','tmss_webapp')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ @@ -141,6 +146,10 @@ TEMPLATES = [ }, ] +STATICFILES_DIRS = [ + os.path.join(BASE_DIR, '../frontend','tmss_webapp/build/static') +] + WSGI_APPLICATION = 'lofar.sas.tmss.tmss.wsgi.application' # Database @@ -323,4 +332,4 @@ SWAGGER_SETTINGS = { } }, -} \ No newline at end of file +} diff --git a/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py b/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py index 5ed5940cd2b6bca37ce160c2613ce634632a87ab..29ea31b133d257425ec9804adeb7ce165ea0d478 100644 --- a/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py +++ b/SAS/TMSS/src/tmss/tmssapp/adapters/parset.py @@ -24,7 +24,7 @@ from lofar.common.json_utils import add_defaults_to_json_object_for_schema from lofar.sas.tmss.tmss.exceptions import * from datetime import datetime -def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> parameterset: +def _convert_to_parset_dict_for_observationcontrol_schema(subtask: models.Subtask) -> dict: # make sure the spec is complete (including all non-filled in properties with default) spec = add_defaults_to_json_object_for_schema(subtask.specifications_doc, subtask.specifications_template.schema) @@ -43,8 +43,9 @@ def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> parset["Observation.ObsID"] = subtask.pk parset["Observation.momID"] = 0 # Needed by MACScheduler parset["Observation.otdbID"] = 0 # Needed by MACScheduler; should/can this be the same as subtask.pk? + parset["Observation.tmssID"] = subtask.pk parset["Observation.processType"] = subtask.specifications_template.type.value.capitalize() - parset["Observation.processSubtype"] = "Beam Observation" # TODO: where to derive the processSubtype from? + parset["Observation.processSubtype"] = "Beam Observation" parset["Observation.Campaign.name"] = subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name parset["Observation.startTime"] = formatDatetime(subtask.start_time) if isinstance(subtask.start_time, datetime) else subtask.start_time parset["Observation.stopTime"] = formatDatetime(subtask.stop_time) if isinstance(subtask.stop_time, datetime) else subtask.stop_time @@ -54,8 +55,8 @@ def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> parset["Observation.antennaArray"] = "HBA" if "HBA" in spec["stations"]["antenna_set"] else "LBA" # maybe not mandatory? parset["Observation.antennaSet"] = spec["stations"]["antenna_set"] parset["Observation.bandFilter"] = spec["stations"]["filter"] - parset["Observation.sampleClock"] = 200 # why is this not part of the schema? for example as a required setting with a single allowed value. - parset["Observation.nrBitsPerSample"] = 8 # why is this not part of the schema? for example as a required setting with a single allowed value. + parset["Observation.sampleClock"] = 200 # fixed value, no other values are supported + parset["Observation.nrBitsPerSample"] = 8 # fixed value, no other values are supported. parset["Observation.strategy"] = "default" # maybe not mandatory? digi_beams = spec['stations']['digital_pointings'] @@ -105,10 +106,12 @@ def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> # TODO: do not use SubtaskOutput.objects.filter but make subtask.subtask_outputs work subtask_outputs = list(models.SubtaskOutput.objects.filter(subtask_id=subtask.id)) # TODO don't we have to append to dataproducts here and then fill in the combined list in the end? - for subtask_output in subtask_outputs: + for output_nr, subtask_output in enumerate(subtask_outputs): dataproducts = list(models.Dataproduct.objects.filter(producer_id=subtask_output.id)) parset["Observation.DataProducts.Output_Correlated.filenames"] = "[%s]" % ','.join(dp.filename for dp in dataproducts) parset["Observation.DataProducts.Output_Correlated.locations"] = "[%s]" % ','.join("%s:%s" % (subtask.cluster.name, dp.directory) for dp in dataproducts) + # mimic MoM placeholder thingy (the resource assigner parses this) + parset["Observation.DataProducts.Output_Correlated.identifications"] = "[TMSS_subtask_%s.SAP%03d]" % (subtask.id, output_nr) # various additional 'Control' settings which seem to be needed for MAC parset["prefix"] = "LOFAR." @@ -133,13 +136,10 @@ def _convert_to_parset_for_observationcontrol_schema(subtask: models.Subtask) -> parset[prefix+"ObservationControl.StationControl.aartfaacPiggybackAllowed"] = False parset[prefix+"ObservationControl.StationControl.tbbPiggybackAllowed"] = False - - # convert dict to real parameterset, and return it - parset = parameterset(parset) return parset -def _convert_to_parset_for_pipelinecontrol_schema(subtask: models.Subtask) -> parameterset: +def _convert_to_parset_dict_for_pipelinecontrol_schema(subtask: models.Subtask) -> dict: # see https://support.astron.nl/confluence/pages/viewpage.action?spaceKey=TMSS&title=UC1+JSON # make sure the spec is complete (including all non-filled in properties with default) @@ -159,153 +159,159 @@ def _convert_to_parset_for_pipelinecontrol_schema(subtask: models.Subtask) -> pa # General parset["prefix"] = "LOFAR." - parset["ObsSW.Observation.processType"] = "Pipeline" - parset["ObsSW.Observation.ObservationControl.PythonControl.pythonProgram"] = "preprocessing_pipeline.py" - parset["ObsSW.Observation.ObservationControl.PythonControl.softwareVersion"] = "" - parset["ObsSW.Observation.Campaign.name"] = subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name - parset["ObsSW.Observation.Scheduler.taskName"] = subtask.task_blueprint.name - parset["ObsSW.Observation.Scheduler.predecessors"] = [] - parset["ObsSW.Observation.Cluster.ProcessingCluster.clusterName"] = subtask.cluster.name - parset["ObsSW.Observation.Cluster.ProcessingCluster.clusterPartition"] = 'cpu' - parset["ObsSW.Observation.Cluster.ProcessingCluster.numberOfTasks"] = 110 # TODO: compute numberOfTasks - parset["ObsSW.Observation.Cluster.ProcessingCluster.numberOfCoresPerTask"] = 2 # TODO: compute numberOfCoresPerTask + parset["Observation.processType"] = "Pipeline" + parset["Observation.processSubtype"] = "Averaging Pipeline" + parset["Observation.ObservationControl.PythonControl.pythonProgram"] = "preprocessing_pipeline.py" + parset["Observation.ObservationControl.PythonControl.softwareVersion"] = "" + parset["Observation.Campaign.name"] = subtask.task_blueprint.scheduling_unit_blueprint.draft.scheduling_set.project.name + parset["Observation.Scheduler.taskName"] = subtask.task_blueprint.name + parset["Observation.Scheduler.predecessors"] = [] + parset["Observation.Cluster.ProcessingCluster.clusterName"] = subtask.cluster.name + parset["Observation.Cluster.ProcessingCluster.clusterPartition"] = 'cpu' + parset["Observation.Cluster.ProcessingCluster.numberOfTasks"] = 110 # deprecated (fixed value) to be completely removed in parset with 'JDM-patch 'soon + parset["Observation.Cluster.ProcessingCluster.numberOfCoresPerTask"] = 2 # deprecated (fixed value) to be completely removed in parset with 'JDM-patch 'soon # DPPP steps dppp_steps = [] if "preflagger0" in spec: dppp_steps.append('preflagger[0]') - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].chan"] = "[%s]" % spec["preflagger0"]["channels"] - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].abstime"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].azimuth"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].baseline"] = "" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].blrange"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].corrtype"] = "" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].count.path"] = "-" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].count.save"] = "false" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].elevation"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].expr"] = "" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].freqrange"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].lst"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].reltime"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].timeofday"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].timeslot"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[0].type"] = "preflagger" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].chan"] = "[%s]" % spec["preflagger0"]["channels"] + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].abstime"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].azimuth"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].baseline"] = "" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].blrange"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].corrtype"] = "" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].count.path"] = "-" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].count.save"] = "false" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].elevation"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].expr"] = "" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].freqrange"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].lst"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].reltime"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].timeofday"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].timeslot"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[0].type"] = "preflagger" if 'preflagger1' in spec: dppp_steps.append('preflagger[1]') - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].corrtype"] = spec["preflagger1"]["corrtype"] - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].abstime"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].azimuth"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].baseline"] = "" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].blrange"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].chan"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].count.path"] = "-" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].count.save"] = "false" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].elevation"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].expr"] = "" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].freqrange"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].lst"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].reltime"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].timeofday"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].timeslot"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.preflagger[1].type"] = "preflagger" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].corrtype"] = spec["preflagger1"]["corrtype"] + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].abstime"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].azimuth"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].baseline"] = "" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].blrange"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].chan"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].count.path"] = "-" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].count.save"] = "false" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].elevation"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].expr"] = "" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].freqrange"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].lst"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].reltime"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].timeofday"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].timeslot"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.preflagger[1].type"] = "preflagger" if 'aoflagger' in spec: dppp_steps.append('aoflagger') - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.strategy"] = spec["aoflagger"]["strategy"] - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.autocorr"] = "F" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.count.path"] = "-" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.count.save"] = "FALSE" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.keepstatistics"] = "T" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.memorymax"] = "10" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.memoryperc"] = "0" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.overlapmax"] = "0" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.overlapperc"] = "0" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.pedantic"] = "F" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.pulsar"] = "F" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.timewindow"] = "0" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.aoflagger.type"] = "aoflagger" + parset["Observation.ObservationControl.PythonControl.DPPP.aoflagger.strategy"] = spec["aoflagger"]["strategy"] + parset["Observation.ObservationControl.PythonControl.DPPP.aoflagger.autocorr"] = "F" + parset["Observation.ObservationControl.PythonControl.DPPP.aoflagger.count.path"] = "-" + parset["Observation.ObservationControl.PythonControl.DPPP.aoflagger.count.save"] = "FALSE" + parset["Observation.ObservationControl.PythonControl.DPPP.aoflagger.keepstatistics"] = "T" + parset["Observation.ObservationControl.PythonControl.DPPP.aoflagger.memorymax"] = "10" + parset["Observation.ObservationControl.PythonControl.DPPP.aoflagger.memoryperc"] = "0" + parset["Observation.ObservationControl.PythonControl.DPPP.aoflagger.overlapmax"] = "0" + parset["Observation.ObservationControl.PythonControl.DPPP.aoflagger.overlapperc"] = "0" + parset["Observation.ObservationControl.PythonControl.DPPP.aoflagger.pedantic"] = "F" + parset["Observation.ObservationControl.PythonControl.DPPP.aoflagger.pulsar"] = "F" + parset["Observation.ObservationControl.PythonControl.DPPP.aoflagger.timewindow"] = "0" + parset["Observation.ObservationControl.PythonControl.DPPP.aoflagger.type"] = "aoflagger" if "demixer" in spec: dppp_steps.append('demixer') - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.baseline"] = spec["demixer"]["baselines"] - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.demixfreqstep"] = spec["demixer"]["demix_frequency_steps"] - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.demixtimestep"] = spec["demixer"]["demix_time_steps"] - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.freqstep"] = spec["demixer"]["frequency_steps"] - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.ignoretarget"] = spec["demixer"]["ignore_target"] - parset["ObsSW.Observation.ObservationControl.PythonControl.PreProcessing.demix_always"] = spec["demixer"]["demix_always"] - parset["ObsSW.Observation.ObservationControl.PythonControl.PreProcessing.demix_if_needed"] = spec["demixer"]["demix_if_needed"] - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.blrange"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.corrtype"] = "cross" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.elevationcutoff"] = "0.0deg" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.instrumentmodel"] = "instrument" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.modelsources"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.ntimechunk"] = "0" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.othersources"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.skymodel"] = "sky" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.subtractsources"] = "" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.targetsource"] = "" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.demixer.type"] = "demixer" - - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.steps"] = "[%s]" % ",".join(dppp_steps) - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.storagemanager.name"] = spec["storagemanager"] # todo: needs to be emptystring when standard/basic/non-dysco? + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.baseline"] = spec["demixer"]["baselines"] + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.demixfreqstep"] = spec["demixer"]["demix_frequency_steps"] + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.demixtimestep"] = spec["demixer"]["demix_time_steps"] + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.freqstep"] = spec["demixer"]["frequency_steps"] + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.timestep"] = spec["demixer"]["time_steps"] + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.ignoretarget"] = spec["demixer"]["ignore_target"] + parset["Observation.ObservationControl.PythonControl.PreProcessing.demix_always"] = spec["demixer"]["demix_always"] + parset["Observation.ObservationControl.PythonControl.PreProcessing.demix_if_needed"] = spec["demixer"]["demix_if_needed"] + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.blrange"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.corrtype"] = "cross" + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.elevationcutoff"] = "0.0deg" + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.instrumentmodel"] = "instrument" + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.modelsources"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.ntimechunk"] = "0" + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.othersources"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.skymodel"] = "sky" + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.subtractsources"] = "" + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.targetsource"] = "" + parset["Observation.ObservationControl.PythonControl.DPPP.demixer.type"] = "demixer" + + parset["Observation.ObservationControl.PythonControl.DPPP.steps"] = "[%s]" % ",".join(dppp_steps) + parset["Observation.ObservationControl.PythonControl.DPPP.msout.storagemanager.name"] = spec["storagemanager"] # Dataproducts - parset["ObsSW.Observation.DataProducts.Input_Correlated.enabled"] = "true" + parset["Observation.DataProducts.Input_Correlated.enabled"] = "true" in_dataproducts = [] - for subtask_input in subtask.inputs.all(): + for input_nr, subtask_input in enumerate(subtask.inputs.all()): in_dataproducts = subtask_input.dataproducts.all() - parset["ObsSW.Observation.DataProducts.Input_Correlated.filenames"] = "[%s]" % ",".join([dp.filename for dp in in_dataproducts]) - parset["ObsSW.Observation.DataProducts.Input_Correlated.locations"] = "[%s]" % ",".join(["%s:%s" % (subtask.cluster.name, dp.directory) for dp in in_dataproducts]) + parset["Observation.DataProducts.Input_Correlated.filenames"] = "[%s]" % ",".join([dp.filename for dp in in_dataproducts]) + parset["Observation.DataProducts.Input_Correlated.locations"] = "[%s]" % ",".join(["%s:%s" % (subtask.cluster.name, dp.directory) for dp in in_dataproducts]) + # mimic MoM placeholder thingy (the resource assigner parses this) + # should be expanded with SAPS and datatypes + parset["Observation.DataProducts.Input_Correlated.identifications"] = "[TMSS_subtask_%s.SAP%03d]" % (subtask_input.producer.subtask.id, input_nr) - parset["ObsSW.Observation.DataProducts.Input_Correlated.skip"] = "[%s]" % ",".join(['0']*len(in_dataproducts)) + parset["Observation.DataProducts.Input_Correlated.skip"] = "[%s]" % ",".join(['0']*len(in_dataproducts)) # TODO: do not use SubtaskOutput.objects.filter but make subtask.subtask_outputs work subtask_outputs = list(models.SubtaskOutput.objects.filter(subtask_id=subtask.id)) out_dataproducts = [] for subtask_output in subtask_outputs: out_dataproducts = list(models.Dataproduct.objects.filter(producer_id=subtask_output.id)) - parset["ObsSW.Observation.DataProducts.Output_Correlated.enabled"] = "true" - parset["ObsSW.Observation.DataProducts.Output_Correlated.filenames"] = "[%s]" % ",".join([dp.filename for dp in out_dataproducts]) - parset["ObsSW.Observation.DataProducts.Output_Correlated.locations"] = "[%s]" % ",".join(["%s:%s" % (subtask.cluster.name, dp.directory) for dp in out_dataproducts]) - parset["ObsSW.Observation.DataProducts.Output_Correlated.skip"] = "[%s]" % ",".join(['0']*len(out_dataproducts)) + + parset["Observation.DataProducts.Output_Correlated.enabled"] = "true" + parset["Observation.DataProducts.Output_Correlated.filenames"] = "[%s]" % ",".join([dp.filename for dp in out_dataproducts]) + parset["Observation.DataProducts.Output_Correlated.locations"] = "[%s]" % ",".join(["%s:%s" % (subtask.cluster.name, dp.directory) for dp in out_dataproducts]) + parset["Observation.DataProducts.Output_Correlated.skip"] = "[%s]" % ",".join(['0']*len(out_dataproducts)) + parset["Observation.DataProducts.Output_Correlated.identifications"] = "[TMSS_subtask_%s.SAP%03d]" % (subtask.id, 0) + parset["Observation.DataProducts.Output_Correlated.storageClusterName"] = subtask.cluster.name # Other - parset["ObsSW.Observation.ObservationControl.PythonControl.PreProcessing.SkyModel"] = "Ateam_LBA_CC" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.checkparset"] = "-1" - - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.autoweight"] = "true" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.band"] = "-1" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.baseline"] = "" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.blrange"] = "[]" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.corrtype"] = "" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.datacolumn"] = "DATA" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.forceautoweight"] = "false" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.missingdata"] = "false" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.nchan"] = "nchan" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.orderms"] = "false" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.sort"] = "false" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.startchan"] = "0" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msin.useflag"] = "true" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.overwrite"] = "false" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.tilenchan"] = "8" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.tilesize"] = "4096" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.vdsdir"] = "A" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.msout.writefullresflag"] = "true" - - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.showprogress"] = "F" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.showtimings"] = "F" - parset["ObsSW.Observation.ObservationControl.PythonControl.DPPP.uselogger"] = "T" - - # convert dict to real parameterset, and return it - parset = parameterset(parset) + parset["Observation.ObservationControl.PythonControl.PreProcessing.SkyModel"] = "Ateam_LBA_CC" + parset["Observation.ObservationControl.PythonControl.DPPP.checkparset"] = "-1" + + parset["Observation.ObservationControl.PythonControl.DPPP.msin.autoweight"] = "true" + parset["Observation.ObservationControl.PythonControl.DPPP.msin.band"] = "-1" + parset["Observation.ObservationControl.PythonControl.DPPP.msin.baseline"] = "" + parset["Observation.ObservationControl.PythonControl.DPPP.msin.blrange"] = "[]" + parset["Observation.ObservationControl.PythonControl.DPPP.msin.corrtype"] = "" + parset["Observation.ObservationControl.PythonControl.DPPP.msin.datacolumn"] = "DATA" + parset["Observation.ObservationControl.PythonControl.DPPP.msin.forceautoweight"] = "false" + parset["Observation.ObservationControl.PythonControl.DPPP.msin.missingdata"] = "false" + parset["Observation.ObservationControl.PythonControl.DPPP.msin.nchan"] = "nchan" + parset["Observation.ObservationControl.PythonControl.DPPP.msin.orderms"] = "false" + parset["Observation.ObservationControl.PythonControl.DPPP.msin.sort"] = "false" + parset["Observation.ObservationControl.PythonControl.DPPP.msin.startchan"] = "0" + parset["Observation.ObservationControl.PythonControl.DPPP.msin.useflag"] = "true" + parset["Observation.ObservationControl.PythonControl.DPPP.msout.overwrite"] = "false" + parset["Observation.ObservationControl.PythonControl.DPPP.msout.tilenchan"] = "8" + parset["Observation.ObservationControl.PythonControl.DPPP.msout.tilesize"] = "4096" + parset["Observation.ObservationControl.PythonControl.DPPP.msout.vdsdir"] = "A" + parset["Observation.ObservationControl.PythonControl.DPPP.msout.writefullresflag"] = "true" + + parset["Observation.ObservationControl.PythonControl.DPPP.showprogress"] = "F" + parset["Observation.ObservationControl.PythonControl.DPPP.showtimings"] = "F" + parset["Observation.ObservationControl.PythonControl.DPPP.uselogger"] = "T" + return parset # dict to store conversion methods based on subtask.specifications_template.name -_convertors = {'observationcontrol schema': _convert_to_parset_for_observationcontrol_schema, - 'pipelinecontrol schema': _convert_to_parset_for_pipelinecontrol_schema } +_convertors = {'observationcontrol schema': _convert_to_parset_dict_for_observationcontrol_schema, + 'pipelinecontrol schema': _convert_to_parset_dict_for_pipelinecontrol_schema } def convert_to_parset(subtask: models.Subtask) -> parameterset: @@ -313,6 +319,13 @@ def convert_to_parset(subtask: models.Subtask) -> parameterset: Convert the specifications in the subtask to a LOFAR parset for MAC/COBALT :raises ConversionException if no proper conversion is available. ''' + return parameterset(convert_to_parset_dict(subtask)) + +def convert_to_parset_dict(subtask: models.Subtask) -> dict: + ''' + Convert the specifications in the subtask to a LOFAR parset dict with typed values for MAC/COBALT + :raises ConversionException if no proper conversion is available. + ''' try: convertor = _convertors[subtask.specifications_template.name] except KeyError: diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py index fb7a36f795f4e9887d5876230d0a2d76a2a1f725..b04ecd0b93b65653a7426a081eb419c03ddef895 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0001_initial.py @@ -1,4 +1,4 @@ -# Generated by Django 3.0.7 on 2020-06-22 14:33 +# Generated by Django 2.2.12 on 2020-08-04 12:35 from django.conf import settings import django.contrib.postgres.fields @@ -76,15 +76,18 @@ class Migration(migrations.Migration): ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), ('start', models.DateTimeField(help_text='Moment at which the cycle starts, that is, when its projects can run.')), ('stop', models.DateTimeField(help_text='Moment at which the cycle officially ends.')), - ('number', models.IntegerField(help_text='Cycle number.')), - ('standard_hours', models.IntegerField(help_text='Number of offered hours for standard observations.')), - ('expert_hours', models.IntegerField(help_text='Number of offered hours for expert observations.')), - ('filler_hours', models.IntegerField(help_text='Number of offered hours for filler observations.')), ], options={ 'abstract': False, }, ), + migrations.CreateModel( + name='CycleQuota', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('value', models.FloatField(help_text='Resource Quota value')), + ], + ), migrations.CreateModel( name='Dataformat', fields=[ @@ -308,7 +311,7 @@ class Migration(migrations.Migration): ], ), migrations.CreateModel( - name='Placement', + name='PeriodCategory', fields=[ ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), ], @@ -324,7 +327,8 @@ class Migration(migrations.Migration): ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), - ('priority', models.IntegerField(default=0, help_text='Priority of this project w.r.t. other projects. Projects can interrupt observations of lower-priority projects.')), + ('priority_rank', models.FloatField(help_text='Priority of this project w.r.t. other projects. Projects can interrupt observations of lower-priority projects.')), + ('trigger_priority', models.IntegerField(default=1000, help_text='Priority of this project w.r.t. triggers.')), ('can_trigger', models.BooleanField(default=False, help_text='True if this project is allowed to supply observation requests on the fly, possibly interrupting currently running observations (responsive telescope).')), ('private_data', models.BooleanField(default=True, help_text='True if data of this project is sensitive. Sensitive data is not made public.')), ('expert', models.BooleanField(default=False, help_text='Expert projects put more responsibility on the PI.')), @@ -334,6 +338,15 @@ class Migration(migrations.Migration): 'abstract': False, }, ), + migrations.CreateModel( + name='ProjectCategory', + fields=[ + ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), + ], + options={ + 'abstract': False, + }, + ), migrations.CreateModel( name='ProjectQuota', fields=[ @@ -342,20 +355,16 @@ class Migration(migrations.Migration): ], ), migrations.CreateModel( - name='ResourceType', + name='Quantity', fields=[ - ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), - ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), - ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('description', models.CharField(help_text='A longer description of this object.', max_length=255)), - ('name', models.CharField(help_text='Human-readable name of this object.', max_length=128, primary_key=True, serialize=False)), + ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), ], options={ 'abstract': False, }, ), migrations.CreateModel( - name='ResourceUnit', + name='ResourceType', fields=[ ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), @@ -385,6 +394,15 @@ class Migration(migrations.Migration): 'abstract': False, }, ), + migrations.CreateModel( + name='SchedulingRelationPlacement', + fields=[ + ('value', models.CharField(max_length=128, primary_key=True, serialize=False, unique=True)), + ], + options={ + 'abstract': False, + }, + ), migrations.CreateModel( name='SchedulingSet', fields=[ @@ -639,7 +657,7 @@ class Migration(migrations.Migration): ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('time_offset', models.FloatField(help_text='Time Offset between first and second Task Blueprint')), + ('time_offset', models.IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.')), ], options={ 'abstract': False, @@ -652,7 +670,7 @@ class Migration(migrations.Migration): ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=128), blank=True, default=list, help_text='User-defined search keywords for object.', size=8)), ('created_at', models.DateTimeField(auto_now_add=True, help_text='Moment of object creation.')), ('updated_at', models.DateTimeField(auto_now=True, help_text='Moment of last object update.')), - ('time_offset', models.FloatField(help_text='Time Offset between first and second Task Draft')), + ('time_offset', models.IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.')), ], options={ 'abstract': False, @@ -691,33 +709,33 @@ class Migration(migrations.Migration): ), migrations.AddField( model_name='taskschedulingrelationdraft', - name='placement', - field=models.ForeignKey(help_text='Placement of first and second Task Draft', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Placement'), + name='first', + field=models.ForeignKey(help_text='First Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_to_connect', to='tmssapp.TaskDraft'), ), migrations.AddField( model_name='taskschedulingrelationdraft', - name='first', - field=models.ForeignKey(help_text='Name of first Task Draft', on_delete=django.db.models.deletion.CASCADE, related_name='scheduling_relation_first', to='tmssapp.TaskDraft'), + name='placement', + field=models.ForeignKey(help_text='Task scheduling relation placement.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingRelationPlacement'), ), migrations.AddField( model_name='taskschedulingrelationdraft', name='second', - field=models.ForeignKey(help_text='Name of second Task Draft.', on_delete=django.db.models.deletion.CASCADE, related_name='scheduling_relation_second', to='tmssapp.TaskDraft'), + field=models.ForeignKey(help_text='Second Task Draft to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_to_connect', to='tmssapp.TaskDraft'), ), migrations.AddField( model_name='taskschedulingrelationblueprint', - name='placement', - field=models.ForeignKey(help_text='Placement of first and second Task Blueprint', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Placement'), + name='first', + field=models.ForeignKey(help_text='First Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='first_to_connect', to='tmssapp.TaskBlueprint'), ), migrations.AddField( model_name='taskschedulingrelationblueprint', - name='first', - field=models.ForeignKey(help_text='Name of first Task Blueprint', on_delete=django.db.models.deletion.CASCADE, related_name='scheduling_relation_first', to='tmssapp.TaskBlueprint'), + name='placement', + field=models.ForeignKey(default='after', help_text='Task scheduling relation placement.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.SchedulingRelationPlacement'), ), migrations.AddField( model_name='taskschedulingrelationblueprint', name='second', - field=models.ForeignKey(help_text='Name of second Task Blueprint.', on_delete=django.db.models.deletion.CASCADE, related_name='scheduling_relation_second', to='tmssapp.TaskBlueprint'), + field=models.ForeignKey(help_text='Second Task Blueprint to connect.', on_delete=django.db.models.deletion.CASCADE, related_name='second_to_connect', to='tmssapp.TaskBlueprint'), ), migrations.AddConstraint( model_name='taskrelationselectiontemplate', @@ -984,23 +1002,33 @@ class Migration(migrations.Migration): ), migrations.AddField( model_name='resourcetype', - name='resource_unit', - field=models.ForeignKey(help_text='Unit of current resource.', on_delete=django.db.models.deletion.PROTECT, related_name='resource_types', to='tmssapp.ResourceUnit'), + name='quantity', + field=models.ForeignKey(help_text='The quantity of this resource type.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.Quantity'), ), migrations.AddField( model_name='projectquota', name='project', - field=models.ForeignKey(help_text='Project to wich this quota belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='project_quota', to='tmssapp.Project'), + field=models.ForeignKey(help_text='Project to wich this quota belongs.', on_delete=django.db.models.deletion.PROTECT, related_name='quota', to='tmssapp.Project'), ), migrations.AddField( model_name='projectquota', name='resource_type', - field=models.ForeignKey(help_text='Resource type.', on_delete=django.db.models.deletion.PROTECT, related_name='resource_type', to='tmssapp.ResourceType'), + field=models.ForeignKey(help_text='Resource type.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ResourceType'), ), migrations.AddField( model_name='project', - name='cycle', - field=models.ForeignKey(help_text='Cycle(s) to which this project belongs (NULLable).', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='projects', to='tmssapp.Cycle'), + name='cycles', + field=models.ManyToManyField(blank=True, help_text='Cycles to which this project belongs (NULLable).', related_name='projects', to='tmssapp.Cycle'), + ), + migrations.AddField( + model_name='project', + name='period_category', + field=models.ForeignKey(help_text='Period category.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.PeriodCategory'), + ), + migrations.AddField( + model_name='project', + name='project_category', + field=models.ForeignKey(help_text='Project category.', null=True, on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ProjectCategory'), ), migrations.AddConstraint( model_name='generatortemplate', @@ -1094,6 +1122,16 @@ class Migration(migrations.Migration): name='specifications_template', field=models.ForeignKey(help_text='Schema used for specifications_doc.', on_delete=django.db.models.deletion.CASCADE, to='tmssapp.DataproductSpecificationsTemplate'), ), + migrations.AddField( + model_name='cyclequota', + name='cycle', + field=models.ForeignKey(help_text='Cycle to which these quota apply.', on_delete=django.db.models.deletion.PROTECT, related_name='quota', to='tmssapp.Cycle'), + ), + migrations.AddField( + model_name='cyclequota', + name='resource_type', + field=models.ForeignKey(help_text='Resource type.', on_delete=django.db.models.deletion.PROTECT, to='tmssapp.ResourceType'), + ), migrations.AddField( model_name='antennaset', name='station_type', diff --git a/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py b/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py index a4fcecd5e9c23454c0158ee93ae88e3d3ecb3321..042d87b37a76af6f3dab5b706b252873af6c1846 100644 --- a/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/migrations/0002_populate.py @@ -17,6 +17,9 @@ class Migration(migrations.Migration): # Start SubTask id with 2 000 000 to avoid overlap with 'old' (test/production) OTDB operations = [ migrations.RunSQL('ALTER SEQUENCE tmssapp_SubTask_id_seq RESTART WITH 2000000;'), migrations.RunPython(populate_choices), + migrations.RunPython(populate_settings), migrations.RunPython(populate_misc), migrations.RunPython(populate_lofar_json_schemas), - migrations.RunPython(populate_settings)] + migrations.RunPython(populate_resources), + migrations.RunPython(populate_cycles), + migrations.RunPython(populate_projects) ] diff --git a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py index e869c80cc92bf69d09bc0ac0b90442c2714070aa..0868f0e846e9baed309f476e779da82336ddf0b6 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/scheduling.py @@ -107,9 +107,7 @@ class SubtaskTemplate(Template): realtime = BooleanField(default=False) class Meta: - # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) - constraints = [UniqueConstraint(fields=['name', 'version'], name='SubtaskTemplate_unique_name_version')] - + pass class DefaultSubtaskTemplate(BasicCommon): name = CharField(max_length=128, unique=True) @@ -118,8 +116,7 @@ class DefaultSubtaskTemplate(BasicCommon): class DataproductSpecificationsTemplate(Template): class Meta: - # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) - constraints = [UniqueConstraint(fields=['name', 'version'], name='DataproductSpecificationsTemplate_unique_name_version')] + pass class DefaultDataproductSpecificationsTemplate(BasicCommon): @@ -129,8 +126,7 @@ class DefaultDataproductSpecificationsTemplate(BasicCommon): class DataproductFeedbackTemplate(Template): class Meta: - # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) - constraints = [UniqueConstraint(fields=['name', 'version'], name='DataproductFeedbackTemplate_unique_name_version')] + pass # todo: do we need to specify a default? diff --git a/SAS/TMSS/src/tmss/tmssapp/models/specification.py b/SAS/TMSS/src/tmss/tmssapp/models/specification.py index fb390765da02b1657990503d769df3133c106e73..371259c1f3809a458e454a7ea580fee842f0b59c 100644 --- a/SAS/TMSS/src/tmss/tmssapp/models/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/models/specification.py @@ -7,8 +7,11 @@ from django.contrib.postgres.fields import ArrayField, JSONField from django.contrib.postgres.indexes import GinIndex from enum import Enum from django.db.models.expressions import RawSQL - +from django.db.models.deletion import ProtectedError from lofar.sas.tmss.tmss.tmssapp.validation import validate_json_against_schema +from django.core.exceptions import ValidationError +from rest_framework import status +import datetime # # Common @@ -131,6 +134,15 @@ class CopyReason(AbstractChoice): REPEATED = "repeated" +class SchedulingRelationPlacement(AbstractChoice): + """Defines the model and predefined list of possible Placements for Task Scheduling Relation. + The items in the Choices class below are automagically populated into the database via a data migration.""" + class Choices(Enum): + AFTER = "after" + BEFORE = "before" + PARALLEL = "parallel" + + class Flag(AbstractChoice): """Defines the model and predefined list of possible Flags to be used in Setting. The items in the Choises class below are automagically populated into the database via a data migration.""" @@ -138,13 +150,41 @@ class Flag(AbstractChoice): AUTOSCHEDULE = "allow_scheduling_observations" -class Placement(AbstractChoice): - """Defines the model and predefined list of possible Placements for Task Scheduling Relation. - The items in the Choices class below are automagically populated into the database via a data migration.""" + +class Quantity(AbstractChoice): + """Defines the model and predefined list of possible period categories to be used in a.o. ProjectQuota and CycleQuota. + Please note that, by agreement, all values of a certain quantity are stored in SI units. + For example: a duration has the quantity "time" which is stored in "seconds". + We only store the quatity-type in the database, because we known the correct SI units for each quantity, so there is no need to store the unit as well. + The items in the Choices class below are automagically populated into the database via a data migration.""" + class Choices(Enum): - BEFORE = "before" - AFTER = "after" - PARALLEL = "parallel" + # these are the basic quantities that we currently use. More can be added if needed. + TIME = "time" + BYTES = "bytes" + NUMBER = "number" + + +class PeriodCategory(AbstractChoice): + """Defines the model and predefined list of possible period categories to be used in Project. + The items in the Choices class below are automagically populated into the database via a data migration.""" + + class Choices(Enum): + SINGLE_CYCLE = "single_cycle" + LONG_TERM = "long_term" + UNBOUNDED = "unbounded" + + +class ProjectCategory(AbstractChoice): + """Defines the model and predefined list of possible project categories to be used in Project. + The items in the Choices class below are automagically populated into the database via a data migration.""" + + class Choices(Enum): + REGULAR = "regular" + USER_SHARED_SUPPORT = "user_shared_support" + COMMISSIONING = "commissioning" + DDT = "ddt" + TEST = "test" # concrete models @@ -159,7 +199,7 @@ class TaskConnectorType(BasicCommon): datatype = ForeignKey('Datatype', null=False, on_delete=PROTECT) dataformats = ManyToManyField('Dataformat', blank=True) output_of = ForeignKey("TaskTemplate", related_name='output_connector_types', on_delete=CASCADE) - input_of = ForeignKey("TaskTemplate", related_name='inpput_connector_types', on_delete=CASCADE) + input_of = ForeignKey("TaskTemplate", related_name='input_connector_types', on_delete=CASCADE) # @@ -174,8 +214,7 @@ class Template(NamedCommon): class Meta: abstract = True - # TODO: remove all <class>_unique_name_version UniqueConstraint's from the subclasses and replace by this line below when we start using django 3.0 - # constraints = [UniqueConstraint(fields=['name', 'version'], name='%(class)s_unique_name_version')] + constraints = [UniqueConstraint(fields=['name', 'version'], name='%(class)s_unique_name_version')] # concrete models @@ -183,8 +222,7 @@ class GeneratorTemplate(Template): create_function = CharField(max_length=128, help_text='Python function to call to execute the generator.') class Meta: - # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) - constraints = [UniqueConstraint(fields=['name', 'version'], name='GeneratorTemplate_unique_name_version')] + pass class DefaultGeneratorTemplate(BasicCommon): @@ -194,8 +232,7 @@ class DefaultGeneratorTemplate(BasicCommon): class SchedulingUnitTemplate(Template): class Meta: - # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) - constraints = [UniqueConstraint(fields=['name', 'version'], name='SchedulingUnitTemplate_unique_name_version')] + pass class DefaultSchedulingUnitTemplate(BasicCommon): @@ -207,8 +244,7 @@ class TaskTemplate(Template): validation_code_js = CharField(max_length=128, help_text='JavaScript code for additional (complex) validation.') class Meta: - # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) - constraints = [UniqueConstraint(fields=['name', 'version'], name='TaskTemplate_unique_name_version')] + pass class DefaultTaskTemplate(BasicCommon): name = CharField(max_length=128, unique=True) @@ -217,9 +253,7 @@ class DefaultTaskTemplate(BasicCommon): class TaskRelationSelectionTemplate(Template): class Meta: - # TODO: move up to the abstract base class and replace with django 3.0 UniqueConstraint(... name='%*class)s_unique_name_version) - constraints = [UniqueConstraint(fields=['name', 'version'], name='TaskRelationSelectionTemplate_unique_name_version')] - + pass class DefaultTaskRelationSelectionTemplate(BasicCommon): name = CharField(max_length=128, unique=True) @@ -233,35 +267,76 @@ class DefaultTaskRelationSelectionTemplate(BasicCommon): class Cycle(NamedCommonPK): start = DateTimeField(help_text='Moment at which the cycle starts, that is, when its projects can run.') stop = DateTimeField(help_text='Moment at which the cycle officially ends.') - number = IntegerField(help_text='Cycle number.') - standard_hours = IntegerField(help_text='Number of offered hours for standard observations.') - expert_hours = IntegerField(help_text='Number of offered hours for expert observations.') - filler_hours = IntegerField(help_text='Number of offered hours for filler observations.') + + @property + def duration(self) -> datetime.timedelta: + '''the duration of the cycle (stop-start date)''' + return self.stop - self.start + + # Project to Cycle references are ManyToMany now, so cannot be protected via on_delete on db level. + # Instead, explicitly check whether there are projects linked to the Cycle and prevent delete + # todo: verify correct behavior + # - Implemented: Cycles cannot be deleted if they have projects assigned to them + # - Also possible: Projects need at least one cycle + def delete(self, *args, **kwargs): + if len(self.projects.all()) > 0: + raise ProtectedError(protected_objects=self.projects.all(), msg='This Cycle is referenced by %s project(s) and cannot be deleted.' % len(self.projects.all())) + else: + super().delete(*args, **kwargs) + + +class CycleQuota(Model): + cycle = ForeignKey('Cycle', related_name="quota", on_delete=PROTECT, help_text='Cycle to which these quota apply.') + value = FloatField(help_text='Resource Quota value') + resource_type = ForeignKey('ResourceType', on_delete=PROTECT, help_text='Resource type.') class Project(NamedCommonPK): - # cycle is protected since we have to manually decide to clean up projects with a cycle or keep them without cycle - cycle = ForeignKey('Cycle', related_name='projects', on_delete=PROTECT, null=True, help_text='Cycle(s) to which this project belongs (NULLable).') - priority = IntegerField(default=0, help_text='Priority of this project w.r.t. other projects. Projects can interrupt observations of lower-priority projects.') # todo: define a value for the default priority + cycles = ManyToManyField('Cycle', blank=True, related_name='projects', help_text='Cycles to which this project belongs (NULLable).') + priority_rank = FloatField(null=False, help_text='Priority of this project w.r.t. other projects. Projects can interrupt observations of lower-priority projects.') # todo: add if needed: validators=[MinValueValidator(0.0), MaxValueValidator(1.0)] + trigger_priority = IntegerField(default=1000, help_text='Priority of this project w.r.t. triggers.') # todo: verify meaning and add to help_text: "Triggers with higher priority than this threshold can interrupt observations of projects." can_trigger = BooleanField(default=False, help_text='True if this project is allowed to supply observation requests on the fly, possibly interrupting currently running observations (responsive telescope).') private_data = BooleanField(default=True, help_text='True if data of this project is sensitive. Sensitive data is not made public.') expert = BooleanField(default=False, help_text='Expert projects put more responsibility on the PI.') filler = BooleanField(default=False, help_text='Use this project to fill up idle telescope time.') + project_category = ForeignKey('ProjectCategory', null=True, on_delete=PROTECT, help_text='Project category.') + period_category = ForeignKey('PeriodCategory', null=True, on_delete=PROTECT, help_text='Period category.') + + # JK, 29/07/20 - after discussion with Sander, it turns out that the ticket TMSS-277 was a misunderstanding. + # 'default' does not refer to 'default values' that are supposed to be filled in by the backend. + # It was meant to be 'resource_types displayed in the frontend by default', where the other resource_types are + # optionally added to the set of quota. These can then be customized in the frontend and are created by the + # frontend in the backend, but no quota are intended to be added automatically. So nothing is really needed in + # the backend for this (apart from the set of predefined resource_types). + # There was some open question on whether there may be a required subset of quota that have to be enforced. So + # I'll leave this in for now, until that question is cleared up. + # + # # also create default project quotas when projects are created + # def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + # creating = self._state.adding # True on create, False on update + # super().save(force_insert, force_update, using, update_fields) + # if creating: + # # todo: review these defaults for being sensible + # ProjectQuota.objects.create(resource_type=ResourceType.objects.get(name="lta_storage"), value=1024^4, project=self) + # ProjectQuota.objects.create(resource_type=ResourceType.objects.get(name="cep_storage"), value=1024^4, project=self) + # ProjectQuota.objects.create(resource_type=ResourceType.objects.get(name="cep_processing_time"), value=60*60*24, project=self) + # ProjectQuota.objects.create(resource_type=ResourceType.objects.get(name="lofar_observing_time"), value=60*60*24, project=self) + # ProjectQuota.objects.create(resource_type=ResourceType.objects.get(name="lofar_observing_time_prio_a"), value=60*60*12, project=self) + # ProjectQuota.objects.create(resource_type=ResourceType.objects.get(name="lofar_observing_time_prio_b"), value=60*60*12, project=self) + # ProjectQuota.objects.create(resource_type=ResourceType.objects.get(name="support_time"), value=60*60*6, project=self) + # ProjectQuota.objects.create(resource_type=ResourceType.objects.get(name="number_of_triggers"), value=42, project=self) class ProjectQuota(Model): - project = ForeignKey('Project', related_name="project_quota", on_delete=PROTECT, help_text='Project to wich this quota belongs.') # protected to avoid accidents + project = ForeignKey('Project', related_name="quota", on_delete=PROTECT, help_text='Project to wich this quota belongs.') # protected to avoid accidents value = FloatField(help_text='Resource Quota value') - resource_type = ForeignKey('ResourceType', related_name="resource_type", on_delete=PROTECT, help_text='Resource type.') # protected to avoid accidents + resource_type = ForeignKey('ResourceType', on_delete=PROTECT, help_text='Resource type.') # protected to avoid accidents class ResourceType(NamedCommonPK): - resource_unit = ForeignKey('ResourceUnit', related_name="resource_types", on_delete=PROTECT, help_text='Unit of current resource.') + quantity = ForeignKey('Quantity', null=False, on_delete=PROTECT, help_text='The quantity of this resource type.') -class ResourceUnit(NamedCommonPK): - pass - class SchedulingSet(NamedCommon): generator_doc = JSONField(null=True, help_text='Parameters for the generator (NULLable).') generator_template = ForeignKey('GeneratorTemplate', on_delete=SET_NULL, null=True, help_text='Generator for the scheduling units in this set (NULLable).') @@ -281,7 +356,7 @@ class SchedulingUnitDraft(NamedCommon): copy_reason = ForeignKey('CopyReason', null=True, on_delete=PROTECT, help_text='Reason why source was copied (NULLable).') generator_instance_doc = JSONField(null=True, help_text='Parameter value that generated this run draft (NULLable).') scheduling_set = ForeignKey('SchedulingSet', related_name='scheduling_unit_drafts', on_delete=CASCADE, help_text='Set to which this scheduling unit draft belongs.') - requirements_template = ForeignKey('SchedulingUnitTemplate', on_delete=CASCADE, help_text='Schema used for requirements_doc.') # todo: 'schema'? + requirements_template = ForeignKey('SchedulingUnitTemplate', on_delete=CASCADE, help_text='Schema used for requirements_doc.') def save(self, force_insert=False, force_update=False, using=None, update_fields=None): if self.requirements_doc and self.requirements_template_id and self.requirements_template.schema: @@ -289,6 +364,32 @@ class SchedulingUnitDraft(NamedCommon): super().save(force_insert, force_update, using, update_fields) + @property + def duration(self) -> datetime.timedelta: + '''return the overall duration of all tasks of this scheduling unit + ''' + return self.relative_stop_time - self.relative_start_time + + @property + def relative_start_time(self) -> datetime.timedelta: + '''return the earliest relative start time of all tasks of this scheduling unit + ''' + task_drafts = list(self.task_drafts.all()) + if task_drafts: + return min(task_drafts, key=lambda x: x.relative_start_time).relative_start_time + else: + return datetime.timedelta(seconds=0) + + @property + def relative_stop_time(self) -> datetime.timedelta: + '''return the latest relative stop time of all tasks of this scheduling unit + ''' + task_drafts = list(self.task_drafts.all()) + if task_drafts: + return max(task_drafts, key=lambda x: x.relative_stop_time).relative_stop_time + else: + return datetime.timedelta(seconds=0) + class SchedulingUnitBlueprint(NamedCommon): requirements_doc = JSONField(help_text='Scheduling and/or quality requirements for this scheduling unit (IMMUTABLE).') @@ -302,6 +403,55 @@ class SchedulingUnitBlueprint(NamedCommon): super().save(force_insert, force_update, using, update_fields) + @property + def duration(self) -> datetime.timedelta: + '''return the overall duration of all tasks of this scheduling unit + ''' + if self.start_time is None or self.stop_time is None: + return self.relative_stop_time - self.relative_start_time + else: + return self.stop_time - self.start_time # <- todo: do we ever want this? + + @property + def relative_start_time(self) -> datetime.timedelta: + '''return the earliest relative start time of all tasks of this scheduling unit + ''' + task_blueprints = list(self.task_blueprints.all()) + if task_blueprints: + return min(task_blueprints, key=lambda x: x.relative_start_time).relative_start_time + else: + return datetime.timedelta(seconds=0) + + @property + def relative_stop_time(self) -> datetime.timedelta: + '''return the latest relative stop time of all tasks of this scheduling unit + ''' + task_blueprints = list(self.task_blueprints.all()) + if task_blueprints: + return max(task_blueprints, key=lambda x: x.relative_stop_time).relative_stop_time + else: + return datetime.timedelta(seconds=0) + + @property + def start_time(self) -> datetime or None: + '''return the earliest start time of all tasks of this scheduling unit + ''' + tasks_with_start_time = list(filter(lambda x: x.start_time is not None, self.task_blueprints.all())) + if tasks_with_start_time: + return min(tasks_with_start_time, key=lambda x: x.start_time).start_time + else: + return None + + @property + def stop_time(self) -> datetime or None: + '''return the latest stop time of all tasks of this scheduling unit + ''' + tasks_with_stop_time = list(filter(lambda x: x.stop_time is not None, self.task_blueprints.all())) + if tasks_with_stop_time: + return max(tasks_with_stop_time, key=lambda x: x.stop_time).stop_time + else: + return None + class TaskDraft(NamedCommon): specifications_doc = JSONField(help_text='Specifications for this task.') @@ -336,6 +486,88 @@ class TaskDraft(NamedCommon): "INNER JOIN tmssapp_taskrelationdraft as task_rel on task_rel.producer_id = successor_task.id\n" "WHERE task_rel.consumer_id = %s", params=[self.id])) + @property + def duration(self) -> datetime.timedelta: + '''returns the overall duration of this task + ''' + return self.relative_stop_time - self.relative_start_time + + @property + def relative_start_time(self) -> datetime.timedelta: + '''return the earliest relative start time of all subtasks of this task + ''' + scheduling_relations = list(self.first_to_connect.all()) + list(self.second_to_connect.all()) + for scheduling_relation in scheduling_relations: + if scheduling_relation.first.id == self.id and scheduling_relation.placement.value == "after": + previous_related_task_draft = TaskDraft.objects.get(id=scheduling_relation.second.id) + time_offset = scheduling_relation.time_offset + # todo: max of several relations + if previous_related_task_draft.relative_stop_time: + return previous_related_task_draft.relative_stop_time + datetime.timedelta(seconds=time_offset) + + if scheduling_relation.second.id == self.id and scheduling_relation.placement.value == "before": + previous_related_task_draft = TaskDraft.objects.get(id=scheduling_relation.first.id) + time_offset = scheduling_relation.time_offset + # todo: max of several relations + if previous_related_task_draft.relative_stop_time: + return previous_related_task_draft.relative_stop_time + datetime.timedelta(seconds=time_offset) + return datetime.timedelta(seconds=0) + + @property + def relative_stop_time(self) -> datetime.timedelta: + '''return the latest relative stop time of all subtasks of this task + ''' + # todo: when it was added, check if self.specifications_template.type.value == TaskType.Choices.OBSERVATION.value: + try: + duration = self.specifications_doc["duration"] + return self.relative_start_time + datetime.timedelta(seconds=duration) + except: + pass + return self.relative_start_time + + + # JK, 28/07/20: After discussion with Sander, we probably only want the + # - duration on the scheduling_unit draft (based on relative start/stop times) + # - duration plus relative start/stop on the task draft. + # This provides an estimate of what is currently planned out in the draft, but does not confuse with timestamps of actual start/stop of the blueprints. + # Only on the blueprints, we also aggregate start_stop times as they are in the system + # I'll leave these code bits here for now, until we made up our minds about this, but this can probably be removed + # + # @property + # def duration(self) -> datetime.timedelta: + # '''returns the overall duration in seconds of all blueprints of this task + # # todo: is this the wanted behavior? Do you want to consider all the blueprints created from your draft or do you want to preview a new blueprint? + # ''' + # if self.start_time is None or self.stop_time is None: + # # todo: calculate? + # return None + # else: + # return self.stop_time - self.start_time + # + # @property + # def start_time(self) -> datetime or None: + # '''return the earliest start time of all blueprints of this task + # # todo: is this the wanted behavior? Do you want to consider all the blueprints created from your draft or do you want to preview a new blueprint? + # ''' + # blueprints_with_start_time = list(filter(lambda x: x.start_time is not None, self.task_blueprints.all())) + # if blueprints_with_start_time: + # return min(blueprints_with_start_time, key=lambda x: x.start_time).start_time + # else: + # # todo: calculate? + # return None + # + # @property + # def stop_time(self) -> datetime or None: + # '''return the latest stop time of all blueprints of this task + # # todo: is this the wanted behavior? Do you want to consider all the blueprints created from your draft or do you want to preview a new blueprint? + # ''' + # blueprints_with_stop_time = list(filter(lambda x: x.stop_time is not None, self.task_blueprints.all())) + # if blueprints_with_stop_time: + # return max(blueprints_with_stop_time, key=lambda x: x.stop_time).stop_time + # else: + # # todo: calculate? + # return None + class TaskBlueprint(NamedCommon): specifications_doc = JSONField(help_text='Schedulings for this task (IMMUTABLE).') @@ -370,10 +602,72 @@ class TaskBlueprint(NamedCommon): "INNER JOIN tmssapp_taskrelationblueprint as task_rel on task_rel.producer_id = predecessor_task.id\n" "WHERE task_rel.consumer_id = %s", params=[self.id])) + @property + def duration(self) -> datetime.timedelta: + '''return the overall duration of this task + ''' + if self.start_time is None or self.stop_time is None: + return self.relative_stop_time - self.relative_start_time + else: + return self.stop_time - self.start_time + + @property + def relative_start_time(self) -> datetime.timedelta: + '''return the earliest relative start time of all subtasks of this task + ''' + scheduling_relations = list(self.first_to_connect.all()) + list(self.second_to_connect.all()) + for scheduling_relation in scheduling_relations: + if scheduling_relation.first.id == self.id and scheduling_relation.placement.value == "after": + previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.second.id) + time_offset = scheduling_relation.time_offset + # todo: max of several relations + if previous_related_task_blueprint.relative_stop_time: + return previous_related_task_blueprint.relative_stop_time + datetime.timedelta(seconds=time_offset) + + if scheduling_relation.second.id == self.id and scheduling_relation.placement.value == "before": + previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.first.id) + time_offset = scheduling_relation.time_offset + # todo: max of several relations + if previous_related_task_blueprint.relative_stop_time: + return previous_related_task_blueprint.relative_stop_time + datetime.timedelta(seconds=time_offset) + return datetime.timedelta(seconds=666660) + + @property + def relative_stop_time(self) -> datetime.timedelta: + '''return the latest relative stop time of all subtasks of this task + ''' + # todo: when it was added, check if subtask.specifications_template.type.value == TaskType.Choices.OBSERVATION.value: + try: + duration = self.specifications_doc["duration"] + return self.relative_start_time + datetime.timedelta(seconds=duration) + except: + pass + return self.relative_start_time + + @property + def start_time(self) -> datetime or None: + '''return the earliest start time of all subtasks of this task + ''' + subtasks_with_start_time = list(filter(lambda x: x.start_time is not None, self.subtasks.all())) + if subtasks_with_start_time: + return min(subtasks_with_start_time, key=lambda x: x.start_time).start_time + else: + return None + + @property + def stop_time(self) -> datetime or None: + '''return the latest stop time of all subtasks of this task + ''' + subtasks_with_stop_time = list(filter(lambda x: x.stop_time is not None, self.subtasks.all())) + if subtasks_with_stop_time: + return max(subtasks_with_stop_time, key=lambda x: x.stop_time).stop_time + else: + return None + class TaskRelationDraft(BasicCommon): selection_doc = JSONField(help_text='Filter for selecting dataproducts from the output role.') - selection_template = ForeignKey('TaskRelationSelectionTemplate', on_delete=CASCADE, help_text='Schema used for selection_doc.') # todo: 'schema'? + selection_template = ForeignKey('TaskRelationSelectionTemplate', on_delete=CASCADE, help_text='Schema used for selection_doc.') dataformat = ForeignKey('Dataformat', null=False, on_delete=PROTECT, help_text='Selected data format to use. One of (MS, HDF5).') # caveat: it might look like producer has an incorrect related_name='consumed_by'. But it really is correct, denends on the way you look at it @@ -412,15 +706,30 @@ class TaskRelationBlueprint(BasicCommon): super().save(force_insert, force_update, using, update_fields) +class TaskSchedulingRelationBlueprint(BasicCommon): + first = ForeignKey('TaskBlueprint', related_name='first_to_connect', on_delete=CASCADE, help_text='First Task Blueprint to connect.') + second = ForeignKey('TaskBlueprint', related_name='second_to_connect', on_delete=CASCADE, help_text='Second Task Blueprint to connect.') + placement = ForeignKey('SchedulingRelationPlacement', null=False, default="after", on_delete=PROTECT, help_text='Task scheduling relation placement.') + time_offset = IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.') + + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.first == self.second: + raise ValidationError("First and Second Task Draft must be different.") + if (str(self.placement) == SchedulingRelationPlacement.Choices.BEFORE.value or str(self.placement) == SchedulingRelationPlacement.Choices.AFTER.value) and self.time_offset<0: + raise ValidationError("Time_offset must be >= 0") + super().save(force_insert, force_update, using, update_fields) + + class TaskSchedulingRelationDraft(BasicCommon): - placement = ForeignKey('Placement', null=False, on_delete=PROTECT, help_text='Placement of first and second Task Draft') - time_offset = FloatField(help_text='Time Offset between first and second Task Draft') - first = ForeignKey('TaskDraft', related_name='scheduling_relation_first', on_delete=CASCADE, help_text='Name of first Task Draft') - second = ForeignKey('TaskDraft', related_name='scheduling_relation_second', on_delete=CASCADE, help_text='Name of second Task Draft.') + first = ForeignKey('TaskDraft', related_name='first_to_connect', on_delete=CASCADE, help_text='First Task Draft to connect.') + second = ForeignKey('TaskDraft', related_name='second_to_connect', on_delete=CASCADE, help_text='Second Task Draft to connect.') + placement = ForeignKey('SchedulingRelationPlacement', null=False, on_delete=PROTECT, help_text='Task scheduling relation placement.') + time_offset = IntegerField(default=60, help_text='Time offset of start of second task with respect to start of first task.') + def save(self, force_insert=False, force_update=False, using=None, update_fields=None): + if self.first == self.second: + raise ValidationError("First and Second Task Draft must be different.") + if (str(self.placement) == SchedulingRelationPlacement.Choices.BEFORE.value or str(self.placement) == SchedulingRelationPlacement.Choices.AFTER.value) and self.time_offset<0: + raise ValidationError("Time_offset must be >= 0") + super().save(force_insert, force_update, using, update_fields) -class TaskSchedulingRelationBlueprint(BasicCommon): - placement = ForeignKey('Placement', null=False, on_delete=PROTECT, help_text='Placement of first and second Task Blueprint') - time_offset = FloatField(help_text='Time Offset between first and second Task Blueprint') - first = ForeignKey('TaskBlueprint', related_name='scheduling_relation_first', on_delete=CASCADE, help_text='Name of first Task Blueprint') - second = ForeignKey('TaskBlueprint', related_name='scheduling_relation_second', on_delete=CASCADE, help_text='Name of second Task Blueprint.') diff --git a/SAS/TMSS/src/tmss/tmssapp/populate.py b/SAS/TMSS/src/tmss/tmssapp/populate.py index 6f231a7191f2ed9be2276fb6bf9f6af7766e3b41..63d8987efe87941bb8a5ae105bc85747ead67727 100644 --- a/SAS/TMSS/src/tmss/tmssapp/populate.py +++ b/SAS/TMSS/src/tmss/tmssapp/populate.py @@ -19,7 +19,8 @@ logger = logging.getLogger(__name__) import json import os -from lofar.sas.tmss.tmss.tmssapp.subtasks import * +from datetime import datetime, timezone +from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.tmssapp.models.specification import * from lofar.sas.tmss.tmss.tmssapp.models.scheduling import * from lofar.common.json_utils import * @@ -34,8 +35,9 @@ def populate_choices(apps, schema_editor): each 'choice'type in Role, Datatype, Dataformat, CopyReason :return: None ''' - for choice_class in [Role, Datatype, Dataformat, CopyReason, Placement, Flag, - SubtaskState, SubtaskType, StationType, Algorithm, ScheduleMethod]: + for choice_class in [Role, Datatype, Dataformat, CopyReason, + SubtaskState, SubtaskType, StationType, Algorithm, ScheduleMethod, SchedulingRelationPlacement, + Flag, ProjectCategory, PeriodCategory, Quantity]: choice_class.objects.bulk_create([choice_class(value=x.value) for x in choice_class.Choices]) def populate_settings(apps, schema_editor): @@ -60,109 +62,197 @@ def populate_lofar_json_schemas(apps, schema_editor): _populate_qa_files_subtask_template() _populate_qa_plots_subtask_template() - #_populate_task_draft_example() # Should be removed - _populate_test_scheduling_set() - -def _populate_test_scheduling_set(): +def populate_test_data(): """ Create a Test Schedule Set to be able to refer to when Scheduling Unit Draft is created from a scheduling unit json :return: """ try: - from lofar.sas.tmss.tmss.tmssapp import models - from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data + # only add (with expensive setup time) example data when developing/testing and we're not unittesting if isTestEnvironment() or isDevelopmentEnvironment(): - scheduling_set_data = SchedulingSet_test_data(name="Test Scheduling Set UC1") - models.SchedulingSet.objects.create(**scheduling_set_data) + from lofar.sas.tmss.tmss.exceptions import TMSSException + from lofar.sas.tmss.test.tmss_test_data_django_models import SchedulingSet_test_data, SchedulingUnitDraft_test_data + from lofar.sas.tmss.tmss.tmssapp.tasks import create_task_blueprints_and_subtasks_from_scheduling_unit_draft, create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_draft + + # create a Test Scheduling Set UC1 under project TMSS-Commissioning + tmss_project = models.Project.objects.get(name="TMSS-Commissioning") + for set_nr in range(3): + scheduling_set_data = SchedulingSet_test_data(name="Test Scheduling Set UC1 example %s" % (set_nr,), project=tmss_project) + scheduling_set = models.SchedulingSet.objects.create(**scheduling_set_data) + scheduling_set.tags = ["TEST"] + scheduling_set.save() + + for unit_nr in range(3): + # construct a scheduling_unit_doc, i.e.: a specification of interrelated tasks which conforms the scheduling unit schema + # by default, this scheduling_unit_doc holds no tasks, so lets setup the UC1 sequence of tasks here, and add it to the scheduling_unit_doc + scheduling_unit_template = models.SchedulingUnitTemplate.objects.get(name="scheduling unit schema") + scheduling_unit_doc = get_default_json_object_for_schema(scheduling_unit_template.schema) + + # create and add a calibrator task spec + # Change autoselect to False (or provide tile_beam pointings for Target Observation) to avoid Exception + json_schema_calibrator = get_default_json_object_for_schema(models.TaskTemplate.objects.get(name="calibrator schema").schema) + json_schema_calibrator['autoselect'] = False + scheduling_unit_doc['tasks'].append({"name": "Calibrator Observation 1", + "description": "Calibrator Observation for UC1 HBA scheduling unit", + "specifications_doc": json_schema_calibrator, + "specifications_template": "calibrator schema"}) + + # create and add a calibrator preprocessing spec + scheduling_unit_doc['tasks'].append({"name": "Pipeline Calibrator1", + "description": "Preprocessing Pipeline for Calibrator Observation 1", + "specifications_doc": get_default_json_object_for_schema(models.TaskTemplate.objects.get(name="preprocessing schema").schema), + "specifications_template": "preprocessing schema"}) + + # create and add a target obs spec + scheduling_unit_doc['tasks'].append({"name": "Target Observation", + "description": "Target Observation for UC1 HBA scheduling unit", + "specifications_doc": get_default_json_object_for_schema(models.TaskTemplate.objects.get(name="observation schema").schema), + "specifications_template": "observation schema"}) + + # create and add a target pipeline spec for sap0 + scheduling_unit_doc['tasks'].append({"name": "Preprocessing Pipeline SAP0", + "description": "Preprocessing Pipeline for Target Observation SAP0", + "specifications_doc": get_default_json_object_for_schema(models.TaskTemplate.objects.get(name="preprocessing schema").schema), + "specifications_template": "preprocessing schema"}) + + # create and add a target pipeline spec for sap1 + scheduling_unit_doc['tasks'].append({"name": "Preprocessing Pipeline SAP1", + "description": "Preprocessing Pipeline for Target Observation SAP1", + "specifications_doc": get_default_json_object_for_schema(models.TaskTemplate.objects.get(name="preprocessing schema").schema), + "specifications_template": "preprocessing schema"}) + + # create and add a calibrator task spec + scheduling_unit_doc['tasks'].append({"name": "Calibrator Observation 2", + "description": "Calibrator Observation for UC1 HBA scheduling unit", + "specifications_doc": json_schema_calibrator, + "specifications_template": "calibrator schema"}) + + # create and add a calibrator preprocessing spec + scheduling_unit_doc['tasks'].append({"name": "Pipeline Calibrator2", + "description": "Preprocessing Pipeline for Calibrator Observation 2", + "specifications_doc": get_default_json_object_for_schema(models.TaskTemplate.objects.get(name="preprocessing schema").schema), + "specifications_template": "preprocessing schema"}) + + # ----- end of tasks + + # setup task_scheduling_relations between Target and Calibrator observations + scheduling_unit_doc['task_scheduling_relations'].append({"first": "Calibrator Observation 1", + "second": "Target Observation", + "placement": "before", + "time_offset": 60 }) + scheduling_unit_doc['task_scheduling_relations'].append({"first": "Calibrator Observation 2", + "second": "Target Observation", + "placement": "after", + "time_offset": 60 }) + + # ----- end of task_scheduling_relations + + #TODO: check various input/output datatypes and roles for each task_relation + scheduling_unit_doc['task_relations'].append({"producer": "Calibrator Observation 1", + "consumer": "Pipeline Calibrator1", + "tags": [], + "input": { "role": "input", "datatype": "visibilities" }, + "output": { "role": "correlator", "datatype": "visibilities" }, + "dataformat": "MeasurementSet", + "selection_doc": {}, + "selection_template": "All" }) + + scheduling_unit_doc['task_relations'].append({"producer": "Calibrator Observation 2", + "consumer": "Pipeline Calibrator2", + "tags": [], + "input": { "role": "input", "datatype": "visibilities" }, + "output": { "role": "correlator", "datatype": "visibilities" }, + "dataformat": "MeasurementSet", + "selection_doc": {}, + "selection_template": "All" }) + + scheduling_unit_doc['task_relations'].append({"producer": "Target Observation", + "consumer": "Preprocessing Pipeline SAP0", + "tags": [], + "input": { "role": "input", "datatype": "visibilities" }, + "output": { "role": "correlator", "datatype": "visibilities" }, + "dataformat": "MeasurementSet", + "selection_doc": {"sap": [0]}, + "selection_template": "SAP" }) + + scheduling_unit_doc['task_relations'].append({"producer": "Target Observation", + "consumer": "Preprocessing Pipeline SAP1", + "tags": [], + "input": { "role": "input", "datatype": "visibilities" }, + "output": { "role": "correlator", "datatype": "visibilities" }, + "dataformat": "MeasurementSet", + "selection_doc": {"sap": [1]}, + "selection_template": "SAP" }) + + # finally... add the scheduling_unit_doc to a new SchedulingUnitDraft instance, and were ready to use it! + scheduling_unit_data = SchedulingUnitDraft_test_data(name="Test Scheduling Unit UC1 example %s.%s" % (set_nr, unit_nr), scheduling_set=scheduling_set, + template=scheduling_unit_template, requirements_doc=scheduling_unit_doc) + scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(**scheduling_unit_data) + + try: + if set_nr==0 and unit_nr==0: + create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + else: + create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + except TMSSException as e: + logger.error(e) except ImportError: pass -def _populate_task_draft_example(): - """ - Create a Task Draft 'Target Observation' - :return: - """ - try: - from datetime import datetime, timezone - from lofar.sas.tmss.tmss.tmssapp import models - from lofar.sas.tmss.test.tmss_test_data_django_models import TaskDraft_test_data - - if isTestEnvironment() or isDevelopmentEnvironment(): - for nr in range(0, 18): - models.Cycle.objects.create(name="Cycle %s" % nr, +def populate_cycles(apps, schema_editor): + for nr in range(0, 18): + cycle = models.Cycle.objects.create(name="Cycle %02d" % nr, description="Lofar Cycle %s" % nr, start=datetime(2013+nr//2, 6 if nr%2==0 else 11, 1, 0, 0, 0, 0, tzinfo=timezone.utc), - stop=datetime(2013+(nr+1)//2, 6 if nr%2==1 else 11, 1, 0, 0, 0, 0, tzinfo=timezone.utc), - number=nr, - standard_hours=0, # TODO: fill in cycle hours - expert_hours=0, - filler_hours=0) - - tmss_project = models.Project.objects.create(cycle=models.Cycle.objects.get(number=14), - name="TMSS-Commissioning", - description="Project for all TMSS tests and commissioning", - priority=1, - can_trigger=False, - private_data=True, - expert=True, - filler=False) - - scheduling_set = models.SchedulingSet.objects.create(name="UC1 test set", - description="UC1 test set", - project=tmss_project) - - requirements_template = models.SchedulingUnitTemplate.objects.create(name="UC1 test scheduling unit template", - description="UC1 test scheduling unit template", - version="0.1", - schema={}) - - scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(name="UC1 test scheduling unit", - description="UC1 test scheduling unit", - scheduling_set=scheduling_set, - requirements_template=requirements_template, - requirements_doc=get_default_json_object_for_schema(requirements_template.schema)) - - obs_task_template = models.TaskTemplate.objects.get(name='correlator schema') - task_draft_data = TaskDraft_test_data(name="Test Target Observation", specifications_template=obs_task_template, scheduling_unit_draft=scheduling_unit_draft) - obs_task_draft = models.TaskDraft.objects.create(**task_draft_data) - - pipeline_task_template = models.TaskTemplate.objects.get(name='preprocessing schema') - task_draft_data = TaskDraft_test_data(name="Test PreProcessingPipeline Task", - specifications_template=pipeline_task_template, - scheduling_unit_draft=obs_task_draft.scheduling_unit_draft) - pipeline_task_draft = models.TaskDraft.objects.create(**task_draft_data) - - # connect them - connector_type = models.TaskConnectorType.objects.first() # TODO: get the correct connectortype instead of the first - selection_template = models.TaskRelationSelectionTemplate.objects.get(name="SAP") - selection_doc = get_default_json_object_for_schema(selection_template.schema) - selection_doc['sap'] = [0] - - task_relation_data = {"tags": [], - "dataformat": models.Dataformat.objects.get(value='MeasurementSet'), - "producer": obs_task_draft, - "consumer": pipeline_task_draft, - "input_role": connector_type, - "output_role": connector_type, - "selection_doc": selection_doc, - "selection_template": selection_template } - models.TaskRelationDraft.objects.create(**task_relation_data) - - except ImportError: - pass + stop=datetime(2013+(nr+1)//2, 6 if nr%2==1 else 11, 1, 0, 0, 0, 0, tzinfo=timezone.utc)) + models.CycleQuota.objects.create(cycle=cycle, + resource_type=ResourceType.objects.get(name="observing_time"), + value=0.8*cycle.duration.total_seconds()) # rough guess. 80% of total time available for observing + models.CycleQuota.objects.create(cycle=cycle, + resource_type=ResourceType.objects.get(name="cep_processing_time"), + value=0.8*cycle.duration.total_seconds()) + models.CycleQuota.objects.create(cycle=cycle, + resource_type=ResourceType.objects.get(name="lta_storage"), + value=0) # needs to be filled in by user (SOS) + models.CycleQuota.objects.create(cycle=cycle, + resource_type=ResourceType.objects.get(name="support_time"), + value=0) # needs to be filled in by user (SOS) + models.CycleQuota.objects.create(cycle=cycle, + resource_type=ResourceType.objects.get(name="observing_time_commissioning"), + value=0.05*cycle.duration.total_seconds()) # rough guess. 5% of total time available for observing + models.CycleQuota.objects.create(cycle=cycle, + resource_type=ResourceType.objects.get(name="observing_time_prio_a"), + value=0) # needs to be filled in by user (SOS) + models.CycleQuota.objects.create(cycle=cycle, + resource_type=ResourceType.objects.get(name="observing_time_prio_b"), + value=0) # needs to be filled in by user (SOS) + + +def populate_projects(apps, schema_editor): + tmss_project = models.Project.objects.create(name="TMSS-Commissioning", + description="Project for all TMSS tests and commissioning", + priority_rank=1.0, + can_trigger=False, + private_data=True, + expert=True, + filler=False) + tmss_project.cycles.set([models.Cycle.objects.get(name="Cycle 14")]) def populate_resources(apps, schema_editor): - ru_bytes = ResourceUnit.objects.create(name="bytes", description="Bytes") - ru_hours = ResourceUnit.objects.create(name="hours", description="duration in hours") - - ResourceType.objects.create(name="lta_storage", description="Amount of storage in LTA", resource_unit=ru_bytes) - ResourceType.objects.create(name="cep_storage", description="Amount of storage at CEP processing cluster", resource_unit=ru_bytes) - ResourceType.objects.create(name="cep_processing_hours", description="Number of processing hours for CEP processing cluster", resource_unit=ru_hours) + ResourceType.objects.create(name="lta_storage", description="Amount of storage in the LTA (in bytes)", quantity=Quantity.objects.get(value=Quantity.Choices.BYTES.value)) + ResourceType.objects.create(name="cep_storage", description="Amount of storage on the CEP processing cluster (in bytes)", quantity=Quantity.objects.get(value=Quantity.Choices.BYTES.value)) + ResourceType.objects.create(name="cep_processing_time", description="Processing time on the CEP processing cluster (in seconds)", quantity=Quantity.objects.get(value=Quantity.Choices.TIME.value)) + ResourceType.objects.create(name="observing_time", description="Observing time (in seconds)", quantity=Quantity.objects.get(value=Quantity.Choices.TIME.value)) + ResourceType.objects.create(name="observing_time_prio_a", description="Observing time with priority A (in seconds)", quantity=Quantity.objects.get(value=Quantity.Choices.TIME.value)) + ResourceType.objects.create(name="observing_time_prio_b", description="Observing time with priority B (in seconds)", quantity=Quantity.objects.get(value=Quantity.Choices.TIME.value)) + ResourceType.objects.create(name="observing_time_commissioning", description="Observing time for Commissioning/DDT (in seconds)", quantity=Quantity.objects.get(value=Quantity.Choices.TIME.value)) + ResourceType.objects.create(name="support_time", description="Support time by human (in seconds)", quantity=Quantity.objects.get(value=Quantity.Choices.TIME.value)) + ResourceType.objects.create(name="number_of_triggers", description="Number of trigger events (as integer)", quantity=Quantity.objects.get(value=Quantity.Choices.NUMBER.value)) def populate_misc(apps, schema_editor): diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling-unit.json b/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling-unit.json index ee006e603ecb695ed3d87e9c10a49929c3b4599b..ba879a079db4ee21158f0aa6363bc14e41ea5f29 100644 --- a/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling-unit.json +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/scheduling-unit.json @@ -29,23 +29,28 @@ "type": "array", "additionalItems": false, "uniqueItems": true, + "default": [], "items": { "type": "object", "title": "Task", "additionalProperties": false, + "default": {}, "properties": { "name": { "type": "string", - "title": "Name (unique)" + "title": "Name (unique)", + "default": "Default Task" }, "description": { "type": "string", - "title": "Description" + "title": "Description", + "default": "" }, "tags": { "type": "array", "addtionalItems": false, "uniqueItems": true, + "default": [], "items": { "type": "string", "title": "Tag" @@ -54,11 +59,13 @@ "specifications_doc": { "type": "object", "title": "Specifications", - "addtionalProperties": true + "addtionalProperties": false, + "default": {} }, "specifications_template": { "type": "string", - "title": "Name of Template for Specifications" + "title": "Name of Template for Specifications", + "default": "" } }, "required": [ @@ -73,10 +80,12 @@ "type": "array", "additionalItems": false, "uniqueItems": true, + "default": [], "items": { "type": "object", "title": "Task Relation", "additionalProperties": false, + "default": {}, "properties": { "producer": { "type": "string", @@ -90,6 +99,7 @@ "type": "array", "addtionalItems": false, "uniqueItems": true, + "default": [], "items": { "type": "string", "title": "Tag" @@ -97,11 +107,13 @@ }, "input": { "title": "Input I/O Connector", - "$ref": "#/definitions/task_connector" + "$ref": "#/definitions/task_connector", + "default": {} }, "output": { "title": "Output I/O Connector", - "$ref": "#/definitions/task_connector" + "$ref": "#/definitions/task_connector", + "default": {} }, "dataformat": { "type": "string", @@ -131,6 +143,7 @@ "type": "array", "additionalItems": false, "uniqueItems": true, + "default": [], "items": { "type": "object", "title": "Task Scheduling Relation", diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/task-calibrator-addon.json b/SAS/TMSS/src/tmss/tmssapp/schemas/task-calibrator-addon.json index 997ee4d4c86f05cb23f545547eecf2485825b433..a1642f634d20f905c7dbca91b0ad078c27c0479b 100644 --- a/SAS/TMSS/src/tmss/tmssapp/schemas/task-calibrator-addon.json +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/task-calibrator-addon.json @@ -31,19 +31,19 @@ "angle1": { "type": "number", "title": "Angle 1", - "description": "First angle (f.e. RA)", + "description": "First angle [rad] (e.g. RA)", "default": 0 }, "angle2": { "type": "number", "title": "Angle 2", - "description": "Second angle (f.e. DEC)", + "description": "Second angle [rad] (e.g. DEC)", "default": 0 }, "angle3": { "type": "number", "title": "Angle 3", - "description": "Third angle (f.e. N in LMN)", + "description": "Third angle [rad] (e.g. N in LMN)", "default": 0 } }, @@ -71,10 +71,11 @@ "pointing": { "title": "Digital pointing", "description": "Manually selected calibrator", - "$ref": "#/definitions/pointing" + "$ref": "#/definitions/pointing", + "default": {} } }, "required": [ - "autoselect" + "autoselect", "duration", "pointing" ] } \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/tmssapp/schemas/task-observation-with-stations.json b/SAS/TMSS/src/tmss/tmssapp/schemas/task-observation-with-stations.json index d1e1fd20c1eec0d688d6d54e81fcf42ca8406b02..278ccb2a816bc645290dcafbddcf5d9d83eece79 100644 --- a/SAS/TMSS/src/tmss/tmssapp/schemas/task-observation-with-stations.json +++ b/SAS/TMSS/src/tmss/tmssapp/schemas/task-observation-with-stations.json @@ -57,7 +57,7 @@ "properties": { "stations": { "title": "Station list", - "default": [{}], + "default": ["CS001"], "oneOf": [ { "type": "array", diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py index fb9acf45af5d7e3a7e7c64f8600a263be8793451..e5d0a521a9eb121b8ea254199de128188a9f2d10 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/scheduling.py @@ -10,7 +10,7 @@ from .. import models from .specification import RelationalHyperlinkedModelSerializer from .widgets import JSONEditorField -class SubtaskStateSerializer(serializers.ModelSerializer): +class SubtaskStateSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.SubtaskState fields = '__all__' @@ -22,25 +22,25 @@ class SubtaskStateLogSerializer(RelationalHyperlinkedModelSerializer): fields = '__all__' -class SubtaskTypeSerializer(serializers.ModelSerializer): +class SubtaskTypeSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.SubtaskType fields = '__all__' -class StationTypeSerializer(serializers.ModelSerializer): +class StationTypeSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.StationType fields = '__all__' -class AlgorithmSerializer(serializers.ModelSerializer): +class AlgorithmSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.Algorithm fields = '__all__' -class ScheduleMethodSerializer(serializers.ModelSerializer): +class ScheduleMethodSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.ScheduleMethod fields = '__all__' diff --git a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py index a8b74f8dbfd3d7c83eb8c395b483a49c5892de5e..f034bc0acc0376ba2725c25c28f04e5f4ce56d90 100644 --- a/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/serializers/specification.py @@ -16,9 +16,7 @@ class RelationalHyperlinkedModelSerializer(serializers.HyperlinkedModelSerialize def get_field_names(self, declared_fields, info): field_names = super().get_field_names(declared_fields, info) - - # always add the primay key as well, cause it makes parsing in the client so much easier (no 'id' extraction from urls) - field_names.append(info.pk.name) + field_names.remove(self.url_field_name) # is added later, see retun statement if getattr(self.Meta, 'extra_fields', None): field_names += self.Meta.extra_fields @@ -38,8 +36,8 @@ class RelationalHyperlinkedModelSerializer(serializers.HyperlinkedModelSerialize if rel.related_model._meta.pk.name in self._accepted_pk_names and name in field_names] - # return them sorted alphabetically - return sorted(field_names + choice_field_names + forward_related_field_names + reverse_related_field_names) + # return them sorted alphabetically, with id and url first so it's easy to identify and 'click' them + return [info.pk.name, self.url_field_name] + sorted(field_names + choice_field_names + forward_related_field_names + reverse_related_field_names) def build_field(self, field_name, info, model_class, nested_depth): '''override of super.build_field to handle 'choice' fields''' @@ -68,6 +66,14 @@ class RelationalHyperlinkedModelSerializer(serializers.HyperlinkedModelSerialize 'read_only':True} +class FloatDurationField(serializers.FloatField): + + # Turn datetime to float representation in seconds. + # (Timedeltas are otherwise by default turned into a string representation) + def to_representation(self, value): + return value.total_seconds() + + # This is required for keeping a user reference as ForeignKey in other models # (I think so that the HyperlinkedModelSerializer can generate a URI) class UserSerializer(serializers.Serializer): @@ -135,6 +141,11 @@ class RoleSerializer(serializers.ModelSerializer): model = models.Role fields = '__all__' +class SchedulingRelationPlacementSerializer(serializers.ModelSerializer): + class Meta: + model = models.SchedulingRelationPlacement + fields = '__all__' + class DatatypeSerializer(serializers.ModelSerializer): class Meta: @@ -142,13 +153,19 @@ class DatatypeSerializer(serializers.ModelSerializer): fields = '__all__' -class DataformatSerializer(serializers.ModelSerializer): +class DataformatSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.Dataformat fields = '__all__' -class CopyReasonSerializer(serializers.ModelSerializer): +class QuantitySerializer(RelationalHyperlinkedModelSerializer): + class Meta: + model = models.Quantity + fields = '__all__' + + +class CopyReasonSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.CopyReason fields = '__all__' @@ -161,11 +178,18 @@ class TaskConnectorTypeSerializer(RelationalHyperlinkedModelSerializer): class CycleSerializer(RelationalHyperlinkedModelSerializer): + duration = FloatDurationField(read_only=True, help_text="Duration of the cycle [seconds]") + class Meta: model = models.Cycle fields = '__all__' - extra_fields = ['projects', 'name'] + extra_fields = ['projects', 'name', 'duration', 'quota'] +class CycleQuotaSerializer(RelationalHyperlinkedModelSerializer): + class Meta: + model = models.CycleQuota + fields = '__all__' + extra_fields = ['resource_type'] class ProjectSerializer(RelationalHyperlinkedModelSerializer): # scheduling_sets = serializers.PrimaryKeyRelatedField(source='scheduling_sets', read_only=True, many=True) @@ -173,7 +197,7 @@ class ProjectSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.Project fields = '__all__' - extra_fields = ['name','project_quota'] #, 'scheduling_sets'] + extra_fields = ['name','quota'] #, 'scheduling_sets'] class ProjectQuotaSerializer(RelationalHyperlinkedModelSerializer): @@ -183,13 +207,6 @@ class ProjectQuotaSerializer(RelationalHyperlinkedModelSerializer): extra_fields = ['resource_type'] -class ResourceUnitSerializer(RelationalHyperlinkedModelSerializer): - class Meta: - model = models.ResourceUnit - fields = '__all__' - extra_fields = ['name'] - - class ResourceTypeSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.ResourceType @@ -197,18 +214,30 @@ class ResourceTypeSerializer(RelationalHyperlinkedModelSerializer): extra_fields = ['name'] -class FlagSerializer(serializers.ModelSerializer): +class FlagSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.Flag fields = '__all__' -class SettingSerializer(serializers.ModelSerializer): +class SettingSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.Setting fields = '__all__' +class ProjectCategorySerializer(RelationalHyperlinkedModelSerializer): + class Meta: + model = models.ProjectCategory + fields = '__all__' + + +class PeriodCategorySerializer(RelationalHyperlinkedModelSerializer): + class Meta: + model = models.PeriodCategory + fields = '__all__' + + class SchedulingSetSerializer(RelationalHyperlinkedModelSerializer): # Create a JSON editor form to replace the simple text field based on the schema in the template that this @@ -228,6 +257,8 @@ class SchedulingSetSerializer(RelationalHyperlinkedModelSerializer): class SchedulingUnitDraftSerializer(RelationalHyperlinkedModelSerializer): + duration = FloatDurationField(required=False) + # Create a JSON editor form to replace the simple text field based on the schema in the template that this # draft refers to. If that fails, the JSONField remains a standard text input. def __init__(self, *args, **kwargs): @@ -240,11 +271,13 @@ class SchedulingUnitDraftSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.SchedulingUnitDraft fields = '__all__' - extra_fields = ['scheduling_unit_blueprints', 'task_drafts'] + extra_fields = ['scheduling_unit_blueprints', 'task_drafts', 'duration'] class SchedulingUnitBlueprintSerializer(RelationalHyperlinkedModelSerializer): + duration = FloatDurationField(required=False) + # Create a JSON editor form to replace the simple text field based on the schema in the template that this # draft refers to. If that fails, the JSONField remains a standard text input. def __init__(self, *args, **kwargs): @@ -257,11 +290,15 @@ class SchedulingUnitBlueprintSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.SchedulingUnitBlueprint fields = '__all__' - extra_fields = ['task_blueprints'] + extra_fields = ['task_blueprints', 'duration', 'start_time', 'stop_time'] class TaskDraftSerializer(RelationalHyperlinkedModelSerializer): + duration = FloatDurationField(required=False) + relative_start_time = FloatDurationField(required=False) + relative_stop_time = FloatDurationField(required=False) + # Create a JSON editor form to replace the simple text field based on the schema in the template that this # draft refers to. If that fails, the JSONField remains a standard text input. def __init__(self, *args, **kwargs): @@ -274,11 +311,15 @@ class TaskDraftSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.TaskDraft fields = '__all__' - extra_fields = ['task_blueprints', 'produced_by', 'consumed_by', 'scheduling_relation_first', 'scheduling_relation_second'] + extra_fields = ['task_blueprints', 'produced_by', 'consumed_by', 'first_to_connect', 'second_to_connect', 'duration', 'relative_start_time', 'relative_stop_time'] class TaskBlueprintSerializer(RelationalHyperlinkedModelSerializer): + duration = FloatDurationField(required=False) + relative_start_time = FloatDurationField(required=False) + relative_stop_time = FloatDurationField(required=False) + # Create a JSON editor form to replace the simple text field based on the schema in the template that this # draft refers to. If that fails, the JSONField remains a standard text input. def __init__(self, *args, **kwargs): @@ -291,7 +332,7 @@ class TaskBlueprintSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.TaskBlueprint fields = '__all__' - extra_fields = ['subtasks', 'produced_by', 'consumed_by', 'scheduling_relation_first', 'scheduling_relation_second'] + extra_fields = ['subtasks', 'produced_by', 'consumed_by', 'first_to_connect', 'second_to_connect', 'duration', 'start_time', 'stop_time', 'relative_start_time', 'relative_stop_time'] class TaskRelationDraftSerializer(RelationalHyperlinkedModelSerializer): @@ -327,19 +368,17 @@ class TaskRelationBlueprintSerializer(RelationalHyperlinkedModelSerializer): fields = '__all__' -class PlacementSerializer(serializers.ModelSerializer): - class Meta: - model = models.Placement - fields = '__all__' - - -class TaskSchedulingRelationDraftSerializer(serializers.HyperlinkedModelSerializer): +class TaskSchedulingRelationDraftSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.TaskSchedulingRelationDraft fields = '__all__' -class TaskSchedulingRelationBlueprintSerializer(serializers.HyperlinkedModelSerializer): +class TaskSchedulingRelationBlueprintSerializer(RelationalHyperlinkedModelSerializer): class Meta: model = models.TaskSchedulingRelationBlueprint fields = '__all__' + + + + diff --git a/SAS/TMSS/src/tmss/tmssapp/subtasks.py b/SAS/TMSS/src/tmss/tmssapp/subtasks.py index 8cb12d96d0d99434c65dc7ae5d406e1ea6ef8698..110a3c609b5e3bfc99f8f9439b5b04c793ee4e1c 100644 --- a/SAS/TMSS/src/tmss/tmssapp/subtasks.py +++ b/SAS/TMSS/src/tmss/tmssapp/subtasks.py @@ -7,6 +7,7 @@ from collections.abc import Iterable from lofar.common.datetimeutils import formatDatetime from lofar.common import isProductionEnvironment from lofar.common.json_utils import add_defaults_to_json_object_for_schema, get_default_json_object_for_schema +from lofar.common.lcu_utils import get_current_stations from lofar.sas.tmss.tmss.exceptions import SubtaskCreationException, SubtaskSchedulingException @@ -14,6 +15,10 @@ from datetime import datetime, timedelta from lofar.common.datetimeutils import parseDatetime from lofar.common.json_utils import add_defaults_to_json_object_for_schema from lofar.sas.tmss.tmss.tmssapp.models import * +from lofar.sas.resourceassignment.resourceassigner.rarpc import RARPC +from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC +from lofar.sas.tmss.tmss.tmssapp.adapters.parset import convert_to_parset_dict +from lofar.sas.resourceassignment.taskprescheduler.cobaltblocksize import CorrelatorSettings, BlockConstraints, BlockSize # ==== various create* methods to convert/create a TaskBlueprint into one or more Subtasks ==== @@ -29,20 +34,171 @@ def create_subtasks_from_task_blueprint(task_blueprint: TaskBlueprint) -> [Subta # fixed mapping from template name to generator functions which create the list of subtask(s) for this task_blueprint generators_mapping = {'observation schema': [create_observation_control_subtask_from_task_blueprint, - create_qafile_subtask_from_task_blueprint, - create_qaplots_subtask_from_task_blueprint], + create_qafile_subtask_from_task_blueprint, + create_qaplots_subtask_from_task_blueprint], 'preprocessing schema': [create_preprocessing_subtask_from_task_blueprint]} + generators_mapping['calibrator schema'] = generators_mapping['observation schema'] template_name = task_blueprint.specifications_template.name if template_name in generators_mapping: generators = generators_mapping[template_name] - subtasks = [generator(task_blueprint) for generator in generators] - subtasks = [s for s in subtasks if s is not None] + subtasks = [] + for generator in generators: + try: + subtask = generator(task_blueprint) + if subtask is not None: + subtasks.append(subtask) + except SubtaskCreationException as e: + logger.error(e) return subtasks else: + logger.error('Cannot create subtasks for task id=%s because no generator exists for its schema name=%s' % (task_blueprint.pk, template_name)) raise SubtaskCreationException('Cannot create subtasks for task id=%s because no generator exists for its schema name=%s' % (task_blueprint.pk, template_name)) +def create_observation_subtask_specifications_from_observation_task_blueprint(task_blueprint: TaskBlueprint) -> (dict, SubtaskTemplate): + """ + Create a valid observation subtask specification ('observationcontrol schema' SubtaskTemplate schema) based on the task_blueprint's settings + """ + + # check if task_blueprint has an observation-like specification + if task_blueprint.specifications_template.name.lower() not in ['observation schema', 'calibrator schema']: + raise SubtaskCreationException("Cannot create observation subtask specifications from task_blueprint id=%s with template name='%s'" % ( + task_blueprint.id, task_blueprint.specifications_template.name)) + + # start with an observation subtask specification with all the defaults and the right structure according to the schema + subtask_template = SubtaskTemplate.objects.get(name='observationcontrol schema') + subtask_spec = get_default_json_object_for_schema(subtask_template.schema) + + # wipe the default pointings, these should come from the task_spec + subtask_spec['stations']['analog_pointing'] = {} + subtask_spec['stations']['digital_pointings'] = [] + + # now go over the settings in the task_spec and 'copy'/'convert' them to the subtask_spec + task_spec = task_blueprint.specifications_doc + + # The calibrator has a minimal calibration-specific specification subset. + # The rest of it's specs are 'shared' with the target observation. + # So... copy the calibrator specs first, then loop over the shared target/calibrator specs... + if 'calibrator' in task_blueprint.specifications_template.name.lower(): + # Calibrator requires related Target Task Observation for some specifications + target_task_blueprint = get_related_target_observation_task_blueprint(task_blueprint) + if target_task_blueprint is None: + raise SubtaskCreationException("Cannot create calibrator observation subtask specifications from task_blueprint id=%s with template name='%s' because no related target observation task_blueprint is found" % ( + task_blueprint.id, task_blueprint.specifications_template.name)) + target_task_spec = target_task_blueprint.specifications_doc + + if task_spec.get('autoselect', True): + logger.info("auto-selecting calibrator target based on elevation of target observation...") + # Get related Target Observation Task + if "tile_beam" in target_task_spec: + subtask_spec['stations']['analog_pointing'] = { + "direction_type": target_task_spec["tile_beam"]["direction_type"], + "angle1": target_task_spec["tile_beam"]["angle1"], + "angle2": target_task_spec["tile_beam"]["angle2"]} + else: + raise SubtaskCreationException("Cannot determine the pointing specification from task_blueprint " + "id=%s in auto-select mode, because the related target observation " + "task_blueprint id=%s has no tile beam pointing defined" % ( + task_blueprint.id, target_task_blueprint.id)) + else: + subtask_spec['stations']['analog_pointing'] = {"direction_type": task_spec["pointing"]["direction_type"], + "angle1": task_spec["pointing"]["angle1"], + "angle2": task_spec["pointing"]["angle2"]} + + # for the calibrator, the digital pointing is equal to the analog pointing + subtask_spec['stations']['digital_pointings'] = [ {'name': 'calibrator', # there is no name for the calibrator pointing in the task spec + 'subbands': list(range(0,488)), # there are no subbands for the calibrator pointing in the task spec + 'pointing': subtask_spec['stations']['analog_pointing'] } ] + # Use the Task Specification of the Target Observation + task_spec = target_task_spec + logger.info("Using station and correlator settings for calibrator observation task_blueprint id=%s from target observation task_blueprint id=%s", + task_blueprint.id, target_task_blueprint.id) + + subtask_spec['stations']["antenna_set"] = task_spec["antenna_set"] + subtask_spec['stations']["filter"] = task_spec["filter"] + + if "stations" in task_spec: + if "group" in task_spec["stations"][0]: + try: + # retrieve stations in group from RADB virtual instrument + station_group_name = task_spec["stations"][0]["group"] + subtask_spec['stations']['station_list'] = get_stations_in_group(station_group_name) + except Exception as e: + raise SubtaskCreationException("Could not determine stations in group '%s' for task_blueprint id=%s. Error: %s" % ( + station_group_name, task_blueprint.id, e)) + else: + subtask_spec['stations']['station_list'] = task_spec["stations"] + + if 'calibrator' not in task_blueprint.specifications_template.name.lower(): + # copy/convert the analoge/digital_pointings only for non-calibrator observations (the calibrator has its own pointing) + for sap in task_spec.get("SAPs", []): + subtask_spec['stations']['digital_pointings'].append( + {"name": sap["name"], + "pointing": {"direction_type": sap["digital_pointing"]["direction_type"], + "angle1": sap["digital_pointing"]["angle1"], + "angle2": sap["digital_pointing"]["angle2"]}, + "subbands": sap["subbands"] + }) + + if "tile_beam" in task_spec: + subtask_spec['stations']['analog_pointing'] = { "direction_type": task_spec["tile_beam"]["direction_type"], + "angle1": task_spec["tile_beam"]["angle1"], + "angle2": task_spec["tile_beam"]["angle2"] } + + if "correlator" in task_spec: + corr = CorrelatorSettings() + corr.nrChannelsPerSubband = task_spec["correlator"]["channels_per_subband"] + corr.integrationTime = task_spec["correlator"]["integration_time"] + calculator = BlockSize(constraints=BlockConstraints(correlatorSettings=corr)) + subtask_spec["COBALT"]["correlator"]["blocks_per_integration"] = calculator.nrBlocks + subtask_spec["COBALT"]["correlator"]["integrations_per_block"] = calculator.nrSubblocks + + # make sure that the subtask_spec is valid conform the schema + validate_json_against_schema(subtask_spec, subtask_template.schema) + + return subtask_spec, subtask_template + + +def get_stations_in_group(station_group_name: str) -> []: + '''Get a list of station names in the given station_group. + A lookup is performed in the RADB, in the virtual instrument table''' + with RADBRPC.create() as radbrpc: + resource_group_memberships = radbrpc.getResourceGroupMemberships()['groups'] + station_resource_group = next(rg for rg in resource_group_memberships.values() + if (rg['resource_group_type'] == 'station_group' or rg['resource_group_type'] == 'virtual') and rg['resource_group_name'] == station_group_name) + station_names = set(resource_group_memberships[rg_id]['resource_group_name'] for rg_id in station_resource_group['child_ids'] + if resource_group_memberships[rg_id]['resource_group_type'] == 'station') + + # HACK, RS408 should be removed from the RADB + if 'RS408' in station_names: + station_names.remove('RS408') + + return sorted(list(station_names)) + + +def get_related_target_observation_task_blueprint(calibrator_task_blueprint: TaskBlueprint) -> TaskBlueprint: + """ + get the related target observation task_blueprint for the given calibrator task_blueprint + if nothing found return None + """ + if 'calibrator' not in calibrator_task_blueprint.specifications_template.name.lower(): + raise ValueError("Cannot get a related target observation task_blueprint for non-calibrator task_blueprint id=%s template_name='%s'", + calibrator_task_blueprint.id, calibrator_task_blueprint.specifications_template.name) + + try: + return next(relation.second for relation in TaskSchedulingRelationBlueprint.objects.filter(first=calibrator_task_blueprint).all() + if relation.second is not None and relation.second.specifications_template.name.lower() == 'observation schema') + except StopIteration: + try: + return next(relation.first for relation in TaskSchedulingRelationBlueprint.objects.filter(second=calibrator_task_blueprint).all() + if relation.first is not None and relation.first.specifications_template.name.lower() == 'observation schema') + except StopIteration: + logger.info("No related target observation task_blueprint found for calibrator observation task_blueprint id=%d", calibrator_task_blueprint.id) + + return None + + def create_observation_control_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask: """ Create an observation control subtask . @@ -53,27 +209,7 @@ def create_observation_control_subtask_from_task_blueprint(task_blueprint: TaskB check_prerequities_for_subtask_creation(task_blueprint) # step 1: create subtask in defining state - subtask_template = SubtaskTemplate.objects.get(name='observationcontrol schema') - # This is some 'extra' specification to add to subtask ... where should it comes from, - # currently not defined in task ? - extra_specifications_doc = { - "stations": {"station_list": ["RS106", "RS205"], - "antenna_set": "HBA_DUAL_INNER", - "filter": "HBA_110_190", - "analog_pointing": {"direction_type": "J2000", - "angle1": 0.4262457643630986, - "angle2": 0.5787463318245085}, - "digital_pointings": [{"name": "3C48", - "pointing": {"direction_type": "J2000", - "angle1": 0.4262457643630986, - "angle1": 0.4262457643630986, - "angle2": 0.5787463318245085}, - "subbands": list(range(0, 244)) - }] - } - } - - specifications_doc = add_defaults_to_json_object_for_schema(extra_specifications_doc, subtask_template.schema) + specifications_doc, subtask_template = create_observation_subtask_specifications_from_observation_task_blueprint(task_blueprint) cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4") subtask_data = { "start_time": None, "stop_time": None, @@ -125,7 +261,7 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask) raise ValueError("Cannot create %s subtask for subtask id=%d because it is not DEFINED" % ( SubtaskType.Choices.QA_FILES.value, observation_subtask.pk)) - obs_task_spec = observation_subtask.task_blueprint.specifications_doc + obs_task_spec = get_observation_task_specification_with_check_for_calibrator(observation_subtask) obs_task_qafile_spec = obs_task_spec.get("QA", {}).get("file_conversion", {}) if not obs_task_qafile_spec.get("enabled", False): @@ -166,13 +302,15 @@ def create_qafile_subtask_from_observation_subtask(observation_subtask: Subtask) # done, now return the subtask, and allow the system to wait for the predecessors to be finished before we schedule this qa_file_subtask return qafile_subtask + def create_qaplots_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask: + qafile_subtasks = [st for st in task_blueprint.subtasks.all() if st.specifications_template.type.value == SubtaskType.Choices.QA_FILES.value] if qafile_subtasks: qafile_subtask = qafile_subtasks[0] # TODO: decide what to do when there are multiple qafile subtasks? return create_qaplots_subtask_from_qafile_subtask(qafile_subtask) else: - raise SubtaskCreationException('Cannot create QA plotting subtask for task id=%s because QA file conversion subtask exists.' % (task_blueprint.pk, )) + raise SubtaskCreationException('Cannot create QA plotting subtask for task id=%s because no predecessor QA file conversion subtask exists.' % (task_blueprint.pk, )) def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subtask: @@ -188,7 +326,7 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta SubtaskType.Choices.QA_PLOTS.value, qafile_subtask.pk, qafile_subtask.specifications_template.type, SubtaskType.Choices.QA_FILES.value)) - obs_task_spec = qafile_subtask.task_blueprint.specifications_doc + obs_task_spec = get_observation_task_specification_with_check_for_calibrator(qafile_subtask) obs_task_qaplots_spec = obs_task_spec.get("QA", {}).get("plots", {}) if not obs_task_qaplots_spec.get("enabled", False): @@ -229,6 +367,7 @@ def create_qaplots_subtask_from_qafile_subtask(qafile_subtask: Subtask) -> Subta # done, now return the subtask, and allow the system to wait for the predecessors to be finished before we schedule this qaplots_subtask return qaplots_subtask + def create_preprocessing_subtask_from_task_blueprint(task_blueprint: TaskBlueprint) -> Subtask: ''' Create a subtask to for the preprocessing pipeline. This method implements "Instantiate subtasks" step from the "Specification Flow" @@ -236,16 +375,18 @@ def create_preprocessing_subtask_from_task_blueprint(task_blueprint: TaskBluepri ''' # step 0: check pre-requisites check_prerequities_for_subtask_creation(task_blueprint) - # TODO: go more elegant lookup of predecessor observation task - observation_predecessor_tasks = [t for t in task_blueprint.predecessors.all() if any(st for st in t.subtasks.all() if st.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value)] + observation_predecessor_tasks = [t for t in task_blueprint.predecessors.all() if any(st for st in t.subtasks.all() + if st.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value)] if not observation_predecessor_tasks: - raise SubtaskCreationException("Cannot create a subtask for task_blueprint id=%s because it is not connected to an observation predecessor (sub)task." % task_blueprint.pk) + raise SubtaskCreationException("Cannot create a subtask for task_blueprint id=%s because it is not connected " + "to an observation predecessor (sub)task." % task_blueprint.pk) # step 1: create subtask in defining state, with filled-in subtask_template subtask_template = SubtaskTemplate.objects.get(name='pipelinecontrol schema') default_subtask_specs = get_default_json_object_for_schema(subtask_template.schema) subtask_specs = _generate_subtask_specs_from_preprocessing_task_specs(task_blueprint.specifications_doc, default_subtask_specs) + cluster_name = task_blueprint.specifications_doc.get("storage_cluster", "CEP4") subtask_data = { "start_time": None, "stop_time": None, "state": SubtaskState.objects.get(value=SubtaskState.Choices.DEFINING.value), @@ -254,7 +395,7 @@ def create_preprocessing_subtask_from_task_blueprint(task_blueprint: TaskBluepri "specifications_doc": subtask_specs, "priority": 1, "schedule_method": ScheduleMethod.objects.get(value=ScheduleMethod.Choices.DYNAMIC.value), - "cluster": Cluster.objects.get(name="CEP4") } + "cluster": Cluster.objects.get(name=cluster_name) } subtask = Subtask.objects.create(**subtask_data) # step 2: create and link subtask input/output @@ -298,7 +439,8 @@ def schedule_subtask(subtask: Subtask) -> Subtask: if subtask.specifications_template.type.value == SubtaskType.Choices.QA_PLOTS.value: return schedule_qaplots_subtask(subtask) - raise SubtaskSchedulingException("Cannot schedule subtask id=%d because there is no schedule-method known for this subtasktype=%s." % (subtask.pk, subtask.specifications_template.type.value)) + raise SubtaskSchedulingException("Cannot schedule subtask id=%d because there is no schedule-method known for this subtasktype=%s." % + (subtask.pk, subtask.specifications_template.type.value)) except Exception as e: try: # set the subtask to state 'ERROR'... @@ -317,7 +459,8 @@ def check_prerequities_for_scheduling(subtask: Subtask) -> bool: for predecessor in subtask.predecessors.all(): if predecessor.state.value != SubtaskState.Choices.FINISHED.value: - raise SubtaskSchedulingException("Cannot schedule subtask id=%d because its predecessor id=%s in not FINISHED but state=%s" % (subtask.pk, predecessor.pk, predecessor.state.value)) + raise SubtaskSchedulingException("Cannot schedule subtask id=%d because its predecessor id=%s in not FINISHED but state=%s" + % (subtask.pk, predecessor.pk, predecessor.state.value)) # check if settings allow scheduling observations setting = Setting.objects.get(name='allow_scheduling_observations') @@ -327,6 +470,38 @@ def check_prerequities_for_scheduling(subtask: Subtask) -> bool: return True +def _assign_resources(subtask: Subtask): + if subtask.state.value != SubtaskState.Choices.SCHEDULING.value: + raise SubtaskSchedulingException("Cannot assign resources for subtask id=%d because it is not in SCHEDULING state. " + "Current state=%s" % (subtask.pk, subtask.state.value)) + + def create_ra_specification(_subtask): + parset_dict = convert_to_parset_dict(_subtask) + return { 'tmss_id': _subtask.id, + 'task_type': _subtask.specifications_template.type.value.lower(), + 'task_subtype': parset_dict.get("Observation.processSubtype","").lower(), + 'status': 'prescheduled', + 'starttime': _subtask.start_time, + 'endtime': _subtask.stop_time, + 'cluster': _subtask.cluster.name, + 'station_requirements': [], + 'specification': parset_dict } + + ra_spec = create_ra_specification(subtask) + ra_spec['predecessors'] = [] + for pred in subtask.predecessors.all(): + try: + ra_spec['predecessors'].append(create_ra_specification(pred)) + except: + pass + + with RARPC.create() as rarpc: + assigned = rarpc.do_assignment(ra_spec) + + if not assigned: + raise SubtaskSchedulingException("Cannot schedule subtask id=%d because the required resources are not (fully) available." % (subtask.pk, )) + + def schedule_qafile_subtask(qafile_subtask: Subtask): ''' Schedule the given qafile_subtask (which converts the observation output to a QA h5 file) This method should typically be called upon the event of the observation_subtask being finished. @@ -424,6 +599,63 @@ def schedule_qaplots_subtask(qaplots_subtask: Subtask): return qaplots_subtask +# todo: this can probably go when we switch to the new start time calculation in the model properties (which is based on this logic) +def get_previous_related_task_blueprint_with_time_offset(task_blueprint): + """ + Retrieve the the previous related blueprint task object (if any) + if nothing found return None, 0. + :param task_blueprint: + :return: previous_related_task_blueprint, + time_offset (in seconds) + """ + logger.info("get_previous_related_task_blueprint_with_time_offset %s (id=%s)", task_blueprint.name, task_blueprint.pk) + previous_related_task_blueprint = None + time_offset = 0 + + scheduling_relations = list(task_blueprint.first_to_connect.all()) + list(task_blueprint.second_to_connect.all()) + for scheduling_relation in scheduling_relations: + if scheduling_relation.first.id == task_blueprint.id and scheduling_relation.placement.value == "after": + previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.second.id) + time_offset = scheduling_relation.time_offset + + if scheduling_relation.second.id == task_blueprint.id and scheduling_relation.placement.value == "before": + previous_related_task_blueprint = TaskBlueprint.objects.get(id=scheduling_relation.first.id) + time_offset = scheduling_relation.time_offset + + return previous_related_task_blueprint, time_offset + +# todo: maybe this can now be replaced by subtask.relative_start_time +def calculate_start_time(observation_subtask: Subtask): + """ + Calculate the start time of an observation subtask. It should calculate the starttime in case of 'C-T-C train' + The start time of an observation depends on the start_time+duration and offset time of the previous observation + and so its scheduling relations should be known. + If there is no previous observation the 'default' start time is in two minutes from now + For demo purposes, will be changed into dynamic scheduled in the future + Note that the method is not robust now when previous start time is unknown. Also parallel observations are + not supported yet + :param observation_subtask: + :return: start_time (utc time) + """ + previous_related_task_blueprint, time_offset = get_previous_related_task_blueprint_with_time_offset(observation_subtask.task_blueprint) + if previous_related_task_blueprint is None: + # This is the first observation so take start time 2 minutes from now + now = datetime.utcnow() + next_start_time = now + timedelta(minutes=+2, seconds=-now.second, microseconds=-now.microsecond) + else: + # Get the duration of last/previous observation + duration_in_sec = previous_related_task_blueprint.specifications_doc["duration"] + logger.info("Duration of previous observation '%s' (id=%s) is %d seconds", + previous_related_task_blueprint.pk, previous_related_task_blueprint.pk, duration_in_sec) + # Get the previous observation subtask, should actually be one + lst_previous_subtasks_obs = [st for st in previous_related_task_blueprint.subtasks.all() if st.specifications_template.type.value == SubtaskType.Choices.OBSERVATION.value] + previous_subtask_obs = lst_previous_subtasks_obs[0] + logger.info("The previous observation subtask is id=%s", previous_subtask_obs.pk) + if previous_subtask_obs.start_time is None: + logger.info("Oeps the previous start time is unknown so I can not calculate it") + next_start_time = previous_subtask_obs.start_time + timedelta(seconds=duration_in_sec+time_offset) + return next_start_time + def schedule_observation_subtask(observation_subtask: Subtask): ''' Schedule the given observation_subtask @@ -446,23 +678,21 @@ def schedule_observation_subtask(observation_subtask: Subtask): # step 1a: check start/stop times if observation_subtask.start_time is None: - now = datetime.utcnow() - next_start_time = now + timedelta(minutes=+2, seconds=-now.second, microseconds=-now.microsecond) + next_start_time = calculate_start_time(observation_subtask) logger.info("observation id=%s has no starttime. assigned default: %s", observation_subtask.pk, formatDatetime(next_start_time)) observation_subtask.start_time = next_start_time if observation_subtask.stop_time is None: - stop_time = observation_subtask.start_time + timedelta(minutes=+1) + duration_in_sec = observation_subtask.task_blueprint.specifications_doc["duration"] + logger.info("Duration of observation id=%s is %d seconds", observation_subtask.pk, duration_in_sec) + stop_time = observation_subtask.start_time + timedelta(seconds=duration_in_sec) logger.info("observation id=%s has no stop_time. assigned default: %s", observation_subtask.pk, formatDatetime(stop_time)) observation_subtask.stop_time = stop_time # step 2: define input dataproducts # TODO: are there any observations that take input dataproducts? - # step 3: resource assigner - # TODO: implement. Can be skipped for now. - - # step 4: create output dataproducts, and link these to the output + # step 3: create output dataproducts, and link these to the output specifications_doc = observation_subtask.specifications_doc dataproduct_specifications_template = DataproductSpecificationsTemplate.objects.get(name="SAP") # todo: should this be derived from the task relation specification template? dataproduct_feedback_template = DataproductFeedbackTemplate.objects.get(name="Empty") @@ -480,6 +710,9 @@ def schedule_observation_subtask(observation_subtask: Subtask): feedback_doc="", feedback_template=dataproduct_feedback_template) + # step 4: resource assigner (if possible) + _assign_resources(observation_subtask) + # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it) observation_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value) observation_subtask.save() @@ -505,7 +738,22 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): pipeline_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULING.value) pipeline_subtask.save() + # step 1a: check start/stop times + # not very relevant for tmss/dynamic scheduling, but the resource assigner demands it. + if pipeline_subtask.start_time is None: + now = datetime.utcnow() + logger.info("pipeline id=%s has no starttime. assigned default: %s", pipeline_subtask.pk, formatDatetime(now)) + pipeline_subtask.start_time = now + + if pipeline_subtask.stop_time is None: + stop_time = pipeline_subtask.start_time + timedelta(hours=+1) + logger.info("pipeline id=%s has no stop_time. assigned default: %s", pipeline_subtask.pk, formatDatetime(stop_time)) + pipeline_subtask.stop_time = stop_time + # step 2: link input dataproducts + if pipeline_subtask.inputs.count() == 0: + raise SubtaskSchedulingException("Cannot schedule subtask id=%d type=%s because it has no input(s)" % (pipeline_subtask.pk, + pipeline_subtask.specifications_template.type)) # TODO: use existing and reasonable selection and specification templates for output when we have those, for now, use "Empty" dataproduct_specifications_template = DataproductSpecificationsTemplate.objects.get(name="Empty") @@ -522,10 +770,7 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): # select subtask output the new dataproducts will be linked to pipeline_subtask_output = pipeline_subtask.outputs.first() # TODO: if we have several, how to map input to output? - # step 3: resource assigner - # TODO: implement. Can be skipped for now. - - # step 4: create output dataproducts, and link these to the output + # step 3: create output dataproducts, and link these to the output # TODO: create them from the spec, instead of "copying" the input filename output_dps = [] for input_dp in pipeline_subtask_input.dataproducts.all(): @@ -547,6 +792,9 @@ def schedule_pipeline_subtask(pipeline_subtask: Subtask): pipeline_subtask_output.dataproducts.set(output_dps) + # step 4: resource assigner (if possible) + _assign_resources(pipeline_subtask) + # step 5: set state to SCHEDULED (resulting in the qaservice to pick this subtask up and run it) pipeline_subtask.state = SubtaskState.objects.get(value=SubtaskState.Choices.SCHEDULED.value) pipeline_subtask.save() @@ -661,3 +909,23 @@ def specifications_doc_meets_selection_doc(specifications_doc, selection_doc): logger.debug("specs %s matches selection %s: %s" % (specifications_doc, selection_doc, meets_criteria)) return meets_criteria + + +def get_observation_task_specification_with_check_for_calibrator(subtask): + """ + Retrieve the observation task blueprint specifications_doc from the given subtask object + If the Task is a calibrator then the related Target Observation specification should be returned + :param: subtask object + :return: task_spec: the specifications_doc of the blue print task which is allways a target observation + """ + if 'calibrator' in subtask.task_blueprint.specifications_template.name.lower(): + # Calibrator requires related Target Task Observation for some specifications + target_task_blueprint = get_related_target_observation_task_blueprint(subtask.task_blueprint) + if target_task_blueprint is None: + raise SubtaskCreationException("Cannot retrieve specifications for subtask id=%d because no related target observation is found " % subtask.pk) + task_spec = target_task_blueprint.specifications_doc + logger.info("Using specifications for calibrator observation (id=%s) from target observation task_blueprint id=%s", + subtask.task_blueprint.id, target_task_blueprint.id) + else: + task_spec = subtask.task_blueprint.specifications_doc + return task_spec diff --git a/SAS/TMSS/src/tmss/tmssapp/tasks.py b/SAS/TMSS/src/tmss/tmssapp/tasks.py index 2da600a24184f98c8158ee97768f0825e62c319c..67b821e6d26b061032862045684e3d216b3c623e 100644 --- a/SAS/TMSS/src/tmss/tmssapp/tasks.py +++ b/SAS/TMSS/src/tmss/tmssapp/tasks.py @@ -1,5 +1,6 @@ +from lofar.sas.tmss.tmss.exceptions import * from lofar.sas.tmss.tmss.tmssapp import models -from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskBlueprint, SchedulingUnitBlueprint, TaskDraft, Placement +from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskBlueprint, SchedulingUnitBlueprint, TaskDraft, SchedulingRelationPlacement from lofar.sas.tmss.tmss.tmssapp.subtasks import create_and_schedule_subtasks_from_task_blueprint from lofar.sas.tmss.tmss.tmssapp.models.specification import TaskBlueprint, SchedulingUnitBlueprint from lofar.sas.tmss.tmss.tmssapp.subtasks import create_and_schedule_subtasks_from_task_blueprint, \ @@ -15,9 +16,8 @@ def create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_ Create a SchedulingUnitBlueprint from the SchedulingUnitDraft :raises Exception if instantiate fails. """ - logger.debug("create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft.id=%s)", scheduling_unit_draft.pk) + logger.debug("create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft.id=%s name='%s')", scheduling_unit_draft.pk, scheduling_unit_draft.name) - # TODO: copy/fill-in the properties from the draft to the blueprint scheduling_unit_blueprint = SchedulingUnitBlueprint.objects.create( name="%s (SchedulingUnitBlueprint)" % (scheduling_unit_draft.name,), description="%s (SchedulingUnitBlueprint)" % (scheduling_unit_draft.description or "<no description>",), @@ -26,76 +26,98 @@ def create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_ draft=scheduling_unit_draft, requirements_template=scheduling_unit_draft.requirements_template) - logger.info("create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft.id=%s) created scheduling_unit_blueprint id=%s", scheduling_unit_draft.pk, scheduling_unit_blueprint.pk) + logger.info("create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft.id=%s name='%s') created scheduling_unit_blueprint id=%s name='%s'", + scheduling_unit_draft.pk, scheduling_unit_draft.name, scheduling_unit_blueprint.pk, scheduling_unit_blueprint.name) return scheduling_unit_blueprint -def create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> [TaskDraft]: +def create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitDraft: """ - Generic create-method for tasks draft. Calls the appropriate create method based on the scheduling_unit_blueprint - specifications_template name. + Create all defined task_drafts in the scheduling_unit_draft's requirements_doc, connect them, and return the updated scheduling_unit_draft. """ - list_created_task_object = [] - - try: - list_tasks = scheduling_unit_draft.requirements_doc["tasks"] - logger.info("create_task_drafts_from_scheduling_unit_draft with scheduling_unit_draft.id=%s, nbr_tasks=%d" % - (scheduling_unit_draft.pk, len(list_tasks))) - except KeyError: - logger.info("create_task_drafts_from_scheduling_unit_draft -> NO tasks to process from requirements_doc") - list_tasks = [] - - for task in list_tasks: - task_template_name = task["specifications_template"] - logger.info("task name is '%s', task_template_name '%s'" % (task["name"], task_template_name)) + logger.debug("create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft.id=%s, name='%s') ...", scheduling_unit_draft.pk, scheduling_unit_draft.name) + + if len(scheduling_unit_draft.requirements_doc.get("tasks",[])) == 0: + raise BlueprintCreationException("create_task_drafts_from_scheduling_unit_draft: scheduling_unit_draft.id=%s has no tasks defined in its requirements_doc" % (scheduling_unit_draft.pk,)) + + for task_definition in scheduling_unit_draft.requirements_doc["tasks"]: + task_template_name = task_definition["specifications_template"] task_template = models.TaskTemplate.objects.get(name=task_template_name) - task_draft = models.TaskDraft.objects.create( - name=task["name"], - description=task["description"], - tags=task["tags"], - specifications_doc=task["specifications_doc"], - copy_reason=models.CopyReason.objects.get(value='template'), - copies=None, - scheduling_unit_draft=scheduling_unit_draft, - specifications_template=task_template) - logger.info("task draft with id %s created successfully" % task_draft.id) - list_created_task_object.append(task_draft) - - # Now create task relation - try: - list_task_relations = scheduling_unit_draft.requirements_doc["task_relations"] - logger.info("create_task_drafts_from_scheduling_unit_draft, nbr of task relations=%d" % len(list_task_relations)) - except KeyError: - logger.info("create_task_drafts_from_scheduling_unit_draft -> NO task relations to process from requirements_doc") - list_task_relations = [] - for task_relation in list_task_relations: - task_rel_obj = models.TaskRelationDraft.objects.create( - tags=task_relation["tags"], - selection_doc=task_relation["selection_doc"], - dataformat=models.Dataformat.objects.get(value=task_relation["dataformat"]), - producer=models.TaskDraft.objects.get(name=task_relation["producer"]), - consumer=models.TaskDraft.objects.get(name=task_relation["consumer"]), - input_role=models.TaskConnectorType.objects.get(role=task_relation["input"]["role"]), - output_role=models.TaskConnectorType.objects.get(role=task_relation["output"]["role"]), - selection_template=models.TaskRelationSelectionTemplate.objects.get(name=task_relation["selection_template"])) - logger.info("task relation draft object with id %s created successfully" % task_rel_obj.id) + + if scheduling_unit_draft.task_drafts.filter(name=task_definition["name"], specifications_template=task_template).count() > 0: + logger.debug("skipping creation of task draft because it is already in the scheduling_unit... task_name='%s', task_template_name='%s'", task_definition["name"], task_template_name) + continue + + logger.debug("creating task draft... task_name='%s', task_template_name='%s'", task_definition["name"], task_template_name) + + task_draft = models.TaskDraft.objects.create(name=task_definition["name"], + description=task_definition.get("description",""), + tags=task_definition.get("tags",[]), + specifications_doc=task_definition["specifications_doc"], + copy_reason=models.CopyReason.objects.get(value='template'), + copies=None, + scheduling_unit_draft=scheduling_unit_draft, + specifications_template=task_template) + + logger.info("created task draft id=%s task_name='%s', task_template_name='%s'", task_draft.pk, task_definition["name"], task_template_name) + + # Now create task relations + for task_relation_definition in scheduling_unit_draft.requirements_doc["task_relations"]: + producer_task_draft = scheduling_unit_draft.task_drafts.get(name=task_relation_definition["producer"]) + consumer_task_draft = scheduling_unit_draft.task_drafts.get(name=task_relation_definition["consumer"]) + dataformat = models.Dataformat.objects.get(value=task_relation_definition["dataformat"]) + input_role = models.TaskConnectorType.objects.get(role=task_relation_definition["input"]["role"], datatype=task_relation_definition["input"]["datatype"]) + output_role = models.TaskConnectorType.objects.get(role=task_relation_definition["output"]["role"], datatype=task_relation_definition["output"]["datatype"]) + selection_template = models.TaskRelationSelectionTemplate.objects.get(name=task_relation_definition["selection_template"]) + + if models.TaskRelationDraft.objects.filter(producer=producer_task_draft, + consumer=consumer_task_draft, + dataformat=dataformat, + input_role=input_role, + output_role=output_role, + selection_template=selection_template, + selection_doc=task_relation_definition["selection_doc"]).count() > 0: + logger.debug("skipping creation of task_relation between task draft '%s' and '%s' because it is already in the scheduling_unit...", task_relation_definition["producer"], task_relation_definition["consumer"]) + continue + + task_relation = models.TaskRelationDraft.objects.create(tags=task_relation_definition.get("tags",[]), + selection_doc=task_relation_definition["selection_doc"], + dataformat=dataformat, + producer=producer_task_draft, + consumer=consumer_task_draft, + input_role=input_role, + output_role=output_role, + selection_template=selection_template) + logger.info("created task_relation id=%s between task draft id=%s name='%s' and id=%s name='%s", + task_relation.pk, producer_task_draft.id, producer_task_draft.name, consumer_task_draft.id, consumer_task_draft.name) + # task_scheduling_relation - try: - list_task_scheduling_relations = scheduling_unit_draft.requirements_doc["task_scheduling_relations"] - logger.info("create_task_drafts_from_scheduling_unit_draft, nbr of task scheduling relations=%d" % len(list_task_scheduling_relations)) - except KeyError: - logger.info("create_task_drafts_from_scheduling_unit_draft -> NO tasks scheduling relations to process from requirements_doc") - list_task_scheduling_relations = [] - for task_scheduling_relation in list_task_scheduling_relations: - task_rel_sch_obj = models.TaskSchedulingRelationDraft.objects.create( - placement=models.Placement.objects.get(value=task_scheduling_relation["placement"]), - time_offset=task_scheduling_relation["time_offset"], - first=models.TaskDraft.objects.get(name=task_scheduling_relation["first"]), - second=models.TaskDraft.objects.get(name=task_scheduling_relation["second"])) - logger.info("task scheduling relation draft object with id %s created successfully" % task_rel_sch_obj.id) - - return list_created_task_object + for task_scheduling_relation_definition in scheduling_unit_draft.requirements_doc["task_scheduling_relations"]: + placement = models.SchedulingRelationPlacement.objects.get(value=task_scheduling_relation_definition["placement"]) + time_offset = task_scheduling_relation_definition["time_offset"] + first_task_draft = scheduling_unit_draft.task_drafts.get(name=task_scheduling_relation_definition["first"]) + second_task_draft = scheduling_unit_draft.task_drafts.get(name=task_scheduling_relation_definition["second"]) + + if models.TaskSchedulingRelationDraft.objects.filter(placement=placement, + time_offset=time_offset, + first=first_task_draft, + second=second_task_draft).count() > 0: + logger.debug("skipping creation of task_scheduling_relation between task draft '%s' and '%s' because it is already in the scheduling_unit...", + task_scheduling_relation_definition["first"], task_scheduling_relation_definition["second"]) + continue + + task_scheduling_relation = models.TaskSchedulingRelationDraft.objects.create(placement=placement, + time_offset=time_offset, + first=first_task_draft, + second=second_task_draft) + logger.info("created task_scheduling_relation id=%s between task draft id=%s name='%s' and id=%s name='%s", + task_scheduling_relation.pk, first_task_draft.id, first_task_draft.name, second_task_draft.id, second_task_draft.name) + + logger.info("create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft.id=%s, name='%s') ... done", scheduling_unit_draft.pk, scheduling_unit_draft.name) + + scheduling_unit_draft.refresh_from_db() + return scheduling_unit_draft def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft) -> models.TaskBlueprint: @@ -103,31 +125,28 @@ def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft) -> model Create a task_blueprint from the task_draft :raises Exception if instantiate fails. """ - logger.debug("Create Task Blueprint from Task Draft (id=%s)", task_draft.pk) + logger.debug("creating task_blueprint from task_draft id=%s", task_draft.pk) # get or create a scheduling_unit_blueprint from the scheduling_unit_draft scheduling_unit_blueprint = task_draft.scheduling_unit_draft.scheduling_unit_blueprints.last() if scheduling_unit_blueprint is None: scheduling_unit_blueprint = create_scheduling_unit_blueprint_from_scheduling_unit_draft(task_draft.scheduling_unit_draft) - description_str = "Task Blueprint " + task_draft.description - name_str = "Task Blueprint of " + task_draft.name task_blueprint = TaskBlueprint.objects.create( - description=description_str, - name=name_str, + description=task_draft.description, + name=task_draft.name, do_cancel=False, draft=task_draft, scheduling_unit_blueprint=scheduling_unit_blueprint, specifications_doc=task_draft.specifications_doc, - specifications_template=task_draft.specifications_template - ) + specifications_template=task_draft.specifications_template) - logger.info("Task Blueprint (id=%s) created from Task Draft (id=%s) created task_blueprint id=%s", task_blueprint.pk, task_draft.pk) + logger.info("created task_blueprint id=%s from task_draft id=%s", task_blueprint.pk, task_draft.pk) # now that we have a task_blueprint, its time to refresh the task_draft so we get the non-cached fields task_draft.refresh_from_db() - # loop over consumers/producers, and 'copy'' the TaskRelationBlueprint from the TaskRelationDraft + # loop over consumers/producers, and 'copy' the TaskRelationBlueprint from the TaskRelationDraft # this is only possible if both 'ends' of the task_relation are converted to a TaskBlueprint # so, when converting two TaskDrafts (for example an observation and a pipeline), then for the conversion # of the first TaskDraft->TaskBlueprint no relation is setup, @@ -149,13 +168,37 @@ def create_task_blueprint_from_task_draft(task_draft: models.TaskDraft) -> model selection_doc=task_relation_draft.selection_doc, selection_template=task_relation_draft.selection_template, dataformat=task_relation_draft.dataformat) - logger.info("Task Blueprint (id=%s) connected to Task Blueprint (id=%s) via Task Relation Blueprint (id=%s)", - task_blueprint.pk, producing_task_blueprint.pk, task_relation_blueprint.pk) + logger.info("created task_relation_blueprint id=%s which connects task_blueprints producer_id=%s and consumer_id=%s", + task_relation_blueprint.pk, producing_task_blueprint.pk, consuming_task_blueprint.pk,) + + # Do the same 'trick' for Task Scheduling Relation Draft to Blueprint + task_draft_scheduling_relations = list(task_draft.first_to_connect.all()) + list(task_draft.second_to_connect.all()) + for task_scheduling_relation_draft in task_draft_scheduling_relations: + for first_task_blueprint in task_scheduling_relation_draft.first.task_blueprints.all(): + for second_task_blueprint in task_scheduling_relation_draft.second.task_blueprints.all(): + try: + # do nothing if task_scheduling_relation_blueprint already exists... + models.TaskSchedulingRelationBlueprint.objects.get(first_id=first_task_blueprint.id, + second_id=second_task_blueprint.id) + except models.TaskSchedulingRelationBlueprint.DoesNotExist: + # ...'else' create it. + task_scheduling_relation_blueprint = models.TaskSchedulingRelationBlueprint.objects.create(first=first_task_blueprint, + second=second_task_blueprint, + time_offset=task_scheduling_relation_draft.time_offset, + placement=task_scheduling_relation_draft.placement) + logger.info("created task_scheduling_relation_blueprint id=%s which connects task_blueprints first_id=%s and second_id=%s, placement=%s time_offset=%s[sec]", + task_scheduling_relation_blueprint.pk, first_task_blueprint.pk, second_task_blueprint.pk, task_scheduling_relation_draft.placement, task_scheduling_relation_draft.time_offset) + return task_blueprint def create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitBlueprint: '''Convenience method: Create the scheduling_unit_blueprint, then create its child task_blueprint(s), then create the task_blueprint's subtasks''' + + # make sure we call create task_drafts first, so the task_blueprints can be created from the scheduling_unit_blueprint in the next step + # already known task_drafts and/or relations are skipped automagically + create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft) + scheduling_unit_blueprint = create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft) return create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) @@ -181,13 +224,19 @@ def create_task_blueprint_and_subtasks_and_schedule_subtasks_from_task_draft(tas task_blueprint.refresh_from_db() return task_blueprint + def create_task_blueprints_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: '''Convenience method: Create the scheduling_unit_blueprint's task_blueprint(s)''' + + # make sure we call create task_drafts first, so the task_blueprints can be created from the scheduling_unit_blueprint in the next step + # already known task_drafts and/or relations are skipped automagically + create_task_drafts_from_scheduling_unit_draft(scheduling_unit_blueprint.draft) + task_drafts = list(scheduling_unit_blueprint.draft.task_drafts.all()) # sort them in 'data-flow'-order, - # because successors can depend on predecessors, so the first tbp's need to be subtask'd first. - task_drafts.sort(key=cmp_to_key(lambda tbp_a, tbp_b: -1 if tbp_a in tbp_b.predecessors else 1 if tbp_b in tbp_a.predecessors else 0)) + # because successors can depend on predecessors, so the first taskdraft's need to be blueprinted first. + task_drafts.sort(key=cmp_to_key(lambda taskdraft_a, taskdraft_b: -1 if taskdraft_a in taskdraft_b.predecessors else 1 if taskdraft_b in taskdraft_a.predecessors else 0)) # convert task_draft(s) to task_blueprint(s) for task_draft in task_drafts: @@ -197,6 +246,7 @@ def create_task_blueprints_from_scheduling_unit_blueprint(scheduling_unit_bluepr scheduling_unit_blueprint.refresh_from_db() return scheduling_unit_blueprint + def create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: '''Convenience method: Create the scheduling_unit_blueprint's task_blueprint(s), then create each task_blueprint's subtasks''' scheduling_unit_blueprint = create_task_blueprints_from_scheduling_unit_blueprint(scheduling_unit_blueprint) @@ -214,6 +264,7 @@ def create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(schedulin scheduling_unit_blueprint.refresh_from_db() return scheduling_unit_blueprint + def create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: '''Convenience method: Create the scheduling_unit_blueprint's task_blueprint(s), then create the task_blueprint's subtasks, and schedule the ones that are not dependend on predecessors''' scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) diff --git a/SAS/TMSS/src/tmss/tmssapp/views.py b/SAS/TMSS/src/tmss/tmssapp/views.py index bfb670fd87bbbd28b9addf622ef51b2a6b8c5385..4614c940953d2a277b00cf1eb0589ef6efb1edd5 100644 --- a/SAS/TMSS/src/tmss/tmssapp/views.py +++ b/SAS/TMSS/src/tmss/tmssapp/views.py @@ -25,7 +25,7 @@ def subtask_parset(request, subtask_pk:int): return HttpResponse(str(parset), content_type='text/plain') def index(request): - return render(request, os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), '../../frontend','frontend_poc/build/index.html')) + return render(request, os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), '../../frontend','tmss_webapp/build/index.html')) #return render(request, "../../../frontend/frontend_poc/build/index.html") def task_specify_observation(request, pk=None): diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py index 359342b1c9c1204773860aeb795cd46f95002b88..e1ce15a72abef5353e52cc263c75a473b66fa4b9 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/scheduling.py @@ -68,22 +68,18 @@ class SubtaskTypeViewSet(LOFARViewSet): queryset = models.SubtaskType.objects.all() serializer_class = serializers.SubtaskTypeSerializer - class StationTypeViewSet(LOFARViewSet): queryset = models.StationType.objects.all() serializer_class = serializers.StationTypeSerializer - class AlgorithmViewSet(LOFARViewSet): queryset = models.Algorithm.objects.all() serializer_class = serializers.AlgorithmSerializer - class ScheduleMethodViewSet(LOFARViewSet): queryset = models.ScheduleMethod.objects.all() serializer_class = serializers.ScheduleMethodSerializer - class SubtaskTemplateFilter(filters.FilterSet): class Meta: model = models.SubtaskTemplate diff --git a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py index dd6345ddb9436664dc9e2faa295924d809674a8e..aa83ab8741ce9925bea917e6023c8af8bf8d3204 100644 --- a/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py +++ b/SAS/TMSS/src/tmss/tmssapp/viewsets/specification.py @@ -5,6 +5,7 @@ This file contains the viewsets (based on the elsewhere defined data models and from django.shortcuts import get_object_or_404 from django.http import JsonResponse from django.contrib.auth.models import User +from django_filters import rest_framework as filters from rest_framework.viewsets import ReadOnlyModelViewSet from rest_framework import status from rest_framework.response import Response @@ -49,20 +50,45 @@ class DefaultGeneratorTemplateViewSet(LOFARViewSet): queryset = models.DefaultGeneratorTemplate.objects.all() serializer_class = serializers.DefaultGeneratorTemplateSerializer +class SchedulingUnitTemplateFilter(filters.FilterSet): + class Meta: + model = models.SchedulingUnitTemplate + fields = { + 'name': ['exact'], + 'version': ['lt', 'gt', 'exact'] + } class SchedulingUnitTemplateViewSet(LOFARViewSet): queryset = models.SchedulingUnitTemplate.objects.all() serializer_class = serializers.SchedulingUnitTemplateSerializer + filter_class = SchedulingUnitTemplateFilter + + @swagger_auto_schema(responses={200: 'JSON object with all the defaults from the schema filled in', + 403: 'forbidden'}, + operation_description="Get a JSON object with all the defaults from the schema filled in.") + @action(methods=['get'], detail=True) + def default_specification(self, request, pk=None): + schedulingunit_template = get_object_or_404(models.SchedulingUnitTemplate, pk=pk) + spec = get_default_json_object_for_schema(schedulingunit_template.schema) + return JsonResponse(spec) class DefaultSchedulingUnitTemplateViewSet(LOFARViewSet): queryset = models.DefaultSchedulingUnitTemplate.objects.all() serializer_class = serializers.DefaultSchedulingUnitTemplateSerializer +class TaskTemplateFilter(filters.FilterSet): + class Meta: + model = models.TaskTemplate + fields = { + 'name': ['exact'], + 'version': ['lt', 'gt', 'exact'] + } class TaskTemplateViewSet(LOFARViewSet): queryset = models.TaskTemplate.objects.all() serializer_class = serializers.TaskTemplateSerializer + filter_class = TaskTemplateFilter @swagger_auto_schema(responses={200: 'The schema as a JSON object', 403: 'forbidden'}, @@ -102,6 +128,11 @@ class RoleViewSet(LOFARViewSet): serializer_class = serializers.RoleSerializer +class SchedulingRelationPlacement(LOFARViewSet): + queryset = models.SchedulingRelationPlacement.objects.all() + serializer_class = serializers.SchedulingRelationPlacementSerializer + + class DatatypeViewSet(LOFARViewSet): queryset = models.Datatype.objects.all() serializer_class = serializers.DatatypeSerializer @@ -116,9 +147,6 @@ class CopyReasonViewSet(LOFARViewSet): queryset = models.CopyReason.objects.all() serializer_class = serializers.CopyReasonSerializer -class ResourceUnitViewSet(LOFARViewSet): - queryset = models.ResourceUnit.objects.all() - serializer_class = serializers.ResourceUnitSerializer class TaskConnectorTypeViewSet(LOFARViewSet): queryset = models.TaskConnectorType.objects.all() @@ -129,7 +157,22 @@ class TaskConnectorTypeViewSet(LOFARViewSet): class CycleViewSet(LOFARViewSet): queryset = models.Cycle.objects.all() serializer_class = serializers.CycleSerializer - ordering = ['number'] + ordering = ['start'] + + +class CycleQuotaViewSet(LOFARViewSet): + queryset = models.CycleQuota.objects.all() + serializer_class = serializers.CycleQuotaSerializer + + def get_queryset(self): + queryset = models.CycleQuota.objects.all() + + # query by project + project = self.request.query_params.get('project', None) + if project is not None: + return queryset.filter(project=project) + + return queryset class ProjectViewSet(LOFARViewSet): @@ -137,6 +180,16 @@ class ProjectViewSet(LOFARViewSet): serializer_class = serializers.ProjectSerializer ordering = ['name'] + def get_queryset(self): + queryset = models.Project.objects.all() + + # query by cycle + cycle = self.request.query_params.get('cycle', None) + if cycle is not None: + return queryset.filter(cycles__name=cycle) + + return queryset + class ProjectNestedViewSet(LOFARNestedViewSet): queryset = models.Project.objects.all() @@ -149,9 +202,20 @@ class ProjectNestedViewSet(LOFARNestedViewSet): else: return models.Project.objects.all() + class ProjectQuotaViewSet(LOFARViewSet): queryset = models.ProjectQuota.objects.all() serializer_class = serializers.ProjectQuotaSerializer + + def get_queryset(self): + queryset = models.ProjectQuota.objects.all() + + # query by project + project = self.request.query_params.get('project', None) + if project is not None: + return queryset.filter(project=project) + + return queryset class ResourceTypeViewSet(LOFARViewSet): @@ -174,6 +238,21 @@ class SettingViewSet(LOFARViewSet): serializer_class = serializers.SettingSerializer +class QuantityViewSet(LOFARViewSet): + queryset = models.Quantity.objects.all() + serializer_class = serializers.QuantitySerializer + + +class PeriodCategoryViewSet(LOFARViewSet): + queryset = models.PeriodCategory.objects.all() + serializer_class = serializers.PeriodCategorySerializer + + +class ProjectCategoryViewSet(LOFARViewSet): + queryset = models.ProjectCategory.objects.all() + serializer_class = serializers.ProjectCategorySerializer + + class SchedulingUnitDraftViewSet(LOFARViewSet): queryset = models.SchedulingUnitDraft.objects.all() serializer_class = serializers.SchedulingUnitDraftSerializer @@ -213,12 +292,11 @@ class SchedulingUnitDraftViewSet(LOFARViewSet): return Response(serializers.SchedulingUnitBlueprintSerializer(scheduling_unit_blueprint, context={'request':request}).data, status=status.HTTP_201_CREATED, headers={'Location': scheduling_unit_blueprint_path}) - @swagger_auto_schema(responses={201: 'The Created SchedulingUnitBlueprint, see Location in Response header', 403: 'forbidden'}, operation_description="Carve this SchedulingUnitDraft and its TaskDraft(s) in stone, and make blueprint(s) out of it and create their subtask(s)") - @action(methods=['get'], detail=True, url_name="create_blueprints", name="Create Blueprints-Tree") - def create_blueprints(self, request, pk=None): + @action(methods=['get'], detail=True, url_name="create_blueprints_and_subtasks", name="Create Blueprints-Tree") + def create_blueprints_and_subtasks(self, request, pk=None): scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=pk) scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) @@ -233,11 +311,11 @@ class SchedulingUnitDraftViewSet(LOFARViewSet): headers={'Location': scheduling_unit_blueprint_path}) - @swagger_auto_schema(responses={201: 'The Created Task Draft, see Location in Response header', + @swagger_auto_schema(responses={201: 'The updated scheduling_unit_draft with references to its created task_drafts', 403: 'forbidden'}, operation_description="Create Task Drafts from SchedulingUnitDraft.") @action(methods=['get'], detail=True, url_name="create_task_drafts", name="Create Task Drafts from Requirement doc") - def create_tasks_draft(self, request, pk=None): + def create_task_drafts(self, request, pk=None): scheduling_unit_draft = get_object_or_404(models.SchedulingUnitDraft, pk=pk) create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft) @@ -503,6 +581,15 @@ class TaskRelationBlueprintViewSet(LOFARViewSet): serializer_class = serializers.TaskRelationBlueprintSerializer +class TaskSchedulingRelationBlueprintViewSet(LOFARViewSet): + queryset = models.TaskSchedulingRelationBlueprint.objects.all() + serializer_class = serializers.TaskSchedulingRelationBlueprintSerializer + + +class TaskSchedulingRelationDraftViewSet(LOFARViewSet): + queryset = models.TaskSchedulingRelationDraft.objects.all() + serializer_class = serializers.TaskSchedulingRelationDraftSerializer + class TaskRelationBlueprintNestedViewSet(LOFARNestedViewSet): queryset = models.TaskRelationBlueprint.objects.all() @@ -518,16 +605,3 @@ class TaskRelationBlueprintNestedViewSet(LOFARNestedViewSet): return task_relation_draft.related_task_relation_blueprint.all() else: return models.TaskRelationBlueprint.objects.all() - -class PlacementViewSet(LOFARViewSet): - queryset = models.Placement.objects.all() - serializer_class = serializers.PlacementSerializer - -class TaskSchedulingRelationBlueprintViewSet(LOFARViewSet): - queryset = models.TaskSchedulingRelationBlueprint.objects.all() - serializer_class = serializers.TaskSchedulingRelationBlueprintSerializer - - -class TaskSchedulingRelationDraftViewSet(LOFARViewSet): - queryset = models.TaskSchedulingRelationDraft.objects.all() - serializer_class = serializers.TaskSchedulingRelationDraftSerializer \ No newline at end of file diff --git a/SAS/TMSS/src/tmss/urls.py b/SAS/TMSS/src/tmss/urls.py index dab404e878a836069bc028da096c5af56e08c6d1..5064ac639fb42ff2780eaa20fe8c6f44645008a8 100644 --- a/SAS/TMSS/src/tmss/urls.py +++ b/SAS/TMSS/src/tmss/urls.py @@ -19,7 +19,7 @@ from django.contrib import admin from django.contrib.auth.views import LoginView, LogoutView from django.urls import path, re_path from django.conf.urls import url, include -from django.views.generic.base import TemplateView +from django.views.generic.base import TemplateView, RedirectView from collections import OrderedDict from rest_framework import routers, permissions @@ -75,7 +75,14 @@ class TMSSAPIRootView(routers.APIRootView): return response -router = routers.DefaultRouter() +class OptionalSlashRouter(routers.DefaultRouter): + + def __init__(self): + super(routers.DefaultRouter, self).__init__() + self.trailing_slash = '/?' + + +router = OptionalSlashRouter() router.APIRootView = TMSSAPIRootView router.register(r'tags', viewsets.TagsViewSet) @@ -86,8 +93,10 @@ router.register(r'role', viewsets.RoleViewSet) router.register(r'datatype', viewsets.DatatypeViewSet) router.register(r'dataformat', viewsets.DataformatViewSet) router.register(r'copy_reason', viewsets.CopyReasonViewSet) -router.register(r'placement', viewsets.PlacementViewSet) router.register(r'flag', viewsets.FlagViewSet) +router.register(r'period_category', viewsets.PeriodCategoryViewSet) +router.register(r'project_category', viewsets.ProjectCategoryViewSet) +router.register(r'quantity', viewsets.QuantityViewSet) # templates router.register(r'generator_template', viewsets.GeneratorTemplateViewSet) @@ -102,8 +111,8 @@ router.register(r'default_task_relation_selection_template', viewsets.DefaultTas # instances router.register(r'cycle', viewsets.CycleViewSet) +router.register(r'cycle_quota', viewsets.CycleQuotaViewSet) router.register(r'project', viewsets.ProjectViewSet) -router.register(r'resource_unit', viewsets.ResourceUnitViewSet) router.register(r'resource_type', viewsets.ResourceTypeViewSet) router.register(r'project_quota', viewsets.ProjectQuotaViewSet) router.register(r'setting', viewsets.SettingViewSet) @@ -119,7 +128,7 @@ router.register(r'task_scheduling_relation_draft', viewsets.TaskSchedulingRelati router.register(r'task_scheduling_relation_blueprint', viewsets.TaskSchedulingRelationBlueprintViewSet) # nested -router.register(r'cycle/(?P<cycle_id>[\w\-]+)/project', viewsets.ProjectNestedViewSet) +router.register(r'cycle/(?P<cycle_id>[\w\- ]+)/project', viewsets.ProjectNestedViewSet) router.register(r'scheduling_set/(?P<scheduling_set_id>\d+)/scheduling_unit_draft', viewsets.SchedulingUnitDraftNestedViewSet) router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_id>\d+)/scheduling_unit_blueprint', viewsets.SchedulingUnitBlueprintNestedViewSet) router.register(r'scheduling_unit_draft/(?P<scheduling_unit_draft_id>\d+)/task_draft', viewsets.TaskDraftNestedViewSet) @@ -137,6 +146,7 @@ router.register(r'subtask_type', viewsets.SubtaskTypeViewSet) router.register(r'station_type', viewsets.StationTypeViewSet) router.register(r'algorithm', viewsets.AlgorithmViewSet) router.register(r'schedule_method', viewsets.ScheduleMethodViewSet) +router.register(r'scheduling_relation_placement', viewsets.SchedulingRelationPlacement) # templates router.register(r'subtask_template', viewsets.SubtaskTemplateViewSet) @@ -171,6 +181,4 @@ frontend_urlpatterns = [ # prefix everything for proxy #urlpatterns = [url(r'^api/', include(urlpatterns)), url(r'^oidc/', include('mozilla_django_oidc.urls')),] -urlpatterns = [url(r'^api/', include(urlpatterns)), url(r'^oidc/', include('mozilla_django_oidc.urls')), url(r'^frontend/', include(frontend_urlpatterns)) , url(r'^frontend/(?:.*)/?$', include(frontend_urlpatterns)),] - - +urlpatterns = [url(r'^api$', RedirectView.as_view(url='/api/')), url(r'^api/', include(urlpatterns)), url(r'^oidc$', RedirectView.as_view(url='/oidc/')), url(r'^oidc/', include('mozilla_django_oidc.urls')), url(r'^.*', include(frontend_urlpatterns)),] diff --git a/SAS/TMSS/test/CMakeLists.txt b/SAS/TMSS/test/CMakeLists.txt index fed919cff07977327addec9f2aeca6a243665c0a..0df3c7d58b658d2358bc94157165a40a77e8ef9f 100644 --- a/SAS/TMSS/test/CMakeLists.txt +++ b/SAS/TMSS/test/CMakeLists.txt @@ -31,6 +31,7 @@ if(BUILD_TESTING) lofar_add_test(t_subtasks) lofar_add_test(t_parset_adapter) lofar_add_test(t_tasks) + lofar_add_test(t_scheduling) # To get ctest running file(COPY testdata DESTINATION ${CMAKE_CURRENT_BINARY_DIR}) diff --git a/SAS/TMSS/test/t_scheduling.py b/SAS/TMSS/test/t_scheduling.py new file mode 100755 index 0000000000000000000000000000000000000000..f4de89666fd4bb05b82009ccc46d11fd578cc769 --- /dev/null +++ b/SAS/TMSS/test/t_scheduling.py @@ -0,0 +1,290 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2018 ASTRON (Netherlands Institute for Radio Astronomy) +# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands +# +# This file is part of the LOFAR software suite. +# The LOFAR software suite is free software: you can redistribute it and/or +# modify it under the terms of the GNU General Public License as published +# by the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# The LOFAR software suite is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. + +# $Id: $ + +import os +import unittest + +import logging +logger = logging.getLogger(__name__) +logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) + +from lofar.common.test_utils import skip_integration_tests +if skip_integration_tests(): + exit(3) + +# before we import any django modules the DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS need to be known/set. +# import and start an isolated RATestEnvironment and TMSSTestEnvironment (with fresh database and attached django and ldap server on free ports) +# this automagically sets the required DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS envvars. +from lofar.sas.resourceassignment.resourceassigner.test.ra_test_environment import RATestEnvironment +from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment + +ra_test_env = RATestEnvironment() +tmss_test_env = TMSSTestEnvironment() +try: + ra_test_env.start() + tmss_test_env.start() +except: + ra_test_env.stop() + tmss_test_env.stop() + exit(1) + +# tell unittest to stop (and automagically cleanup) the test database once all testing is done. +def tearDownModule(): + tmss_test_env.stop() + ra_test_env.stop() + + +from lofar.sas.tmss.test.tmss_test_data_django_models import * + +# import and setup rest test data creator +from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator +test_data_creator = TMSSRESTTestDataCreator(tmss_test_env.django_server.url, (tmss_test_env.ldap_server.dbcreds.user, tmss_test_env.ldap_server.dbcreds.password)) +from datetime import datetime, timedelta +from lofar.sas.resourceassignment.resourceassigner.rarpc import RARPC +from lofar.sas.tmss.tmss.tmssapp import models +from lofar.sas.tmss.tmss.tmssapp.subtasks import * + + +def create_subtask_object_for_testing(subtask_type_value, subtask_state_value): + """ + Helper function to create a subtask object for testing with given subtask value and subtask state value + as string (no object) + For these testcases 'pipelinecontrol schema' and 'observationcontrol schema' is relevant + """ + subtask_template_obj = models.SubtaskTemplate.objects.get(name="%scontrol schema" % subtask_type_value) + subtask_state_obj = models.SubtaskState.objects.get(value=subtask_state_value) + subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj) + return models.Subtask.objects.create(**subtask_data) + + + +class SchedulingTest(unittest.TestCase): + def setUp(self): + # clean all specs/tasks/claims in RADB (cascading delete) + for spec in ra_test_env.radb.getSpecifications(): + ra_test_env.radb.deleteSpecification(spec['id']) + + def test_schedule_observation_subtask_with_enough_resources_available(self): + with tmss_test_env.create_tmss_client() as client: + subtask_template = client.get_subtask_template("observationcontrol schema") + spec = get_default_json_object_for_schema(subtask_template['schema']) + spec['stations']['digital_pointings'][0]['subbands'] = [0] + cluster_url = client.get_path_as_json_object('/cluster/1')['url'] + + subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'], + specifications_doc=spec, + cluster_url=cluster_url) + subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/') + subtask_id = subtask['id'] + test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/') + + client.set_subtask_status(subtask_id, 'defined') + subtask = client.schedule_subtask(subtask_id) + + self.assertEqual('scheduled', subtask['state_value']) + self.assertEqual('scheduled', ra_test_env.radb.getTask(tmss_id=subtask_id)['status']) + + def test_schedule_observation_subtask_with_blocking_reservations(self): + + # create a reservation on station CS001 + with RARPC.create() as rarpc: + ra_spec = { 'task_type': 'reservation', + 'task_subtype': 'maintenance', + 'status': 'prescheduled', + 'starttime': datetime.utcnow()-timedelta(hours=1), + 'endtime': datetime.utcnow() + timedelta(hours=1), + 'cluster': None, + 'specification': {} } + inner_spec = { 'Observation.VirtualInstrument.stationList': ['CS001'], + 'Observation.startTime': '2020-01-08 06:30:00', + 'Observation.endTime': '2021-07-08 06:30:00' } + ra_spec['specification'] = inner_spec + assigned = rarpc.do_assignment(ra_spec) + self.assertTrue(assigned) + + with tmss_test_env.create_tmss_client() as client: + subtask_template = client.get_subtask_template("observationcontrol schema") + spec = get_default_json_object_for_schema(subtask_template['schema']) + spec['stations']['digital_pointings'][0]['subbands'] = [0] + cluster_url = client.get_path_as_json_object('/cluster/1')['url'] + + subtask_data = test_data_creator.Subtask(specifications_template_url=subtask_template['url'], + specifications_doc=spec, + cluster_url=cluster_url) + subtask = test_data_creator.post_data_and_get_response_as_json_object(subtask_data, '/subtask/') + subtask_id = subtask['id'] + test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=subtask['url']), '/subtask_output/') + + client.set_subtask_status(subtask_id, 'defined') + + with self.assertRaises(Exception): + client.schedule_subtask(subtask_id) + + subtask = client.get_subtask(subtask_id) + self.assertEqual('error', subtask['state_value']) + self.assertEqual('conflict', ra_test_env.radb.getTask(tmss_id=subtask_id)['status']) + + def test_schedule_pipeline_subtask_with_enough_resources_available(self): + with tmss_test_env.create_tmss_client() as client: + cluster_url = client.get_path_as_json_object('/cluster/1')['url'] + + # setup: first create an observation, so the pipeline can have input. + obs_subtask_template = client.get_subtask_template("observationcontrol schema") + obs_spec = get_default_json_object_for_schema(obs_subtask_template['schema']) + obs_spec['stations']['digital_pointings'][0]['subbands'] = [0] + + obs_subtask_data = test_data_creator.Subtask(specifications_template_url=obs_subtask_template['url'], + specifications_doc=obs_spec, + cluster_url=cluster_url) + obs_subtask = test_data_creator.post_data_and_get_response_as_json_object(obs_subtask_data, '/subtask/') + obs_subtask_output_url = test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=obs_subtask['url']), '/subtask_output/') + test_data_creator.post_data_and_get_url(test_data_creator.Dataproduct(filename="L%s_SB000.MS"%obs_subtask['id'], + subtask_output_url=obs_subtask_output_url), '/dataproduct/') + + # now create the pipeline... + pipe_subtask_template = client.get_subtask_template("pipelinecontrol schema") + pipe_spec = get_default_json_object_for_schema(pipe_subtask_template['schema']) + + pipe_subtask_data = test_data_creator.Subtask(specifications_template_url=pipe_subtask_template['url'], + specifications_doc=pipe_spec, + task_blueprint_url=obs_subtask['task_blueprint'], + cluster_url=cluster_url) + pipe_subtask = test_data_creator.post_data_and_get_response_as_json_object(pipe_subtask_data, '/subtask/') + + # ...and connect it to the observation + test_data_creator.post_data_and_get_url(test_data_creator.SubtaskInput(subtask_url=pipe_subtask['url'], subtask_output_url=obs_subtask_output_url), '/subtask_input/') + test_data_creator.post_data_and_get_url(test_data_creator.SubtaskOutput(subtask_url=pipe_subtask['url']), '/subtask_output/') + + for predecessor in client.get_subtask_predecessors(pipe_subtask['id']): + client.set_subtask_status(predecessor['id'], 'finished') + + client.set_subtask_status(pipe_subtask['id'], 'defined') + subtask = client.schedule_subtask(pipe_subtask['id']) + + self.assertEqual('scheduled', subtask['state_value']) + self.assertEqual('scheduled', ra_test_env.radb.getTask(tmss_id=pipe_subtask['id'])['status']) + + + def test_schedule_schedulingunit_enough_resources_available(self): + '''similar test as test_schedule_pipeline_subtask_with_enough_resources_available, but now created from a scheduling_unit''' + with tmss_test_env.create_tmss_client() as client: + scheduling_unit_template = client.get_schedulingunit_template("scheduling unit schema") + scheduling_unit_doc = get_default_json_object_for_schema(scheduling_unit_template['schema']) + + # define an observation without QA + obs_task = get_default_json_object_for_schema(client.get_task_template(name="observation schema")['schema']) + obs_task['QA']['plots']['enabled'] = False + obs_task['QA']['file_conversion']['enabled'] = False + obs_task['SAPs'][0]['subbands'] = [0,1] + scheduling_unit_doc['tasks'].append({"name": "Observation", + "specifications_doc": obs_task, + "specifications_template": "observation schema"}) + + # define a pipeline + scheduling_unit_doc['tasks'].append({"name": "Pipeline", + "specifications_doc": get_default_json_object_for_schema(client.get_task_template(name="preprocessing schema")['schema']), + "specifications_template": "preprocessing schema"}) + + # connect obs to pipeline + scheduling_unit_doc['task_relations'].append({"producer": "Observation", + "consumer": "Pipeline", + "input": { "role": "input", "datatype": "visibilities" }, + "output": { "role": "correlator", "datatype": "visibilities" }, + "dataformat": "MeasurementSet", + "selection_doc": {}, + "selection_template": "All" }) + + # submit + scheduling_unit_draft_data = test_data_creator.SchedulingUnitDraft(template_url=scheduling_unit_template['url'], + requirements_doc=scheduling_unit_doc) + scheduling_unit_draft = test_data_creator.post_data_and_get_response_as_json_object(scheduling_unit_draft_data, '/scheduling_unit_draft/') + + # create the whole blueprints tree... + scheduling_unit_blueprint = client.create_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft['id']) + + # fetch the created task_blueprints + task_blueprints = [client.get_url_as_json_object(task_blueprint_url) for task_blueprint_url in scheduling_unit_blueprint['task_blueprints']] + self.assertEqual(2, len(task_blueprints)) + # and make sure they are ordered correctly + if "Pipeline" in task_blueprints[0]['name']: + task_blueprints.reverse() + + for task_blueprint in task_blueprints: + self.assertEqual(1, len(task_blueprint['subtasks'])) + subtask = client.get_url_as_json_object(task_blueprint['subtasks'][0]) + + client.set_subtask_status(subtask['id'], 'defined') + subtask = client.schedule_subtask(subtask['id']) + + self.assertEqual('scheduled', subtask['state_value']) + self.assertEqual('scheduled', ra_test_env.radb.getTask(tmss_id=subtask['id'])['status']) + + client.set_subtask_status(subtask['id'], 'finished') + + +class SubtaskInputOutputTest(unittest.TestCase): + """ + Subtask Input and Output test + These testcases are located in the t_scheduling module, because during scheduling the output + dataproducts are assigned + """ + + def setUp(self) -> None: + # make sure we're allowed to schedule + setting = Setting.objects.get(name='allow_scheduling_observations') + setting.value = True + setting.save() + + def test_schedule_pipeline_subtask_filters_predecessor_output_dataproducts_for_input(self): + # setup: + # create observation subtask and outputs and dataproducts + obs_st = create_subtask_object_for_testing('observation', 'finished') + obs_out1 = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=obs_st)) + obs_out2 = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=obs_st)) + + # create connected pipeline subtask and inputs, specify input filtering + pipe_st = create_subtask_object_for_testing('pipeline', 'defined') + pipe_out = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=pipe_st)) # required by scheduling function + pipe_in1 = models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=pipe_st, producer=obs_out1, selection_doc={'sap': [0]})) + pipe_in2 = models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=pipe_st, producer=obs_out2, selection_doc={'sap': [1]})) + + # create obs output dataproducts with specs we can filter on + dp1_1 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out1, specifications_doc={'sap': [0]})) + dp1_2 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out1, specifications_doc={'sap': [1]})) + dp1_3 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out1, specifications_doc={'sap': [0]})) + + dp2_1 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out2, specifications_doc={'sap': [0]})) + dp2_2 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out2, specifications_doc={'sap': [1]})) + + # uncomment when RA scheduler works + # # trigger: + # # schedule pipeline, which should attach the correct subset of dataproducts to the pipeline inputs + # schedule_pipeline_subtask(pipe_st) + # + # # assert: + # # check correct input filtering + # self.assertEqual(set(pipe_in1.dataproducts.all()), {dp1_1, dp1_3}) + # self.assertEqual(set(pipe_in2.dataproducts.all()), {dp2_2}) + + +if __name__ == "__main__": + os.environ['TZ'] = 'UTC' + unittest.main() diff --git a/SAS/TMSS/test/t_scheduling.run b/SAS/TMSS/test/t_scheduling.run new file mode 100755 index 0000000000000000000000000000000000000000..24c89483cc69078d2616426a1bc5beb22fa577f9 --- /dev/null +++ b/SAS/TMSS/test/t_scheduling.run @@ -0,0 +1,6 @@ +#!/bin/bash + +# Run the unit test +source python-coverage.sh +python_coverage_test "*tmss*" t_scheduling.py + diff --git a/SAS/TMSS/test/t_scheduling.sh b/SAS/TMSS/test/t_scheduling.sh new file mode 100755 index 0000000000000000000000000000000000000000..63ecb0c21ef0f6ae6d8ccc6d0c4d88d826a4189d --- /dev/null +++ b/SAS/TMSS/test/t_scheduling.sh @@ -0,0 +1,3 @@ +#!/bin/sh + +./runctest.sh t_scheduling \ No newline at end of file diff --git a/SAS/TMSS/test/t_subtasks.py b/SAS/TMSS/test/t_subtasks.py index b7da9da760dd1638a93a2f70658ebc12697e6067..2421ab66d1b5817adb87df5c902b637cbf500007 100755 --- a/SAS/TMSS/test/t_subtasks.py +++ b/SAS/TMSS/test/t_subtasks.py @@ -41,57 +41,88 @@ from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.tmssapp.subtasks import * -# The following methods should be tested -# check_prerequities_for_subtask_creation -# create_subtasks_from_task_blueprint -# create_observation_control_subtask_from_task_blueprint -# create_qafile_subtask_from_task_blueprint -# create_qafile_subtask_from_observation_subtask -# create_qaplots_subtask_from_task_blueprint -# create_qaplots_subtask_from_qafile_subtask -# create_preprocessing_subtask_from_task_blueprint -# -# schedule_subtask -# check_prerequities_for_scheduling -# schedule_qafile_subtask -# schedule_qaplots_subtask -# schedule_observation_subtask -# schedule_pipeline_subtask -# -# create_and_schedule_subtasks_from_task_blueprint - - - - +def create_subtask_object_for_testing(subtask_type_value, subtask_state_value): + """ + Helper function to create a subtask object for testing with given subtask value and subtask state value + as string (no object) + """ + template_type = models.SubtaskType.objects.get(value=subtask_type_value) + subtask_template_obj = create_subtask_template_for_testing(template_type) + subtask_state_obj = models.SubtaskState.objects.get(value=subtask_state_value) + subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj) + return models.Subtask.objects.create(**subtask_data) + + +def create_subtask_template_for_testing(template_type: object): + """ + Helper function + :param template_type: + :return: + """ + subtask_template_data = SubtaskTemplate_test_data() + subtask_template_data['type'] = template_type + return models.SubtaskTemplate.objects.create(**subtask_template_data) + + +def create_task_blueprint_object_for_testing(task_template_name="observation schema", QA_enabled=False): + """ + Helper function to create a task blueprint object for testing with given task template name value + as string (no object) + :param task_template_name: (Optional) name of schema observation schema is target observation + :param QA_enabled: (Optional) QA plots and file_conversion + :return: task_blueprint_obj: Created Task Blueprint object + """ + task_template = models.TaskTemplate.objects.get(name=task_template_name) + task_spec = get_default_json_object_for_schema(task_template.schema) + if 'QA' in task_spec: + task_spec["QA"]['plots']['enabled'] = QA_enabled + task_spec["QA"]['file_conversion']['enabled'] = QA_enabled + task_draft_data = TaskDraft_test_data(specifications_template=task_template, specifications_doc=task_spec) + task_draft_obj = models.TaskDraft.objects.create(**task_draft_data) + + task_name = "BlueprintTask with %s" % task_template_name + task_blueprint_data = TaskBlueprint_test_data(name=task_name, task_draft=task_draft_obj) + task_blueprint_obj = models.TaskBlueprint.objects.create(**task_blueprint_data) + return task_blueprint_obj + + +def create_relation_task_blueprint_object_for_testing(blueprint_task_producer, blueprint_task_consumer): + """ + Helper function to create a task relation blueprint object for testing for given task objects consumer and producer + :param blueprint_task_producer: Blueprint task of producer, typical an observation + :param blueprint_task_consumer: Blueprint task of consumer, typical a preprocessing pipeline + :return: task_relation_obj: Created Task Relation Blueprint object + """ + task_relation_data = TaskRelationBlueprint_test_data(blueprint_task_producer, blueprint_task_consumer) + task_relation_obj = models.TaskRelationBlueprint.objects.create(**task_relation_data) + return task_relation_obj + + +def create_scheduling_relation_task_blueprint_for_testing(first_task_blueprint, second_task_blueprint): + """ + Helper function to create a task blueprint relation object between two task blueprint (calibrator and target observation) + :param first_task_blueprint: + :param second_task_blueprint: + :return: task_relation_blueprint_obj: Created Task Relation Blueprint object + """ + task_scheduling_rel_obj = models.TaskSchedulingRelationBlueprint.objects.create( + tags=[], + first=first_task_blueprint, + second=second_task_blueprint, + placement=models.SchedulingRelationPlacement.objects.get(value='before'), + time_offset=60) + return task_scheduling_rel_obj class SubTasksCreationFromSubTask(unittest.TestCase): - @staticmethod - def create_subtask_object(subtask_type_value, subtask_state_value): - """ - Helper function to create a subtask object for testing with given subtask value and subtask state value - as string (no object) - """ - template_type = models.SubtaskType.objects.get(value=subtask_type_value) - subtask_template_obj = SubTasksCreationFromSubTask.create_subtask_template(template_type) - subtask_state_obj = models.SubtaskState.objects.get(value=subtask_state_value) - subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj) - return models.Subtask.objects.create(**subtask_data) - - @staticmethod - def create_subtask_template(template_type: object): - subtask_template_data = SubtaskTemplate_test_data() - subtask_template_data['type'] = template_type - return models.SubtaskTemplate.objects.create(**subtask_template_data) - def test_create_qafile_subtask_from_observation_subtask_failed(self): """ Test if creation of subtask qafile failed due to wrong state or wrong type of the predecessor subtask Correct state should be 'defined' and correct type should be 'observation' (for this test of course it is not) """ - subtasks = [self.create_subtask_object("pipeline", "defined"), - self.create_subtask_object("observation", "defining"), - self.create_subtask_object("observation", "defining") ] + subtasks = [create_subtask_object_for_testing("pipeline", "defined"), + create_subtask_object_for_testing("observation", "defining"), + create_subtask_object_for_testing("observation", "defining") ] for subtask in subtasks: with self.assertRaises(ValueError): create_qafile_subtask_from_observation_subtask(subtask) @@ -99,11 +130,10 @@ class SubTasksCreationFromSubTask(unittest.TestCase): def test_create_qafile_subtask_from_observation_subtask_succeed(self): """ Test if creation of subtask qafile succeed - Check if the created subtask has correct subtask state and value (TODO) + Subtask object is None because QA file conversion is by default not enabled!!!! """ - predecessor_subtask = self.create_subtask_object("observation", "defined") + predecessor_subtask = create_subtask_object_for_testing("observation", "defined") subtask = create_qafile_subtask_from_observation_subtask(predecessor_subtask) - # subtask object is None because QA file conversion is by default not enabled!!!! self.assertEqual(None, subtask) def test_create_qaplots_subtask_from_qafile_subtask_failed(self): @@ -111,9 +141,9 @@ class SubTasksCreationFromSubTask(unittest.TestCase): Test if creation of subtask qaplots failed due to wrong state or wrong type of the predecessor subtask Correct type should be 'qa_files' (for this test of course it is not) """ - subtasks = [self.create_subtask_object("pipeline", "defined"), - self.create_subtask_object("observation", "defining"), - self.create_subtask_object("observation", "defining") ] + subtasks = [create_subtask_object_for_testing("pipeline", "defined"), + create_subtask_object_for_testing("observation", "defining"), + create_subtask_object_for_testing("observation", "defining") ] for subtask in subtasks: with self.assertRaises(ValueError): create_qaplots_subtask_from_qafile_subtask(subtask) @@ -121,47 +151,21 @@ class SubTasksCreationFromSubTask(unittest.TestCase): def test_create_qaplots_subtask_from_qafile_subtask_succeed(self): """ Test if creation of subtask qaplots succeed - Check if the created subtask has correct subtask state and value (TODO) + Subtask object is None because QA plots is by default not enabled!!!! """ - predecessor_subtask = self.create_subtask_object("qa_files", "defined") + predecessor_subtask = create_subtask_object_for_testing("qa_files", "defined") subtask = create_qaplots_subtask_from_qafile_subtask(predecessor_subtask) - # subtask object is None because QA plots is by default not enabled!!!! self.assertEqual(None, subtask) class SubTasksCreationFromTaskBluePrint(unittest.TestCase): - @staticmethod - def create_task_blueprint_object(task_template_name="observation schema", QA_enabled=False): - """ - Helper function to create a task blueprint object for testing with given task template name value - as string (no object) - """ - task_blueprint_data = TaskBlueprint_test_data() - task_blueprint_obj = models.TaskBlueprint.objects.create(**task_blueprint_data) - task_blueprint_obj.specifications_template.name = task_template_name - task_blueprint_obj.specifications_doc = { - "QA": { - "plots": { - "enabled": QA_enabled, - "autocorrelation": True, - "crosscorrelation": True - }, - "file_conversion": { - "enabled": QA_enabled, - "nr_of_subbands": -1, - "nr_of_timestamps": 256 - } - } - } - return task_blueprint_obj - def test_create_sequence_of_subtask_from_task_blueprint(self): """ Create multiple subtasks from a task blueprint, executed in correct order. No exception should occur, check name, type and state of the subtask """ - task_blueprint = self.create_task_blueprint_object() + task_blueprint = create_task_blueprint_object_for_testing() subtask = create_observation_control_subtask_from_task_blueprint(task_blueprint) self.assertEqual("defined", str(subtask.state)) @@ -178,7 +182,6 @@ class SubTasksCreationFromTaskBluePrint(unittest.TestCase): with self.assertRaises(SubtaskCreationException): subtask = create_qaplots_subtask_from_task_blueprint(task_blueprint) - # subtask = create_preprocessing_subtask_from_task_blueprint(task_blueprint) def test_create_sequence_of_subtask_from_task_blueprint_with_QA_enabled(self): """ @@ -186,9 +189,9 @@ class SubTasksCreationFromTaskBluePrint(unittest.TestCase): QA plots and QA file conversion enabled No exception should occur, check name, type and state of the subtasks """ - # Enable QA plot and QA conversion - task_blueprint = self.create_task_blueprint_object(QA_enabled=True) - task_blueprint_preprocessing = self.create_task_blueprint_object("preprocessing schema") + # Create Observation Task Enable QA plot and QA conversion + task_blueprint = create_task_blueprint_object_for_testing(QA_enabled=True) + task_blueprint_preprocessing = create_task_blueprint_object_for_testing("preprocessing schema") subtask = create_observation_control_subtask_from_task_blueprint(task_blueprint) self.assertEqual("defined", str(subtask.state)) @@ -204,59 +207,70 @@ class SubTasksCreationFromTaskBluePrint(unittest.TestCase): self.assertEqual("defined", str(subtask.state)) self.assertEqual("QA plots", str(subtask.specifications_template.name)) self.assertEqual("qa_plots", str(subtask.specifications_template.type)) - # TODO: check why next call failed? - #subtask = create_preprocessing_subtask_from_task_blueprint(task_blueprint_preprocessing) - #self.assertEqual("defined", str(subtask.state)) + # Next call will fail due to missing task relation + with self.assertRaises(SubtaskCreationException): + create_preprocessing_subtask_from_task_blueprint(task_blueprint_preprocessing) + # Create that relation and check again + create_relation_task_blueprint_object_for_testing(task_blueprint, task_blueprint_preprocessing) + subtask = create_preprocessing_subtask_from_task_blueprint(task_blueprint_preprocessing) + self.assertEqual("defined", str(subtask.state)) + self.assertEqual("pipelinecontrol schema", str(subtask.specifications_template.name)) + self.assertEqual("pipeline", str(subtask.specifications_template.type)) + + def test_create_subtasks_from_task_blueprint_succeed(self): + """ + """ + task_blueprint = create_task_blueprint_object_for_testing(QA_enabled=True) + subtasks = create_subtasks_from_task_blueprint(task_blueprint) + self.assertEqual(3, len(subtasks)) + +class SubTasksCreationFromTaskBluePrintCalibrator(unittest.TestCase): - def test_create_subtasks_from_task_blueprint_failure_on_schema(self): + def test_create_sequence_of_subtask_from_task_blueprint_calibrator_failure(self): """ - Test creation failure due to unknown schema (no correlator or preprocessing schema) - Check exception - "SubtaskCreationException: Cannot create subtasks for task id=1 because no generator exists for its schema name=unknown schema" + Create multiple subtasks from a task blueprint when task is a calibrator + Check that exception should occur due too missing related target observation """ - task_blueprint = self.create_task_blueprint_object("unknown schema") + task_blueprint = create_task_blueprint_object_for_testing(task_template_name="calibrator schema") with self.assertRaises(SubtaskCreationException): - create_subtasks_from_task_blueprint(task_blueprint) + create_observation_control_subtask_from_task_blueprint(task_blueprint) - def test_create_subtasks_from_task_blueprint_succeed(self): + def test_create_sequence_of_subtask_from_task_blueprint_calibrator(self): """ + Create multiple subtasks from a task blueprint when task is a calibrator and is related to task blueprint + of a target observation + Check that exception should occur due too missing pointing setting in target observation, + the calibrator default is AutoSelect=True + Check NO exception, when AutoSelect=False """ - task_blueprint = self.create_task_blueprint_object(QA_enabled=True) - subtasks = create_subtasks_from_task_blueprint(task_blueprint) - self.assertEqual(3, len(subtasks)) + cal_task_blueprint = create_task_blueprint_object_for_testing(task_template_name="calibrator schema") + target_task_blueprint = create_task_blueprint_object_for_testing() + create_scheduling_relation_task_blueprint_for_testing(cal_task_blueprint, target_task_blueprint) + + with self.assertRaises(SubtaskCreationException): + create_observation_control_subtask_from_task_blueprint(cal_task_blueprint) -# TODO Test the Schedule calls + cal_task_blueprint.specifications_doc['autoselect'] = False + cal_task_blueprint.specifications_doc['pointing']['angle1'] = 11.11 + cal_task_blueprint.specifications_doc['pointing']['angle2'] = 22.22 + subtask = create_observation_control_subtask_from_task_blueprint(cal_task_blueprint) + self.assertEqual("defined", str(subtask.state)) + self.assertEqual("observationcontrol schema", str(subtask.specifications_template.name)) + self.assertEqual("observation", str(subtask.specifications_template.type)) + self.assertEqual('J2000', subtask.specifications_doc['stations']['analog_pointing']['direction_type']) + self.assertEqual(11.11, subtask.specifications_doc['stations']['analog_pointing']['angle1']) + self.assertEqual(22.22, subtask.specifications_doc['stations']['analog_pointing']['angle2']) class SubtaskInputSelectionFilteringTest(unittest.TestCase): - # todo: merge in tests from TMSS-207 and deduplicate staticmethods - def setUp(self) -> None: # make sure we're allowed to schedule setting = Setting.objects.get(name='allow_scheduling_observations') setting.value = True setting.save() - @staticmethod - def create_subtask_object(subtask_type_value, subtask_state_value): - """ - Helper function to create a subtask object for testing with given subtask value and subtask state value - as string (no object) - """ - template_type = models.SubtaskType.objects.get(value=subtask_type_value) - subtask_template_obj = SubtaskInputSelectionFilteringTest.create_subtask_template(template_type) - subtask_state_obj = models.SubtaskState.objects.get(value=subtask_state_value) - subtask_data = Subtask_test_data(subtask_template=subtask_template_obj, state=subtask_state_obj) - return models.Subtask.objects.create(**subtask_data) - - @staticmethod - def create_subtask_template(template_type: object): - subtask_template_data = SubtaskTemplate_test_data() - subtask_template_data['type'] = template_type - return models.SubtaskTemplate.objects.create(**subtask_template_data) - def test_specifications_doc_meets_selection_doc_returns_true_on_empty_filter(self): specs = {} selection = {} @@ -294,37 +308,6 @@ class SubtaskInputSelectionFilteringTest(unittest.TestCase): selection = {'sap': [0], 'is_relevant': True} self.assertFalse(specifications_doc_meets_selection_doc(specs, selection)) - def test_schedule_pipeline_subtask_filters_predecessor_output_dataproducts_for_input(self): - - # setup: - # create observation subtask and outputs and dataproducts - obs_st = self.create_subtask_object('observation', 'finished') - obs_out1 = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=obs_st)) - obs_out2 = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=obs_st)) - - # create connected pipeline subtask and inputs, specify input filtering - pipe_st = self.create_subtask_object('pipeline', 'defined') - pipe_out = models.SubtaskOutput.objects.create(**SubtaskOutput_test_data(subtask=pipe_st)) # required by scheduling function - pipe_in1 = models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=pipe_st, producer=obs_out1, selection_doc={'sap': [0]})) - pipe_in2 = models.SubtaskInput.objects.create(**SubtaskInput_test_data(subtask=pipe_st, producer=obs_out2, selection_doc={'sap': [1]})) - - # create obs output dataproducts with specs we can filter on - dp1_1 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out1, specifications_doc={'sap': [0]})) - dp1_2 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out1, specifications_doc={'sap': [1]})) - dp1_3 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out1, specifications_doc={'sap': [0]})) - - dp2_1 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out2, specifications_doc={'sap': [0]})) - dp2_2 = models.Dataproduct.objects.create(**Dataproduct_test_data(producer=obs_out2, specifications_doc={'sap': [1]})) - - # trigger: - # schedule pipeline, which should attach the correct subset of dataproducts to the pipeline inputs - schedule_pipeline_subtask(pipe_st) - - # assert: - # check correct input filtering - self.assertEqual(set(pipe_in1.dataproducts.all()), {dp1_1, dp1_3}) - self.assertEqual(set(pipe_in2.dataproducts.all()), {dp2_2}) - class SettingTest(unittest.TestCase): @@ -332,11 +315,12 @@ class SettingTest(unittest.TestCase): setting = Setting.objects.get(name='allow_scheduling_observations') setting.value = False setting.save() - obs_st = SubtaskInputSelectionFilteringTest.create_subtask_object('observation', 'defined') + obs_st = create_subtask_object_for_testing('observation', 'defined') with self.assertRaises(SubtaskSchedulingException): schedule_observation_subtask(obs_st) + if __name__ == "__main__": os.environ['TZ'] = 'UTC' unittest.main() diff --git a/SAS/TMSS/test/t_tasks.py b/SAS/TMSS/test/t_tasks.py index 5005382e6c831e41cbd0f90d9c48afc99cce33de..d9f6c1b2a79eb78f03173fa38006b2c197bfde26 100755 --- a/SAS/TMSS/test/t_tasks.py +++ b/SAS/TMSS/test/t_tasks.py @@ -27,16 +27,32 @@ import logging logger = logging.getLogger(__name__) logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) -# Do Mandatory setup step: -# use setup/teardown magic for tmss test database, ldap server and django server -# (ignore pycharm unused import statement, python unittests does use at RunTime the tmss_test_environment_unittest_setup module) -from lofar.sas.tmss.test.tmss_test_environment_unittest_setup import * +# before we import any django modules the DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS need to be known/set. +# import and start an isolated RATestEnvironment and TMSSTestEnvironment (with fresh database and attached django and ldap server on free ports) +# this automagically sets the required DJANGO_SETTINGS_MODULE, TMSS_LDAPCREDENTIALS and TMSS_DBCREDENTIALS envvars. +from lofar.sas.resourceassignment.resourceassigner.test.ra_test_environment import RATestEnvironment +from lofar.sas.tmss.test.test_utils import TMSSTestEnvironment + +ra_test_env = RATestEnvironment() +tmss_test_env = TMSSTestEnvironment() +try: + ra_test_env.start() + tmss_test_env.start() +except: + ra_test_env.stop() + tmss_test_env.stop() + exit(1) + +# tell unittest to stop (and automagically cleanup) the test database once all testing is done. +def tearDownModule(): + tmss_test_env.stop() + ra_test_env.stop() from lofar.sas.tmss.test.tmss_test_data_django_models import * # import and setup rest test data creator from lofar.sas.tmss.test.tmss_test_data_rest import TMSSRESTTestDataCreator -rest_data_creator = TMSSRESTTestDataCreator(BASE_URL, AUTH) +rest_data_creator = TMSSRESTTestDataCreator(tmss_test_env.django_server.url, (tmss_test_env.ldap_server.dbcreds.user, tmss_test_env.ldap_server.dbcreds.password)) from lofar.sas.tmss.tmss.tmssapp import models from lofar.sas.tmss.tmss.tmssapp.tasks import * @@ -47,14 +63,16 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase): From scheduling_unit_draft should test: 1. create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitBlueprint: 6. create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> [TaskDraft]: - 3. create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitBlueprint: + 3. create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft: models.SchedulingUnitDraft) -> models.SchedulingUnitBlueprint: """ @staticmethod def create_scheduling_unit_draft_object(scheduling_unit_draft_name, requirements_doc=None): """ Helper function to create a scheduling unit object for testing """ - scheduling_unit_draft_data = SchedulingUnitDraft_test_data(name=scheduling_unit_draft_name) + scheduling_unit_draft_data = SchedulingUnitDraft_test_data(name=scheduling_unit_draft_name, + requirements_doc=requirements_doc, + template=models.SchedulingUnitTemplate.objects.get(name="scheduling unit schema")) draft_obj = models.SchedulingUnitDraft.objects.create(**scheduling_unit_draft_data) return draft_obj @@ -66,13 +84,9 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase): """ scheduling_unit_draft = self.create_scheduling_unit_draft_object("Test Scheduling Unit 1") - res_scheduling_unit_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_draft/' + str(scheduling_unit_draft.id), 200) scheduling_unit_blueprint = create_scheduling_unit_blueprint_from_scheduling_unit_draft(scheduling_unit_draft) self.assertEqual(scheduling_unit_draft.name, scheduling_unit_blueprint.draft.name) - res_scheduling_unit_blueprint = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_blueprint/' + str(scheduling_unit_blueprint.id), 200) - self.assertEqual(res_scheduling_unit_blueprint['requirements_template'], res_scheduling_unit_draft['requirements_template']) - self.assertEqual(res_scheduling_unit_blueprint['requirements_doc'], res_scheduling_unit_draft['requirements_doc']) - self.assertEqual(0, len(res_scheduling_unit_blueprint['task_blueprints'])) + self.assertEqual(0, len(scheduling_unit_blueprint.task_blueprints.all())) def test_create_task_drafts_from_scheduling_unit_draft(self): """ @@ -80,18 +94,20 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase): Check if NO tasks are created Check with REST-call if NO tasks are created """ - scheduling_unit_draft = self.create_scheduling_unit_draft_object("Test Scheduling Unit 2") + scheduling_unit_draft = self.create_scheduling_unit_draft_object("Test Scheduling Unit 2", requirements_doc={'tasks': []}) + with self.assertRaises(BlueprintCreationException): + create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft) - res_scheduling_unit_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_draft/' + str(scheduling_unit_draft.id), 200) - list_tasks = create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft) - self.assertEqual(0, len(list_tasks)) - self.assertEqual(0, len(res_scheduling_unit_draft['task_drafts'])) + scheduling_unit_draft.refresh_from_db() + task_drafts = scheduling_unit_draft.task_drafts.all() + self.assertEqual(0, len(task_drafts)) + self.assertEqual(0, len(task_drafts)) def test_create_task_drafts_from_scheduling_unit_draft_with_UC1_requirements(self): """ Create Scheduling Unit Draft with requirements_doc (read from file) + Create Task Blueprints (only) Check if tasks (7) are created - Check with REST-call if tasks are created """ working_dir = os.path.dirname(os.path.abspath(__file__)) with open(os.path.join(working_dir, "testdata/example_UC1_scheduling_unit.json")) as json_file: @@ -106,67 +122,106 @@ class CreationFromSchedulingUnitDraft(unittest.TestCase): copies=None, scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data())) - res_scheduling_unit_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_draft/' + str(scheduling_unit_draft.id), 200) - list_tasks = create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft) - self.assertEqual(7, len(list_tasks)) - # TODO: check why rest api is not updated? self.assertEqual(7, len(res_scheduling_unit_draft['task_drafts'])) + create_task_drafts_from_scheduling_unit_draft(scheduling_unit_draft) - def test_create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_draft(self): + scheduling_unit_draft.refresh_from_db() + task_drafts = scheduling_unit_draft.task_drafts.all() + self.assertEqual(7, len(task_drafts)) + + def test_create_task_blueprints_and_subtasks_from_scheduling_unit_draft_with_UC1_requirements(self): + """ + Create Scheduling Unit Draft with requirements_doc (read from file) + Create Task Blueprints and Subtasks + Check if tasks (7) are created: + Calibration 1 : 1 Observation and 1 Pipeline task + Target Observation: 1 Observation and 2 Pipeline tasks + Calibration 2 : 1 Observation and 1 Pipeline task + Check if subtasks (13) are created: + Every Observation Task: 3 subtasks (1 control, 2 QA) + Every Pipeline Task: 1 subtasks (1 control) + makes 3x3 + 4x1 = 13 + """ + working_dir = os.path.dirname(os.path.abspath(__file__)) + with open(os.path.join(working_dir, "testdata/example_UC1_scheduling_unit.json")) as json_file: + json_requirements_doc = json.loads(json_file.read()) + + scheduling_unit_draft = models.SchedulingUnitDraft.objects.create( + name="Test Scheduling Unit UC1", + requirements_doc=json_requirements_doc, + requirements_template=models.SchedulingUnitTemplate.objects.get(name="scheduling unit schema"), + copy_reason=models.CopyReason.objects.get(value='template'), + generator_instance_doc="para", + copies=None, + scheduling_set=models.SchedulingSet.objects.create(**SchedulingSet_test_data())) + + create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + + scheduling_unit_draft.refresh_from_db() + task_drafts = scheduling_unit_draft.task_drafts.all() + self.assertEqual(7, len(task_drafts)) + + scheduling_unit_blueprints = scheduling_unit_draft.scheduling_unit_blueprints.all() + self.assertEqual(1, len(scheduling_unit_blueprints)) + + scheduling_unit_blueprint = scheduling_unit_blueprints[0] + task_blueprints = scheduling_unit_blueprint.task_blueprints.all() + self.assertEqual(7, len(task_blueprints)) + total_subtasks = 0 + for task_blueprint in task_blueprints: + total_subtasks += task_blueprint.subtasks.count() + self.assertEqual(13, total_subtasks) + + def test_create_task_blueprints_and_subtasks_from_scheduling_unit_draft(self): """ Create Scheduling Unit Draft Check if the name draft (specified) is equal to name blueprint (created) Check with REST-call if NO tasks are created """ - scheduling_unit_draft = self.create_scheduling_unit_draft_object("Test Scheduling Unit 3") + scheduling_unit_draft = self.create_scheduling_unit_draft_object("Test Scheduling Unit 3", {'tasks': []}) - res_scheduling_unit_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_draft/' + str(scheduling_unit_draft.id), 200) - scheduling_unit_blueprint = create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) - self.assertEqual(scheduling_unit_draft.name, scheduling_unit_blueprint.draft.name) - res_scheduling_unit_blueprint = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_blueprint/' + str(scheduling_unit_blueprint.id), 200) - self.assertEqual(res_scheduling_unit_blueprint['requirements_template'], res_scheduling_unit_draft['requirements_template']) - self.assertEqual(res_scheduling_unit_blueprint['requirements_doc'], res_scheduling_unit_draft['requirements_doc']) - self.assertEqual(0, len(res_scheduling_unit_blueprint['task_blueprints'])) + with self.assertRaises(BlueprintCreationException): + create_task_blueprints_and_subtasks_from_scheduling_unit_draft(scheduling_unit_draft) + + self.assertEqual(0, len(scheduling_unit_draft.scheduling_unit_blueprints.all())) class CreationFromSchedulingUnitBluePrint(unittest.TestCase): """ From scheduling_unit_blueprint should test: - 5. create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: + 5. create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint: models.SchedulingUnitBlueprint) -> models.SchedulingUnitBlueprint: """ - def test_create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_blueprint(self): + def test_create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(self): """ Create Scheduling Unit BluePrint - Check with REST-call if NO tasks are created + Check with REST-call if NO tasks are created, an Exception is raised becaus the requirements_doc of the + scheduling_unit (draft) has no tasks defined, it is an empty list """ scheduling_unit_blueprint_data = SchedulingUnitBlueprint_test_data(name="Test Scheduling Unit BluePrint") scheduling_unit_blueprint = models.SchedulingUnitBlueprint.objects.create(**scheduling_unit_blueprint_data) - scheduling_unit_blueprint_after_creation = create_task_blueprints_and_subtasks_and_schedule_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) - res_scheduling_unit_blueprint = GET_and_assert_equal_expected_code(self, BASE_URL + '/scheduling_unit_blueprint/' + str(scheduling_unit_blueprint.id), 200) - self.assertEqual(0, len(res_scheduling_unit_blueprint['task_blueprints'])) + with self.assertRaises(BlueprintCreationException): + scheduling_unit_blueprint = create_task_blueprints_and_subtasks_from_scheduling_unit_blueprint(scheduling_unit_blueprint) + + self.assertEqual(0, scheduling_unit_blueprint.task_blueprints.count()) class CreationFromTaskDraft(unittest.TestCase): """ From task draft should test: 2. create_task_blueprint_from_task_draft(task_draft: models.TaskDraft) -> models.TaskBlueprint: - 5. create_task_blueprint_and_subtasks_and_schedule_subtasks_from_task_draft(task_draft: models.TaskDraft) -> models.TaskBlueprint: + 5. create_task_blueprint_and_subtasks_from_task_draft(task_draft: models.TaskDraft) -> models.TaskBlueprint: """ @staticmethod def create_task_object(task_draft_name): """ Helper function to create a task object for testing - TODO change schema to observation schema, correlator schema wil be removed - using the observation schema results in jsonschema.exceptions.ValidationError: 'stations' is a required property - so somehow its does not fill in the required fields ?? """ obs_task_template = models.TaskTemplate.objects.get(name='observation schema') task_draft_data = TaskDraft_test_data(name=task_draft_name, specifications_template=obs_task_template) models.TaskDraft.objects.create(**task_draft_data) - - def test_create_task_blueprint_and_subtasks_and_schedule_subtasks(self): + def test_create_task_blueprint_and_subtasks(self): """ Create task draft Check if the name draft (specified) is equal to name blueprint (created) @@ -174,18 +229,12 @@ class CreationFromTaskDraft(unittest.TestCase): """ self.create_task_object("Test Target Observation 1") - task_draft = models.TaskDraft.objects.get(name="Test Target Observation 1") - rest_task_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/' + str(task_draft.id), 200) - task_blueprint = create_task_blueprint_and_subtasks_and_schedule_subtasks_from_task_draft(task_draft) + task_blueprint = create_task_blueprint_and_subtasks_from_task_draft(task_draft) self.assertEqual(task_draft.name, task_blueprint.draft.name) - rest_task_blueprint = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/' + str(task_blueprint.id), 200) - self.assertEqual(3, len(rest_task_blueprint['subtasks'])) - self.assertEqual(rest_task_blueprint['specifications_template'], rest_task_draft['specifications_template']) - for subtask_url in rest_task_blueprint['subtasks']: - res_subtask = GET_and_assert_equal_expected_code(self, subtask_url, 200) - state_value = GET_and_assert_equal_expected_code(self, res_subtask['state'], 200)['value'] - # TODO not all scheduled??? self.assertEqual(state_value, "defined") + self.assertEqual(3, task_blueprint.subtasks.count()) + for subtask in task_blueprint.subtasks.all(): + subtask.state.value == 'defined' def test_create_task_blueprint(self): """ @@ -196,12 +245,9 @@ class CreationFromTaskDraft(unittest.TestCase): self.create_task_object("Test Target Observation 2") task_draft = models.TaskDraft.objects.get(name="Test Target Observation 2") - res_task_draft = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_draft/' + str(task_draft.id), 200) task_blueprint = create_task_blueprint_from_task_draft(task_draft) self.assertEqual(task_draft.name, task_blueprint.draft.name) - res_task_blueprint = GET_and_assert_equal_expected_code(self, BASE_URL + '/task_blueprint/' + str(task_blueprint.id), 200) - self.assertEqual(0, len(res_task_blueprint['subtasks'])) - self.assertEqual(res_task_blueprint['specifications_template'], res_task_draft['specifications_template']) + self.assertEqual(0, task_blueprint.subtasks.count()) if __name__ == "__main__": diff --git a/SAS/TMSS/test/t_tmss_test_database.py b/SAS/TMSS/test/t_tmss_test_database.py index 708dcd2f4724181214093099fbc1cf96a3f883b5..2155893ec67d3da46a163cb57f800883408bfe02 100755 --- a/SAS/TMSS/test/t_tmss_test_database.py +++ b/SAS/TMSS/test/t_tmss_test_database.py @@ -51,8 +51,8 @@ class TMSSPostgresTestMixinTestCase(TMSSPostgresTestMixin, unittest.TestCase): now = datetime.utcnow() - db.executeQuery('''INSERT INTO tmssapp_cycle VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);''', - qargs=([], now, now, "my_description", "my_name", now, now, 0, 1, 2, 3)) + db.executeQuery('''INSERT INTO tmssapp_cycle VALUES (%s, %s, %s, %s, %s, %s, %s);''', + qargs=([], now, now, "my_description", "my_name", now, now)) self.assertEqual(cycle_count+1, db.executeQuery("SELECT COUNT(*) FROM tmssapp_cycle;", fetch=FETCH_ONE)['count']) diff --git a/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py index 132b434cc424099d19172cd131f9e27184b6bb81..ec07eacc9f05774a3491beb36498369a819c9843 100755 --- a/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py +++ b/SAS/TMSS/test/t_tmssapp_scheduling_REST_API.py @@ -22,10 +22,10 @@ # This functional test talks to the API like a regular user would. # It is supposed to cover all REST http methods for all ViewSets. -# todo: I am still a bit under the impression that we re-test Django functionality that we can expect to just work -# todo: with some of these tests. On the other hand a lot of these provide us a nice basis for differentiating out -# todo: behavior in a controlled way. -# todo: We should probably also fully test behavior wrt mandatory and nullable fields. +# I am still a bit under the impression that we re-test Django functionality that we can expect to just work +# with some of these tests. On the other hand a lot of these provide us a nice basis for differentiating out +# behavior in a controlled way. +# We should probably also fully test behavior wrt mandatory and nullable fields. from datetime import datetime, timedelta import unittest @@ -92,7 +92,7 @@ class SubtaskTemplateTestCase(unittest.TestCase): url = r_dict['url'] GET_OK_and_assert_equal_expected_response(self, url, st_test_data) - test_patch = {"type": BASE_URL + '/subtask_type/inspection/', + test_patch = {"type": BASE_URL + '/subtask_type/inspection', "version": 'v6.28318530718', "schema": {"mykey": "my better value"}, } @@ -120,7 +120,7 @@ class SubtaskTemplateTestCase(unittest.TestCase): # create dependency that is safe to delete (enums are not populated / re-established between tests) type_data = {'value': 'kickme'} POST_and_assert_expected_response(self, BASE_URL + '/subtask_type/', type_data, 201, type_data) - type_url = BASE_URL + '/subtask_type/kickme/' + type_url = BASE_URL + '/subtask_type/kickme' # POST new item and verify test_data = dict(st_test_data) @@ -370,7 +370,7 @@ class SubtaskTestCase(unittest.TestCase): # create dependency that is safe to delete (enums are not populated / re-established between tests) state_data = {'value': 'kickme'} POST_and_assert_expected_response(self, BASE_URL + '/subtask_state/', state_data, 201, state_data) - state_url = BASE_URL + '/subtask_state/kickme/' + state_url = BASE_URL + '/subtask_state/kickme' # POST new item and verify test_data = dict(st_test_data) @@ -473,7 +473,7 @@ class SubtaskTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_state_log/?subtask=' + identifier, {"count": 1}) # PATCH item with state update and verify log record is created - test_patch = {"state": BASE_URL + "/subtask_state/finishing/"} + test_patch = {"state": BASE_URL + "/subtask_state/finishing"} PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/subtask_state_log/?subtask=' + identifier, {"count": 2}) @@ -553,7 +553,7 @@ class DataproductTestCase(unittest.TestCase): # create dependency that is safe to delete (enums are not populated / re-established between tests) dataformat_data = {'value': 'kickme'} POST_and_assert_expected_response(self, BASE_URL + '/dataformat/', dataformat_data, 201, dataformat_data) - dataformat_url = BASE_URL + '/dataformat/kickme/' + dataformat_url = BASE_URL + '/dataformat/kickme' # POST new item and verify test_data = dict(dp_test_data) @@ -934,7 +934,7 @@ class AntennaSetTestCase(unittest.TestCase): GET_OK_and_assert_equal_expected_response(self, url, antennaset_test_data) test_patch = {"rcus": [11, 12, 13, 14, 15], - "station_type": BASE_URL + '/station_type/remote/'} + "station_type": BASE_URL + '/station_type/remote'} # PATCH item and verify PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) @@ -959,7 +959,7 @@ class AntennaSetTestCase(unittest.TestCase): # create dependency that is safe to delete (enums are not populated / re-established between tests) dataformat_data = {'value': 'kickme'} POST_and_assert_expected_response(self, BASE_URL + '/station_type/', dataformat_data, 201, dataformat_data) - dataformat_url = BASE_URL + '/station_type/kickme/' + dataformat_url = BASE_URL + '/station_type/kickme' # POST new item and verify test_data = dict(antennaset_test_data) @@ -1362,7 +1362,7 @@ class DataproductHashTestCase(unittest.TestCase): url = r_dict['url'] GET_OK_and_assert_equal_expected_response(self, url, dph_test_data) - test_patch = {"algorithm": BASE_URL + '/algorithm/aes256/', + test_patch = {"algorithm": BASE_URL + '/algorithm/aes256', "hash": 'bender-was-here'} # PATCH item and verify diff --git a/SAS/TMSS/test/t_tmssapp_specification_REST_API.py b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py index 0e76ecaf27259a099b5a0eede8431b9ae312b4ad..5ddac0817fcb77daac42f81de665c4b0850e2059 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_REST_API.py +++ b/SAS/TMSS/test/t_tmssapp_specification_REST_API.py @@ -22,10 +22,10 @@ # This functional test talks to the API like a regular user would. # It is supposed to cover all REST http methods for all ViewSets. -# todo: I am still a bit under the impression that we re-test Django functionality that we can expect to just work -# todo: with some of these tests. On the other hand a lot of these provide us a nice basis for differentiating out -# todo: behavior in a controlled way. -# todo: We should probably also fully test behavior wrt mandatory and nullable fields. +# I am still a bit under the impression that we re-test Django functionality that we can expect to just work +# with some of these tests. On the other hand a lot of these provide us a nice basis for differentiating out +# behavior in a controlled way. +# We should probably also fully test behavior wrt mandatory and nullable fields. from datetime import datetime import unittest @@ -125,8 +125,8 @@ class GeneratorTemplateTestCase(unittest.TestCase): test_data_2 = GeneratorTemplate_test_data("test_generator_template_2") id1 = models.GeneratorTemplate.objects.create(**test_data_1).id id2 = models.GeneratorTemplate.objects.create(**test_data_2).id - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/generator_template/' + str(id1), test_data_1) - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/generator_template/' + str(id2), test_data_2) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/generator_template/' + str(id1) + '/', test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/generator_template/' + str(id2) + '/', test_data_2) class SchedulingUnitTemplateTestCase(unittest.TestCase): @@ -197,8 +197,8 @@ class SchedulingUnitTemplateTestCase(unittest.TestCase): test_data_2 = SchedulingUnitTemplate_test_data("scheduling_unit_template_2") id1 = models.SchedulingUnitTemplate.objects.create(**test_data_1).id id2 = models.SchedulingUnitTemplate.objects.create(**test_data_2).id - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_template/' + str(id1), test_data_1) - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_template/' + str(id2), test_data_2) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_template/' + str(id1) + '/', test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_unit_template/' + str(id2) + '/', test_data_2) class TaskTemplateTestCase(unittest.TestCase): @@ -266,8 +266,8 @@ class TaskTemplateTestCase(unittest.TestCase): test_data_2 = TaskTemplate_test_data("task_template_2") id1 = models.TaskTemplate.objects.create(**test_data_1).id id2 = models.TaskTemplate.objects.create(**test_data_2).id - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_template/' + str(id1), test_data_1) - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_template/' + str(id2), test_data_2) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_template/' + str(id1) + '/', test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_template/' + str(id2) + '/', test_data_2) class TaskRelationSelectionTemplateTestCase(unittest.TestCase): @@ -339,8 +339,8 @@ class TaskRelationSelectionTemplateTestCase(unittest.TestCase): test_data_2 = TaskRelationSelectionTemplate_test_data("task_relation_selection_template_2") id1 = models.TaskRelationSelectionTemplate.objects.create(**test_data_1).id id2 = models.TaskRelationSelectionTemplate.objects.create(**test_data_2).id - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_relation_selection_template/' + str(id1), test_data_1) - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_relation_selection_template/' + str(id2), test_data_2) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_relation_selection_template/' + str(id1) + '/', test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_relation_selection_template/' + str(id2) + '/', test_data_2) class TaskConnectorTestCase(unittest.TestCase): @@ -440,9 +440,9 @@ class TaskConnectorTestCase(unittest.TestCase): url = r_dict['url'] GET_OK_and_assert_equal_expected_response(self, url, tc_test_data) - test_patch = {"role": BASE_URL + '/role/calibrator/', - "dataformats": [BASE_URL + '/dataformat/Beamformed/', - BASE_URL + '/dataformat/MeasurementSet/']} + test_patch = {"role": BASE_URL + '/role/calibrator', + "dataformats": [BASE_URL + '/dataformat/Beamformed', + BASE_URL + '/dataformat/MeasurementSet']} # PATCH item and verify PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) @@ -491,8 +491,8 @@ class TaskConnectorTestCase(unittest.TestCase): test_data_2 = TaskConnectorType_test_data() id1 = models.TaskConnectorType.objects.create(**test_data_1).id id2 = models.TaskConnectorType.objects.create(**test_data_2).id - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connector_type/' + str(id1), test_data_1) - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connector_type/' + str(id2), test_data_2) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connector_type/' + str(id1) + '/', test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_connector_type/' + str(id2) + '/', test_data_2) class DefaultTemplates(unittest.TestCase): @@ -676,9 +676,10 @@ class CycleTestCase(unittest.TestCase): def test_GET_cycle_list_shows_entry(self): test_data_1 = Cycle_test_data() # uuid makes name unique - test_data_1["number"] = 32000 # cycles are ordered by number, so make this the largest numberm and hence the latest cycle + test_data_1["start"] = datetime(2999, 1, 1) # cycles are ordered by start, so make this the latest date and hence the latest cycle models.Cycle.objects.create(**test_data_1) nbr_results = models.Cycle.objects.count() + test_data_1["start"] = test_data_1["start"].isoformat() GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/cycle/', test_data_1, nbr_results) def test_GET_cycle_view_returns_correct_entry(self): @@ -687,8 +688,8 @@ class CycleTestCase(unittest.TestCase): test_data_2 = Cycle_test_data() id1 = models.Cycle.objects.create(**test_data_1).name # name is pk id2 = models.Cycle.objects.create(**test_data_2).name # name is pk - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/cycle/' + str(id1), test_data_1) - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/cycle/' + str(id2), test_data_2) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/cycle/' + str(id1) + '/', test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/cycle/' + str(id2) + '/', test_data_2) def test_cycle_contains_list_of_releated_projects(self): @@ -698,12 +699,12 @@ class CycleTestCase(unittest.TestCase): cycle = models.Cycle.objects.create(**cycle_test_data_1) project1 = models.Project.objects.create(**project_test_data_1) - project1.cycle = cycle + project1.cycles.set([cycle]) project1.save() project2 = models.Project.objects.create(**project_test_data_2) - project2.cycle = cycle + project2.cycles.set([cycle]) project2.save() - response_data = GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/cycle/' + cycle.name, cycle_test_data_1) + response_data = GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/cycle/' + cycle.name + '/', cycle_test_data_1) assertUrlList(self, response_data['projects'], [project1, project2]) @@ -720,9 +721,11 @@ class ProjectTestCase(unittest.TestCase): project_test_data = test_data_creator.Project() # POST and GET a new item and assert correctness - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data) + expected = project_test_data.copy() + expected.pop('quota') # exclude quota from comparison, because these get auto-generated + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, expected) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, project_test_data) + GET_OK_and_assert_equal_expected_response(self, url, expected) def test_project_PUT_invalid_raises_error(self): PUT_and_assert_expected_response(self, BASE_URL + '/project/9876789876/', test_data_creator.Project(), 404, {}) @@ -731,40 +734,52 @@ class ProjectTestCase(unittest.TestCase): project_test_data = test_data_creator.Project() # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data) + expected = project_test_data.copy() + expected.pop('quota') # exclude quota from comparison, because these get auto-generated + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, expected) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, project_test_data) + GET_OK_and_assert_equal_expected_response(self, url, expected) # PUT new values, verify test_data = dict(test_data_creator.Project("other description")) test_data['name'] = project_test_data['name'] # since name is PK, need to keep that unchanged - PUT_and_assert_expected_response(self, url, test_data, 200, test_data) - GET_OK_and_assert_equal_expected_response(self, url, test_data) + expected = test_data.copy() + expected.pop('quota') # exclude quota from comparison, because these get auto-generated + PUT_and_assert_expected_response(self, url, test_data, 200, expected) + GET_OK_and_assert_equal_expected_response(self, url, expected) def test_project_PATCH(self): project_test_data = test_data_creator.Project() # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data) + expected = project_test_data.copy() + expected.pop('quota') # exclude quota from comparison, because these get auto-generated + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, expected) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, project_test_data) + GET_OK_and_assert_equal_expected_response(self, url, expected) - test_patch = {"priority": 500, + test_patch = {"priority_rank": 1.0, "tags": ["SUPERIMPORTANT"]} # PATCH item and verify PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) - expected_data = dict(project_test_data) - expected_data.update(test_patch) - GET_OK_and_assert_equal_expected_response(self, url, expected_data) + expected.update(test_patch) + GET_OK_and_assert_equal_expected_response(self, url, expected) def test_project_DELETE(self): project_test_data = test_data_creator.Project() # POST new item, verify - r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data) + expected = project_test_data.copy() + expected.pop('quota') # exclude quota from comparison, because these get auto-generated + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, expected) url = r_dict['url'] - GET_OK_and_assert_equal_expected_response(self, url, project_test_data) + GET_OK_and_assert_equal_expected_response(self, url, expected) + + # DELETE related auto-generated quota first + quotas = r_dict['quota'] + for quota in quotas: + DELETE_and_assert_gone(self, quota) # DELETE and check it's gone DELETE_and_assert_gone(self, url) @@ -775,14 +790,16 @@ class ProjectTestCase(unittest.TestCase): cycle_test_data = test_data_creator.Cycle() cycle_url = POST_and_assert_expected_response(self, BASE_URL + '/cycle/', cycle_test_data, 201, cycle_test_data)['url'] test_data = dict(test_data_creator.Project()) - test_data['cycle'] = cycle_url - url = POST_and_assert_expected_response(self, BASE_URL + '/project/', test_data, 201, test_data)['url'] + test_data['cycles'] = [cycle_url] + expected = test_data.copy() + expected.pop('quota') # exclude quota from comparison, because these get auto-generated + url = POST_and_assert_expected_response(self, BASE_URL + '/project/', test_data, 201, expected)['url'] # verify - GET_OK_and_assert_equal_expected_response(self, url, test_data) + GET_OK_and_assert_equal_expected_response(self, url, expected) # add project reference to cycle test data (we make Django add that to the cycle in serializer) - cycle_test_data['projects'] = [url] # add the + cycle_test_data['projects'] = [url] # Try to DELETE dependency, verify that was not successful # Unfortunately we don't get a nice error in json, but a Django debug page on error 500... @@ -805,25 +822,26 @@ class ProjectTestCase(unittest.TestCase): test_data_2 = Project_test_data() id1 = models.Project.objects.create(**test_data_1).name # name is pk id2 = models.Project.objects.create(**test_data_2).name - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/project/' + str(id1), test_data_1) - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/project/' + str(id2), test_data_2) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/project/' + str(id1) + '/', test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/project/' + str(id2) + '/', test_data_2) def test_nested_projects_are_filtered_according_to_cycle(self): - cycle_1 = models.Cycle.objects.create(**Cycle_test_data()) test_data_1 = dict(Project_test_data()) # uuid makes project unique - test_data_1['cycle'] = cycle_1 project_1 = models.Project.objects.create(**test_data_1) - + + cycle_1 = models.Cycle.objects.create(**Cycle_test_data()) + project_1.cycles.set([cycle_1]) + # assert the returned list contains related items, A list of length 1 is retrieved GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/cycle/%s/project/' % cycle_1.name, test_data_1, 1) - + class ResourceTypeTestCase(unittest.TestCase): def test_resource_type_list_apiformat(self): r = requests.get(BASE_URL + '/resource_type/?format=api', auth=AUTH) self.assertEqual(r.status_code, 200) self.assertTrue("Resource Type List" in r.content.decode('utf8')) - + def test_resource_type_GET_nonexistant_raises_error(self): GET_and_assert_equal_expected_code(self, BASE_URL + '/resource_type/1234321/', 404) @@ -839,7 +857,7 @@ class ProjectQuotaTestCase(unittest.TestCase): r = requests.get(BASE_URL + '/project_quota/?format=api', auth=AUTH) self.assertEqual(r.status_code, 200) self.assertTrue("Project Quota List" in r.content.decode('utf8')) - + def test_project_quota_GET_nonexistant_raises_error(self): GET_and_assert_equal_expected_code(self, BASE_URL + '/project_quota/1234321/', 404) @@ -906,8 +924,10 @@ class ProjectQuotaTestCase(unittest.TestCase): # POST new item with dependencies project_test_data = test_data_creator.Project() - project_url = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, project_test_data)['url'] - + expected = project_test_data.copy() + expected.pop('quota') # exclude quota from comparison, because these get auto-generated + project_url = POST_and_assert_expected_response(self, BASE_URL + '/project/', project_test_data, 201, expected)['url'] + project_quota_test_data = dict(test_data_creator.ProjectQuota(project_url=project_url)) project_quota_url = POST_and_assert_expected_response(self, BASE_URL + '/project_quota/', project_quota_test_data, 201, project_quota_test_data)['url'] @@ -1033,8 +1053,8 @@ class SchedulingSetTestCase(unittest.TestCase): test_data_2 = SchedulingSet_test_data() id1 = models.SchedulingSet.objects.create(**test_data_1).id id2 = models.SchedulingSet.objects.create(**test_data_2).id - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_set/' + str(id1), test_data_1) - GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_set/' + str(id2), test_data_2) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_set/' + str(id1) + '/', test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_set/' + str(id2) + '/', test_data_2) def test_SchedulingSet_contains_list_of_releated_SchedulingUnitDraft(self): @@ -1046,7 +1066,7 @@ class SchedulingSetTestCase(unittest.TestCase): scheduling_unit_draft_2 = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data("scheduler draft one")) scheduling_unit_draft_2.scheduling_set = scheduling_set scheduling_unit_draft_2.save() - response_data = GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_set/%d' % scheduling_set.id, test_data_1) + response_data = GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/scheduling_set/%d/' % scheduling_set.id, test_data_1) assertUrlList(self, response_data['scheduling_unit_drafts'], [scheduling_unit_draft_1, scheduling_unit_draft_2]) @@ -1186,7 +1206,7 @@ class SchedulingUnitDraftTestCase(unittest.TestCase): # assert the returned list contains related items, A list of length 1 is retrieved GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_set/%s/scheduling_unit_draft/' % scheduling_set_1.id, test_data_1, 1) - + def test_SchedulingUnitDraft_contains_list_of_related_SchedulingUnitBlueprint(self): @@ -1590,7 +1610,7 @@ class TaskRelationDraftTestCase(unittest.TestCase): test_data_1 = dict(test_data_1) test_data_1['producer'] = task_draft_1 task_relation_draft_1 = models.TaskRelationDraft.objects.create(**test_data_1) - + # assert the returned list contains related items, A list of length 1 is retrieved GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_draft/%s/task_relation_draft/' % task_draft_1.id, test_data_1, 1) # assert an existing related producer is returned @@ -1725,7 +1745,7 @@ class SchedulingUnitBlueprintTestCase(unittest.TestCase): # assert the returned list contains related items, A list of length 1 is retrieved GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/scheduling_unit_draft/%s/scheduling_unit_blueprint/' % scheduling_unit_draft_1.id, test_data_1, 1) - + class TaskBlueprintTestCase(unittest.TestCase): @classmethod @@ -2192,7 +2212,7 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): test_data_1 = dict(test_data_1) test_data_1['draft'] = task_relation_draft_1 task_relation_blueprint_1 = models.TaskRelationBlueprint.objects.create(**test_data_1) - + # assert the returned list contains related items, A list of length 1 is retrieved GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_relation_draft/%s/task_relation_blueprint/' % task_relation_draft_1.id, test_data_1, 1) @@ -2215,7 +2235,295 @@ class TaskRelationBlueprintTestCase(unittest.TestCase): GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_blueprint/%s/task_relation_blueprint/' % task_blueprint_1.id, test_data_1, 1) # assert the returned list contains related consumer GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_blueprint/%s/task_relation_blueprint/' % task_blueprint_2.id, test_data_2, 1) - + + +class TaskSchedulingRelationBlueprintTestCase(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + cls.first_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/') + cls.second_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskBlueprint(), '/task_blueprint/') + + def test_task_scheduling_relation_blueprint_list_apiformat(self): + r = requests.get(BASE_URL + '/task_scheduling_relation_blueprint/?format=api', auth=AUTH) + self.assertEqual(r.status_code, 200) + self.assertTrue("Task Scheduling Relation Blueprint List" in r.content.decode('utf8')) + + def test_task_scheduling_relation_blueprint_GET_nonexistant_raises_error(self): + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_scheduling_relation_blueprint/1234321/', 404) + + def test_task_scheduling_relation_blueprint_POST_and_GET(self): + tsrb_test_data = test_data_creator.TaskSchedulingRelationBlueprint(first_url=None, second_url=None, placement="after") + # POST and GET a new item and assert correctness + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_blueprint/', tsrb_test_data, 201, tsrb_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, tsrb_test_data) + + def test_task_scheduling_relation_blueprint_PUT_invalid_raises_error(self): + tsrb_test_data = test_data_creator.TaskSchedulingRelationBlueprint(first_url=None, second_url=None, placement="after") + PUT_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_blueprint/9876789876/', tsrb_test_data, 404, {}) + + def test_task_scheduling_relation_blueprint_PUT(self): + tsrb_test_data1 = test_data_creator.TaskSchedulingRelationBlueprint(first_url=None, second_url=None, placement="after") + tsrb_test_data2 = test_data_creator.TaskSchedulingRelationBlueprint(first_url=None, second_url=None, placement="after") + + # POST new item, verify + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_blueprint/', tsrb_test_data1, 201, tsrb_test_data1) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, tsrb_test_data1) + + # PUT new values, verify + PUT_and_assert_expected_response(self, url, tsrb_test_data2, 200, tsrb_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, tsrb_test_data2) + + def test_task_scheduling_relation_blueprint_PATCH(self): + tsrb_test_data = test_data_creator.TaskSchedulingRelationBlueprint(first_url=None, second_url=None, placement="after") + # POST new item, verify + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_blueprint/', tsrb_test_data, 201, tsrb_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, tsrb_test_data) + + test_patch = {"time_offset": 20} + + # PATCH item and verify + PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) + expected_data = dict(tsrb_test_data) + expected_data.update(test_patch) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) + + def test_task_scheduling_relation_blueprint_DELETE(self): + tsrb_test_data = test_data_creator.TaskSchedulingRelationBlueprint(first_url=None, second_url=None, placement="after") + # POST new item, verify + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_blueprint/', tsrb_test_data, 201, tsrb_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, tsrb_test_data) + + # DELETE and check it's gone + DELETE_and_assert_gone(self, url) + + def test_task_scheduling_relation_blueprint_prevents_missing_time_offset(self): + tsrb_test_data = test_data_creator.TaskSchedulingRelationBlueprint(first_url=None, second_url=None, placement="after") + # test data + test_data = dict(tsrb_test_data) + test_data['time_offset'] = None + + # POST invalid data and assert response + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_blueprint/', test_data, 400, {}) + self.assertTrue('This field may not be null' in str(r_dict['time_offset'])) + + def test_task_scheduling_relation_blueprint_prevents_missing_time_first(self): + tsrb_test_data = test_data_creator.TaskSchedulingRelationBlueprint(first_url=None, second_url=None, placement="after") + + # test data + test_data = dict(tsrb_test_data) + test_data['first'] = None + + # POST invalid data and assert response + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_blueprint/', test_data, 400, {}) + self.assertTrue('This field may not be null' in str(r_dict['first'])) + + def test_task_scheduling_relation_blueprint_prevents_missing_second(self): + tsrb_test_data = test_data_creator.TaskSchedulingRelationBlueprint(first_url=None, second_url=None, placement="after") + + # test data + test_data = dict(tsrb_test_data) + test_data['second'] = None + + # POST invalid data and assert response + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_blueprint/', test_data, 400, {}) + self.assertTrue('This field may not be null' in str(r_dict['second'])) + + def test_task_scheduling_relation_blueprint_prevents_missing_placement(self): + tsrb_test_data = test_data_creator.TaskSchedulingRelationBlueprint(first_url=None, second_url=None, placement="after") + + # test data + test_data = dict(tsrb_test_data) + test_data['placement'] = None + + # POST invalid data and assert response + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_blueprint/', test_data, 400, {}) + self.assertTrue('This field may not be null' in str(r_dict['placement'])) + + def test_task_scheduling_relation_blueprint_CASCADE_behavior_on_task_blueprint_deleted(self): + #Create test data + tsrb_test_data = test_data_creator.TaskSchedulingRelationBlueprint(first_url=None, second_url=None, placement="after") + + # POST new item + url = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_blueprint/', tsrb_test_data, 201, tsrb_test_data)['url'] + + # verify + GET_OK_and_assert_equal_expected_response(self, url, tsrb_test_data) + + #Get the URL of first task blueprint + test_data = dict(tsrb_test_data) + task_blueprint_url=test_data['first'] + + # DELETE dependency + DELETE_and_assert_gone(self, task_blueprint_url) + + # assert + GET_and_assert_equal_expected_code(self, url, 404) + + def test_GET_TaskSchedulingRelationBlueprint_list_view_shows_entry(self): + + test_data_1 = TaskSchedulingRelationBlueprint_test_data() + models.TaskSchedulingRelationBlueprint.objects.create(**test_data_1) + nbr_results = models.TaskSchedulingRelationBlueprint.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_scheduling_relation_blueprint/', test_data_1, nbr_results) + + def test_GET_TaskSchedulingRelationBlueprint_view_returns_correct_entry(self): + + # setup + test_data_1 = TaskSchedulingRelationBlueprint_test_data() + test_data_2 = TaskSchedulingRelationBlueprint_test_data() + id1 = models.TaskSchedulingRelationBlueprint.objects.create(**test_data_1).id + id2 = models.TaskSchedulingRelationBlueprint.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_scheduling_relation_blueprint/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_scheduling_relation_blueprint/%s/' % id2, test_data_2) + + +class TaskSchedulingRelationDraftTestCase(unittest.TestCase): + + @classmethod + def setUpClass(cls) -> None: + cls.first_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') + cls.second_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') + + def test_task_scheduling_relation_draft_list_apiformat(self): + r = requests.get(BASE_URL + '/task_scheduling_relation_draft/?format=api', auth=AUTH) + self.assertEqual(r.status_code, 200) + self.assertTrue("Task Scheduling Relation Draft List" in r.content.decode('utf8')) + + def test_task_scheduling_relation_draft_GET_nonexistant_raises_error(self): + GET_and_assert_equal_expected_code(self, BASE_URL + '/task_scheduling_relation_draft/1234321/', 404) + + def test_task_scheduling_relation_draft_POST_and_GET(self): + tsrd_test_data = test_data_creator.TaskSchedulingRelationDraft( first_url=None, second_url=None, placement="after") + # POST and GET a new item and assert correctness + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_draft/', tsrd_test_data, 201, tsrd_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, tsrd_test_data) + + def test_task_scheduling_relation_draft_PUT_invalid_raises_error(self): + tsrd_test_data = test_data_creator.TaskSchedulingRelationDraft( first_url=None, second_url=None, placement="after") + PUT_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_draft/9876789876/', tsrd_test_data, 404, {}) + + def test_task_scheduling_relation_draft_PUT(self): + tsrd_test_data = test_data_creator.TaskSchedulingRelationDraft( first_url=None, second_url=None, placement="after") + tsrd_test_data2 = test_data_creator.TaskSchedulingRelationDraft( first_url=None, second_url=None, placement="after") + + # POST new item, verify + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_draft/', tsrd_test_data, 201, tsrd_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, tsrd_test_data) + + # PUT new values, verify + PUT_and_assert_expected_response(self, url, tsrd_test_data2, 200, tsrd_test_data2) + GET_OK_and_assert_equal_expected_response(self, url, tsrd_test_data2) + + def test_task_scheduling_relation_draft_PATCH(self): + tsrd_test_data = test_data_creator.TaskSchedulingRelationDraft(first_url=None, second_url=None, placement="after") + # POST new item, verify + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_draft/', tsrd_test_data, 201, tsrd_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, tsrd_test_data) + + test_patch = {"time_offset": 20} + + # PATCH item and verify + PATCH_and_assert_expected_response(self, url, test_patch, 200, test_patch) + expected_data = dict(tsrd_test_data) + expected_data.update(test_patch) + GET_OK_and_assert_equal_expected_response(self, url, expected_data) + + def test_task_scheduling_relation_draft_DELETE(self): + tsrd_test_data = test_data_creator.TaskSchedulingRelationDraft( first_url=None, second_url=None, placement="after") + # POST new item, verify + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_draft/', tsrd_test_data, 201, tsrd_test_data) + url = r_dict['url'] + GET_OK_and_assert_equal_expected_response(self, url, tsrd_test_data) + + # DELETE and check it's gone + DELETE_and_assert_gone(self, url) + + def test_task_scheduling_relation_draft_prevents_missing_time_offset(self): + tsrd_test_data = test_data_creator.TaskSchedulingRelationDraft( first_url=None, second_url=None, placement="after") + # test data + test_data = dict(tsrd_test_data) + test_data['time_offset'] = None + + # POST invalid data and assert response + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_draft/', test_data, 400, {}) + self.assertTrue('This field may not be null' in str(r_dict['time_offset'])) + + def test_task_scheduling_relation_draft_prevents_missing_time_first(self): + tsrd_test_data = test_data_creator.TaskSchedulingRelationDraft( first_url=None, second_url=None, placement="after") + + # test data + test_data = dict(tsrd_test_data) + test_data['first'] = None + + # POST invalid data and assert response + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_draft/', test_data, 400, {}) + self.assertTrue('This field may not be null' in str(r_dict['first'])) + + def test_task_scheduling_relation_draft_prevents_missing_second(self): + tsrd_test_data = test_data_creator.TaskSchedulingRelationDraft( first_url=None, second_url=None, placement="after") + + # test data + test_data = dict(tsrd_test_data) + test_data['second'] = None + + # POST invalid data and assert response + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_draft/', test_data, 400, {}) + self.assertTrue('This field may not be null' in str(r_dict['second'])) + + def test_task_scheduling_relation_draft_prevents_missing_placement(self): + tsrd_test_data = test_data_creator.TaskSchedulingRelationDraft( first_url=None, second_url=None, placement="after") + + # test data + test_data = dict(tsrd_test_data) + test_data['placement'] = None + + # POST invalid data and assert response + r_dict = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_draft/', test_data, 400, {}) + self.assertTrue('This field may not be null' in str(r_dict['placement'])) + + def test_task_scheduling_relation_draft_CASCADE_behavior_on_task_draft_deleted(self): + task_draft_url = test_data_creator.post_data_and_get_url(test_data_creator.TaskDraft(), '/task_draft/') + tsrd_test_data = test_data_creator.TaskSchedulingRelationDraft( first_url=task_draft_url, second_url=None, placement="after") + + # POST new item + url = POST_and_assert_expected_response(self, BASE_URL + '/task_scheduling_relation_draft/', tsrd_test_data, 201, tsrd_test_data)['url'] + + # verify + GET_OK_and_assert_equal_expected_response(self, url, tsrd_test_data) + + # DELETE dependency + DELETE_and_assert_gone(self, task_draft_url) + + # assert + GET_and_assert_equal_expected_code(self, url, 404) + + def test_GET_TaskSchedulingRelationDraft_list_view_shows_entry(self): + + test_data_1 = TaskSchedulingRelationDraft_test_data() + models.TaskSchedulingRelationDraft.objects.create(**test_data_1) + nbr_results = models.TaskSchedulingRelationDraft.objects.count() + GET_and_assert_in_expected_response_result_list(self, BASE_URL + '/task_scheduling_relation_draft/', test_data_1, nbr_results) + + def test_GET_TaskSchedulingRelationDraft_view_returns_correct_entry(self): + + # setup + test_data_1 = TaskSchedulingRelationDraft_test_data() + test_data_2 = TaskSchedulingRelationDraft_test_data() + id1 = models.TaskSchedulingRelationDraft.objects.create(**test_data_1).id + id2 = models.TaskSchedulingRelationDraft.objects.create(**test_data_2).id + # assert + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_scheduling_relation_draft/%s/' % id1, test_data_1) + GET_OK_and_assert_equal_expected_response(self, BASE_URL + '/task_scheduling_relation_draft/%s/' % id2, test_data_2) + if __name__ == "__main__": logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s', diff --git a/SAS/TMSS/test/t_tmssapp_specification_permissions.py b/SAS/TMSS/test/t_tmssapp_specification_permissions.py index 0e8ebd686bd17a53a0746993d73ec7e4127604d6..ad0576a81665b650b63245a3a2f5faff396299fd 100755 --- a/SAS/TMSS/test/t_tmssapp_specification_permissions.py +++ b/SAS/TMSS/test/t_tmssapp_specification_permissions.py @@ -42,7 +42,7 @@ class CyclePermissionTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.test_data_creator = TMSSRESTTestDataCreator(BASE_URL, requests.auth.HTTPBasicAuth('paulus', 'pauluspass')) - response = requests.get(cls.test_data_creator.django_api_url, auth=cls.test_data_creator.auth) + response = requests.get(cls.test_data_creator.django_api_url + '/', auth=cls.test_data_creator.auth) cls.support_group = Group.objects.create(name='support') cls.support_group.permissions.add(Permission.objects.get(codename='add_cycle')) diff --git a/SAS/TMSS/test/test_utils.py b/SAS/TMSS/test/test_utils.py index 88a250084b484b67931a20a822bac8b38655462b..c97fcab50e588742e1aa2705e9542e8d463e7fd7 100644 --- a/SAS/TMSS/test/test_utils.py +++ b/SAS/TMSS/test/test_utils.py @@ -35,6 +35,8 @@ from lofar.sas.tmss.tmss.exceptions import TMSSException from lofar.messaging.config import DEFAULT_BROKER, DEFAULT_BUSNAME from lofar.common.testing.dbcredentials import TemporaryCredentials from lofar.sas.tmss.client.tmss_http_rest_client import TMSSsession +from lofar.sas.resourceassignment.resourceassigner.test.ra_test_environment import RATestEnvironment + def assertDataWithUrls(self, data, expected): """ @@ -134,12 +136,12 @@ class TMSSDjangoServerInstance(): @property def url(self): ''':returns the http url to the django server''' - return "http://%s/api" % self.address + return "http://%s/api/" % self.address @property def oidc_url(self): ''':returns the http url to the django server''' - return "http://%s/oidc" % self.address + return "http://%s/oidc/" % self.address @property def database_dbcreds_id(self) -> str: @@ -261,6 +263,10 @@ class TMSSTestEnvironment: broker=broker) self.client_credentials = TemporaryCredentials(user=self.ldap_server.dbcreds.user, password=self.ldap_server.dbcreds.password) + # Check for correct Django version, should be at least 3.0 + if django.VERSION[0] < 3: + print("\nWARNING: YOU ARE USING DJANGO VERSION '%s', WHICH WILL NOT SUPPORT ALL FEATURES IN TMSS!\n" % + django.get_version()) def start(self): self.ldap_server.start() @@ -348,31 +354,36 @@ def main_test_environment(): logging.basicConfig(format = '%(asctime)s %(levelname)s %(message)s', level = logging.INFO) - with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port, exchange=options.exchange, broker=options.broker) as instance: - # print some nice info for the user to use the test servers... - # use print instead of log for clean lines. - for h in logging.root.handlers: - h.flush() - print() - print() - print("*****************************************************") - print("Test-TMSS database, LDAP and Django up and running...") - print("*****************************************************") - print("DB Credentials ID: %s" % (instance.database.dbcreds_id, )) - print("LDAP Credentials ID: %s" % (instance.django_server.ldap_dbcreds_id, )) - print("TMSS Client Credentials ID: %s" % (instance.client_credentials.dbcreds_id, )) - print("Django URL: %s" % (instance.django_server.url)) - print() - print("Example cmdlines to run tmss or tmss_manage_django:") - print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) - print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss_manage_django" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) - print() - print("Example cmdline to run tmss client call:") - print("TMSS_CLIENT_DBCREDENTIALS=%s tmss_set_subtask_state <id> <state>" % (instance.client_credentials.dbcreds_id, )) - print() - print("Press Ctrl-C to exit (and remove the test database and django server automatically)") - waitForInterrupt() + with RATestEnvironment(exchange=options.exchange, broker=options.broker): + with TMSSTestEnvironment(host=options.host, preferred_django_port=options.port, exchange=options.exchange, broker=options.broker) as instance: + + from lofar.sas.tmss.tmss.tmssapp.populate import populate_test_data + populate_test_data() + + # print some nice info for the user to use the test servers... + # use print instead of log for clean lines. + for h in logging.root.handlers: + h.flush() + print() + print() + print("*****************************************************") + print("Test-TMSS database, LDAP and Django up and running...") + print("*****************************************************") + print("DB Credentials ID: %s" % (instance.database.dbcreds_id, )) + print("LDAP Credentials ID: %s" % (instance.django_server.ldap_dbcreds_id, )) + print("TMSS Client Credentials ID: %s" % (instance.client_credentials.dbcreds_id, )) + print("Django URL: %s" % (instance.django_server.url)) + print() + print("Example cmdlines to run tmss or tmss_manage_django:") + print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) + print("TMSS_DBCREDENTIALS=%s TMSS_LDAPCREDENTIALS=%s tmss_manage_django" % (instance.database.dbcreds_id, instance.django_server.ldap_dbcreds_id)) + print() + print("Example cmdline to run tmss client call:") + print("TMSS_CLIENT_DBCREDENTIALS=%s tmss_set_subtask_state <id> <state>" % (instance.client_credentials.dbcreds_id, )) + print() + print("Press Ctrl-C to exit (and remove the test database and django server automatically)") + waitForInterrupt() if __name__ == '__main__': - main_test_environment() \ No newline at end of file + main_test_environment() diff --git a/SAS/TMSS/test/tmss_test_data_django_models.py b/SAS/TMSS/test/tmss_test_data_django_models.py index 2a4e2a0b8644cc7c0b53f90d7d46b19818ce2270..21ee23b0d2e0330a9a7660f91a2e8e3c72b9f66a 100644 --- a/SAS/TMSS/test/tmss_test_data_django_models.py +++ b/SAS/TMSS/test/tmss_test_data_django_models.py @@ -93,36 +93,26 @@ def Cycle_test_data() -> dict: "description": "", "tags": [], "start": datetime.utcnow().isoformat(), - "stop": datetime.utcnow().isoformat(), - "number": 1, - "standard_hours": 2, - "expert_hours": 3, - "filler_hours": 4} + "stop": datetime.utcnow().isoformat()} def Project_test_data() -> dict: - return { "cycle": models.Cycle.objects.create(**Cycle_test_data()), + return { #"cycles": [models.Cycle.objects.create(**Cycle_test_data())], # ManyToMany, use set() "name": 'my_project_' + str(uuid.uuid4()), "description": 'my description ' + str(uuid.uuid4()), "tags": [], - "priority": 1, + "priority_rank": 1.0, + "trigger_priority": 1000, "can_trigger": False, "private_data": True, "expert": True, "filler": False} -def ResourceUnit_test_data() -> dict: - return { - "tags": [], - "description": 'my description ' + str(uuid.uuid4()), - "name": 'my_resource_unit_' + str(uuid.uuid4()), - } - def ResourceType_test_data() -> dict: return { "tags": [], "description": 'my description ' + str(uuid.uuid4()), - "resource_unit": models.ResourceUnit.objects.create(**ResourceUnit_test_data()), "name": 'my_resource_type_' + str(uuid.uuid4()), + "quantity": models.Quantity.objects.get(value=models.Quantity.Choices.NUMBER.value) } def ProjectQuota_test_data() -> dict: @@ -144,31 +134,40 @@ def SchedulingSet_test_data(name="my_scheduling_set", project: models.Project=No "generator_template": models.GeneratorTemplate.objects.create(**GeneratorTemplate_test_data()), "generator_source": None} -def SchedulingUnitDraft_test_data(name="my_scheduling_unit_draft", scheduling_set: models.SchedulingSet=None) -> dict: +def SchedulingUnitDraft_test_data(name="my_scheduling_unit_draft", scheduling_set: models.SchedulingSet=None, template: models.SchedulingUnitTemplate=None, requirements_doc: dict=None) -> dict: if scheduling_set is None: scheduling_set = models.SchedulingSet.objects.create(**SchedulingSet_test_data()) + if template is None: + template = models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data()) + + if requirements_doc is None: + requirements_doc = get_default_json_object_for_schema(template.schema) + return {"name": name, "description": "", "tags": [], - "requirements_doc": {}, + "requirements_doc": requirements_doc, "copy_reason": models.CopyReason.objects.get(value='template'), "generator_instance_doc": "para", "copies": None, "scheduling_set": scheduling_set, - "requirements_template": models.SchedulingUnitTemplate.objects.create(**SchedulingUnitTemplate_test_data())} + "requirements_template": template } -def TaskDraft_test_data(name: str="my_task_draft", specifications_template: models.TaskTemplate=None, scheduling_unit_draft: models.SchedulingUnitDraft=None) -> dict: +def TaskDraft_test_data(name: str="my_task_draft", specifications_template: models.TaskTemplate=None, specifications_doc: dict=None, scheduling_unit_draft: models.SchedulingUnitDraft=None) -> dict: if specifications_template is None: specifications_template = models.TaskTemplate.objects.create(**TaskTemplate_test_data()) + if specifications_doc is None: + specifications_doc = get_default_json_object_for_schema(specifications_template.schema) + if scheduling_unit_draft is None: scheduling_unit_draft = models.SchedulingUnitDraft.objects.create(**SchedulingUnitDraft_test_data()) return {"name": name, "description": "", "tags": [], - "specifications_doc": get_default_json_object_for_schema(specifications_template.schema), + "specifications_doc": specifications_doc, "copy_reason": models.CopyReason.objects.get(value='template'), "copies": None, "scheduling_unit_draft": scheduling_unit_draft, @@ -246,6 +245,31 @@ def SubtaskTemplate_test_data(schema: object=None, version:str=None) -> dict: "queue": False, "tags": ["TMSS", "TESTING"]} +def TaskSchedulingRelationDraft_test_data(first: models.TaskDraft = None, second: models.TaskDraft = None) -> dict: + if first is None: + first = models.TaskDraft.objects.create(**TaskDraft_test_data()) + + if second is None: + second = models.TaskDraft.objects.create(**TaskDraft_test_data()) + return {"tags": [], + "first": first, + "second": second, + "placement": models.SchedulingRelationPlacement.objects.get(value='after'), + "time_offset":60} + +def TaskSchedulingRelationBlueprint_test_data(first: models.TaskBlueprint = None, second: models.TaskBlueprint = None) -> dict: + if first is None: + first = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()) + + if second is None: + second = models.TaskBlueprint.objects.create(**TaskBlueprint_test_data()) + + return {"tags": [], + "first": first, + "second": second, + "placement": models.SchedulingRelationPlacement.objects.get(value='after'), + "time_offset":60} + def DataproductSpecificationsTemplate_test_data(version:str=None) -> dict: if version is None: version = str(uuid.uuid4()) diff --git a/SAS/TMSS/test/tmss_test_data_rest.py b/SAS/TMSS/test/tmss_test_data_rest.py index 865e74c738db8703dc70ea105a9ff1ea1dc448b3..90384f75fe83544a48407494bf0f4efb59055aac 100644 --- a/SAS/TMSS/test/tmss_test_data_rest.py +++ b/SAS/TMSS/test/tmss_test_data_rest.py @@ -25,10 +25,12 @@ from datetime import datetime import uuid import requests import json +from lofar.common.json_utils import get_default_json_object_for_schema class TMSSRESTTestDataCreator(): def __init__(self, django_api_url: str, auth: requests.auth.HTTPBasicAuth): - self.django_api_url = django_api_url + self.django_api_url = django_api_url[:-1] if django_api_url.endswith('/') else django_api_url + self.auth = auth def get_response_as_json_object(self, url): @@ -108,9 +110,9 @@ class TMSSRESTTestDataCreator(): if output_of_url is None: output_of_url = self.post_data_and_get_url(self.TaskTemplate(), '/task_template/') - return {"role": self.django_api_url + '/role/%s/'%role, - "datatype": self.django_api_url + '/datatype/image/', - "dataformats": [self.django_api_url + '/dataformat/Beamformed/'], + return {"role": self.django_api_url + '/role/%s'%role, + "datatype": self.django_api_url + '/datatype/image', + "dataformats": [self.django_api_url + '/dataformat/Beamformed'], "output_of": output_of_url, "input_of": input_of_url, "tags": []} @@ -126,36 +128,26 @@ class TMSSRESTTestDataCreator(): "tags": [], "start": datetime.utcnow().isoformat(), "stop": datetime.utcnow().isoformat(), - "number": 1, - "standard_hours": 2, - "expert_hours": 3, - "filler_hours": 4, - "projects": []} + "projects": [], + "quota": []} def Project(self, description="my project description"): return {"name": 'my_project_' + str(uuid.uuid4()), "description": description, "tags": [], - "project_quota": [], - "priority": 1, + "quota": [], + "priority_rank": 1.0, + "trigger_priority": 1000, "can_trigger": False, - "private_data": True} - - def ResourceUnit(self): - return { - "tags": [], - "description": 'my description ' + str(uuid.uuid4()), - "name": 'my_resource_unit_' + str(uuid.uuid4()) - } + "private_data": True, + "cycles": []} - def ResourceType(self, description="my resource_type description", resource_url=None): - if resource_url is None: - resource_url = self.post_data_and_get_url(self.ResourceUnit(), '/resource_unit/') + def ResourceType(self, description="my resource_type description"): return { "tags": [], "description": description, - "resource_unit": resource_url, - "name": 'my_resource_type_' + str(uuid.uuid4()) + "name": 'my_resource_type_' + str(uuid.uuid4()), + "quantity": self.django_api_url + '/quantity/number' } @@ -189,18 +181,22 @@ class TMSSRESTTestDataCreator(): "generator_source": None, "scheduling_unit_drafts": []} - def SchedulingUnitDraft(self, name="my_scheduling_unit_draft", scheduling_set_url=None, template_url=None): + def SchedulingUnitDraft(self, name="my_scheduling_unit_draft", scheduling_set_url=None, template_url=None, requirements_doc=None): if scheduling_set_url is None: scheduling_set_url = self.post_data_and_get_url(self.SchedulingSet(), '/scheduling_set/') if template_url is None: template_url = self.post_data_and_get_url(self.SchedulingUnitTemplate(), '/scheduling_unit_template/') - + + if requirements_doc is None: + scheduling_unit_template = self.get_response_as_json_object(template_url) + requirements_doc = get_default_json_object_for_schema(scheduling_unit_template['schema']) + return {"name": name, "description": "This is my run draft", "tags": [], - "requirements_doc": "{}", - "copy_reason": self.django_api_url + '/copy_reason/template/', + "requirements_doc": requirements_doc, + "copy_reason": self.django_api_url + '/copy_reason/template', "generator_instance_doc": "{}", "copies": None, "scheduling_set": scheduling_set_url, @@ -219,15 +215,15 @@ class TMSSRESTTestDataCreator(): "description": "This is my task draft", "tags": [], "specifications_doc": "{}", - "copy_reason": self.django_api_url + '/copy_reason/template/', + "copy_reason": self.django_api_url + '/copy_reason/template', "copies": None, "scheduling_unit_draft": scheduling_unit_draft_url, "specifications_template": template_url, 'task_blueprints': [], 'produced_by': [], 'consumed_by': [], - 'scheduling_relation_first': [], - 'scheduling_relation_second': []} + 'first_to_connect': [], + 'second_to_connect': []} def TaskRelationDraft(self, producer_url=None, consumer_url=None, template_url=None, input_role_url=None, output_role_url=None): @@ -248,7 +244,7 @@ class TMSSRESTTestDataCreator(): return {"tags": [], "selection_doc": "{}", - "dataformat": self.django_api_url + "/dataformat/Beamformed/", + "dataformat": self.django_api_url + "/dataformat/Beamformed", "producer": producer_url, "consumer": consumer_url, "input_role": input_role_url, @@ -294,8 +290,8 @@ class TMSSRESTTestDataCreator(): "subtasks": [], "produced_by": [], "consumed_by": [], - 'scheduling_relation_first': [], - 'scheduling_relation_second': []} + 'first_to_connect': [], + 'second_to_connect': []} def TaskRelationBlueprint(self, draft_url=None, template_url=None, input_role_url=None, output_role_url=None, consumer_url=None, producer_url=None): if draft_url is None: @@ -319,7 +315,7 @@ class TMSSRESTTestDataCreator(): # test data return {"tags": [], "selection_doc": "{}", - "dataformat": self.django_api_url + '/dataformat/MeasurementSet/', + "dataformat": self.django_api_url + '/dataformat/MeasurementSet', "input_role": input_role_url, "output_role": output_role_url, "draft": draft_url, @@ -335,7 +331,7 @@ class TMSSRESTTestDataCreator(): schema = {} if subtask_type_url is None: - subtask_type_url = self.django_api_url + '/subtask_type/observation/' + subtask_type_url = self.django_api_url + '/subtask_type/observation' return {"type": subtask_type_url, "name": name, @@ -346,6 +342,32 @@ class TMSSRESTTestDataCreator(): "queue": False, "tags": ["TMSS", "TESTING"]} + def TaskSchedulingRelationBlueprint(self, first_url=None, second_url=None, placement="after"): + + if first_url is None: + first_url = self.post_data_and_get_url(self.TaskBlueprint(), '/task_blueprint/') + + if second_url is None: + second_url = self.post_data_and_get_url(self.TaskBlueprint(), '/task_blueprint/') + + return {"tags": [], + "first": first_url, + "second": second_url, + "placement": self.django_api_url + '/scheduling_relation_placement/%s'%placement, + "time_offset":60} + + def TaskSchedulingRelationDraft(self, first_url=None, second_url=None, placement="after"): + if first_url is None: + first_url = self.post_data_and_get_url(self.TaskDraft(), '/task_draft/') + + if second_url is None: + second_url = self.post_data_and_get_url(self.TaskDraft(), '/task_draft/') + return {"tags": [], + "first": first_url, + "second": second_url, + "placement": self.django_api_url + '/scheduling_relation_placement/%s'%placement, + "time_offset":60} + def DataproductSpecificationsTemplate(self, name="my_DataproductSpecificationsTemplate", version:str=None) -> dict: if version is None: version = str(uuid.uuid4()) @@ -396,14 +418,14 @@ class TMSSRESTTestDataCreator(): return {"start_time": datetime.utcnow().isoformat(), "stop_time": datetime.utcnow().isoformat(), - "state": self.django_api_url + '/subtask_state/%s/' % (state,), + "state": self.django_api_url + '/subtask_state/%s' % (state,), "specifications_doc": specifications_doc, "task_blueprint": task_blueprint_url, "specifications_template": specifications_template_url, "tags": ["TMSS", "TESTING"], "do_cancel": datetime.utcnow().isoformat(), "priority": 1, - "schedule_method": self.django_api_url + '/schedule_method/manual/', + "schedule_method": self.django_api_url + '/schedule_method/manual', "cluster": cluster_url} def SubtaskOutput(self, subtask_url=None): @@ -426,7 +448,7 @@ class TMSSRESTTestDataCreator(): return {"filename": filename, "directory": directory, - "dataformat": "%s/dataformat/%s/" % (self.django_api_url, dataformat), + "dataformat": "%s/dataformat/%s" % (self.django_api_url, dataformat), "deleted_since": None, "pinned_since": None, "specifications_doc": "{}", @@ -443,7 +465,7 @@ class TMSSRESTTestDataCreator(): def AntennaSet(self, name="antennaset1"): return {"name": name, "description": 'My one observation', - "station_type": self.django_api_url + '/station_type/core/', + "station_type": self.django_api_url + '/station_type/core', "rcus": [1,2,3,4,5], "inputs": ['input1', 'input2'], "tags": ['tmss', 'testing']} @@ -462,7 +484,7 @@ class TMSSRESTTestDataCreator(): def DataproductHash(self, algorithm_url=None, hash="my_hash", dataproduct_url=None): if algorithm_url is None: - algorithm_url = self.django_api_url + '/algorithm/md5/' + algorithm_url = self.django_api_url + '/algorithm/md5' if dataproduct_url is None: dataproduct_url = self.post_data_and_get_url(self.Dataproduct(), '/dataproduct/') @@ -504,7 +526,7 @@ class TMSSRESTTestDataCreator(): "task_relation_blueprint": task_relation_blueprint_url, "producer": subtask_output_url, "dataproducts": dataproduct_urls, - "selection_doc": "{}", + "selection_doc": {}, "selection_template": task_relation_selection_template_url, "tags": []} diff --git a/SAS/TMSS/test/tmss_test_environment_unittest_setup.py b/SAS/TMSS/test/tmss_test_environment_unittest_setup.py index 98375bb80e3b66b19320ef3c129d4757f1bbc7b6..d29fbde127a8023ff29ebc6e014b4b3954c87bd5 100644 --- a/SAS/TMSS/test/tmss_test_environment_unittest_setup.py +++ b/SAS/TMSS/test/tmss_test_environment_unittest_setup.py @@ -47,8 +47,8 @@ def tearDownModule(): import json import requests AUTH = requests.auth.HTTPBasicAuth(tmss_test_env.ldap_server.dbcreds.user, tmss_test_env.ldap_server.dbcreds.password) -BASE_URL = tmss_test_env.django_server.url -OIDC_URL = tmss_test_env.django_server.oidc_url +BASE_URL = tmss_test_env.django_server.url[:-1] if tmss_test_env.django_server.url.endswith('/') else tmss_test_env.django_server.url +OIDC_URL = tmss_test_env.django_server.oidc_url[:-1] if tmss_test_env.django_server.oidc_url.endswith('/') else tmss_test_env.django_server.oidc_url from lofar.sas.tmss.test.test_utils import assertDataWithUrls import lofar.sas.tmss.tmss.settings as TMSS_SETTINGS diff --git a/SubSystems/Dragnet/scripts/LOFAR-Dragnet-deploy.sh b/SubSystems/Dragnet/scripts/LOFAR-Dragnet-deploy.sh index 428bac1725c68266fc638d812145435ed19394b5..1f4c6b03448171669e444edb1dc1cff29c02d382 100755 --- a/SubSystems/Dragnet/scripts/LOFAR-Dragnet-deploy.sh +++ b/SubSystems/Dragnet/scripts/LOFAR-Dragnet-deploy.sh @@ -10,21 +10,21 @@ # - install new module env file into /etc/modulefiles/lofar/ # # Jenkins shell command: -# svn export --force https://svn.astron.nl/LOFAR/trunk/SubSystems/Dragnet/scripts/LOFAR-Dragnet-deploy.sh && \ -# ./LOFAR-Dragnet-deploy.sh "$LOFAR_SVN_TAG" && \ +# wget https://git.astron.nl/ro/lofar/-/raw/master/SubSystems/Dragnet/scripts/LOFAR-Dragnet-deploy.sh && \ +# ./LOFAR-Dragnet-deploy.sh "$LOFAR_GIT_TAG" && \ # rm LOFAR-Dragnet-deploy.sh # -# where $LOFAR_SVN_TAG is set by Jenkins. Examples: trunk or tags/LOFAR-Release-2_17_5 or branches/CEP-Pipeline-Task1234 +# where $LOFAR_GIT_TAG is set by Jenkins. Examples: trunk or refs/tags/LOFAR-Release-2_17_5 or refs/heads/CEP-Pipeline-Task1234 # # $Id$ if [ $# -eq 0 ]; then - echo "Usage: $0 LOFAR_SVN_TAG" - echo ' LOFAR_SVN_TAG: e.g.: trunk or tags/LOFAR-Release-2_17_5 or branches/CEP-Pipeline-Task1234' + echo "Usage: $0 LOFAR_GIT_TAG" + echo ' LOFAR_GIT_TAG: e.g.: refs/heads/master or refs/tags/LOFAR-Release-2_17_5 or refs/heads/CEP-Pipeline-Task1234' exit 1 fi -lofar_svn_tag="$1" +lofar_git_tag="$1" shift # unload all loaded env modules to avoid accidentally depending on pkgs in PATH, LD_LIBRARY_PATH, ... @@ -32,7 +32,9 @@ module purge || true # config: version, paths, hostnames -lofar_release=$(echo $lofar_svn_tag | cut -d '/' -f 2) # select tag or branch name, or trunk +lofar_release=${lofar_git_tag#refs/tags/} # select tag +lofar_release=${lofar_release#refs/heads/} # select branch name +lofar_branch=$lofar_release lofar_release_tag_prefix=LOFAR-Release- lofar_version=${lofar_release#$lofar_release_tag_prefix} # chop off prefix if there @@ -43,7 +45,7 @@ else fi echo $lofar_version -lofar_svn_root=https://svn.astron.nl/LOFAR +lofar_git_repo=https://git.astron.nl/ro/lofar.git lofar_versions_root=/opt/lofar_versions prefix=$lofar_versions_root/$lofar_release # @@ -55,8 +57,8 @@ buildtype=gnucxx11_optarch # optarch enables -O3 -march=native pushd "$tmpdir" -# check out release. Don't svn export LOFAR if you care about version strings, incl what goes into data products. -svn checkout $lofar_svn_root/$lofar_svn_tag LOFAR > /dev/null +# check out branch. +git clone $lofar_git_repo --branch $lofar_branch --depth 1 --single-branch LOFAR # build, install into DESTDIR, and create deploy archive mkdir -p $buildtype && cd $buildtype